From c1666978b2da9c40c4713f86835d1a6cc084fe01 Mon Sep 17 00:00:00 2001 From: j Date: Sun, 31 Jan 2016 20:14:46 +0530 Subject: [PATCH] win32 platform --- .gitignore | 7 + COPYING | 339 + COPYING3 | 674 + Lib/site-packages/Crypto/Cipher/AES.py | 115 + Lib/site-packages/Crypto/Cipher/ARC2.py | 130 + Lib/site-packages/Crypto/Cipher/ARC4.py | 120 + Lib/site-packages/Crypto/Cipher/Blowfish.py | 121 + Lib/site-packages/Crypto/Cipher/CAST.py | 123 + Lib/site-packages/Crypto/Cipher/DES.py | 118 + Lib/site-packages/Crypto/Cipher/DES3.py | 133 + Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py | 255 + Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.py | 226 + Lib/site-packages/Crypto/Cipher/XOR.py | 86 + Lib/site-packages/Crypto/Cipher/__init__.py | 83 + Lib/site-packages/Crypto/Cipher/blockalgo.py | 296 + Lib/site-packages/Crypto/Hash/HMAC.py | 212 + Lib/site-packages/Crypto/Hash/MD2.py | 91 + Lib/site-packages/Crypto/Hash/MD4.py | 91 + Lib/site-packages/Crypto/Hash/MD5.py | 97 + Lib/site-packages/Crypto/Hash/RIPEMD.py | 94 + Lib/site-packages/Crypto/Hash/SHA.py | 98 + Lib/site-packages/Crypto/Hash/SHA224.py | 95 + Lib/site-packages/Crypto/Hash/SHA256.py | 95 + Lib/site-packages/Crypto/Hash/SHA384.py | 96 + Lib/site-packages/Crypto/Hash/SHA512.py | 95 + Lib/site-packages/Crypto/Hash/__init__.py | 56 + Lib/site-packages/Crypto/Hash/hashalgo.py | 116 + .../Crypto/Protocol/AllOrNothing.py | 320 + Lib/site-packages/Crypto/Protocol/Chaffing.py | 245 + Lib/site-packages/Crypto/Protocol/KDF.py | 123 + Lib/site-packages/Crypto/Protocol/__init__.py | 41 + Lib/site-packages/Crypto/PublicKey/DSA.py | 379 + Lib/site-packages/Crypto/PublicKey/ElGamal.py | 373 + Lib/site-packages/Crypto/PublicKey/RSA.py | 719 + Lib/site-packages/Crypto/PublicKey/_DSA.py | 115 + Lib/site-packages/Crypto/PublicKey/_RSA.py | 81 + .../Crypto/PublicKey/__init__.py | 41 + .../Crypto/PublicKey/_slowmath.py | 187 + Lib/site-packages/Crypto/PublicKey/pubkey.py | 240 + .../Random/Fortuna/FortunaAccumulator.py | 171 + .../Crypto/Random/Fortuna/FortunaGenerator.py | 132 + .../Crypto/Random/Fortuna/SHAd256.py | 98 + .../Crypto/Random/Fortuna/__init__.py | 0 .../Crypto/Random/OSRNG/__init__.py | 40 + .../Crypto/Random/OSRNG/fallback.py | 46 + Lib/site-packages/Crypto/Random/OSRNG/nt.py | 74 + .../Crypto/Random/OSRNG/posix.py | 86 + .../Crypto/Random/OSRNG/rng_base.py | 88 + .../Crypto/Random/_UserFriendlyRNG.py | 230 + Lib/site-packages/Crypto/Random/__init__.py | 43 + Lib/site-packages/Crypto/Random/random.py | 142 + .../Crypto/SelfTest/Cipher/__init__.py | 48 + .../Crypto/SelfTest/Cipher/common.py | 399 + .../Crypto/SelfTest/Cipher/test_AES.py | 1433 ++ .../Crypto/SelfTest/Cipher/test_ARC2.py | 124 + .../Crypto/SelfTest/Cipher/test_ARC4.py | 81 + .../Crypto/SelfTest/Cipher/test_Blowfish.py | 113 + .../Crypto/SelfTest/Cipher/test_CAST.py | 57 + .../Crypto/SelfTest/Cipher/test_DES.py | 339 + .../Crypto/SelfTest/Cipher/test_DES3.py | 333 + .../Crypto/SelfTest/Cipher/test_XOR.py | 72 + .../Crypto/SelfTest/Cipher/test_pkcs1_15.py | 174 + .../Crypto/SelfTest/Cipher/test_pkcs1_oaep.py | 372 + .../Crypto/SelfTest/Hash/__init__.py | 52 + .../Crypto/SelfTest/Hash/common.py | 197 + .../Crypto/SelfTest/Hash/test_HMAC.py | 223 + .../Crypto/SelfTest/Hash/test_MD2.py | 64 + .../Crypto/SelfTest/Hash/test_MD4.py | 64 + .../Crypto/SelfTest/Hash/test_MD5.py | 64 + .../Crypto/SelfTest/Hash/test_RIPEMD.py | 73 + .../Crypto/SelfTest/Hash/test_SHA.py | 64 + .../Crypto/SelfTest/Hash/test_SHA224.py | 65 + .../Crypto/SelfTest/Hash/test_SHA256.py | 96 + .../Crypto/SelfTest/Hash/test_SHA384.py | 63 + .../Crypto/SelfTest/Hash/test_SHA512.py | 60 + .../Crypto/SelfTest/Protocol/__init__.py | 41 + .../SelfTest/Protocol/test_AllOrNothing.py | 76 + .../Crypto/SelfTest/Protocol/test_KDF.py | 98 + .../Crypto/SelfTest/Protocol/test_chaffing.py | 74 + .../Crypto/SelfTest/Protocol/test_rfc1751.py | 62 + .../Crypto/SelfTest/PublicKey/__init__.py | 44 + .../Crypto/SelfTest/PublicKey/test_DSA.py | 244 + .../Crypto/SelfTest/PublicKey/test_ElGamal.py | 210 + .../Crypto/SelfTest/PublicKey/test_RSA.py | 415 + .../SelfTest/PublicKey/test_importKey.py | 345 + .../SelfTest/Random/Fortuna/__init__.py | 44 + .../Random/Fortuna/test_FortunaAccumulator.py | 189 + .../Random/Fortuna/test_FortunaGenerator.py | 83 + .../SelfTest/Random/Fortuna/test_SHAd256.py | 55 + .../Crypto/SelfTest/Random/OSRNG/__init__.py | 49 + .../SelfTest/Random/OSRNG/test_fallback.py | 48 + .../SelfTest/Random/OSRNG/test_generic.py | 48 + .../Crypto/SelfTest/Random/OSRNG/test_nt.py | 48 + .../SelfTest/Random/OSRNG/test_posix.py | 48 + .../SelfTest/Random/OSRNG/test_winrandom.py | 48 + .../Crypto/SelfTest/Random/__init__.py | 43 + .../SelfTest/Random/test__UserFriendlyRNG.py | 171 + .../Crypto/SelfTest/Random/test_random.py | 171 + .../SelfTest/Random/test_rpoolcompat.py | 55 + .../Crypto/SelfTest/Signature/__init__.py | 40 + .../SelfTest/Signature/test_pkcs1_15.py | 219 + .../SelfTest/Signature/test_pkcs1_pss.py | 446 + .../Crypto/SelfTest/Util/__init__.py | 44 + .../Crypto/SelfTest/Util/test_Counter.py | 165 + .../Crypto/SelfTest/Util/test_asn1.py | 293 + .../Crypto/SelfTest/Util/test_number.py | 295 + .../Crypto/SelfTest/Util/test_winrandom.py | 48 + Lib/site-packages/Crypto/SelfTest/__init__.py | 92 + .../Crypto/SelfTest/st_common.py | 62 + .../Crypto/Signature/PKCS1_PSS.py | 355 + .../Crypto/Signature/PKCS1_v1_5.py | 236 + .../Crypto/Signature/__init__.py | 31 + Lib/site-packages/Crypto/Util/Counter.py | 127 + Lib/site-packages/Crypto/Util/RFC1751.py | 365 + Lib/site-packages/Crypto/Util/__init__.py | 37 + Lib/site-packages/Crypto/Util/_number_new.py | 119 + Lib/site-packages/Crypto/Util/asn1.py | 286 + Lib/site-packages/Crypto/Util/number.py | 1456 ++ Lib/site-packages/Crypto/Util/py3compat.py | 107 + Lib/site-packages/Crypto/Util/randpool.py | 82 + Lib/site-packages/Crypto/Util/winrandom.py | 28 + Lib/site-packages/Crypto/__init__.py | 51 + Lib/site-packages/Crypto/pct_warnings.py | 60 + Lib/site-packages/OpenSSL/SSL.py | 1948 ++ Lib/site-packages/OpenSSL/__init__.py | 12 + Lib/site-packages/OpenSSL/_util.py | 127 + Lib/site-packages/OpenSSL/crypto.py | 2639 +++ Lib/site-packages/OpenSSL/rand.py | 180 + Lib/site-packages/OpenSSL/test/__init__.py | 6 + Lib/site-packages/OpenSSL/test/test_crypto.py | 3671 ++++ Lib/site-packages/OpenSSL/test/test_rand.py | 223 + Lib/site-packages/OpenSSL/test/test_ssl.py | 3775 ++++ Lib/site-packages/OpenSSL/test/test_tsafe.py | 24 + Lib/site-packages/OpenSSL/test/test_util.py | 17 + Lib/site-packages/OpenSSL/test/util.py | 463 + Lib/site-packages/OpenSSL/tsafe.py | 28 + Lib/site-packages/OpenSSL/version.py | 9 + Lib/site-packages/PIL/BdfFontFile.py | 132 + Lib/site-packages/PIL/BmpImagePlugin.py | 288 + Lib/site-packages/PIL/BufrStubImagePlugin.py | 72 + Lib/site-packages/PIL/ContainerIO.py | 117 + Lib/site-packages/PIL/CurImagePlugin.py | 88 + Lib/site-packages/PIL/DcxImagePlugin.py | 86 + Lib/site-packages/PIL/EpsImagePlugin.py | 428 + Lib/site-packages/PIL/ExifTags.py | 193 + Lib/site-packages/PIL/FitsStubImagePlugin.py | 76 + Lib/site-packages/PIL/FliImagePlugin.py | 188 + Lib/site-packages/PIL/FontFile.py | 115 + Lib/site-packages/PIL/FpxImagePlugin.py | 226 + Lib/site-packages/PIL/GbrImagePlugin.py | 71 + Lib/site-packages/PIL/GdImageFile.py | 92 + Lib/site-packages/PIL/GifImagePlugin.py | 685 + Lib/site-packages/PIL/GimpGradientFile.py | 137 + Lib/site-packages/PIL/GimpPaletteFile.py | 62 + Lib/site-packages/PIL/GribStubImagePlugin.py | 72 + Lib/site-packages/PIL/Hdf5StubImagePlugin.py | 73 + Lib/site-packages/PIL/IcnsImagePlugin.py | 366 + Lib/site-packages/PIL/IcoImagePlugin.py | 283 + Lib/site-packages/PIL/ImImagePlugin.py | 355 + Lib/site-packages/PIL/Image.py | 2500 +++ Lib/site-packages/PIL/ImageChops.py | 283 + Lib/site-packages/PIL/ImageCms.py | 970 + Lib/site-packages/PIL/ImageColor.py | 279 + Lib/site-packages/PIL/ImageDraw.py | 407 + Lib/site-packages/PIL/ImageDraw2.py | 111 + Lib/site-packages/PIL/ImageEnhance.py | 100 + Lib/site-packages/PIL/ImageFile.py | 513 + Lib/site-packages/PIL/ImageFilter.py | 275 + Lib/site-packages/PIL/ImageFont.py | 437 + Lib/site-packages/PIL/ImageGrab.py | 61 + Lib/site-packages/PIL/ImageMath.py | 270 + Lib/site-packages/PIL/ImageMode.py | 52 + Lib/site-packages/PIL/ImageMorph.py | 251 + Lib/site-packages/PIL/ImageOps.py | 461 + Lib/site-packages/PIL/ImagePalette.py | 237 + Lib/site-packages/PIL/ImagePath.py | 66 + Lib/site-packages/PIL/ImageQt.py | 198 + Lib/site-packages/PIL/ImageSequence.py | 42 + Lib/site-packages/PIL/ImageShow.py | 179 + Lib/site-packages/PIL/ImageStat.py | 147 + Lib/site-packages/PIL/ImageTk.py | 292 + Lib/site-packages/PIL/ImageTransform.py | 103 + Lib/site-packages/PIL/ImageWin.py | 239 + Lib/site-packages/PIL/ImtImagePlugin.py | 95 + Lib/site-packages/PIL/IptcImagePlugin.py | 267 + Lib/site-packages/PIL/Jpeg2KImagePlugin.py | 276 + Lib/site-packages/PIL/JpegImagePlugin.py | 753 + Lib/site-packages/PIL/JpegPresets.py | 241 + Lib/site-packages/PIL/McIdasImagePlugin.py | 74 + Lib/site-packages/PIL/MicImagePlugin.py | 103 + Lib/site-packages/PIL/MpegImagePlugin.py | 86 + Lib/site-packages/PIL/MpoImagePlugin.py | 99 + Lib/site-packages/PIL/MspImagePlugin.py | 104 + Lib/site-packages/PIL/OleFileIO-README.md | 180 + Lib/site-packages/PIL/OleFileIO.py | 2305 ++ Lib/site-packages/PIL/PSDraw.py | 235 + Lib/site-packages/PIL/PaletteFile.py | 55 + Lib/site-packages/PIL/PalmImagePlugin.py | 241 + Lib/site-packages/PIL/PcdImagePlugin.py | 59 + Lib/site-packages/PIL/PcfFontFile.py | 252 + Lib/site-packages/PIL/PcxImagePlugin.py | 187 + Lib/site-packages/PIL/PdfImagePlugin.py | 258 + Lib/site-packages/PIL/PixarImagePlugin.py | 68 + Lib/site-packages/PIL/PngImagePlugin.py | 809 + Lib/site-packages/PIL/PpmImagePlugin.py | 174 + Lib/site-packages/PIL/PsdImagePlugin.py | 312 + Lib/site-packages/PIL/PyAccess.py | 317 + Lib/site-packages/PIL/SgiImagePlugin.py | 89 + Lib/site-packages/PIL/SpiderImagePlugin.py | 322 + Lib/site-packages/PIL/SunImagePlugin.py | 81 + Lib/site-packages/PIL/TarIO.py | 57 + Lib/site-packages/PIL/TgaImagePlugin.py | 198 + Lib/site-packages/PIL/TiffImagePlugin.py | 1476 ++ Lib/site-packages/PIL/TiffTags.py | 402 + Lib/site-packages/PIL/WalImageFile.py | 128 + Lib/site-packages/PIL/WebPImagePlugin.py | 80 + Lib/site-packages/PIL/WmfImagePlugin.py | 173 + Lib/site-packages/PIL/XVThumbImagePlugin.py | 75 + Lib/site-packages/PIL/XbmImagePlugin.py | 96 + Lib/site-packages/PIL/XpmImagePlugin.py | 130 + Lib/site-packages/PIL/__init__.py | 58 + Lib/site-packages/PIL/_binary.py | 76 + Lib/site-packages/PIL/_util.py | 27 + Lib/site-packages/PIL/features.py | 67 + .../Pillow-3.1.0.dist-info/DESCRIPTION.rst | 62 + .../Pillow-3.1.0.dist-info/METADATA | 89 + .../Pillow-3.1.0.dist-info/RECORD | 213 + .../Pillow-3.1.0.dist-info/WHEEL | 5 + .../Pillow-3.1.0.dist-info/metadata.json | 1 + .../Pillow-3.1.0.dist-info/top_level.txt | 1 + .../Pillow-3.1.0.dist-info/zip-safe | 1 + .../DESCRIPTION.rst | 137 + .../SQLAlchemy-1.0.11.dist-info/INSTALLER | 1 + .../SQLAlchemy-1.0.11.dist-info/METADATA | 157 + .../SQLAlchemy-1.0.11.dist-info/RECORD | 376 + .../SQLAlchemy-1.0.11.dist-info/WHEEL | 5 + .../SQLAlchemy-1.0.11.dist-info/metadata.json | 1 + .../SQLAlchemy-1.0.11.dist-info/top_level.txt | 1 + Lib/site-packages/_markerlib/__init__.py | 16 + Lib/site-packages/_markerlib/markers.py | 119 + .../cffi-1.5.0.dist-info/DESCRIPTION.rst | 13 + .../cffi-1.5.0.dist-info/INSTALLER | 1 + .../cffi-1.5.0.dist-info/METADATA | 34 + Lib/site-packages/cffi-1.5.0.dist-info/RECORD | 42 + Lib/site-packages/cffi-1.5.0.dist-info/WHEEL | 5 + .../cffi-1.5.0.dist-info/entry_points.txt | 3 + .../cffi-1.5.0.dist-info/metadata.json | 1 + .../cffi-1.5.0.dist-info/top_level.txt | 2 + Lib/site-packages/cffi/__init__.py | 13 + Lib/site-packages/cffi/_cffi_include.h | 236 + Lib/site-packages/cffi/_embedding.h | 517 + Lib/site-packages/cffi/api.py | 827 + Lib/site-packages/cffi/backend_ctypes.py | 1068 + Lib/site-packages/cffi/cffi_opcode.py | 179 + Lib/site-packages/cffi/commontypes.py | 76 + Lib/site-packages/cffi/cparser.py | 839 + Lib/site-packages/cffi/ffiplatform.py | 121 + Lib/site-packages/cffi/gc_weakref.py | 22 + Lib/site-packages/cffi/lock.py | 30 + Lib/site-packages/cffi/model.py | 602 + Lib/site-packages/cffi/parse_c_type.h | 177 + Lib/site-packages/cffi/recompiler.py | 1485 ++ Lib/site-packages/cffi/setuptools_ext.py | 161 + Lib/site-packages/cffi/vengine_cpy.py | 1007 + Lib/site-packages/cffi/vengine_gen.py | 668 + Lib/site-packages/cffi/verifier.py | 313 + .../DESCRIPTION.rst | 57 + .../cryptography-1.2.2.dist-info/INSTALLER | 1 + .../cryptography-1.2.2.dist-info/METADATA | 92 + .../cryptography-1.2.2.dist-info/RECORD | 143 + .../cryptography-1.2.2.dist-info/WHEEL | 5 + .../entry_points.txt | 3 + .../metadata.json | 1 + .../top_level.txt | 4 + Lib/site-packages/cryptography/__about__.py | 23 + Lib/site-packages/cryptography/__init__.py | 27 + Lib/site-packages/cryptography/exceptions.py | 56 + Lib/site-packages/cryptography/fernet.py | 141 + .../cryptography/hazmat/__init__.py | 5 + .../cryptography/hazmat/backends/__init__.py | 42 + .../hazmat/backends/commoncrypto/__init__.py | 10 + .../hazmat/backends/commoncrypto/backend.py | 245 + .../hazmat/backends/commoncrypto/ciphers.py | 193 + .../hazmat/backends/commoncrypto/hashes.py | 55 + .../hazmat/backends/commoncrypto/hmac.py | 59 + .../hazmat/backends/interfaces.py | 359 + .../hazmat/backends/multibackend.py | 404 + .../hazmat/backends/openssl/__init__.py | 10 + .../hazmat/backends/openssl/backend.py | 2299 ++ .../hazmat/backends/openssl/ciphers.py | 213 + .../hazmat/backends/openssl/cmac.py | 80 + .../hazmat/backends/openssl/dsa.py | 218 + .../hazmat/backends/openssl/ec.py | 304 + .../hazmat/backends/openssl/hashes.py | 62 + .../hazmat/backends/openssl/hmac.py | 81 + .../hazmat/backends/openssl/rsa.py | 605 + .../hazmat/backends/openssl/utils.py | 26 + .../hazmat/backends/openssl/x509.py | 1067 + .../cryptography/hazmat/bindings/__init__.py | 5 + .../hazmat/bindings/commoncrypto/__init__.py | 5 + .../hazmat/bindings/commoncrypto/binding.py | 15 + .../hazmat/bindings/openssl/__init__.py | 5 + .../hazmat/bindings/openssl/_conditional.py | 414 + .../hazmat/bindings/openssl/binding.py | 213 + .../hazmat/primitives/__init__.py | 5 + .../hazmat/primitives/asymmetric/__init__.py | 40 + .../hazmat/primitives/asymmetric/dh.py | 166 + .../hazmat/primitives/asymmetric/dsa.py | 229 + .../hazmat/primitives/asymmetric/ec.py | 346 + .../hazmat/primitives/asymmetric/padding.py | 67 + .../hazmat/primitives/asymmetric/rsa.py | 352 + .../hazmat/primitives/asymmetric/utils.py | 73 + .../hazmat/primitives/ciphers/__init__.py | 20 + .../hazmat/primitives/ciphers/algorithms.py | 140 + .../hazmat/primitives/ciphers/base.py | 203 + .../hazmat/primitives/ciphers/modes.py | 164 + .../cryptography/hazmat/primitives/cmac.py | 66 + .../hazmat/primitives/constant_time.py | 26 + .../cryptography/hazmat/primitives/hashes.py | 163 + .../cryptography/hazmat/primitives/hmac.py | 69 + .../hazmat/primitives/interfaces/__init__.py | 37 + .../hazmat/primitives/kdf/__init__.py | 26 + .../hazmat/primitives/kdf/concatkdf.py | 125 + .../hazmat/primitives/kdf/hkdf.py | 116 + .../hazmat/primitives/kdf/pbkdf2.py | 58 + .../hazmat/primitives/kdf/x963kdf.py | 70 + .../cryptography/hazmat/primitives/keywrap.py | 85 + .../cryptography/hazmat/primitives/padding.py | 124 + .../hazmat/primitives/serialization.py | 188 + .../hazmat/primitives/twofactor/__init__.py | 9 + .../hazmat/primitives/twofactor/hotp.py | 67 + .../hazmat/primitives/twofactor/totp.py | 39 + .../hazmat/primitives/twofactor/utils.py | 30 + Lib/site-packages/cryptography/utils.py | 129 + .../cryptography/x509/__init__.py | 173 + Lib/site-packages/cryptography/x509/base.py | 676 + .../cryptography/x509/extensions.py | 1108 + .../cryptography/x509/general_name.py | 271 + Lib/site-packages/cryptography/x509/name.py | 78 + Lib/site-packages/cryptography/x509/oid.py | 251 + Lib/site-packages/easy_install.py | 5 + .../idna-2.0.dist-info/DESCRIPTION.rst | 144 + .../idna-2.0.dist-info/INSTALLER | 1 + Lib/site-packages/idna-2.0.dist-info/METADATA | 168 + Lib/site-packages/idna-2.0.dist-info/RECORD | 20 + Lib/site-packages/idna-2.0.dist-info/WHEEL | 6 + .../idna-2.0.dist-info/metadata.json | 1 + Lib/site-packages/idna-2.0.dist-info/pbr.json | 1 + .../idna-2.0.dist-info/top_level.txt | 1 + Lib/site-packages/idna/__init__.py | 1 + Lib/site-packages/idna/codec.py | 118 + Lib/site-packages/idna/compat.py | 12 + Lib/site-packages/idna/core.py | 386 + Lib/site-packages/idna/idnadata.py | 3573 ++++ Lib/site-packages/idna/uts46data.py | 7267 +++++++ .../lxml-3.5.0.dist-info/DESCRIPTION.rst | 46 + .../lxml-3.5.0.dist-info/INSTALLER | 1 + .../lxml-3.5.0.dist-info/METADATA | 82 + Lib/site-packages/lxml-3.5.0.dist-info/RECORD | 85 + Lib/site-packages/lxml-3.5.0.dist-info/WHEEL | 5 + .../lxml-3.5.0.dist-info/metadata.json | 1 + .../lxml-3.5.0.dist-info/top_level.txt | 1 + Lib/site-packages/lxml/ElementInclude.py | 223 + Lib/site-packages/lxml/__init__.py | 20 + Lib/site-packages/lxml/_elementpath.py | 315 + Lib/site-packages/lxml/builder.py | 246 + Lib/site-packages/lxml/cssselect.py | 102 + Lib/site-packages/lxml/doctestcompare.py | 508 + Lib/site-packages/lxml/html/ElementSoup.py | 10 + Lib/site-packages/lxml/html/__init__.py | 1923 ++ Lib/site-packages/lxml/html/_diffcommand.py | 87 + Lib/site-packages/lxml/html/_html5builder.py | 100 + Lib/site-packages/lxml/html/_setmixin.py | 52 + Lib/site-packages/lxml/html/builder.py | 133 + Lib/site-packages/lxml/html/clean.py | 732 + Lib/site-packages/lxml/html/defs.py | 137 + Lib/site-packages/lxml/html/diff.py | 881 + Lib/site-packages/lxml/html/formfill.py | 299 + Lib/site-packages/lxml/html/html5parser.py | 207 + Lib/site-packages/lxml/html/soupparser.py | 303 + Lib/site-packages/lxml/html/usedoctest.py | 13 + Lib/site-packages/lxml/includes/__init__.py | 0 Lib/site-packages/lxml/includes/c14n.pxd | 26 + Lib/site-packages/lxml/includes/config.pxd | 3 + Lib/site-packages/lxml/includes/dtdvalid.pxd | 18 + Lib/site-packages/lxml/includes/etree_defs.h | 373 + .../lxml/includes/etreepublic.pxd | 234 + .../lxml/includes/htmlparser.pxd | 56 + .../lxml/includes/lxml-version.h | 3 + Lib/site-packages/lxml/includes/relaxng.pxd | 64 + .../lxml/includes/schematron.pxd | 34 + Lib/site-packages/lxml/includes/tree.pxd | 474 + Lib/site-packages/lxml/includes/uri.pxd | 5 + Lib/site-packages/lxml/includes/xinclude.pxd | 22 + Lib/site-packages/lxml/includes/xmlerror.pxd | 850 + Lib/site-packages/lxml/includes/xmlparser.pxd | 248 + Lib/site-packages/lxml/includes/xmlschema.pxd | 35 + Lib/site-packages/lxml/includes/xpath.pxd | 135 + Lib/site-packages/lxml/includes/xslt.pxd | 176 + .../lxml/isoschematron/__init__.py | 334 + .../resources/rng/iso-schematron.rng | 622 + .../resources/xsl/RNG2Schtrn.xsl | 75 + .../resources/xsl/XSD2Schtrn.xsl | 77 + .../iso_abstract_expand.xsl | 296 + .../iso-schematron-xslt1/iso_dsdl_include.xsl | 1160 + .../iso_schematron_message.xsl | 55 + .../iso_schematron_skeleton_for_xslt1.xsl | 1796 ++ .../iso_svrl_for_xslt1.xsl | 588 + .../xsl/iso-schematron-xslt1/readme.txt | 83 + Lib/site-packages/lxml/lxml.etree.h | 219 + Lib/site-packages/lxml/lxml.etree_api.h | 230 + Lib/site-packages/lxml/pyclasslookup.py | 3 + Lib/site-packages/lxml/sax.py | 248 + Lib/site-packages/lxml/usedoctest.py | 13 + .../pip-8.0.2.dist-info/DESCRIPTION.rst | 34 + .../pip-8.0.2.dist-info/METADATA | 62 + Lib/site-packages/pip-8.0.2.dist-info/RECORD | 476 + Lib/site-packages/pip-8.0.2.dist-info/WHEEL | 6 + .../pip-8.0.2.dist-info/entry_points.txt | 5 + .../pip-8.0.2.dist-info/metadata.json | 1 + .../pip-8.0.2.dist-info/top_level.txt | 1 + Lib/site-packages/pip/__init__.py | 315 + Lib/site-packages/pip/__main__.py | 19 + Lib/site-packages/pip/_vendor/__init__.py | 100 + .../pip/_vendor/_markerlib/__init__.py | 16 + .../pip/_vendor/_markerlib/markers.py | 119 + .../pip/_vendor/cachecontrol/__init__.py | 11 + .../pip/_vendor/cachecontrol/_cmd.py | 60 + .../pip/_vendor/cachecontrol/adapter.py | 117 + .../pip/_vendor/cachecontrol/cache.py | 39 + .../_vendor/cachecontrol/caches/__init__.py | 18 + .../_vendor/cachecontrol/caches/file_cache.py | 116 + .../cachecontrol/caches/redis_cache.py | 41 + .../pip/_vendor/cachecontrol/compat.py | 20 + .../pip/_vendor/cachecontrol/controller.py | 353 + .../pip/_vendor/cachecontrol/filewrapper.py | 63 + .../pip/_vendor/cachecontrol/heuristics.py | 138 + .../pip/_vendor/cachecontrol/serialize.py | 190 + .../pip/_vendor/cachecontrol/wrapper.py | 21 + .../pip/_vendor/colorama/__init__.py | 7 + .../pip/_vendor/colorama/ansi.py | 102 + .../pip/_vendor/colorama/ansitowin32.py | 232 + .../pip/_vendor/colorama/initialise.py | 81 + .../pip/_vendor/colorama/win32.py | 154 + .../pip/_vendor/colorama/winterm.py | 162 + .../pip/_vendor/distlib/__init__.py | 23 + .../pip/_vendor/distlib/_backport/__init__.py | 6 + .../pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 761 + .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 788 + .../pip/_vendor/distlib/_backport/tarfile.py | 2607 +++ .../pip/_vendor/distlib/compat.py | 1102 + .../pip/_vendor/distlib/database.py | 1305 ++ .../pip/_vendor/distlib/index.py | 513 + .../pip/_vendor/distlib/locators.py | 1264 ++ .../pip/_vendor/distlib/manifest.py | 367 + .../pip/_vendor/distlib/markers.py | 190 + .../pip/_vendor/distlib/metadata.py | 1058 + .../pip/_vendor/distlib/resources.py | 350 + .../pip/_vendor/distlib/scripts.py | 365 + Lib/site-packages/pip/_vendor/distlib/t32.exe | Bin 0 -> 89088 bytes Lib/site-packages/pip/_vendor/distlib/t64.exe | Bin 0 -> 97792 bytes Lib/site-packages/pip/_vendor/distlib/util.py | 1579 ++ .../pip/_vendor/distlib/version.py | 742 + Lib/site-packages/pip/_vendor/distlib/w32.exe | Bin 0 -> 85504 bytes Lib/site-packages/pip/_vendor/distlib/w64.exe | Bin 0 -> 94208 bytes .../pip/_vendor/distlib/wheel.py | 976 + .../pip/_vendor/html5lib/__init__.py | 25 + .../pip/_vendor/html5lib/constants.py | 3102 +++ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../pip/_vendor/html5lib/filters/_base.py | 12 + .../filters/alphabeticalattributes.py | 20 + .../html5lib/filters/inject_meta_charset.py | 65 + .../pip/_vendor/html5lib/filters/lint.py | 90 + .../_vendor/html5lib/filters/optionaltags.py | 205 + .../pip/_vendor/html5lib/filters/sanitizer.py | 12 + .../_vendor/html5lib/filters/whitespace.py | 38 + .../pip/_vendor/html5lib/html5parser.py | 2724 +++ .../pip/_vendor/html5lib/ihatexml.py | 285 + .../pip/_vendor/html5lib/inputstream.py | 903 + .../pip/_vendor/html5lib/sanitizer.py | 300 + .../_vendor/html5lib/serializer/__init__.py | 16 + .../html5lib/serializer/htmlserializer.py | 317 + .../pip/_vendor/html5lib/tokenizer.py | 1731 ++ .../_vendor/html5lib/treeadapters/__init__.py | 0 .../pip/_vendor/html5lib/treeadapters/sax.py | 44 + .../_vendor/html5lib/treebuilders/__init__.py | 76 + .../_vendor/html5lib/treebuilders/_base.py | 377 + .../pip/_vendor/html5lib/treebuilders/dom.py | 227 + .../_vendor/html5lib/treebuilders/etree.py | 337 + .../html5lib/treebuilders/etree_lxml.py | 369 + .../_vendor/html5lib/treewalkers/__init__.py | 147 + .../pip/_vendor/html5lib/treewalkers/_base.py | 200 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 136 + .../html5lib/treewalkers/genshistream.py | 69 + .../_vendor/html5lib/treewalkers/lxmletree.py | 201 + .../_vendor/html5lib/treewalkers/pulldom.py | 63 + .../pip/_vendor/html5lib/trie/__init__.py | 12 + .../pip/_vendor/html5lib/trie/_base.py | 37 + .../pip/_vendor/html5lib/trie/datrie.py | 44 + .../pip/_vendor/html5lib/trie/py.py | 67 + .../pip/_vendor/html5lib/utils.py | 103 + Lib/site-packages/pip/_vendor/ipaddress.py | 2417 +++ .../pip/_vendor/lockfile/__init__.py | 347 + .../pip/_vendor/lockfile/linklockfile.py | 73 + .../pip/_vendor/lockfile/mkdirlockfile.py | 84 + .../pip/_vendor/lockfile/pidlockfile.py | 190 + .../pip/_vendor/lockfile/sqlitelockfile.py | 156 + .../pip/_vendor/lockfile/symlinklockfile.py | 70 + .../pip/_vendor/packaging/__about__.py | 21 + .../pip/_vendor/packaging/__init__.py | 14 + .../pip/_vendor/packaging/_compat.py | 30 + .../pip/_vendor/packaging/_structures.py | 68 + .../pip/_vendor/packaging/markers.py | 275 + .../pip/_vendor/packaging/specifiers.py | 776 + .../pip/_vendor/packaging/version.py | 393 + .../pip/_vendor/pkg_resources/__init__.py | 3159 +++ .../pip/_vendor/progress/__init__.py | 123 + Lib/site-packages/pip/_vendor/progress/bar.py | 83 + .../pip/_vendor/progress/counter.py | 47 + .../pip/_vendor/progress/helpers.py | 91 + .../pip/_vendor/progress/spinner.py | 40 + Lib/site-packages/pip/_vendor/pyparsing.py | 3801 ++++ Lib/site-packages/pip/_vendor/re-vendor.py | 34 + .../pip/_vendor/requests/__init__.py | 83 + .../pip/_vendor/requests/adapters.py | 453 + Lib/site-packages/pip/_vendor/requests/api.py | 145 + .../pip/_vendor/requests/auth.py | 223 + .../pip/_vendor/requests/cacert.pem | 5616 +++++ .../pip/_vendor/requests/certs.py | 25 + .../pip/_vendor/requests/compat.py | 62 + .../pip/_vendor/requests/cookies.py | 487 + .../pip/_vendor/requests/exceptions.py | 114 + .../pip/_vendor/requests/hooks.py | 34 + .../pip/_vendor/requests/models.py | 851 + .../pip/_vendor/requests/packages/__init__.py | 36 + .../requests/packages/chardet/__init__.py | 32 + .../requests/packages/chardet/big5freq.py | 925 + .../requests/packages/chardet/big5prober.py | 42 + .../requests/packages/chardet/chardetect.py | 80 + .../packages/chardet/chardistribution.py | 231 + .../packages/chardet/charsetgroupprober.py | 106 + .../packages/chardet/charsetprober.py | 62 + .../packages/chardet/codingstatemachine.py | 61 + .../requests/packages/chardet/compat.py | 34 + .../requests/packages/chardet/constants.py | 39 + .../requests/packages/chardet/cp949prober.py | 44 + .../requests/packages/chardet/escprober.py | 86 + .../requests/packages/chardet/escsm.py | 242 + .../requests/packages/chardet/eucjpprober.py | 90 + .../requests/packages/chardet/euckrfreq.py | 596 + .../requests/packages/chardet/euckrprober.py | 42 + .../requests/packages/chardet/euctwfreq.py | 428 + .../requests/packages/chardet/euctwprober.py | 41 + .../requests/packages/chardet/gb2312freq.py | 472 + .../requests/packages/chardet/gb2312prober.py | 41 + .../requests/packages/chardet/hebrewprober.py | 283 + .../requests/packages/chardet/jisfreq.py | 569 + .../requests/packages/chardet/jpcntx.py | 227 + .../packages/chardet/langbulgarianmodel.py | 229 + .../packages/chardet/langcyrillicmodel.py | 329 + .../packages/chardet/langgreekmodel.py | 225 + .../packages/chardet/langhebrewmodel.py | 201 + .../packages/chardet/langhungarianmodel.py | 225 + .../packages/chardet/langthaimodel.py | 200 + .../requests/packages/chardet/latin1prober.py | 139 + .../packages/chardet/mbcharsetprober.py | 86 + .../packages/chardet/mbcsgroupprober.py | 54 + .../requests/packages/chardet/mbcssm.py | 572 + .../packages/chardet/sbcharsetprober.py | 120 + .../packages/chardet/sbcsgroupprober.py | 69 + .../requests/packages/chardet/sjisprober.py | 91 + .../packages/chardet/universaldetector.py | 170 + .../requests/packages/chardet/utf8prober.py | 76 + .../requests/packages/urllib3/__init__.py | 93 + .../requests/packages/urllib3/_collections.py | 324 + .../requests/packages/urllib3/connection.py | 288 + .../packages/urllib3/connectionpool.py | 818 + .../packages/urllib3/contrib/__init__.py | 0 .../packages/urllib3/contrib/appengine.py | 223 + .../packages/urllib3/contrib/ntlmpool.py | 115 + .../packages/urllib3/contrib/pyopenssl.py | 310 + .../requests/packages/urllib3/exceptions.py | 201 + .../requests/packages/urllib3/fields.py | 178 + .../requests/packages/urllib3/filepost.py | 94 + .../packages/urllib3/packages/__init__.py | 5 + .../packages/urllib3/packages/ordered_dict.py | 259 + .../requests/packages/urllib3/packages/six.py | 385 + .../packages/ssl_match_hostname/__init__.py | 13 + .../ssl_match_hostname/_implementation.py | 105 + .../requests/packages/urllib3/poolmanager.py | 281 + .../requests/packages/urllib3/request.py | 151 + .../requests/packages/urllib3/response.py | 514 + .../packages/urllib3/util/__init__.py | 44 + .../packages/urllib3/util/connection.py | 101 + .../requests/packages/urllib3/util/request.py | 72 + .../packages/urllib3/util/response.py | 74 + .../requests/packages/urllib3/util/retry.py | 286 + .../requests/packages/urllib3/util/ssl_.py | 317 + .../requests/packages/urllib3/util/timeout.py | 242 + .../requests/packages/urllib3/util/url.py | 217 + .../pip/_vendor/requests/sessions.py | 680 + .../pip/_vendor/requests/status_codes.py | 90 + .../pip/_vendor/requests/structures.py | 104 + .../pip/_vendor/requests/utils.py | 721 + Lib/site-packages/pip/_vendor/retrying.py | 267 + Lib/site-packages/pip/_vendor/six.py | 868 + Lib/site-packages/pip/basecommand.py | 325 + Lib/site-packages/pip/baseparser.py | 292 + Lib/site-packages/pip/cmdoptions.py | 618 + Lib/site-packages/pip/commands/__init__.py | 85 + Lib/site-packages/pip/commands/completion.py | 68 + Lib/site-packages/pip/commands/download.py | 136 + Lib/site-packages/pip/commands/freeze.py | 71 + Lib/site-packages/pip/commands/hash.py | 57 + Lib/site-packages/pip/commands/help.py | 35 + Lib/site-packages/pip/commands/install.py | 386 + Lib/site-packages/pip/commands/list.py | 209 + Lib/site-packages/pip/commands/search.py | 139 + Lib/site-packages/pip/commands/show.py | 131 + Lib/site-packages/pip/commands/uninstall.py | 76 + Lib/site-packages/pip/commands/wheel.py | 204 + Lib/site-packages/pip/compat/__init__.py | 158 + Lib/site-packages/pip/compat/dictconfig.py | 565 + Lib/site-packages/pip/download.py | 894 + Lib/site-packages/pip/exceptions.py | 236 + Lib/site-packages/pip/index.py | 1033 + Lib/site-packages/pip/locations.py | 182 + Lib/site-packages/pip/models/__init__.py | 4 + Lib/site-packages/pip/models/index.py | 16 + Lib/site-packages/pip/operations/__init__.py | 0 Lib/site-packages/pip/operations/freeze.py | 112 + Lib/site-packages/pip/pep425tags.py | 222 + Lib/site-packages/pip/req/__init__.py | 10 + Lib/site-packages/pip/req/req_file.py | 338 + Lib/site-packages/pip/req/req_install.py | 1217 ++ Lib/site-packages/pip/req/req_set.py | 745 + Lib/site-packages/pip/req/req_uninstall.py | 195 + Lib/site-packages/pip/status_codes.py | 8 + Lib/site-packages/pip/utils/__init__.py | 834 + Lib/site-packages/pip/utils/appdirs.py | 224 + Lib/site-packages/pip/utils/build.py | 42 + Lib/site-packages/pip/utils/deprecation.py | 76 + Lib/site-packages/pip/utils/filesystem.py | 28 + Lib/site-packages/pip/utils/hashes.py | 92 + Lib/site-packages/pip/utils/logging.py | 130 + Lib/site-packages/pip/utils/outdated.py | 162 + .../pip/utils/setuptools_build.py | 6 + Lib/site-packages/pip/utils/ui.py | 339 + Lib/site-packages/pip/vcs/__init__.py | 363 + Lib/site-packages/pip/vcs/bazaar.py | 116 + Lib/site-packages/pip/vcs/git.py | 274 + Lib/site-packages/pip/vcs/mercurial.py | 103 + Lib/site-packages/pip/vcs/subversion.py | 249 + Lib/site-packages/pip/wheel.py | 854 + Lib/site-packages/pkg_resources/__init__.py | 3148 +++ .../pkg_resources/_vendor/__init__.py | 0 .../_vendor/packaging/__about__.py | 31 + .../_vendor/packaging/__init__.py | 24 + .../_vendor/packaging/_compat.py | 40 + .../_vendor/packaging/_structures.py | 78 + .../_vendor/packaging/specifiers.py | 784 + .../_vendor/packaging/version.py | 403 + .../pkg_resources/_vendor/six.py | 868 + .../pkg_resources/extern/__init__.py | 71 + .../DESCRIPTION.rst | 7 + .../pyOpenSSL-0.15.1.dist-info/INSTALLER | 1 + .../pyOpenSSL-0.15.1.dist-info/METADATA | 36 + .../pyOpenSSL-0.15.1.dist-info/RECORD | 35 + .../pyOpenSSL-0.15.1.dist-info/WHEEL | 6 + .../pyOpenSSL-0.15.1.dist-info/metadata.json | 1 + .../pyOpenSSL-0.15.1.dist-info/top_level.txt | 1 + .../pyasn1-0.1.9.dist-info/DESCRIPTION.rst | 3 + .../pyasn1-0.1.9.dist-info/INSTALLER | 1 + .../pyasn1-0.1.9.dist-info/METADATA | 29 + .../pyasn1-0.1.9.dist-info/RECORD | 64 + .../pyasn1-0.1.9.dist-info/WHEEL | 6 + .../pyasn1-0.1.9.dist-info/metadata.json | 1 + .../pyasn1-0.1.9.dist-info/top_level.txt | 1 + .../pyasn1-0.1.9.dist-info/zip-safe | 1 + Lib/site-packages/pyasn1/__init__.py | 8 + Lib/site-packages/pyasn1/codec/__init__.py | 1 + .../pyasn1/codec/ber/__init__.py | 1 + Lib/site-packages/pyasn1/codec/ber/decoder.py | 841 + Lib/site-packages/pyasn1/codec/ber/encoder.py | 433 + Lib/site-packages/pyasn1/codec/ber/eoo.py | 8 + .../pyasn1/codec/cer/__init__.py | 1 + Lib/site-packages/pyasn1/codec/cer/decoder.py | 35 + Lib/site-packages/pyasn1/codec/cer/encoder.py | 130 + .../pyasn1/codec/der/__init__.py | 1 + Lib/site-packages/pyasn1/codec/der/decoder.py | 9 + Lib/site-packages/pyasn1/codec/der/encoder.py | 32 + Lib/site-packages/pyasn1/compat/__init__.py | 1 + Lib/site-packages/pyasn1/compat/binary.py | 10 + Lib/site-packages/pyasn1/compat/octets.py | 22 + Lib/site-packages/pyasn1/debug.py | 110 + Lib/site-packages/pyasn1/error.py | 3 + Lib/site-packages/pyasn1/type/__init__.py | 1 + Lib/site-packages/pyasn1/type/base.py | 278 + Lib/site-packages/pyasn1/type/char.py | 64 + Lib/site-packages/pyasn1/type/constraint.py | 200 + Lib/site-packages/pyasn1/type/error.py | 3 + Lib/site-packages/pyasn1/type/namedtype.py | 149 + Lib/site-packages/pyasn1/type/namedval.py | 58 + Lib/site-packages/pyasn1/type/tag.py | 128 + Lib/site-packages/pyasn1/type/tagmap.py | 66 + Lib/site-packages/pyasn1/type/univ.py | 1156 + Lib/site-packages/pyasn1/type/useful.py | 17 + .../pycparser-2.14.dist-info/DESCRIPTION.rst | 7 + .../pycparser-2.14.dist-info/INSTALLER | 1 + .../pycparser-2.14.dist-info/METADATA | 19 + .../pycparser-2.14.dist-info/RECORD | 40 + .../pycparser-2.14.dist-info/WHEEL | 5 + .../pycparser-2.14.dist-info/metadata.json | 1 + .../pycparser-2.14.dist-info/top_level.txt | 1 + Lib/site-packages/pycparser/__init__.py | 93 + Lib/site-packages/pycparser/_ast_gen.py | 278 + Lib/site-packages/pycparser/_build_tables.py | 33 + Lib/site-packages/pycparser/_c_ast.cfg | 189 + Lib/site-packages/pycparser/ast_transforms.py | 105 + Lib/site-packages/pycparser/c_ast.py | 797 + Lib/site-packages/pycparser/c_generator.py | 399 + Lib/site-packages/pycparser/c_lexer.py | 485 + Lib/site-packages/pycparser/c_parser.py | 1701 ++ Lib/site-packages/pycparser/lextab.py | 9 + Lib/site-packages/pycparser/ply/__init__.py | 4 + Lib/site-packages/pycparser/ply/cpp.py | 898 + Lib/site-packages/pycparser/ply/ctokens.py | 133 + Lib/site-packages/pycparser/ply/lex.py | 1058 + Lib/site-packages/pycparser/ply/yacc.py | 3276 +++ Lib/site-packages/pycparser/plyparser.py | 55 + Lib/site-packages/pycparser/yacctab.py | 292 + .../pycrypto-2.6.1.dist-info/DESCRIPTION.rst | 3 + .../pycrypto-2.6.1.dist-info/INSTALLER | 1 + .../pycrypto-2.6.1.dist-info/METADATA | 22 + .../pycrypto-2.6.1.dist-info/RECORD | 265 + .../pycrypto-2.6.1.dist-info/WHEEL | 5 + .../pycrypto-2.6.1.dist-info/metadata.json | 1 + .../pycrypto-2.6.1.dist-info/top_level.txt | 1 + .../setuptools-19.3.dist-info/DESCRIPTION.rst | 238 + .../setuptools-19.3.dist-info/METADATA | 268 + .../setuptools-19.3.dist-info/RECORD | 128 + .../setuptools-19.3.dist-info/WHEEL | 6 + .../dependency_links.txt | 2 + .../entry_points.txt | 61 + .../setuptools-19.3.dist-info/metadata.json | 1 + .../setuptools-19.3.dist-info/top_level.txt | 4 + .../setuptools-19.3.dist-info/zip-safe | 1 + Lib/site-packages/setuptools/__init__.py | 169 + Lib/site-packages/setuptools/archive_util.py | 170 + Lib/site-packages/setuptools/cli-32.exe | Bin 0 -> 65536 bytes Lib/site-packages/setuptools/cli-64.exe | Bin 0 -> 74752 bytes Lib/site-packages/setuptools/cli-arm-32.exe | Bin 0 -> 69120 bytes Lib/site-packages/setuptools/cli.exe | Bin 0 -> 65536 bytes .../setuptools/command/__init__.py | 18 + Lib/site-packages/setuptools/command/alias.py | 78 + .../setuptools/command/bdist_egg.py | 471 + .../setuptools/command/bdist_rpm.py | 43 + .../setuptools/command/bdist_wininst.py | 21 + .../setuptools/command/build_ext.py | 296 + .../setuptools/command/build_py.py | 222 + .../setuptools/command/develop.py | 196 + .../setuptools/command/easy_install.py | 2308 ++ .../setuptools/command/egg_info.py | 481 + .../setuptools/command/install.py | 125 + .../setuptools/command/install_egg_info.py | 116 + .../setuptools/command/install_lib.py | 120 + .../setuptools/command/install_scripts.py | 60 + .../setuptools/command/launcher manifest.xml | 15 + .../setuptools/command/register.py | 10 + .../setuptools/command/rotate.py | 62 + .../setuptools/command/saveopts.py | 22 + Lib/site-packages/setuptools/command/sdist.py | 196 + .../setuptools/command/setopt.py | 150 + Lib/site-packages/setuptools/command/test.py | 195 + .../setuptools/command/upload_docs.py | 192 + Lib/site-packages/setuptools/depends.py | 217 + Lib/site-packages/setuptools/dist.py | 871 + Lib/site-packages/setuptools/extension.py | 55 + .../setuptools/extern/__init__.py | 5 + Lib/site-packages/setuptools/gui-32.exe | Bin 0 -> 65536 bytes Lib/site-packages/setuptools/gui-64.exe | Bin 0 -> 75264 bytes Lib/site-packages/setuptools/gui-arm-32.exe | Bin 0 -> 69120 bytes Lib/site-packages/setuptools/gui.exe | Bin 0 -> 65536 bytes Lib/site-packages/setuptools/lib2to3_ex.py | 58 + Lib/site-packages/setuptools/msvc9_support.py | 63 + Lib/site-packages/setuptools/package_index.py | 1069 + Lib/site-packages/setuptools/py26compat.py | 22 + Lib/site-packages/setuptools/py27compat.py | 15 + Lib/site-packages/setuptools/py31compat.py | 52 + Lib/site-packages/setuptools/sandbox.py | 492 + .../setuptools/script (dev).tmpl | 5 + Lib/site-packages/setuptools/script.tmpl | 3 + Lib/site-packages/setuptools/site-patch.py | 76 + Lib/site-packages/setuptools/ssl_support.py | 242 + Lib/site-packages/setuptools/unicode_utils.py | 41 + Lib/site-packages/setuptools/utils.py | 11 + Lib/site-packages/setuptools/version.py | 1 + .../setuptools/windows_support.py | 29 + .../DESCRIPTION.rst | 31 + .../simplejson-3.8.1.dist-info/INSTALLER | 1 + .../simplejson-3.8.1.dist-info/METADATA | 56 + .../simplejson-3.8.1.dist-info/RECORD | 80 + .../simplejson-3.8.1.dist-info/WHEEL | 5 + .../simplejson-3.8.1.dist-info/metadata.json | 1 + .../simplejson-3.8.1.dist-info/top_level.txt | 1 + Lib/site-packages/simplejson/__init__.py | 575 + Lib/site-packages/simplejson/compat.py | 46 + Lib/site-packages/simplejson/decoder.py | 400 + Lib/site-packages/simplejson/encoder.py | 681 + Lib/site-packages/simplejson/ordered_dict.py | 119 + Lib/site-packages/simplejson/scanner.py | 133 + .../simplejson/tests/__init__.py | 90 + .../simplejson/tests/test_bigint_as_string.py | 67 + .../tests/test_bitsize_int_as_string.py | 73 + .../simplejson/tests/test_check_circular.py | 30 + .../simplejson/tests/test_decimal.py | 71 + .../simplejson/tests/test_decode.py | 99 + .../simplejson/tests/test_default.py | 9 + .../simplejson/tests/test_dump.py | 130 + .../tests/test_encode_basestring_ascii.py | 47 + .../simplejson/tests/test_encode_for_html.py | 30 + .../simplejson/tests/test_errors.py | 51 + .../simplejson/tests/test_fail.py | 176 + .../simplejson/tests/test_float.py | 35 + .../simplejson/tests/test_for_json.py | 97 + .../simplejson/tests/test_indent.py | 86 + .../simplejson/tests/test_item_sort_key.py | 20 + .../simplejson/tests/test_iterable.py | 31 + .../simplejson/tests/test_namedtuple.py | 122 + .../simplejson/tests/test_pass1.py | 71 + .../simplejson/tests/test_pass2.py | 14 + .../simplejson/tests/test_pass3.py | 20 + .../simplejson/tests/test_recursion.py | 67 + .../simplejson/tests/test_scanstring.py | 194 + .../simplejson/tests/test_separators.py | 42 + .../simplejson/tests/test_speedups.py | 39 + .../simplejson/tests/test_subclass.py | 37 + .../simplejson/tests/test_tool.py | 97 + .../simplejson/tests/test_tuple.py | 47 + .../simplejson/tests/test_unicode.py | 153 + Lib/site-packages/simplejson/tool.py | 42 + .../six-1.10.0.dist-info/DESCRIPTION.rst | 18 + .../six-1.10.0.dist-info/INSTALLER | 1 + .../six-1.10.0.dist-info/METADATA | 34 + Lib/site-packages/six-1.10.0.dist-info/RECORD | 9 + Lib/site-packages/six-1.10.0.dist-info/WHEEL | 6 + .../six-1.10.0.dist-info/metadata.json | 1 + .../six-1.10.0.dist-info/top_level.txt | 1 + Lib/site-packages/six.py | 868 + Lib/site-packages/sqlalchemy/__init__.py | 138 + .../sqlalchemy/connectors/__init__.py | 10 + .../sqlalchemy/connectors/mxodbc.py | 150 + .../sqlalchemy/connectors/pyodbc.py | 183 + .../sqlalchemy/connectors/zxJDBC.py | 60 + .../sqlalchemy/databases/__init__.py | 30 + .../sqlalchemy/dialects/__init__.py | 45 + .../sqlalchemy/dialects/firebird/__init__.py | 21 + .../sqlalchemy/dialects/firebird/base.py | 738 + .../sqlalchemy/dialects/firebird/fdb.py | 118 + .../dialects/firebird/kinterbasdb.py | 184 + .../sqlalchemy/dialects/mssql/__init__.py | 27 + .../sqlalchemy/dialects/mssql/adodbapi.py | 80 + .../sqlalchemy/dialects/mssql/base.py | 1929 ++ .../dialects/mssql/information_schema.py | 136 + .../sqlalchemy/dialects/mssql/mxodbc.py | 112 + .../sqlalchemy/dialects/mssql/pymssql.py | 96 + .../sqlalchemy/dialects/mssql/pyodbc.py | 265 + .../sqlalchemy/dialects/mssql/zxjdbc.py | 69 + .../sqlalchemy/dialects/mysql/__init__.py | 31 + .../sqlalchemy/dialects/mysql/base.py | 3438 +++ .../sqlalchemy/dialects/mysql/cymysql.py | 87 + .../sqlalchemy/dialects/mysql/gaerdbms.py | 102 + .../dialects/mysql/mysqlconnector.py | 176 + .../sqlalchemy/dialects/mysql/mysqldb.py | 198 + .../sqlalchemy/dialects/mysql/oursql.py | 254 + .../sqlalchemy/dialects/mysql/pymysql.py | 57 + .../sqlalchemy/dialects/mysql/pyodbc.py | 79 + .../sqlalchemy/dialects/mysql/zxjdbc.py | 117 + .../sqlalchemy/dialects/oracle/__init__.py | 24 + .../sqlalchemy/dialects/oracle/base.py | 1546 ++ .../sqlalchemy/dialects/oracle/cx_oracle.py | 989 + .../sqlalchemy/dialects/oracle/zxjdbc.py | 237 + .../sqlalchemy/dialects/postgres.py | 18 + .../dialects/postgresql/__init__.py | 31 + .../sqlalchemy/dialects/postgresql/base.py | 2940 +++ .../dialects/postgresql/constraints.py | 98 + .../sqlalchemy/dialects/postgresql/hstore.py | 376 + .../sqlalchemy/dialects/postgresql/json.py | 358 + .../sqlalchemy/dialects/postgresql/pg8000.py | 264 + .../dialects/postgresql/psycopg2.py | 726 + .../dialects/postgresql/psycopg2cffi.py | 61 + .../dialects/postgresql/pypostgresql.py | 97 + .../sqlalchemy/dialects/postgresql/ranges.py | 168 + .../sqlalchemy/dialects/postgresql/zxjdbc.py | 46 + .../sqlalchemy/dialects/sqlite/__init__.py | 20 + .../sqlalchemy/dialects/sqlite/base.py | 1476 ++ .../sqlalchemy/dialects/sqlite/pysqlcipher.py | 116 + .../sqlalchemy/dialects/sqlite/pysqlite.py | 377 + .../sqlalchemy/dialects/sybase/__init__.py | 28 + .../sqlalchemy/dialects/sybase/base.py | 825 + .../sqlalchemy/dialects/sybase/mxodbc.py | 33 + .../sqlalchemy/dialects/sybase/pyodbc.py | 86 + .../sqlalchemy/dialects/sybase/pysybase.py | 102 + .../sqlalchemy/engine/__init__.py | 433 + Lib/site-packages/sqlalchemy/engine/base.py | 2134 ++ .../sqlalchemy/engine/default.py | 1023 + .../sqlalchemy/engine/interfaces.py | 1152 + .../sqlalchemy/engine/reflection.py | 788 + Lib/site-packages/sqlalchemy/engine/result.py | 1273 ++ .../sqlalchemy/engine/strategies.py | 262 + .../sqlalchemy/engine/threadlocal.py | 138 + Lib/site-packages/sqlalchemy/engine/url.py | 253 + Lib/site-packages/sqlalchemy/engine/util.py | 74 + .../sqlalchemy/event/__init__.py | 11 + Lib/site-packages/sqlalchemy/event/api.py | 188 + Lib/site-packages/sqlalchemy/event/attr.py | 373 + Lib/site-packages/sqlalchemy/event/base.py | 289 + Lib/site-packages/sqlalchemy/event/legacy.py | 169 + .../sqlalchemy/event/registry.py | 262 + Lib/site-packages/sqlalchemy/events.py | 1101 + Lib/site-packages/sqlalchemy/exc.py | 374 + Lib/site-packages/sqlalchemy/ext/__init__.py | 11 + .../sqlalchemy/ext/associationproxy.py | 1068 + Lib/site-packages/sqlalchemy/ext/automap.py | 1038 + Lib/site-packages/sqlalchemy/ext/baked.py | 523 + Lib/site-packages/sqlalchemy/ext/compiler.py | 451 + .../sqlalchemy/ext/declarative/__init__.py | 18 + .../sqlalchemy/ext/declarative/api.py | 671 + .../sqlalchemy/ext/declarative/base.py | 657 + .../sqlalchemy/ext/declarative/clsregistry.py | 328 + .../sqlalchemy/ext/horizontal_shard.py | 131 + Lib/site-packages/sqlalchemy/ext/hybrid.py | 810 + .../sqlalchemy/ext/instrumentation.py | 414 + Lib/site-packages/sqlalchemy/ext/mutable.py | 689 + .../sqlalchemy/ext/orderinglist.py | 380 + .../sqlalchemy/ext/serializer.py | 159 + Lib/site-packages/sqlalchemy/inspection.py | 93 + Lib/site-packages/sqlalchemy/interfaces.py | 312 + Lib/site-packages/sqlalchemy/log.py | 217 + Lib/site-packages/sqlalchemy/orm/__init__.py | 275 + .../sqlalchemy/orm/attributes.py | 1598 ++ Lib/site-packages/sqlalchemy/orm/base.py | 540 + .../sqlalchemy/orm/collections.py | 1580 ++ .../sqlalchemy/orm/dependency.py | 1175 + .../sqlalchemy/orm/deprecated_interfaces.py | 487 + .../sqlalchemy/orm/descriptor_props.py | 699 + Lib/site-packages/sqlalchemy/orm/dynamic.py | 370 + Lib/site-packages/sqlalchemy/orm/evaluator.py | 134 + Lib/site-packages/sqlalchemy/orm/events.py | 1801 ++ Lib/site-packages/sqlalchemy/orm/exc.py | 165 + Lib/site-packages/sqlalchemy/orm/identity.py | 314 + .../sqlalchemy/orm/instrumentation.py | 528 + .../sqlalchemy/orm/interfaces.py | 640 + Lib/site-packages/sqlalchemy/orm/loading.py | 669 + Lib/site-packages/sqlalchemy/orm/mapper.py | 2909 +++ .../sqlalchemy/orm/path_registry.py | 291 + .../sqlalchemy/orm/persistence.py | 1408 ++ .../sqlalchemy/orm/properties.py | 276 + Lib/site-packages/sqlalchemy/orm/query.py | 4019 ++++ .../sqlalchemy/orm/relationships.py | 2861 +++ Lib/site-packages/sqlalchemy/orm/scoping.py | 177 + Lib/site-packages/sqlalchemy/orm/session.py | 2792 +++ Lib/site-packages/sqlalchemy/orm/state.py | 729 + .../sqlalchemy/orm/strategies.py | 1618 ++ .../sqlalchemy/orm/strategy_options.py | 1037 + Lib/site-packages/sqlalchemy/orm/sync.py | 140 + .../sqlalchemy/orm/unitofwork.py | 656 + Lib/site-packages/sqlalchemy/orm/util.py | 1034 + Lib/site-packages/sqlalchemy/pool.py | 1367 ++ Lib/site-packages/sqlalchemy/processors.py | 155 + Lib/site-packages/sqlalchemy/schema.py | 65 + Lib/site-packages/sqlalchemy/sql/__init__.py | 92 + .../sqlalchemy/sql/annotation.py | 196 + Lib/site-packages/sqlalchemy/sql/base.py | 647 + Lib/site-packages/sqlalchemy/sql/compiler.py | 2816 +++ Lib/site-packages/sqlalchemy/sql/crud.py | 570 + Lib/site-packages/sqlalchemy/sql/ddl.py | 1095 + .../sqlalchemy/sql/default_comparator.py | 287 + Lib/site-packages/sqlalchemy/sql/dml.py | 846 + Lib/site-packages/sqlalchemy/sql/elements.py | 3952 ++++ .../sqlalchemy/sql/expression.py | 137 + Lib/site-packages/sqlalchemy/sql/functions.py | 618 + Lib/site-packages/sqlalchemy/sql/naming.py | 146 + Lib/site-packages/sqlalchemy/sql/operators.py | 902 + Lib/site-packages/sqlalchemy/sql/schema.py | 3787 ++++ .../sqlalchemy/sql/selectable.py | 3436 +++ Lib/site-packages/sqlalchemy/sql/sqltypes.py | 1714 ++ Lib/site-packages/sqlalchemy/sql/type_api.py | 1186 ++ Lib/site-packages/sqlalchemy/sql/util.py | 612 + Lib/site-packages/sqlalchemy/sql/visitors.py | 328 + .../sqlalchemy/testing/__init__.py | 36 + .../sqlalchemy/testing/assertions.py | 491 + .../sqlalchemy/testing/assertsql.py | 372 + .../sqlalchemy/testing/config.py | 92 + .../sqlalchemy/testing/distutils_run.py | 11 + .../sqlalchemy/testing/engines.py | 346 + .../sqlalchemy/testing/entities.py | 101 + .../sqlalchemy/testing/exclusions.py | 441 + .../sqlalchemy/testing/fixtures.py | 386 + Lib/site-packages/sqlalchemy/testing/mock.py | 21 + .../sqlalchemy/testing/pickleable.py | 143 + .../sqlalchemy/testing/plugin/__init__.py | 0 .../sqlalchemy/testing/plugin/bootstrap.py | 44 + .../sqlalchemy/testing/plugin/noseplugin.py | 107 + .../sqlalchemy/testing/plugin/plugin_base.py | 552 + .../sqlalchemy/testing/plugin/pytestplugin.py | 181 + .../sqlalchemy/testing/profiling.py | 260 + .../sqlalchemy/testing/provision.py | 201 + .../sqlalchemy/testing/replay_fixture.py | 172 + .../sqlalchemy/testing/requirements.py | 709 + .../sqlalchemy/testing/runner.py | 50 + .../sqlalchemy/testing/schema.py | 98 + .../sqlalchemy/testing/suite/__init__.py | 10 + .../sqlalchemy/testing/suite/test_ddl.py | 65 + .../sqlalchemy/testing/suite/test_dialect.py | 41 + .../sqlalchemy/testing/suite/test_insert.py | 269 + .../testing/suite/test_reflection.py | 647 + .../sqlalchemy/testing/suite/test_results.py | 220 + .../sqlalchemy/testing/suite/test_select.py | 192 + .../sqlalchemy/testing/suite/test_sequence.py | 126 + .../sqlalchemy/testing/suite/test_types.py | 594 + .../testing/suite/test_update_delete.py | 63 + Lib/site-packages/sqlalchemy/testing/util.py | 280 + .../sqlalchemy/testing/warnings.py | 34 + Lib/site-packages/sqlalchemy/types.py | 78 + Lib/site-packages/sqlalchemy/util/__init__.py | 49 + .../sqlalchemy/util/_collections.py | 1043 + Lib/site-packages/sqlalchemy/util/compat.py | 263 + .../sqlalchemy/util/deprecations.py | 146 + .../sqlalchemy/util/langhelpers.py | 1377 ++ Lib/site-packages/sqlalchemy/util/queue.py | 199 + .../sqlalchemy/util/topological.py | 100 + .../wheel-0.26.0.dist-info/DESCRIPTION.rst | 288 + .../wheel-0.26.0.dist-info/LICENSE.txt | 22 + .../wheel-0.26.0.dist-info/METADATA | 317 + .../wheel-0.26.0.dist-info/RECORD | 81 + .../wheel-0.26.0.dist-info/WHEEL | 6 + .../wheel-0.26.0.dist-info/entry_points.txt | 5 + .../wheel-0.26.0.dist-info/metadata.json | 1 + .../wheel-0.26.0.dist-info/top_level.txt | 1 + Lib/site-packages/wheel/__init__.py | 2 + Lib/site-packages/wheel/__main__.py | 17 + Lib/site-packages/wheel/archive.py | 61 + Lib/site-packages/wheel/bdist_wheel.py | 448 + Lib/site-packages/wheel/decorator.py | 19 + Lib/site-packages/wheel/egg2wheel.py | 73 + Lib/site-packages/wheel/eggnames.txt | 87 + Lib/site-packages/wheel/install.py | 480 + Lib/site-packages/wheel/metadata.py | 310 + Lib/site-packages/wheel/paths.py | 41 + Lib/site-packages/wheel/pep425tags.py | 100 + Lib/site-packages/wheel/pkginfo.py | 44 + .../wheel/signatures/__init__.py | 106 + Lib/site-packages/wheel/signatures/djbec.py | 270 + .../wheel/signatures/ed25519py.py | 52 + Lib/site-packages/wheel/signatures/keys.py | 99 + Lib/site-packages/wheel/test/__init__.py | 1 + .../test/complex-dist/complexdist/__init__.py | 2 + .../wheel/test/complex-dist/setup.py | 30 + .../wheel/test/headers.dist/header.h | 0 .../wheel/test/headers.dist/headersdist.py | 0 .../wheel/test/headers.dist/setup.py | 16 + .../wheel/test/pydist-schema.json | 362 + .../wheel/test/simple.dist/setup.py | 17 + .../test/simple.dist/simpledist/__init__.py | 0 .../test/test-1.0-py2.py3-none-win32.whl | Bin 0 -> 5226 bytes Lib/site-packages/wheel/test/test_basic.py | 176 + Lib/site-packages/wheel/test/test_install.py | 55 + Lib/site-packages/wheel/test/test_keys.py | 98 + Lib/site-packages/wheel/test/test_paths.py | 6 + Lib/site-packages/wheel/test/test_ranking.py | 43 + .../wheel/test/test_signatures.py | 47 + Lib/site-packages/wheel/test/test_tagopt.py | 112 + Lib/site-packages/wheel/test/test_tool.py | 28 + .../wheel/test/test_wheelfile.py | 69 + Lib/site-packages/wheel/tool/__init__.py | 362 + Lib/site-packages/wheel/util.py | 167 + Lib/site-packages/wheel/wininst2wheel.py | 187 + POPPLER_AUTHORS | 5 + POPPLER_BINARIES | 23 + POPPLER_README | 39 + POPPLER_README-XPDF | 423 + README.md | 9 + Scripts/createfontdatachunk.py | 18 + Scripts/easy_install-3.5.exe | Bin 0 -> 89468 bytes Scripts/easy_install.exe | Bin 0 -> 89468 bytes Scripts/explode.py | 112 + Scripts/painter.py | 82 + Scripts/pip.exe | Bin 0 -> 89438 bytes Scripts/pip3.5.exe | Bin 0 -> 89438 bytes Scripts/pip3.exe | Bin 0 -> 89438 bytes Scripts/player.py | 102 + Scripts/thresholder.py | 74 + Scripts/viewer.py | 54 + Scripts/wheel.exe | Bin 0 -> 89447 bytes freetype6.dll | Bin 0 -> 553382 bytes get-pip.py | 17759 ++++++++++++++++ jpeg62.dll | Bin 0 -> 127488 bytes libcairo-2.dll | Bin 0 -> 1138880 bytes libexpat-1.dll | Bin 0 -> 440258 bytes libfontconfig-1.dll | Bin 0 -> 279059 bytes libgcc_s_dw2-1.dll | Bin 0 -> 112142 bytes libpixman-1-0.dll | Bin 0 -> 538008 bytes libpng16-16.dll | Bin 0 -> 194157 bytes libpoppler-cpp.dll | Bin 0 -> 1220228 bytes libpoppler.dll | Bin 0 -> 10151389 bytes libstdc++-6.dll | Bin 0 -> 1000974 bytes libtiff3.dll | Bin 0 -> 376832 bytes pdftocairo.exe | Bin 0 -> 894233 bytes pdftotext.exe | Bin 0 -> 399836 bytes python.exe | Bin 0 -> 38680 bytes python3.dll | Bin 0 -> 51480 bytes python35.dll | Bin 0 -> 3122968 bytes python35.zip | Bin 0 -> 2260580 bytes pythonw.exe | Bin 0 -> 38680 bytes pyvenv.cfg | 1 + sqlite3.dll | Bin 0 -> 601880 bytes vcruntime140.dll | Bin 0 -> 85328 bytes zlib1.dll | Bin 0 -> 103424 bytes 1122 files changed, 348397 insertions(+) create mode 100644 .gitignore create mode 100644 COPYING create mode 100644 COPYING3 create mode 100644 Lib/site-packages/Crypto/Cipher/AES.py create mode 100644 Lib/site-packages/Crypto/Cipher/ARC2.py create mode 100644 Lib/site-packages/Crypto/Cipher/ARC4.py create mode 100644 Lib/site-packages/Crypto/Cipher/Blowfish.py create mode 100644 Lib/site-packages/Crypto/Cipher/CAST.py create mode 100644 Lib/site-packages/Crypto/Cipher/DES.py create mode 100644 Lib/site-packages/Crypto/Cipher/DES3.py create mode 100644 Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py create mode 100644 Lib/site-packages/Crypto/Cipher/PKCS1_v1_5.py create mode 100644 Lib/site-packages/Crypto/Cipher/XOR.py create mode 100644 Lib/site-packages/Crypto/Cipher/__init__.py create mode 100644 Lib/site-packages/Crypto/Cipher/blockalgo.py create mode 100644 Lib/site-packages/Crypto/Hash/HMAC.py create mode 100644 Lib/site-packages/Crypto/Hash/MD2.py create mode 100644 Lib/site-packages/Crypto/Hash/MD4.py create mode 100644 Lib/site-packages/Crypto/Hash/MD5.py create mode 100644 Lib/site-packages/Crypto/Hash/RIPEMD.py create mode 100644 Lib/site-packages/Crypto/Hash/SHA.py create mode 100644 Lib/site-packages/Crypto/Hash/SHA224.py create mode 100644 Lib/site-packages/Crypto/Hash/SHA256.py create mode 100644 Lib/site-packages/Crypto/Hash/SHA384.py create mode 100644 Lib/site-packages/Crypto/Hash/SHA512.py create mode 100644 Lib/site-packages/Crypto/Hash/__init__.py create mode 100644 Lib/site-packages/Crypto/Hash/hashalgo.py create mode 100644 Lib/site-packages/Crypto/Protocol/AllOrNothing.py create mode 100644 Lib/site-packages/Crypto/Protocol/Chaffing.py create mode 100644 Lib/site-packages/Crypto/Protocol/KDF.py create mode 100644 Lib/site-packages/Crypto/Protocol/__init__.py create mode 100644 Lib/site-packages/Crypto/PublicKey/DSA.py create mode 100644 Lib/site-packages/Crypto/PublicKey/ElGamal.py create mode 100644 Lib/site-packages/Crypto/PublicKey/RSA.py create mode 100644 Lib/site-packages/Crypto/PublicKey/_DSA.py create mode 100644 Lib/site-packages/Crypto/PublicKey/_RSA.py create mode 100644 Lib/site-packages/Crypto/PublicKey/__init__.py create mode 100644 Lib/site-packages/Crypto/PublicKey/_slowmath.py create mode 100644 Lib/site-packages/Crypto/PublicKey/pubkey.py create mode 100644 Lib/site-packages/Crypto/Random/Fortuna/FortunaAccumulator.py create mode 100644 Lib/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py create mode 100644 Lib/site-packages/Crypto/Random/Fortuna/SHAd256.py create mode 100644 Lib/site-packages/Crypto/Random/Fortuna/__init__.py create mode 100644 Lib/site-packages/Crypto/Random/OSRNG/__init__.py create mode 100644 Lib/site-packages/Crypto/Random/OSRNG/fallback.py create mode 100644 Lib/site-packages/Crypto/Random/OSRNG/nt.py create mode 100644 Lib/site-packages/Crypto/Random/OSRNG/posix.py create mode 100644 Lib/site-packages/Crypto/Random/OSRNG/rng_base.py create mode 100644 Lib/site-packages/Crypto/Random/_UserFriendlyRNG.py create mode 100644 Lib/site-packages/Crypto/Random/__init__.py create mode 100644 Lib/site-packages/Crypto/Random/random.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/common.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_XOR.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/common.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_SHA.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py create mode 100644 Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py create mode 100644 Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py create mode 100644 Lib/site-packages/Crypto/SelfTest/PublicKey/test_RSA.py create mode 100644 Lib/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/test_random.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Signature/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Util/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Util/test_number.py create mode 100644 Lib/site-packages/Crypto/SelfTest/Util/test_winrandom.py create mode 100644 Lib/site-packages/Crypto/SelfTest/__init__.py create mode 100644 Lib/site-packages/Crypto/SelfTest/st_common.py create mode 100644 Lib/site-packages/Crypto/Signature/PKCS1_PSS.py create mode 100644 Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py create mode 100644 Lib/site-packages/Crypto/Signature/__init__.py create mode 100644 Lib/site-packages/Crypto/Util/Counter.py create mode 100644 Lib/site-packages/Crypto/Util/RFC1751.py create mode 100644 Lib/site-packages/Crypto/Util/__init__.py create mode 100644 Lib/site-packages/Crypto/Util/_number_new.py create mode 100644 Lib/site-packages/Crypto/Util/asn1.py create mode 100644 Lib/site-packages/Crypto/Util/number.py create mode 100644 Lib/site-packages/Crypto/Util/py3compat.py create mode 100644 Lib/site-packages/Crypto/Util/randpool.py create mode 100644 Lib/site-packages/Crypto/Util/winrandom.py create mode 100644 Lib/site-packages/Crypto/__init__.py create mode 100644 Lib/site-packages/Crypto/pct_warnings.py create mode 100644 Lib/site-packages/OpenSSL/SSL.py create mode 100644 Lib/site-packages/OpenSSL/__init__.py create mode 100644 Lib/site-packages/OpenSSL/_util.py create mode 100644 Lib/site-packages/OpenSSL/crypto.py create mode 100644 Lib/site-packages/OpenSSL/rand.py create mode 100644 Lib/site-packages/OpenSSL/test/__init__.py create mode 100644 Lib/site-packages/OpenSSL/test/test_crypto.py create mode 100644 Lib/site-packages/OpenSSL/test/test_rand.py create mode 100644 Lib/site-packages/OpenSSL/test/test_ssl.py create mode 100644 Lib/site-packages/OpenSSL/test/test_tsafe.py create mode 100644 Lib/site-packages/OpenSSL/test/test_util.py create mode 100644 Lib/site-packages/OpenSSL/test/util.py create mode 100644 Lib/site-packages/OpenSSL/tsafe.py create mode 100644 Lib/site-packages/OpenSSL/version.py create mode 100644 Lib/site-packages/PIL/BdfFontFile.py create mode 100644 Lib/site-packages/PIL/BmpImagePlugin.py create mode 100644 Lib/site-packages/PIL/BufrStubImagePlugin.py create mode 100644 Lib/site-packages/PIL/ContainerIO.py create mode 100644 Lib/site-packages/PIL/CurImagePlugin.py create mode 100644 Lib/site-packages/PIL/DcxImagePlugin.py create mode 100644 Lib/site-packages/PIL/EpsImagePlugin.py create mode 100644 Lib/site-packages/PIL/ExifTags.py create mode 100644 Lib/site-packages/PIL/FitsStubImagePlugin.py create mode 100644 Lib/site-packages/PIL/FliImagePlugin.py create mode 100644 Lib/site-packages/PIL/FontFile.py create mode 100644 Lib/site-packages/PIL/FpxImagePlugin.py create mode 100644 Lib/site-packages/PIL/GbrImagePlugin.py create mode 100644 Lib/site-packages/PIL/GdImageFile.py create mode 100644 Lib/site-packages/PIL/GifImagePlugin.py create mode 100644 Lib/site-packages/PIL/GimpGradientFile.py create mode 100644 Lib/site-packages/PIL/GimpPaletteFile.py create mode 100644 Lib/site-packages/PIL/GribStubImagePlugin.py create mode 100644 Lib/site-packages/PIL/Hdf5StubImagePlugin.py create mode 100644 Lib/site-packages/PIL/IcnsImagePlugin.py create mode 100644 Lib/site-packages/PIL/IcoImagePlugin.py create mode 100644 Lib/site-packages/PIL/ImImagePlugin.py create mode 100644 Lib/site-packages/PIL/Image.py create mode 100644 Lib/site-packages/PIL/ImageChops.py create mode 100644 Lib/site-packages/PIL/ImageCms.py create mode 100644 Lib/site-packages/PIL/ImageColor.py create mode 100644 Lib/site-packages/PIL/ImageDraw.py create mode 100644 Lib/site-packages/PIL/ImageDraw2.py create mode 100644 Lib/site-packages/PIL/ImageEnhance.py create mode 100644 Lib/site-packages/PIL/ImageFile.py create mode 100644 Lib/site-packages/PIL/ImageFilter.py create mode 100644 Lib/site-packages/PIL/ImageFont.py create mode 100644 Lib/site-packages/PIL/ImageGrab.py create mode 100644 Lib/site-packages/PIL/ImageMath.py create mode 100644 Lib/site-packages/PIL/ImageMode.py create mode 100644 Lib/site-packages/PIL/ImageMorph.py create mode 100644 Lib/site-packages/PIL/ImageOps.py create mode 100644 Lib/site-packages/PIL/ImagePalette.py create mode 100644 Lib/site-packages/PIL/ImagePath.py create mode 100644 Lib/site-packages/PIL/ImageQt.py create mode 100644 Lib/site-packages/PIL/ImageSequence.py create mode 100644 Lib/site-packages/PIL/ImageShow.py create mode 100644 Lib/site-packages/PIL/ImageStat.py create mode 100644 Lib/site-packages/PIL/ImageTk.py create mode 100644 Lib/site-packages/PIL/ImageTransform.py create mode 100644 Lib/site-packages/PIL/ImageWin.py create mode 100644 Lib/site-packages/PIL/ImtImagePlugin.py create mode 100644 Lib/site-packages/PIL/IptcImagePlugin.py create mode 100644 Lib/site-packages/PIL/Jpeg2KImagePlugin.py create mode 100644 Lib/site-packages/PIL/JpegImagePlugin.py create mode 100644 Lib/site-packages/PIL/JpegPresets.py create mode 100644 Lib/site-packages/PIL/McIdasImagePlugin.py create mode 100644 Lib/site-packages/PIL/MicImagePlugin.py create mode 100644 Lib/site-packages/PIL/MpegImagePlugin.py create mode 100644 Lib/site-packages/PIL/MpoImagePlugin.py create mode 100644 Lib/site-packages/PIL/MspImagePlugin.py create mode 100644 Lib/site-packages/PIL/OleFileIO-README.md create mode 100644 Lib/site-packages/PIL/OleFileIO.py create mode 100644 Lib/site-packages/PIL/PSDraw.py create mode 100644 Lib/site-packages/PIL/PaletteFile.py create mode 100644 Lib/site-packages/PIL/PalmImagePlugin.py create mode 100644 Lib/site-packages/PIL/PcdImagePlugin.py create mode 100644 Lib/site-packages/PIL/PcfFontFile.py create mode 100644 Lib/site-packages/PIL/PcxImagePlugin.py create mode 100644 Lib/site-packages/PIL/PdfImagePlugin.py create mode 100644 Lib/site-packages/PIL/PixarImagePlugin.py create mode 100644 Lib/site-packages/PIL/PngImagePlugin.py create mode 100644 Lib/site-packages/PIL/PpmImagePlugin.py create mode 100644 Lib/site-packages/PIL/PsdImagePlugin.py create mode 100644 Lib/site-packages/PIL/PyAccess.py create mode 100644 Lib/site-packages/PIL/SgiImagePlugin.py create mode 100644 Lib/site-packages/PIL/SpiderImagePlugin.py create mode 100644 Lib/site-packages/PIL/SunImagePlugin.py create mode 100644 Lib/site-packages/PIL/TarIO.py create mode 100644 Lib/site-packages/PIL/TgaImagePlugin.py create mode 100644 Lib/site-packages/PIL/TiffImagePlugin.py create mode 100644 Lib/site-packages/PIL/TiffTags.py create mode 100644 Lib/site-packages/PIL/WalImageFile.py create mode 100644 Lib/site-packages/PIL/WebPImagePlugin.py create mode 100644 Lib/site-packages/PIL/WmfImagePlugin.py create mode 100644 Lib/site-packages/PIL/XVThumbImagePlugin.py create mode 100644 Lib/site-packages/PIL/XbmImagePlugin.py create mode 100644 Lib/site-packages/PIL/XpmImagePlugin.py create mode 100644 Lib/site-packages/PIL/__init__.py create mode 100644 Lib/site-packages/PIL/_binary.py create mode 100644 Lib/site-packages/PIL/_util.py create mode 100644 Lib/site-packages/PIL/features.py create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/METADATA create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/RECORD create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/WHEEL create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/metadata.json create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/top_level.txt create mode 100644 Lib/site-packages/Pillow-3.1.0.dist-info/zip-safe create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/INSTALLER create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/METADATA create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/RECORD create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/WHEEL create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/metadata.json create mode 100644 Lib/site-packages/SQLAlchemy-1.0.11.dist-info/top_level.txt create mode 100644 Lib/site-packages/_markerlib/__init__.py create mode 100644 Lib/site-packages/_markerlib/markers.py create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/INSTALLER create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/METADATA create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/RECORD create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/WHEEL create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/entry_points.txt create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/metadata.json create mode 100644 Lib/site-packages/cffi-1.5.0.dist-info/top_level.txt create mode 100644 Lib/site-packages/cffi/__init__.py create mode 100644 Lib/site-packages/cffi/_cffi_include.h create mode 100644 Lib/site-packages/cffi/_embedding.h create mode 100644 Lib/site-packages/cffi/api.py create mode 100644 Lib/site-packages/cffi/backend_ctypes.py create mode 100644 Lib/site-packages/cffi/cffi_opcode.py create mode 100644 Lib/site-packages/cffi/commontypes.py create mode 100644 Lib/site-packages/cffi/cparser.py create mode 100644 Lib/site-packages/cffi/ffiplatform.py create mode 100644 Lib/site-packages/cffi/gc_weakref.py create mode 100644 Lib/site-packages/cffi/lock.py create mode 100644 Lib/site-packages/cffi/model.py create mode 100644 Lib/site-packages/cffi/parse_c_type.h create mode 100644 Lib/site-packages/cffi/recompiler.py create mode 100644 Lib/site-packages/cffi/setuptools_ext.py create mode 100644 Lib/site-packages/cffi/vengine_cpy.py create mode 100644 Lib/site-packages/cffi/vengine_gen.py create mode 100644 Lib/site-packages/cffi/verifier.py create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/INSTALLER create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/METADATA create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/RECORD create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/WHEEL create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/entry_points.txt create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/metadata.json create mode 100644 Lib/site-packages/cryptography-1.2.2.dist-info/top_level.txt create mode 100644 Lib/site-packages/cryptography/__about__.py create mode 100644 Lib/site-packages/cryptography/__init__.py create mode 100644 Lib/site-packages/cryptography/exceptions.py create mode 100644 Lib/site-packages/cryptography/fernet.py create mode 100644 Lib/site-packages/cryptography/hazmat/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/interfaces.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/multibackend.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/ec.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/utils.py create mode 100644 Lib/site-packages/cryptography/hazmat/backends/openssl/x509.py create mode 100644 Lib/site-packages/cryptography/hazmat/bindings/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py create mode 100644 Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py create mode 100644 Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/cmac.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/constant_time.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/hashes.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/hmac.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/keywrap.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/padding.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/serialization.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py create mode 100644 Lib/site-packages/cryptography/hazmat/primitives/twofactor/utils.py create mode 100644 Lib/site-packages/cryptography/utils.py create mode 100644 Lib/site-packages/cryptography/x509/__init__.py create mode 100644 Lib/site-packages/cryptography/x509/base.py create mode 100644 Lib/site-packages/cryptography/x509/extensions.py create mode 100644 Lib/site-packages/cryptography/x509/general_name.py create mode 100644 Lib/site-packages/cryptography/x509/name.py create mode 100644 Lib/site-packages/cryptography/x509/oid.py create mode 100644 Lib/site-packages/easy_install.py create mode 100644 Lib/site-packages/idna-2.0.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/idna-2.0.dist-info/INSTALLER create mode 100644 Lib/site-packages/idna-2.0.dist-info/METADATA create mode 100644 Lib/site-packages/idna-2.0.dist-info/RECORD create mode 100644 Lib/site-packages/idna-2.0.dist-info/WHEEL create mode 100644 Lib/site-packages/idna-2.0.dist-info/metadata.json create mode 100644 Lib/site-packages/idna-2.0.dist-info/pbr.json create mode 100644 Lib/site-packages/idna-2.0.dist-info/top_level.txt create mode 100644 Lib/site-packages/idna/__init__.py create mode 100644 Lib/site-packages/idna/codec.py create mode 100644 Lib/site-packages/idna/compat.py create mode 100644 Lib/site-packages/idna/core.py create mode 100644 Lib/site-packages/idna/idnadata.py create mode 100644 Lib/site-packages/idna/uts46data.py create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/INSTALLER create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/METADATA create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/RECORD create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/WHEEL create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/metadata.json create mode 100644 Lib/site-packages/lxml-3.5.0.dist-info/top_level.txt create mode 100644 Lib/site-packages/lxml/ElementInclude.py create mode 100644 Lib/site-packages/lxml/__init__.py create mode 100644 Lib/site-packages/lxml/_elementpath.py create mode 100644 Lib/site-packages/lxml/builder.py create mode 100644 Lib/site-packages/lxml/cssselect.py create mode 100644 Lib/site-packages/lxml/doctestcompare.py create mode 100644 Lib/site-packages/lxml/html/ElementSoup.py create mode 100644 Lib/site-packages/lxml/html/__init__.py create mode 100644 Lib/site-packages/lxml/html/_diffcommand.py create mode 100644 Lib/site-packages/lxml/html/_html5builder.py create mode 100644 Lib/site-packages/lxml/html/_setmixin.py create mode 100644 Lib/site-packages/lxml/html/builder.py create mode 100644 Lib/site-packages/lxml/html/clean.py create mode 100644 Lib/site-packages/lxml/html/defs.py create mode 100644 Lib/site-packages/lxml/html/diff.py create mode 100644 Lib/site-packages/lxml/html/formfill.py create mode 100644 Lib/site-packages/lxml/html/html5parser.py create mode 100644 Lib/site-packages/lxml/html/soupparser.py create mode 100644 Lib/site-packages/lxml/html/usedoctest.py create mode 100644 Lib/site-packages/lxml/includes/__init__.py create mode 100644 Lib/site-packages/lxml/includes/c14n.pxd create mode 100644 Lib/site-packages/lxml/includes/config.pxd create mode 100644 Lib/site-packages/lxml/includes/dtdvalid.pxd create mode 100644 Lib/site-packages/lxml/includes/etree_defs.h create mode 100644 Lib/site-packages/lxml/includes/etreepublic.pxd create mode 100644 Lib/site-packages/lxml/includes/htmlparser.pxd create mode 100644 Lib/site-packages/lxml/includes/lxml-version.h create mode 100644 Lib/site-packages/lxml/includes/relaxng.pxd create mode 100644 Lib/site-packages/lxml/includes/schematron.pxd create mode 100644 Lib/site-packages/lxml/includes/tree.pxd create mode 100644 Lib/site-packages/lxml/includes/uri.pxd create mode 100644 Lib/site-packages/lxml/includes/xinclude.pxd create mode 100644 Lib/site-packages/lxml/includes/xmlerror.pxd create mode 100644 Lib/site-packages/lxml/includes/xmlparser.pxd create mode 100644 Lib/site-packages/lxml/includes/xmlschema.pxd create mode 100644 Lib/site-packages/lxml/includes/xpath.pxd create mode 100644 Lib/site-packages/lxml/includes/xslt.pxd create mode 100644 Lib/site-packages/lxml/isoschematron/__init__.py create mode 100644 Lib/site-packages/lxml/isoschematron/resources/rng/iso-schematron.rng create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/XSD2Schtrn.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_abstract_expand.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_dsdl_include.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_skeleton_for_xslt1.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_svrl_for_xslt1.xsl create mode 100644 Lib/site-packages/lxml/isoschematron/resources/xsl/iso-schematron-xslt1/readme.txt create mode 100644 Lib/site-packages/lxml/lxml.etree.h create mode 100644 Lib/site-packages/lxml/lxml.etree_api.h create mode 100644 Lib/site-packages/lxml/pyclasslookup.py create mode 100644 Lib/site-packages/lxml/sax.py create mode 100644 Lib/site-packages/lxml/usedoctest.py create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/METADATA create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/RECORD create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/WHEEL create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/entry_points.txt create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/metadata.json create mode 100644 Lib/site-packages/pip-8.0.2.dist-info/top_level.txt create mode 100644 Lib/site-packages/pip/__init__.py create mode 100644 Lib/site-packages/pip/__main__.py create mode 100644 Lib/site-packages/pip/_vendor/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/_markerlib/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/_markerlib/markers.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/compat.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 Lib/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 Lib/site-packages/pip/_vendor/colorama/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/colorama/ansi.py create mode 100644 Lib/site-packages/pip/_vendor/colorama/ansitowin32.py create mode 100644 Lib/site-packages/pip/_vendor/colorama/initialise.py create mode 100644 Lib/site-packages/pip/_vendor/colorama/win32.py create mode 100644 Lib/site-packages/pip/_vendor/colorama/winterm.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/_backport/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/_backport/misc.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/_backport/shutil.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/compat.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/database.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/index.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/locators.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/markers.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/resources.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/t32.exe create mode 100644 Lib/site-packages/pip/_vendor/distlib/t64.exe create mode 100644 Lib/site-packages/pip/_vendor/distlib/util.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/version.py create mode 100644 Lib/site-packages/pip/_vendor/distlib/w32.exe create mode 100644 Lib/site-packages/pip/_vendor/distlib/w64.exe create mode 100644 Lib/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/constants.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/_base.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/lint.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/html5parser.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/ihatexml.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/inputstream.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/sanitizer.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/serializer/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/serializer/htmlserializer.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/tokenizer.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treebuilders/_base.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/_base.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshistream.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/lxmletree.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/treewalkers/pulldom.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/trie/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/trie/_base.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/trie/datrie.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/trie/py.py create mode 100644 Lib/site-packages/pip/_vendor/html5lib/utils.py create mode 100644 Lib/site-packages/pip/_vendor/ipaddress.py create mode 100644 Lib/site-packages/pip/_vendor/lockfile/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/lockfile/linklockfile.py create mode 100644 Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py create mode 100644 Lib/site-packages/pip/_vendor/lockfile/pidlockfile.py create mode 100644 Lib/site-packages/pip/_vendor/lockfile/sqlitelockfile.py create mode 100644 Lib/site-packages/pip/_vendor/lockfile/symlinklockfile.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/__about__.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/_compat.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/markers.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 Lib/site-packages/pip/_vendor/packaging/version.py create mode 100644 Lib/site-packages/pip/_vendor/pkg_resources/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/progress/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/progress/bar.py create mode 100644 Lib/site-packages/pip/_vendor/progress/counter.py create mode 100644 Lib/site-packages/pip/_vendor/progress/helpers.py create mode 100644 Lib/site-packages/pip/_vendor/progress/spinner.py create mode 100644 Lib/site-packages/pip/_vendor/pyparsing.py create mode 100644 Lib/site-packages/pip/_vendor/re-vendor.py create mode 100644 Lib/site-packages/pip/_vendor/requests/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/adapters.py create mode 100644 Lib/site-packages/pip/_vendor/requests/api.py create mode 100644 Lib/site-packages/pip/_vendor/requests/auth.py create mode 100644 Lib/site-packages/pip/_vendor/requests/cacert.pem create mode 100644 Lib/site-packages/pip/_vendor/requests/certs.py create mode 100644 Lib/site-packages/pip/_vendor/requests/compat.py create mode 100644 Lib/site-packages/pip/_vendor/requests/cookies.py create mode 100644 Lib/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 Lib/site-packages/pip/_vendor/requests/hooks.py create mode 100644 Lib/site-packages/pip/_vendor/requests/models.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/big5freq.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/big5prober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/chardistribution.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/charsetgroupprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/charsetprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/codingstatemachine.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/compat.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/constants.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/cp949prober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/escprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/escsm.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/eucjpprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/euckrfreq.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/euckrprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/euctwprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/gb2312freq.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/gb2312prober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/hebrewprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/jisfreq.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/jpcntx.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/langcyrillicmodel.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/langgreekmodel.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/langhebrewmodel.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/langhungarianmodel.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/latin1prober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/mbcharsetprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/mbcsgroupprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/mbcssm.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/sbcharsetprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/sbcsgroupprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/sjisprober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/universaldetector.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/chardet/utf8prober.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/_collections.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/connection.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/connectionpool.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/contrib/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/contrib/appengine.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/exceptions.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/fields.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/filepost.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/request.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/response.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/__init__.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/request.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/retry.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/ssl_.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/timeout.py create mode 100644 Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/url.py create mode 100644 Lib/site-packages/pip/_vendor/requests/sessions.py create mode 100644 Lib/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 Lib/site-packages/pip/_vendor/requests/structures.py create mode 100644 Lib/site-packages/pip/_vendor/requests/utils.py create mode 100644 Lib/site-packages/pip/_vendor/retrying.py create mode 100644 Lib/site-packages/pip/_vendor/six.py create mode 100644 Lib/site-packages/pip/basecommand.py create mode 100644 Lib/site-packages/pip/baseparser.py create mode 100644 Lib/site-packages/pip/cmdoptions.py create mode 100644 Lib/site-packages/pip/commands/__init__.py create mode 100644 Lib/site-packages/pip/commands/completion.py create mode 100644 Lib/site-packages/pip/commands/download.py create mode 100644 Lib/site-packages/pip/commands/freeze.py create mode 100644 Lib/site-packages/pip/commands/hash.py create mode 100644 Lib/site-packages/pip/commands/help.py create mode 100644 Lib/site-packages/pip/commands/install.py create mode 100644 Lib/site-packages/pip/commands/list.py create mode 100644 Lib/site-packages/pip/commands/search.py create mode 100644 Lib/site-packages/pip/commands/show.py create mode 100644 Lib/site-packages/pip/commands/uninstall.py create mode 100644 Lib/site-packages/pip/commands/wheel.py create mode 100644 Lib/site-packages/pip/compat/__init__.py create mode 100644 Lib/site-packages/pip/compat/dictconfig.py create mode 100644 Lib/site-packages/pip/download.py create mode 100644 Lib/site-packages/pip/exceptions.py create mode 100644 Lib/site-packages/pip/index.py create mode 100644 Lib/site-packages/pip/locations.py create mode 100644 Lib/site-packages/pip/models/__init__.py create mode 100644 Lib/site-packages/pip/models/index.py create mode 100644 Lib/site-packages/pip/operations/__init__.py create mode 100644 Lib/site-packages/pip/operations/freeze.py create mode 100644 Lib/site-packages/pip/pep425tags.py create mode 100644 Lib/site-packages/pip/req/__init__.py create mode 100644 Lib/site-packages/pip/req/req_file.py create mode 100644 Lib/site-packages/pip/req/req_install.py create mode 100644 Lib/site-packages/pip/req/req_set.py create mode 100644 Lib/site-packages/pip/req/req_uninstall.py create mode 100644 Lib/site-packages/pip/status_codes.py create mode 100644 Lib/site-packages/pip/utils/__init__.py create mode 100644 Lib/site-packages/pip/utils/appdirs.py create mode 100644 Lib/site-packages/pip/utils/build.py create mode 100644 Lib/site-packages/pip/utils/deprecation.py create mode 100644 Lib/site-packages/pip/utils/filesystem.py create mode 100644 Lib/site-packages/pip/utils/hashes.py create mode 100644 Lib/site-packages/pip/utils/logging.py create mode 100644 Lib/site-packages/pip/utils/outdated.py create mode 100644 Lib/site-packages/pip/utils/setuptools_build.py create mode 100644 Lib/site-packages/pip/utils/ui.py create mode 100644 Lib/site-packages/pip/vcs/__init__.py create mode 100644 Lib/site-packages/pip/vcs/bazaar.py create mode 100644 Lib/site-packages/pip/vcs/git.py create mode 100644 Lib/site-packages/pip/vcs/mercurial.py create mode 100644 Lib/site-packages/pip/vcs/subversion.py create mode 100644 Lib/site-packages/pip/wheel.py create mode 100644 Lib/site-packages/pkg_resources/__init__.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/__init__.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/packaging/__about__.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/packaging/__init__.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/packaging/_compat.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/packaging/_structures.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/packaging/specifiers.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/packaging/version.py create mode 100644 Lib/site-packages/pkg_resources/_vendor/six.py create mode 100644 Lib/site-packages/pkg_resources/extern/__init__.py create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/INSTALLER create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/METADATA create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/RECORD create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/WHEEL create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/metadata.json create mode 100644 Lib/site-packages/pyOpenSSL-0.15.1.dist-info/top_level.txt create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/INSTALLER create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/METADATA create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/RECORD create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/WHEEL create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/metadata.json create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/top_level.txt create mode 100644 Lib/site-packages/pyasn1-0.1.9.dist-info/zip-safe create mode 100644 Lib/site-packages/pyasn1/__init__.py create mode 100644 Lib/site-packages/pyasn1/codec/__init__.py create mode 100644 Lib/site-packages/pyasn1/codec/ber/__init__.py create mode 100644 Lib/site-packages/pyasn1/codec/ber/decoder.py create mode 100644 Lib/site-packages/pyasn1/codec/ber/encoder.py create mode 100644 Lib/site-packages/pyasn1/codec/ber/eoo.py create mode 100644 Lib/site-packages/pyasn1/codec/cer/__init__.py create mode 100644 Lib/site-packages/pyasn1/codec/cer/decoder.py create mode 100644 Lib/site-packages/pyasn1/codec/cer/encoder.py create mode 100644 Lib/site-packages/pyasn1/codec/der/__init__.py create mode 100644 Lib/site-packages/pyasn1/codec/der/decoder.py create mode 100644 Lib/site-packages/pyasn1/codec/der/encoder.py create mode 100644 Lib/site-packages/pyasn1/compat/__init__.py create mode 100644 Lib/site-packages/pyasn1/compat/binary.py create mode 100644 Lib/site-packages/pyasn1/compat/octets.py create mode 100644 Lib/site-packages/pyasn1/debug.py create mode 100644 Lib/site-packages/pyasn1/error.py create mode 100644 Lib/site-packages/pyasn1/type/__init__.py create mode 100644 Lib/site-packages/pyasn1/type/base.py create mode 100644 Lib/site-packages/pyasn1/type/char.py create mode 100644 Lib/site-packages/pyasn1/type/constraint.py create mode 100644 Lib/site-packages/pyasn1/type/error.py create mode 100644 Lib/site-packages/pyasn1/type/namedtype.py create mode 100644 Lib/site-packages/pyasn1/type/namedval.py create mode 100644 Lib/site-packages/pyasn1/type/tag.py create mode 100644 Lib/site-packages/pyasn1/type/tagmap.py create mode 100644 Lib/site-packages/pyasn1/type/univ.py create mode 100644 Lib/site-packages/pyasn1/type/useful.py create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/INSTALLER create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/METADATA create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/RECORD create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/WHEEL create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/metadata.json create mode 100644 Lib/site-packages/pycparser-2.14.dist-info/top_level.txt create mode 100644 Lib/site-packages/pycparser/__init__.py create mode 100644 Lib/site-packages/pycparser/_ast_gen.py create mode 100644 Lib/site-packages/pycparser/_build_tables.py create mode 100644 Lib/site-packages/pycparser/_c_ast.cfg create mode 100644 Lib/site-packages/pycparser/ast_transforms.py create mode 100644 Lib/site-packages/pycparser/c_ast.py create mode 100644 Lib/site-packages/pycparser/c_generator.py create mode 100644 Lib/site-packages/pycparser/c_lexer.py create mode 100644 Lib/site-packages/pycparser/c_parser.py create mode 100644 Lib/site-packages/pycparser/lextab.py create mode 100644 Lib/site-packages/pycparser/ply/__init__.py create mode 100644 Lib/site-packages/pycparser/ply/cpp.py create mode 100644 Lib/site-packages/pycparser/ply/ctokens.py create mode 100644 Lib/site-packages/pycparser/ply/lex.py create mode 100644 Lib/site-packages/pycparser/ply/yacc.py create mode 100644 Lib/site-packages/pycparser/plyparser.py create mode 100644 Lib/site-packages/pycparser/yacctab.py create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/INSTALLER create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/METADATA create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/RECORD create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/WHEEL create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/metadata.json create mode 100644 Lib/site-packages/pycrypto-2.6.1.dist-info/top_level.txt create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/METADATA create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/RECORD create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/WHEEL create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/dependency_links.txt create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/entry_points.txt create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/metadata.json create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/top_level.txt create mode 100644 Lib/site-packages/setuptools-19.3.dist-info/zip-safe create mode 100644 Lib/site-packages/setuptools/__init__.py create mode 100644 Lib/site-packages/setuptools/archive_util.py create mode 100644 Lib/site-packages/setuptools/cli-32.exe create mode 100644 Lib/site-packages/setuptools/cli-64.exe create mode 100644 Lib/site-packages/setuptools/cli-arm-32.exe create mode 100644 Lib/site-packages/setuptools/cli.exe create mode 100644 Lib/site-packages/setuptools/command/__init__.py create mode 100644 Lib/site-packages/setuptools/command/alias.py create mode 100644 Lib/site-packages/setuptools/command/bdist_egg.py create mode 100644 Lib/site-packages/setuptools/command/bdist_rpm.py create mode 100644 Lib/site-packages/setuptools/command/bdist_wininst.py create mode 100644 Lib/site-packages/setuptools/command/build_ext.py create mode 100644 Lib/site-packages/setuptools/command/build_py.py create mode 100644 Lib/site-packages/setuptools/command/develop.py create mode 100644 Lib/site-packages/setuptools/command/easy_install.py create mode 100644 Lib/site-packages/setuptools/command/egg_info.py create mode 100644 Lib/site-packages/setuptools/command/install.py create mode 100644 Lib/site-packages/setuptools/command/install_egg_info.py create mode 100644 Lib/site-packages/setuptools/command/install_lib.py create mode 100644 Lib/site-packages/setuptools/command/install_scripts.py create mode 100644 Lib/site-packages/setuptools/command/launcher manifest.xml create mode 100644 Lib/site-packages/setuptools/command/register.py create mode 100644 Lib/site-packages/setuptools/command/rotate.py create mode 100644 Lib/site-packages/setuptools/command/saveopts.py create mode 100644 Lib/site-packages/setuptools/command/sdist.py create mode 100644 Lib/site-packages/setuptools/command/setopt.py create mode 100644 Lib/site-packages/setuptools/command/test.py create mode 100644 Lib/site-packages/setuptools/command/upload_docs.py create mode 100644 Lib/site-packages/setuptools/depends.py create mode 100644 Lib/site-packages/setuptools/dist.py create mode 100644 Lib/site-packages/setuptools/extension.py create mode 100644 Lib/site-packages/setuptools/extern/__init__.py create mode 100644 Lib/site-packages/setuptools/gui-32.exe create mode 100644 Lib/site-packages/setuptools/gui-64.exe create mode 100644 Lib/site-packages/setuptools/gui-arm-32.exe create mode 100644 Lib/site-packages/setuptools/gui.exe create mode 100644 Lib/site-packages/setuptools/lib2to3_ex.py create mode 100644 Lib/site-packages/setuptools/msvc9_support.py create mode 100644 Lib/site-packages/setuptools/package_index.py create mode 100644 Lib/site-packages/setuptools/py26compat.py create mode 100644 Lib/site-packages/setuptools/py27compat.py create mode 100644 Lib/site-packages/setuptools/py31compat.py create mode 100644 Lib/site-packages/setuptools/sandbox.py create mode 100644 Lib/site-packages/setuptools/script (dev).tmpl create mode 100644 Lib/site-packages/setuptools/script.tmpl create mode 100644 Lib/site-packages/setuptools/site-patch.py create mode 100644 Lib/site-packages/setuptools/ssl_support.py create mode 100644 Lib/site-packages/setuptools/unicode_utils.py create mode 100644 Lib/site-packages/setuptools/utils.py create mode 100644 Lib/site-packages/setuptools/version.py create mode 100644 Lib/site-packages/setuptools/windows_support.py create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/INSTALLER create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/METADATA create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/RECORD create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/WHEEL create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/metadata.json create mode 100644 Lib/site-packages/simplejson-3.8.1.dist-info/top_level.txt create mode 100644 Lib/site-packages/simplejson/__init__.py create mode 100644 Lib/site-packages/simplejson/compat.py create mode 100644 Lib/site-packages/simplejson/decoder.py create mode 100644 Lib/site-packages/simplejson/encoder.py create mode 100644 Lib/site-packages/simplejson/ordered_dict.py create mode 100644 Lib/site-packages/simplejson/scanner.py create mode 100644 Lib/site-packages/simplejson/tests/__init__.py create mode 100644 Lib/site-packages/simplejson/tests/test_bigint_as_string.py create mode 100644 Lib/site-packages/simplejson/tests/test_bitsize_int_as_string.py create mode 100644 Lib/site-packages/simplejson/tests/test_check_circular.py create mode 100644 Lib/site-packages/simplejson/tests/test_decimal.py create mode 100644 Lib/site-packages/simplejson/tests/test_decode.py create mode 100644 Lib/site-packages/simplejson/tests/test_default.py create mode 100644 Lib/site-packages/simplejson/tests/test_dump.py create mode 100644 Lib/site-packages/simplejson/tests/test_encode_basestring_ascii.py create mode 100644 Lib/site-packages/simplejson/tests/test_encode_for_html.py create mode 100644 Lib/site-packages/simplejson/tests/test_errors.py create mode 100644 Lib/site-packages/simplejson/tests/test_fail.py create mode 100644 Lib/site-packages/simplejson/tests/test_float.py create mode 100644 Lib/site-packages/simplejson/tests/test_for_json.py create mode 100644 Lib/site-packages/simplejson/tests/test_indent.py create mode 100644 Lib/site-packages/simplejson/tests/test_item_sort_key.py create mode 100644 Lib/site-packages/simplejson/tests/test_iterable.py create mode 100644 Lib/site-packages/simplejson/tests/test_namedtuple.py create mode 100644 Lib/site-packages/simplejson/tests/test_pass1.py create mode 100644 Lib/site-packages/simplejson/tests/test_pass2.py create mode 100644 Lib/site-packages/simplejson/tests/test_pass3.py create mode 100644 Lib/site-packages/simplejson/tests/test_recursion.py create mode 100644 Lib/site-packages/simplejson/tests/test_scanstring.py create mode 100644 Lib/site-packages/simplejson/tests/test_separators.py create mode 100644 Lib/site-packages/simplejson/tests/test_speedups.py create mode 100644 Lib/site-packages/simplejson/tests/test_subclass.py create mode 100644 Lib/site-packages/simplejson/tests/test_tool.py create mode 100644 Lib/site-packages/simplejson/tests/test_tuple.py create mode 100644 Lib/site-packages/simplejson/tests/test_unicode.py create mode 100644 Lib/site-packages/simplejson/tool.py create mode 100644 Lib/site-packages/six-1.10.0.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/six-1.10.0.dist-info/INSTALLER create mode 100644 Lib/site-packages/six-1.10.0.dist-info/METADATA create mode 100644 Lib/site-packages/six-1.10.0.dist-info/RECORD create mode 100644 Lib/site-packages/six-1.10.0.dist-info/WHEEL create mode 100644 Lib/site-packages/six-1.10.0.dist-info/metadata.json create mode 100644 Lib/site-packages/six-1.10.0.dist-info/top_level.txt create mode 100644 Lib/site-packages/six.py create mode 100644 Lib/site-packages/sqlalchemy/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/connectors/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/connectors/mxodbc.py create mode 100644 Lib/site-packages/sqlalchemy/connectors/pyodbc.py create mode 100644 Lib/site-packages/sqlalchemy/connectors/zxJDBC.py create mode 100644 Lib/site-packages/sqlalchemy/databases/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/firebird/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/firebird/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/firebird/fdb.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/firebird/kinterbasdb.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/adodbapi.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/information_schema.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/mxodbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/pymssql.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/pyodbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mssql/zxjdbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/cymysql.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/gaerdbms.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/mysqldb.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/oursql.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/pymysql.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/pyodbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/mysql/zxjdbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/oracle/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/oracle/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/oracle/cx_oracle.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/oracle/zxjdbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgres.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/constraints.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/hstore.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/json.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/pg8000.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/psycopg2.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/psycopg2cffi.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/pypostgresql.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/ranges.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/postgresql/zxjdbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sqlite/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sqlite/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sqlite/pysqlcipher.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sqlite/pysqlite.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sybase/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sybase/base.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sybase/mxodbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sybase/pyodbc.py create mode 100644 Lib/site-packages/sqlalchemy/dialects/sybase/pysybase.py create mode 100644 Lib/site-packages/sqlalchemy/engine/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/engine/base.py create mode 100644 Lib/site-packages/sqlalchemy/engine/default.py create mode 100644 Lib/site-packages/sqlalchemy/engine/interfaces.py create mode 100644 Lib/site-packages/sqlalchemy/engine/reflection.py create mode 100644 Lib/site-packages/sqlalchemy/engine/result.py create mode 100644 Lib/site-packages/sqlalchemy/engine/strategies.py create mode 100644 Lib/site-packages/sqlalchemy/engine/threadlocal.py create mode 100644 Lib/site-packages/sqlalchemy/engine/url.py create mode 100644 Lib/site-packages/sqlalchemy/engine/util.py create mode 100644 Lib/site-packages/sqlalchemy/event/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/event/api.py create mode 100644 Lib/site-packages/sqlalchemy/event/attr.py create mode 100644 Lib/site-packages/sqlalchemy/event/base.py create mode 100644 Lib/site-packages/sqlalchemy/event/legacy.py create mode 100644 Lib/site-packages/sqlalchemy/event/registry.py create mode 100644 Lib/site-packages/sqlalchemy/events.py create mode 100644 Lib/site-packages/sqlalchemy/exc.py create mode 100644 Lib/site-packages/sqlalchemy/ext/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/ext/associationproxy.py create mode 100644 Lib/site-packages/sqlalchemy/ext/automap.py create mode 100644 Lib/site-packages/sqlalchemy/ext/baked.py create mode 100644 Lib/site-packages/sqlalchemy/ext/compiler.py create mode 100644 Lib/site-packages/sqlalchemy/ext/declarative/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/ext/declarative/api.py create mode 100644 Lib/site-packages/sqlalchemy/ext/declarative/base.py create mode 100644 Lib/site-packages/sqlalchemy/ext/declarative/clsregistry.py create mode 100644 Lib/site-packages/sqlalchemy/ext/horizontal_shard.py create mode 100644 Lib/site-packages/sqlalchemy/ext/hybrid.py create mode 100644 Lib/site-packages/sqlalchemy/ext/instrumentation.py create mode 100644 Lib/site-packages/sqlalchemy/ext/mutable.py create mode 100644 Lib/site-packages/sqlalchemy/ext/orderinglist.py create mode 100644 Lib/site-packages/sqlalchemy/ext/serializer.py create mode 100644 Lib/site-packages/sqlalchemy/inspection.py create mode 100644 Lib/site-packages/sqlalchemy/interfaces.py create mode 100644 Lib/site-packages/sqlalchemy/log.py create mode 100644 Lib/site-packages/sqlalchemy/orm/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/orm/attributes.py create mode 100644 Lib/site-packages/sqlalchemy/orm/base.py create mode 100644 Lib/site-packages/sqlalchemy/orm/collections.py create mode 100644 Lib/site-packages/sqlalchemy/orm/dependency.py create mode 100644 Lib/site-packages/sqlalchemy/orm/deprecated_interfaces.py create mode 100644 Lib/site-packages/sqlalchemy/orm/descriptor_props.py create mode 100644 Lib/site-packages/sqlalchemy/orm/dynamic.py create mode 100644 Lib/site-packages/sqlalchemy/orm/evaluator.py create mode 100644 Lib/site-packages/sqlalchemy/orm/events.py create mode 100644 Lib/site-packages/sqlalchemy/orm/exc.py create mode 100644 Lib/site-packages/sqlalchemy/orm/identity.py create mode 100644 Lib/site-packages/sqlalchemy/orm/instrumentation.py create mode 100644 Lib/site-packages/sqlalchemy/orm/interfaces.py create mode 100644 Lib/site-packages/sqlalchemy/orm/loading.py create mode 100644 Lib/site-packages/sqlalchemy/orm/mapper.py create mode 100644 Lib/site-packages/sqlalchemy/orm/path_registry.py create mode 100644 Lib/site-packages/sqlalchemy/orm/persistence.py create mode 100644 Lib/site-packages/sqlalchemy/orm/properties.py create mode 100644 Lib/site-packages/sqlalchemy/orm/query.py create mode 100644 Lib/site-packages/sqlalchemy/orm/relationships.py create mode 100644 Lib/site-packages/sqlalchemy/orm/scoping.py create mode 100644 Lib/site-packages/sqlalchemy/orm/session.py create mode 100644 Lib/site-packages/sqlalchemy/orm/state.py create mode 100644 Lib/site-packages/sqlalchemy/orm/strategies.py create mode 100644 Lib/site-packages/sqlalchemy/orm/strategy_options.py create mode 100644 Lib/site-packages/sqlalchemy/orm/sync.py create mode 100644 Lib/site-packages/sqlalchemy/orm/unitofwork.py create mode 100644 Lib/site-packages/sqlalchemy/orm/util.py create mode 100644 Lib/site-packages/sqlalchemy/pool.py create mode 100644 Lib/site-packages/sqlalchemy/processors.py create mode 100644 Lib/site-packages/sqlalchemy/schema.py create mode 100644 Lib/site-packages/sqlalchemy/sql/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/sql/annotation.py create mode 100644 Lib/site-packages/sqlalchemy/sql/base.py create mode 100644 Lib/site-packages/sqlalchemy/sql/compiler.py create mode 100644 Lib/site-packages/sqlalchemy/sql/crud.py create mode 100644 Lib/site-packages/sqlalchemy/sql/ddl.py create mode 100644 Lib/site-packages/sqlalchemy/sql/default_comparator.py create mode 100644 Lib/site-packages/sqlalchemy/sql/dml.py create mode 100644 Lib/site-packages/sqlalchemy/sql/elements.py create mode 100644 Lib/site-packages/sqlalchemy/sql/expression.py create mode 100644 Lib/site-packages/sqlalchemy/sql/functions.py create mode 100644 Lib/site-packages/sqlalchemy/sql/naming.py create mode 100644 Lib/site-packages/sqlalchemy/sql/operators.py create mode 100644 Lib/site-packages/sqlalchemy/sql/schema.py create mode 100644 Lib/site-packages/sqlalchemy/sql/selectable.py create mode 100644 Lib/site-packages/sqlalchemy/sql/sqltypes.py create mode 100644 Lib/site-packages/sqlalchemy/sql/type_api.py create mode 100644 Lib/site-packages/sqlalchemy/sql/util.py create mode 100644 Lib/site-packages/sqlalchemy/sql/visitors.py create mode 100644 Lib/site-packages/sqlalchemy/testing/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/testing/assertions.py create mode 100644 Lib/site-packages/sqlalchemy/testing/assertsql.py create mode 100644 Lib/site-packages/sqlalchemy/testing/config.py create mode 100644 Lib/site-packages/sqlalchemy/testing/distutils_run.py create mode 100644 Lib/site-packages/sqlalchemy/testing/engines.py create mode 100644 Lib/site-packages/sqlalchemy/testing/entities.py create mode 100644 Lib/site-packages/sqlalchemy/testing/exclusions.py create mode 100644 Lib/site-packages/sqlalchemy/testing/fixtures.py create mode 100644 Lib/site-packages/sqlalchemy/testing/mock.py create mode 100644 Lib/site-packages/sqlalchemy/testing/pickleable.py create mode 100644 Lib/site-packages/sqlalchemy/testing/plugin/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/testing/plugin/bootstrap.py create mode 100644 Lib/site-packages/sqlalchemy/testing/plugin/noseplugin.py create mode 100644 Lib/site-packages/sqlalchemy/testing/plugin/plugin_base.py create mode 100644 Lib/site-packages/sqlalchemy/testing/plugin/pytestplugin.py create mode 100644 Lib/site-packages/sqlalchemy/testing/profiling.py create mode 100644 Lib/site-packages/sqlalchemy/testing/provision.py create mode 100644 Lib/site-packages/sqlalchemy/testing/replay_fixture.py create mode 100644 Lib/site-packages/sqlalchemy/testing/requirements.py create mode 100644 Lib/site-packages/sqlalchemy/testing/runner.py create mode 100644 Lib/site-packages/sqlalchemy/testing/schema.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_ddl.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_dialect.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_insert.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_reflection.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_results.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_select.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_sequence.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_types.py create mode 100644 Lib/site-packages/sqlalchemy/testing/suite/test_update_delete.py create mode 100644 Lib/site-packages/sqlalchemy/testing/util.py create mode 100644 Lib/site-packages/sqlalchemy/testing/warnings.py create mode 100644 Lib/site-packages/sqlalchemy/types.py create mode 100644 Lib/site-packages/sqlalchemy/util/__init__.py create mode 100644 Lib/site-packages/sqlalchemy/util/_collections.py create mode 100644 Lib/site-packages/sqlalchemy/util/compat.py create mode 100644 Lib/site-packages/sqlalchemy/util/deprecations.py create mode 100644 Lib/site-packages/sqlalchemy/util/langhelpers.py create mode 100644 Lib/site-packages/sqlalchemy/util/queue.py create mode 100644 Lib/site-packages/sqlalchemy/util/topological.py create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/DESCRIPTION.rst create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/LICENSE.txt create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/METADATA create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/RECORD create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/WHEEL create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/entry_points.txt create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/metadata.json create mode 100644 Lib/site-packages/wheel-0.26.0.dist-info/top_level.txt create mode 100644 Lib/site-packages/wheel/__init__.py create mode 100644 Lib/site-packages/wheel/__main__.py create mode 100644 Lib/site-packages/wheel/archive.py create mode 100644 Lib/site-packages/wheel/bdist_wheel.py create mode 100644 Lib/site-packages/wheel/decorator.py create mode 100644 Lib/site-packages/wheel/egg2wheel.py create mode 100644 Lib/site-packages/wheel/eggnames.txt create mode 100644 Lib/site-packages/wheel/install.py create mode 100644 Lib/site-packages/wheel/metadata.py create mode 100644 Lib/site-packages/wheel/paths.py create mode 100644 Lib/site-packages/wheel/pep425tags.py create mode 100644 Lib/site-packages/wheel/pkginfo.py create mode 100644 Lib/site-packages/wheel/signatures/__init__.py create mode 100644 Lib/site-packages/wheel/signatures/djbec.py create mode 100644 Lib/site-packages/wheel/signatures/ed25519py.py create mode 100644 Lib/site-packages/wheel/signatures/keys.py create mode 100644 Lib/site-packages/wheel/test/__init__.py create mode 100644 Lib/site-packages/wheel/test/complex-dist/complexdist/__init__.py create mode 100644 Lib/site-packages/wheel/test/complex-dist/setup.py create mode 100644 Lib/site-packages/wheel/test/headers.dist/header.h create mode 100644 Lib/site-packages/wheel/test/headers.dist/headersdist.py create mode 100644 Lib/site-packages/wheel/test/headers.dist/setup.py create mode 100644 Lib/site-packages/wheel/test/pydist-schema.json create mode 100644 Lib/site-packages/wheel/test/simple.dist/setup.py create mode 100644 Lib/site-packages/wheel/test/simple.dist/simpledist/__init__.py create mode 100644 Lib/site-packages/wheel/test/test-1.0-py2.py3-none-win32.whl create mode 100644 Lib/site-packages/wheel/test/test_basic.py create mode 100644 Lib/site-packages/wheel/test/test_install.py create mode 100644 Lib/site-packages/wheel/test/test_keys.py create mode 100644 Lib/site-packages/wheel/test/test_paths.py create mode 100644 Lib/site-packages/wheel/test/test_ranking.py create mode 100644 Lib/site-packages/wheel/test/test_signatures.py create mode 100644 Lib/site-packages/wheel/test/test_tagopt.py create mode 100644 Lib/site-packages/wheel/test/test_tool.py create mode 100644 Lib/site-packages/wheel/test/test_wheelfile.py create mode 100644 Lib/site-packages/wheel/tool/__init__.py create mode 100644 Lib/site-packages/wheel/util.py create mode 100644 Lib/site-packages/wheel/wininst2wheel.py create mode 100644 POPPLER_AUTHORS create mode 100644 POPPLER_BINARIES create mode 100644 POPPLER_README create mode 100644 POPPLER_README-XPDF create mode 100644 README.md create mode 100644 Scripts/createfontdatachunk.py create mode 100644 Scripts/easy_install-3.5.exe create mode 100644 Scripts/easy_install.exe create mode 100644 Scripts/explode.py create mode 100644 Scripts/painter.py create mode 100644 Scripts/pip.exe create mode 100644 Scripts/pip3.5.exe create mode 100644 Scripts/pip3.exe create mode 100644 Scripts/player.py create mode 100644 Scripts/thresholder.py create mode 100644 Scripts/viewer.py create mode 100644 Scripts/wheel.exe create mode 100644 freetype6.dll create mode 100644 get-pip.py create mode 100644 jpeg62.dll create mode 100644 libcairo-2.dll create mode 100644 libexpat-1.dll create mode 100644 libfontconfig-1.dll create mode 100644 libgcc_s_dw2-1.dll create mode 100644 libpixman-1-0.dll create mode 100644 libpng16-16.dll create mode 100644 libpoppler-cpp.dll create mode 100644 libpoppler.dll create mode 100644 libstdc++-6.dll create mode 100644 libtiff3.dll create mode 100644 pdftocairo.exe create mode 100644 pdftotext.exe create mode 100644 python.exe create mode 100644 python3.dll create mode 100644 python35.dll create mode 100644 python35.zip create mode 100644 pythonw.exe create mode 100644 pyvenv.cfg create mode 100644 sqlite3.dll create mode 100644 vcruntime140.dll create mode 100644 zlib1.dll diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..10f128e --- /dev/null +++ b/.gitignore @@ -0,0 +1,7 @@ +*.swp +*.pyc +*.pyo +*.pyd +__pycache__ +pip_cache +.DS_Store diff --git a/COPYING b/COPYING new file mode 100644 index 0000000..82fa1da --- /dev/null +++ b/COPYING @@ -0,0 +1,339 @@ + GNU GENERAL PUBLIC LICENSE + Version 2, June 1991 + + Copyright (C) 1989, 1991 Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +License is intended to guarantee your freedom to share and change free +software--to make sure the software is free for all its users. This +General Public License applies to most of the Free Software +Foundation's software and to any other program whose authors commit to +using it. (Some other Free Software Foundation software is covered by +the GNU Lesser General Public License instead.) You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +this service if you wish), that you receive source code or can get it +if you want it, that you can change the software or use pieces of it +in new free programs; and that you know you can do these things. + + To protect your rights, we need to make restrictions that forbid +anyone to deny you these rights or to ask you to surrender the rights. +These restrictions translate to certain responsibilities for you if you +distribute copies of the software, or if you modify it. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must give the recipients all the rights that +you have. You must make sure that they, too, receive or can get the +source code. And you must show them these terms so they know their +rights. + + We protect your rights with two steps: (1) copyright the software, and +(2) offer you this license which gives you legal permission to copy, +distribute and/or modify the software. + + Also, for each author's protection and ours, we want to make certain +that everyone understands that there is no warranty for this free +software. If the software is modified by someone else and passed on, we +want its recipients to know that what they have is not the original, so +that any problems introduced by others will not reflect on the original +authors' reputations. + + Finally, any free program is threatened constantly by software +patents. We wish to avoid the danger that redistributors of a free +program will individually obtain patent licenses, in effect making the +program proprietary. To prevent this, we have made it clear that any +patent must be licensed for everyone's free use or not licensed at all. + + The precise terms and conditions for copying, distribution and +modification follow. + + GNU GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License applies to any program or other work which contains +a notice placed by the copyright holder saying it may be distributed +under the terms of this General Public License. The "Program", below, +refers to any such program or work, and a "work based on the Program" +means either the Program or any derivative work under copyright law: +that is to say, a work containing the Program or a portion of it, +either verbatim or with modifications and/or translated into another +language. (Hereinafter, translation is included without limitation in +the term "modification".) Each licensee is addressed as "you". + +Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running the Program is not restricted, and the output from the Program +is covered only if its contents constitute a work based on the +Program (independent of having been made by running the Program). +Whether that is true depends on what the Program does. + + 1. You may copy and distribute verbatim copies of the Program's +source code as you receive it, in any medium, provided that you +conspicuously and appropriately publish on each copy an appropriate +copyright notice and disclaimer of warranty; keep intact all the +notices that refer to this License and to the absence of any warranty; +and give any other recipients of the Program a copy of this License +along with the Program. + +You may charge a fee for the physical act of transferring a copy, and +you may at your option offer warranty protection in exchange for a fee. + + 2. You may modify your copy or copies of the Program or any portion +of it, thus forming a work based on the Program, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) You must cause the modified files to carry prominent notices + stating that you changed the files and the date of any change. + + b) You must cause any work that you distribute or publish, that in + whole or in part contains or is derived from the Program or any + part thereof, to be licensed as a whole at no charge to all third + parties under the terms of this License. + + c) If the modified program normally reads commands interactively + when run, you must cause it, when started running for such + interactive use in the most ordinary way, to print or display an + announcement including an appropriate copyright notice and a + notice that there is no warranty (or else, saying that you provide + a warranty) and that users may redistribute the program under + these conditions, and telling the user how to view a copy of this + License. (Exception: if the Program itself is interactive but + does not normally print such an announcement, your work based on + the Program is not required to print an announcement.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Program, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Program, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Program. + +In addition, mere aggregation of another work not based on the Program +with the Program (or with a work based on the Program) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may copy and distribute the Program (or a work based on it, +under Section 2) in object code or executable form under the terms of +Sections 1 and 2 above provided that you also do one of the following: + + a) Accompany it with the complete corresponding machine-readable + source code, which must be distributed under the terms of Sections + 1 and 2 above on a medium customarily used for software interchange; or, + + b) Accompany it with a written offer, valid for at least three + years, to give any third party, for a charge no more than your + cost of physically performing source distribution, a complete + machine-readable copy of the corresponding source code, to be + distributed under the terms of Sections 1 and 2 above on a medium + customarily used for software interchange; or, + + c) Accompany it with the information you received as to the offer + to distribute corresponding source code. (This alternative is + allowed only for noncommercial distribution and only if you + received the program in object code or executable form with such + an offer, in accord with Subsection b above.) + +The source code for a work means the preferred form of the work for +making modifications to it. For an executable work, complete source +code means all the source code for all modules it contains, plus any +associated interface definition files, plus the scripts used to +control compilation and installation of the executable. However, as a +special exception, the source code distributed need not include +anything that is normally distributed (in either source or binary +form) with the major components (compiler, kernel, and so on) of the +operating system on which the executable runs, unless that component +itself accompanies the executable. + +If distribution of executable or object code is made by offering +access to copy from a designated place, then offering equivalent +access to copy the source code from the same place counts as +distribution of the source code, even though third parties are not +compelled to copy the source along with the object code. + + 4. You may not copy, modify, sublicense, or distribute the Program +except as expressly provided under this License. Any attempt +otherwise to copy, modify, sublicense or distribute the Program is +void, and will automatically terminate your rights under this License. +However, parties who have received copies, or rights, from you under +this License will not have their licenses terminated so long as such +parties remain in full compliance. + + 5. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Program or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Program (or any work based on the +Program), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Program or works based on it. + + 6. Each time you redistribute the Program (or any work based on the +Program), the recipient automatically receives a license from the +original licensor to copy, distribute or modify the Program subject to +these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties to +this License. + + 7. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Program at all. For example, if a patent +license would not permit royalty-free redistribution of the Program by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Program. + +If any portion of this section is held invalid or unenforceable under +any particular circumstance, the balance of the section is intended to +apply and the section as a whole is intended to apply in other +circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system, which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 8. If the distribution and/or use of the Program is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Program under this License +may add an explicit geographical distribution limitation excluding +those countries, so that distribution is permitted only in or among +countries not thus excluded. In such case, this License incorporates +the limitation as if written in the body of this License. + + 9. The Free Software Foundation may publish revised and/or new versions +of the General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + +Each version is given a distinguishing version number. If the Program +specifies a version number of this License which applies to it and "any +later version", you have the option of following the terms and conditions +either of that version or of any later version published by the Free +Software Foundation. If the Program does not specify a version number of +this License, you may choose any version ever published by the Free Software +Foundation. + + 10. If you wish to incorporate parts of the Program into other free +programs whose distribution conditions are different, write to the author +to ask for permission. For software which is copyrighted by the Free +Software Foundation, write to the Free Software Foundation; we sometimes +make exceptions for this. Our decision will be guided by the two goals +of preserving the free status of all derivatives of our free software and +of promoting the sharing and reuse of software generally. + + NO WARRANTY + + 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY +FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN +OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES +PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED +OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS +TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE +PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, +REPAIR OR CORRECTION. + + 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR +REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, +INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING +OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED +TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY +YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER +PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE +POSSIBILITY OF SUCH DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation; either version 2 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along + with this program; if not, write to the Free Software Foundation, Inc., + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +Also add information on how to contact you by electronic and paper mail. + +If the program is interactive, make it output a short notice like this +when it starts in an interactive mode: + + Gnomovision version 69, Copyright (C) year name of author + Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, the commands you use may +be called something other than `show w' and `show c'; they could even be +mouse-clicks or menu items--whatever suits your program. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the program, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the program + `Gnomovision' (which makes passes at compilers) written by James Hacker. + + , 1 April 1989 + Ty Coon, President of Vice + +This General Public License does not permit incorporating your program into +proprietary programs. If your program is a subroutine library, you may +consider it more useful to permit linking proprietary applications with the +library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. diff --git a/COPYING3 b/COPYING3 new file mode 100644 index 0000000..818433e --- /dev/null +++ b/COPYING3 @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/Lib/site-packages/Crypto/Cipher/AES.py b/Lib/site-packages/Crypto/Cipher/AES.py new file mode 100644 index 0000000..14f68d8 --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/AES.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- +# +# Cipher/AES.py : AES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""AES symmetric cipher + +AES `(Advanced Encryption Standard)`__ is a symmetric block cipher standardized +by NIST_ . It has a fixed data block size of 16 bytes. +Its keys can be 128, 192, or 256 bits long. + +AES is very fast and secure, and it is the de facto standard for symmetric +encryption. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import AES + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(AES.block_size) + >>> cipher = AES.new(key, AES.MODE_CFB, iv) + >>> msg = iv + cipher.encrypt(b'Attack at dawn') + +.. __: http://en.wikipedia.org/wiki/Advanced_Encryption_Standard +.. _NIST: http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _AES + +class AESCipher (blockalgo.BlockAlgo): + """AES cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize an AES cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _AES, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new AES cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It must be 16 (*AES-128*), 24 (*AES-192*), or 32 (*AES-256*) bytes long. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: an `AESCipher` object + """ + return AESCipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 16 +#: Size of a key (in bytes) +key_size = ( 16, 24, 32 ) + diff --git a/Lib/site-packages/Crypto/Cipher/ARC2.py b/Lib/site-packages/Crypto/Cipher/ARC2.py new file mode 100644 index 0000000..95f5da5 --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/ARC2.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC2.py : ARC2.py +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""RC2 symmetric cipher + +RC2_ (Rivest's Cipher version 2) is a symmetric block cipher designed +by Ron Rivest in 1987. The cipher started as a proprietary design, +that was reverse engineered and anonymously posted on Usenet in 1996. +For this reason, the algorithm was first called *Alleged* RC2 (ARC2), +since the company that owned RC2 (RSA Data Inc.) did not confirm whether +the details leaked into public domain were really correct. + +The company eventually published its full specification in RFC2268_. + +RC2 has a fixed data block size of 8 bytes. Length of its keys can vary from +8 to 128 bits. One particular property of RC2 is that the actual +cryptographic strength of the key (*effective key length*) can be reduced +via a parameter. + +Even though RC2 is not cryptographically broken, it has not been analyzed as +thoroughly as AES, which is also faster than RC2. + +New designs should not use RC2. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import ARC2 + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(ARC2.block_size) + >>> cipher = ARC2.new(key, ARC2.MODE_CFB, iv) + >>> msg = iv + cipher.encrypt(b'Attack at dawn') + +.. _RC2: http://en.wikipedia.org/wiki/RC2 +.. _RFC2268: http://tools.ietf.org/html/rfc2268 + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _ARC2 + +class RC2Cipher (blockalgo.BlockAlgo): + """RC2 cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize an ARC2 cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _ARC2, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new RC2 cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length can vary from 1 to 128 bytes. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + effective_keylen : integer + Maximum cryptographic strength of the key, in bits. + It can vary from 0 to 1024. The default value is 1024. + + :Return: an `RC2Cipher` object + """ + return RC2Cipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = range(1,16+1) + diff --git a/Lib/site-packages/Crypto/Cipher/ARC4.py b/Lib/site-packages/Crypto/Cipher/ARC4.py new file mode 100644 index 0000000..12850c6 --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/ARC4.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +# +# Cipher/ARC4.py : ARC4 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""ARC4 symmetric cipher + +ARC4_ (Alleged RC4) is an implementation of RC4 (Rivest's Cipher version 4), +a symmetric stream cipher designed by Ron Rivest in 1987. + +The cipher started as a proprietary design, that was reverse engineered and +anonymously posted on Usenet in 1994. The company that owns RC4 (RSA Data +Inc.) never confirmed the correctness of the leaked algorithm. + +Unlike RC2, the company has never published the full specification of RC4, +of whom it still holds the trademark. + +ARC4 keys can vary in length from 40 to 2048 bits. + +One problem of ARC4 is that it does not take a nonce or an IV. If it is required +to encrypt multiple messages with the same long-term key, a distinct +independent nonce must be created for each message, and a short-term key must +be derived from the combination of the long-term key and the nonce. +Due to the weak key scheduling algorithm of RC2, the combination must be carried +out with a complex function (e.g. a cryptographic hash) and not by simply +concatenating key and nonce. + +New designs should not use ARC4. A good alternative is AES +(`Crypto.Cipher.AES`) in any of the modes that turn it into a stream cipher (OFB, CFB, or CTR). + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import ARC4 + >>> from Crypto.Hash import SHA + >>> from Crypto import Random + >>> + >>> key = b'Very long and confidential key' + >>> nonce = Random.new().read(16) + >>> tempkey = SHA.new(key+nonce).digest() + >>> cipher = ARC4.new(tempkey) + >>> msg = nonce + cipher.encrypt(b'Open the pod bay doors, HAL') + +.. _ARC4: http://en.wikipedia.org/wiki/RC4 + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import _ARC4 + +class ARC4Cipher: + """ARC4 cipher object""" + + + def __init__(self, key, *args, **kwargs): + """Initialize an ARC4 cipher object + + See also `new()` at the module level.""" + + self._cipher = _ARC4.new(key, *args, **kwargs) + self.block_size = self._cipher.block_size + self.key_size = self._cipher.key_size + + def encrypt(self, plaintext): + """Encrypt a piece of data. + + :Parameters: + plaintext : byte string + The piece of data to encrypt. It can be of any size. + :Return: the encrypted data (byte string, as long as the + plaintext). + """ + return self._cipher.encrypt(plaintext) + + def decrypt(self, ciphertext): + """Decrypt a piece of data. + + :Parameters: + ciphertext : byte string + The piece of data to decrypt. It can be of any size. + :Return: the decrypted data (byte string, as long as the + ciphertext). + """ + return self._cipher.decrypt(ciphertext) + +def new(key, *args, **kwargs): + """Create a new ARC4 cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It can have any length, with a minimum of 40 bytes. + Its cryptograpic strength is always capped to 2048 bits (256 bytes). + + :Return: an `ARC4Cipher` object + """ + return ARC4Cipher(key, *args, **kwargs) + +#: Size of a data block (in bytes) +block_size = 1 +#: Size of a key (in bytes) +key_size = range(1,256+1) + diff --git a/Lib/site-packages/Crypto/Cipher/Blowfish.py b/Lib/site-packages/Crypto/Cipher/Blowfish.py new file mode 100644 index 0000000..2953f3e --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/Blowfish.py @@ -0,0 +1,121 @@ +# -*- coding: utf-8 -*- +# +# Cipher/Blowfish.py : Blowfish +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Blowfish symmetric cipher + +Blowfish_ is a symmetric block cipher designed by Bruce Schneier. + +It has a fixed data block size of 8 bytes and its keys can vary in length +from 32 to 448 bits (4 to 56 bytes). + +Blowfish is deemed secure and it is fast. However, its keys should be chosen +to be big enough to withstand a brute force attack (e.g. at least 16 bytes). + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import Blowfish + >>> from Crypto import Random + >>> from struct import pack + >>> + >>> bs = Blowfish.block_size + >>> key = b'An arbitrarily long key' + >>> iv = Random.new().read(bs) + >>> cipher = Blowfish.new(key, Blowfish.MODE_CBC, iv) + >>> plaintext = b'docendo discimus ' + >>> plen = bs - divmod(len(plaintext),bs)[1] + >>> padding = [plen]*plen + >>> padding = pack('b'*plen, *padding) + >>> msg = iv + cipher.encrypt(plaintext + padding) + +.. _Blowfish: http://www.schneier.com/blowfish.html + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _Blowfish + +class BlowfishCipher (blockalgo.BlockAlgo): + """Blowfish cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a Blowfish cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _Blowfish, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new Blowfish cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length can vary from 4 to 56 bytes. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: a `BlowfishCipher` object + """ + return BlowfishCipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = range(4,56+1) + diff --git a/Lib/site-packages/Crypto/Cipher/CAST.py b/Lib/site-packages/Crypto/Cipher/CAST.py new file mode 100644 index 0000000..db85328 --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/CAST.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +# +# Cipher/CAST.py : CAST +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""CAST-128 symmetric cipher + +CAST-128_ (or CAST5) is a symmetric block cipher specified in RFC2144_. + +It has a fixed data block size of 8 bytes. Its key can vary in length +from 40 to 128 bits. + +CAST is deemed to be cryptographically secure, but its usage is not widespread. +Keys of sufficient length should be used to prevent brute force attacks +(128 bits are recommended). + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import CAST + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(CAST.block_size) + >>> cipher = CAST.new(key, CAST.MODE_OPENPGP, iv) + >>> plaintext = b'sona si latine loqueris ' + >>> msg = cipher.encrypt(plaintext) + >>> + ... + >>> eiv = msg[:CAST.block_size+2] + >>> ciphertext = msg[CAST.block_size+2:] + >>> cipher = CAST.new(key, CAST.MODE_OPENPGP, eiv) + >>> print cipher.decrypt(ciphertext) + +.. _CAST-128: http://en.wikipedia.org/wiki/CAST-128 +.. _RFC2144: http://tools.ietf.org/html/rfc2144 + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _CAST + +class CAST128Cipher(blockalgo.BlockAlgo): + """CAST-128 cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a CAST-128 cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _CAST, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new CAST-128 cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length may vary from 5 to 16 bytes. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: an `CAST128Cipher` object + """ + return CAST128Cipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = range(5,16+1) diff --git a/Lib/site-packages/Crypto/Cipher/DES.py b/Lib/site-packages/Crypto/Cipher/DES.py new file mode 100644 index 0000000..2fae42f --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/DES.py @@ -0,0 +1,118 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES.py : DES +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""DES symmetric cipher + +DES `(Data Encryption Standard)`__ is a symmetric block cipher standardized +by NIST_ . It has a fixed data block size of 8 bytes. +Its keys are 64 bits long, even though 8 bits were used for integrity (now they +are ignored) and do not contribute to securty. + +DES is cryptographically secure, but its key length is too short by nowadays +standards and it could be brute forced with some effort. + +DES should not be used for new designs. Use `AES`. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import DES3 + >>> from Crypto import Random + >>> + >>> key = b'Sixteen byte key' + >>> iv = Random.new().read(DES3.block_size) + >>> cipher = DES3.new(key, DES3.MODE_OFB, iv) + >>> plaintext = b'sona si latine loqueris ' + >>> msg = iv + cipher.encrypt(plaintext) + +.. __: http://en.wikipedia.org/wiki/Data_Encryption_Standard +.. _NIST: http://csrc.nist.gov/publications/fips/fips46-3/fips46-3.pdf + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _DES + +class DESCipher(blockalgo.BlockAlgo): + """DES cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a DES cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _DES, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new DES cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It must be 8 byte long. The parity bits will be ignored. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Return: an `DESCipher` object + """ + return DESCipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = 8 diff --git a/Lib/site-packages/Crypto/Cipher/DES3.py b/Lib/site-packages/Crypto/Cipher/DES3.py new file mode 100644 index 0000000..7fedac8 --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/DES3.py @@ -0,0 +1,133 @@ +# -*- coding: utf-8 -*- +# +# Cipher/DES3.py : DES3 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Triple DES symmetric cipher + +`Triple DES`__ (or TDES or TDEA or 3DES) is a symmetric block cipher standardized by NIST_. +It has a fixed data block size of 8 bytes. Its keys are 128 (*Option 1*) or 192 +bits (*Option 2*) long. +However, 1 out of 8 bits is used for redundancy and do not contribute to +security. The effective key length is respectively 112 or 168 bits. + +TDES consists of the concatenation of 3 simple `DES` ciphers. + +The plaintext is first DES encrypted with *K1*, then decrypted with *K2*, +and finally encrypted again with *K3*. The ciphertext is decrypted in the reverse manner. + +The 192 bit key is a bundle of three 64 bit independent subkeys: *K1*, *K2*, and *K3*. + +The 128 bit key is split into *K1* and *K2*, whereas *K1=K3*. + +It is important that all subkeys are different, otherwise TDES would degrade to +single `DES`. + +TDES is cryptographically secure, even though it is neither as secure nor as fast +as `AES`. + +As an example, encryption can be done as follows: + + >>> from Crypto.Cipher import DES + >>> from Crypto import Random + >>> from Crypto.Util import Counter + >>> + >>> key = b'-8B key-' + >>> nonce = Random.new().read(DES.block_size/2) + >>> ctr = Counter.new(DES.block_size*8/2, prefix=nonce) + >>> cipher = DES.new(key, DES.MODE_CTR, counter=ctr) + >>> plaintext = b'We are no longer the knights who say ni!' + >>> msg = nonce + cipher.encrypt(plaintext) + +.. __: http://en.wikipedia.org/wiki/Triple_DES +.. _NIST: http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import blockalgo +from Crypto.Cipher import _DES3 + +class DES3Cipher(blockalgo.BlockAlgo): + """TDES cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a TDES cipher object + + See also `new()` at the module level.""" + blockalgo.BlockAlgo.__init__(self, _DES3, key, *args, **kwargs) + +def new(key, *args, **kwargs): + """Create a new TDES cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + It must be 16 or 24 bytes long. The parity bits will be ignored. + :Keywords: + mode : a *MODE_** constant + The chaining mode to use for encryption or decryption. + Default is `MODE_ECB`. + IV : byte string + The initialization vector to use for encryption or decryption. + + It is ignored for `MODE_ECB` and `MODE_CTR`. + + For `MODE_OPENPGP`, IV must be `block_size` bytes long for encryption + and `block_size` +2 bytes for decryption (in the latter case, it is + actually the *encrypted* IV which was prefixed to the ciphertext). + It is mandatory. + + For all other modes, it must be `block_size` bytes longs. It is optional and + when not present it will be given a default value of all zeroes. + counter : callable + (*Only* `MODE_CTR`). A stateful function that returns the next + *counter block*, which is a byte string of `block_size` bytes. + For better performance, use `Crypto.Util.Counter`. + segment_size : integer + (*Only* `MODE_CFB`).The number of bits the plaintext and ciphertext + are segmented in. + It must be a multiple of 8. If 0 or not specified, it will be assumed to be 8. + + :Attention: it is important that all 8 byte subkeys are different, + otherwise TDES would degrade to single `DES`. + :Return: an `DES3Cipher` object + """ + return DES3Cipher(key, *args, **kwargs) + +#: Electronic Code Book (ECB). See `blockalgo.MODE_ECB`. +MODE_ECB = 1 +#: Cipher-Block Chaining (CBC). See `blockalgo.MODE_CBC`. +MODE_CBC = 2 +#: Cipher FeedBack (CFB). See `blockalgo.MODE_CFB`. +MODE_CFB = 3 +#: This mode should not be used. +MODE_PGP = 4 +#: Output FeedBack (OFB). See `blockalgo.MODE_OFB`. +MODE_OFB = 5 +#: CounTer Mode (CTR). See `blockalgo.MODE_CTR`. +MODE_CTR = 6 +#: OpenPGP Mode. See `blockalgo.MODE_OPENPGP`. +MODE_OPENPGP = 7 +#: Size of a data block (in bytes) +block_size = 8 +#: Size of a key (in bytes) +key_size = ( 16, 24 ) diff --git a/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py b/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py new file mode 100644 index 0000000..a3130ba --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/PKCS1_OAEP.py @@ -0,0 +1,255 @@ +# -*- coding: utf-8 -*- +# +# Cipher/PKCS1_OAEP.py : PKCS#1 OAEP +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RSA encryption protocol according to PKCS#1 OAEP + +See RFC3447__ or the `original RSA Labs specification`__ . + +This scheme is more properly called ``RSAES-OAEP``. + +As an example, a sender may encrypt a message in this way: + + >>> from Crypto.Cipher import PKCS1_OAEP + >>> from Crypto.PublicKey import RSA + >>> + >>> message = 'To be encrypted' + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> cipher = PKCS1_OAEP.new(key) + >>> ciphertext = cipher.encrypt(message) + +At the receiver side, decryption can be done using the private part of +the RSA key: + + >>> key = RSA.importKey(open('privkey.der').read()) + >>> cipher = PKCS1_OAP.new(key) + >>> message = cipher.decrypt(ciphertext) + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125. +""" + + + +__revision__ = "$Id$" +__all__ = [ 'new', 'PKCS1OAEP_Cipher' ] + +import Crypto.Signature.PKCS1_PSS +import Crypto.Hash.SHA + +from Crypto.Util.py3compat import * +import Crypto.Util.number +from Crypto.Util.number import ceil_div +from Crypto.Util.strxor import strxor + +class PKCS1OAEP_Cipher: + """This cipher can perform PKCS#1 v1.5 OAEP encryption or decryption.""" + + def __init__(self, key, hashAlgo, mgfunc, label): + """Initialize this PKCS#1 OAEP cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + hashAlgo : hash object + The hash function to use. This can be a module under `Crypto.Hash` + or an existing hash object created from any of such modules. If not specified, + `Crypto.Hash.SHA` (that is, SHA-1) is used. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 is used (a safe choice). + label : string + A label to apply to this particular encryption. If not specified, + an empty string is used. Specifying a label does not improve + security. + + :attention: Modify the mask generation function only if you know what you are doing. + Sender and receiver must use the same one. + """ + self._key = key + + if hashAlgo: + self._hashObj = hashAlgo + else: + self._hashObj = Crypto.Hash.SHA + + if mgfunc: + self._mgf = mgfunc + else: + self._mgf = lambda x,y: Crypto.Signature.PKCS1_PSS.MGF1(x,y,self._hashObj) + + self._label = label + + def can_encrypt(self): + """Return True/1 if this cipher object can be used for encryption.""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Return True/1 if this cipher object can be used for decryption.""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Produce the PKCS#1 OAEP encryption of a message. + + This function is named ``RSAES-OAEP-ENCRYPT``, and is specified in + section 7.1.1 of RFC3447. + + :Parameters: + message : string + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) + minus 2, minus twice the hash output size. + + :Return: A string, the ciphertext in which the message is encrypted. + It is as long as the RSA modulus (in bytes). + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + message. + """ + # TODO: Verify the key is RSA + + randFunc = self._key._randfunc + + # See 7.1.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + mLen = len(message) + + # Step 1b + ps_len = k-mLen-2*hLen-2 + if ps_len<0: + raise ValueError("Plaintext is too long.") + # Step 2a + lHash = self._hashObj.new(self._label).digest() + # Step 2b + ps = bchr(0x00)*ps_len + # Step 2c + db = lHash + ps + bchr(0x01) + message + # Step 2d + ros = randFunc(hLen) + # Step 2e + dbMask = self._mgf(ros, k-hLen-1) + # Step 2f + maskedDB = strxor(db, dbMask) + # Step 2g + seedMask = self._mgf(maskedDB, hLen) + # Step 2h + maskedSeed = strxor(ros, seedMask) + # Step 2i + em = bchr(0x00) + maskedSeed + maskedDB + # Step 3a (OS2IP), step 3b (RSAEP), part of step 3c (I2OSP) + m = self._key.encrypt(em, 0)[0] + # Complete step 3c (I2OSP) + c = bchr(0x00)*(k-len(m)) + m + return c + + def decrypt(self, ct): + """Decrypt a PKCS#1 OAEP ciphertext. + + This function is named ``RSAES-OAEP-DECRYPT``, and is specified in + section 7.1.2 of RFC3447. + + :Parameters: + ct : string + The ciphertext that contains the message to recover. + + :Return: A string, the original message. + :Raise ValueError: + If the ciphertext length is incorrect, or if the decryption does not + succeed. + :Raise TypeError: + If the RSA key has no private half. + """ + # TODO: Verify the key is RSA + + # See 7.1.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + hLen = self._hashObj.digest_size + + # Step 1b and 1c + if len(ct) != k or k>> from Crypto.Cipher import PKCS1_v1_5 + >>> from Crypto.PublicKey import RSA + >>> from Crypto.Hash import SHA + >>> + >>> message = 'To be encrypted' + >>> h = SHA.new(message) + >>> + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> cipher = PKCS1_v1_5.new(key) + >>> ciphertext = cipher.encrypt(message+h.digest()) + +At the receiver side, decryption can be done using the private part of +the RSA key: + + >>> From Crypto.Hash import SHA + >>> from Crypto import Random + >>> + >>> key = RSA.importKey(open('privkey.der').read()) + >>> + >>> dsize = SHA.digest_size + >>> sentinel = Random.new().read(15+dsize) # Let's assume that average data length is 15 + >>> + >>> cipher = PKCS1_v1_5.new(key) + >>> message = cipher.decrypt(ciphertext, sentinel) + >>> + >>> digest = SHA.new(message[:-dsize]).digest() + >>> if digest==message[-dsize:]: # Note how we DO NOT look for the sentinel + >>> print "Encryption was correct." + >>> else: + >>> print "Encryption was not correct." + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125. +""" + +__revision__ = "$Id$" +__all__ = [ 'new', 'PKCS115_Cipher' ] + +from Crypto.Util.number import ceil_div +from Crypto.Util.py3compat import * +import Crypto.Util.number + +class PKCS115_Cipher: + """This cipher can perform PKCS#1 v1.5 RSA encryption or decryption.""" + + def __init__(self, key): + """Initialize this PKCS#1 v1.5 cipher object. + + :Parameters: + key : an RSA key object + If a private half is given, both encryption and decryption are possible. + If a public half is given, only encryption is possible. + """ + self._key = key + + def can_encrypt(self): + """Return True if this cipher object can be used for encryption.""" + return self._key.can_encrypt() + + def can_decrypt(self): + """Return True if this cipher object can be used for decryption.""" + return self._key.can_decrypt() + + def encrypt(self, message): + """Produce the PKCS#1 v1.5 encryption of a message. + + This function is named ``RSAES-PKCS1-V1_5-ENCRYPT``, and is specified in + section 7.2.1 of RFC3447. + For a complete example see `Crypto.Cipher.PKCS1_v1_5`. + + :Parameters: + message : byte string + The message to encrypt, also known as plaintext. It can be of + variable length, but not longer than the RSA modulus (in bytes) minus 11. + + :Return: A byte string, the ciphertext in which the message is encrypted. + It is as long as the RSA modulus (in bytes). + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + message. + + """ + # TODO: Verify the key is RSA + + randFunc = self._key._randfunc + + # See 7.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + mLen = len(message) + + # Step 1 + if mLen > k-11: + raise ValueError("Plaintext is too long.") + # Step 2a + class nonZeroRandByte: + def __init__(self, rf): self.rf=rf + def __call__(self, c): + while bord(c)==0x00: c=self.rf(1)[0] + return c + ps = tobytes(list(map(nonZeroRandByte(randFunc), randFunc(k-mLen-3)))) + # Step 2b + em = b('\x00\x02') + ps + bchr(0x00) + message + # Step 3a (OS2IP), step 3b (RSAEP), part of step 3c (I2OSP) + m = self._key.encrypt(em, 0)[0] + # Complete step 3c (I2OSP) + c = bchr(0x00)*(k-len(m)) + m + return c + + def decrypt(self, ct, sentinel): + """Decrypt a PKCS#1 v1.5 ciphertext. + + This function is named ``RSAES-PKCS1-V1_5-DECRYPT``, and is specified in + section 7.2.2 of RFC3447. + For a complete example see `Crypto.Cipher.PKCS1_v1_5`. + + :Parameters: + ct : byte string + The ciphertext that contains the message to recover. + sentinel : any type + The object to return to indicate that an error was detected during decryption. + + :Return: A byte string. It is either the original message or the ``sentinel`` (in case of an error). + :Raise ValueError: + If the ciphertext length is incorrect + :Raise TypeError: + If the RSA key has no private half. + + :attention: + You should **never** let the party who submitted the ciphertext know that + this function returned the ``sentinel`` value. + Armed with such knowledge (for a fair amount of carefully crafted but invalid ciphertexts), + an attacker is able to recontruct the plaintext of any other encryption that were carried out + with the same RSA public key (see `Bleichenbacher's`__ attack). + + In general, it should not be possible for the other party to distinguish + whether processing at the server side failed because the value returned + was a ``sentinel`` as opposed to a random, invalid message. + + In fact, the second option is not that unlikely: encryption done according to PKCS#1 v1.5 + embeds no good integrity check. There is roughly one chance + in 2^16 for a random ciphertext to be returned as a valid message + (although random looking). + + It is therefore advisabled to: + + 1. Select as ``sentinel`` a value that resembles a plausable random, invalid message. + 2. Not report back an error as soon as you detect a ``sentinel`` value. + Put differently, you should not explicitly check if the returned value is the ``sentinel`` or not. + 3. Cover all possible errors with a single, generic error indicator. + 4. Embed into the definition of ``message`` (at the protocol level) a digest (e.g. ``SHA-1``). + It is recommended for it to be the rightmost part ``message``. + 5. Where possible, monitor the number of errors due to ciphertexts originating from the same party, + and slow down the rate of the requests from such party (or even blacklist it altogether). + + **If you are designing a new protocol, consider using the more robust PKCS#1 OAEP.** + + .. __: http://www.bell-labs.com/user/bleichen/papers/pkcs.ps + + """ + + # TODO: Verify the key is RSA + + # See 7.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + if len(ct) != k: + raise ValueError("Ciphertext with incorrect length.") + # Step 2a (O2SIP), 2b (RSADP), and part of 2c (I2OSP) + m = self._key.decrypt(ct) + # Complete step 2c (I2OSP) + em = bchr(0x00)*(k-len(m)) + m + # Step 3 + sep = em.find(bchr(0x00),2) + if not em.startswith(b('\x00\x02')) or sep<10: + return sentinel + # Step 4 + return em[sep+1:] + +def new(key): + """Return a cipher object `PKCS115_Cipher` that can be used to perform PKCS#1 v1.5 encryption or decryption. + + :Parameters: + key : RSA key object + The key to use to encrypt or decrypt the message. This is a `Crypto.PublicKey.RSA` object. + Decryption is only possible if *key* is a private RSA key. + + """ + return PKCS115_Cipher(key) + diff --git a/Lib/site-packages/Crypto/Cipher/XOR.py b/Lib/site-packages/Crypto/Cipher/XOR.py new file mode 100644 index 0000000..7e0e69d --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/XOR.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# +# Cipher/XOR.py : XOR +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""XOR toy cipher + +XOR is one the simplest stream ciphers. Encryption and decryption are +performed by XOR-ing data with a keystream made by contatenating +the key. + +Do not use it for real applications! + +:undocumented: __revision__, __package__ +""" + +__revision__ = "$Id$" + +from Crypto.Cipher import _XOR + +class XORCipher: + """XOR cipher object""" + + def __init__(self, key, *args, **kwargs): + """Initialize a XOR cipher object + + See also `new()` at the module level.""" + self._cipher = _XOR.new(key, *args, **kwargs) + self.block_size = self._cipher.block_size + self.key_size = self._cipher.key_size + + def encrypt(self, plaintext): + """Encrypt a piece of data. + + :Parameters: + plaintext : byte string + The piece of data to encrypt. It can be of any size. + :Return: the encrypted data (byte string, as long as the + plaintext). + """ + return self._cipher.encrypt(plaintext) + + def decrypt(self, ciphertext): + """Decrypt a piece of data. + + :Parameters: + ciphertext : byte string + The piece of data to decrypt. It can be of any size. + :Return: the decrypted data (byte string, as long as the + ciphertext). + """ + return self._cipher.decrypt(ciphertext) + +def new(key, *args, **kwargs): + """Create a new XOR cipher + + :Parameters: + key : byte string + The secret key to use in the symmetric cipher. + Its length may vary from 1 to 32 bytes. + + :Return: an `XORCipher` object + """ + return XORCipher(key, *args, **kwargs) + +#: Size of a data block (in bytes) +block_size = 1 +#: Size of a key (in bytes) +key_size = range(1,32+1) + diff --git a/Lib/site-packages/Crypto/Cipher/__init__.py b/Lib/site-packages/Crypto/Cipher/__init__.py new file mode 100644 index 0000000..7afed2d --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Symmetric- and asymmetric-key encryption algorithms. + +Encryption algorithms transform plaintext in some way that +is dependent on a key or key pair, producing ciphertext. + +Symmetric algorithms +-------------------- + +Encryption can easily be reversed, if (and, hopefully, only if) +one knows the same key. +In other words, sender and receiver share the same key. + +The symmetric encryption modules here all support the interface described in PEP +272, "API for Block Encryption Algorithms". + +If you don't know which algorithm to choose, use AES because it's +standard and has undergone a fair bit of examination. + +======================== ======= ======================== +Module name Type Description +======================== ======= ======================== +`Crypto.Cipher.AES` Block Advanced Encryption Standard +`Crypto.Cipher.ARC2` Block Alleged RC2 +`Crypto.Cipher.ARC4` Stream Alleged RC4 +`Crypto.Cipher.Blowfish` Block Blowfish +`Crypto.Cipher.CAST` Block CAST +`Crypto.Cipher.DES` Block The Data Encryption Standard. + Very commonly used in the past, + but today its 56-bit keys are too small. +`Crypto.Cipher.DES3` Block Triple DES. +`Crypto.Cipher.XOR` Stream The simple XOR cipher. +======================== ======= ======================== + + +Asymmetric algorithms +--------------------- + +For asymmetric algorithms, the key to be used for decryption is totally +different and cannot be derived in a feasible way from the key used +for encryption. Put differently, sender and receiver each own one half +of a key pair. The encryption key is often called ``public`` whereas +the decryption key is called ``private``. + +========================== ======================= +Module name Description +========================== ======================= +`Crypto.Cipher.PKCS1_v1_5` PKCS#1 v1.5 encryption, based on RSA key pairs +`Crypto.Cipher.PKCS1_OAEP` PKCS#1 OAEP encryption, based on RSA key pairs +========================== ======================= + +:undocumented: __revision__, __package__, _AES, _ARC2, _ARC4, _Blowfish + _CAST, _DES, _DES3, _XOR +""" + +__all__ = ['AES', 'ARC2', 'ARC4', + 'Blowfish', 'CAST', 'DES', 'DES3', + 'XOR', + 'PKCS1_v1_5', 'PKCS1_OAEP' + ] + +__revision__ = "$Id$" + + diff --git a/Lib/site-packages/Crypto/Cipher/blockalgo.py b/Lib/site-packages/Crypto/Cipher/blockalgo.py new file mode 100644 index 0000000..dd183dc --- /dev/null +++ b/Lib/site-packages/Crypto/Cipher/blockalgo.py @@ -0,0 +1,296 @@ +# -*- coding: utf-8 -*- +# +# Cipher/blockalgo.py +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Module with definitions common to all block ciphers.""" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +#: *Electronic Code Book (ECB)*. +#: This is the simplest encryption mode. Each of the plaintext blocks +#: is directly encrypted into a ciphertext block, independently of +#: any other block. This mode exposes frequency of symbols +#: in your plaintext. Other modes (e.g. *CBC*) should be used instead. +#: +#: See `NIST SP800-38A`_ , Section 6.1 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_ECB = 1 + +#: *Cipher-Block Chaining (CBC)*. Each of the ciphertext blocks depends +#: on the current and all previous plaintext blocks. An Initialization Vector +#: (*IV*) is required. +#: +#: The *IV* is a data block to be transmitted to the receiver. +#: The *IV* can be made public, but it must be authenticated by the receiver and +#: it should be picked randomly. +#: +#: See `NIST SP800-38A`_ , Section 6.2 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_CBC = 2 + +#: *Cipher FeedBack (CFB)*. This mode is similar to CBC, but it transforms +#: the underlying block cipher into a stream cipher. Plaintext and ciphertext +#: are processed in *segments* of **s** bits. The mode is therefore sometimes +#: labelled **s**-bit CFB. An Initialization Vector (*IV*) is required. +#: +#: When encrypting, each ciphertext segment contributes to the encryption of +#: the next plaintext segment. +#: +#: This *IV* is a data block to be transmitted to the receiver. +#: The *IV* can be made public, but it should be picked randomly. +#: Reusing the same *IV* for encryptions done with the same key lead to +#: catastrophic cryptographic failures. +#: +#: See `NIST SP800-38A`_ , Section 6.3 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_CFB = 3 + +#: This mode should not be used. +MODE_PGP = 4 + +#: *Output FeedBack (OFB)*. This mode is very similar to CBC, but it +#: transforms the underlying block cipher into a stream cipher. +#: The keystream is the iterated block encryption of an Initialization Vector (*IV*). +#: +#: The *IV* is a data block to be transmitted to the receiver. +#: The *IV* can be made public, but it should be picked randomly. +#: +#: Reusing the same *IV* for encryptions done with the same key lead to +#: catastrophic cryptograhic failures. +#: +#: See `NIST SP800-38A`_ , Section 6.4 . +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_OFB = 5 + +#: *CounTeR (CTR)*. This mode is very similar to ECB, in that +#: encryption of one block is done independently of all other blocks. +#: Unlike ECB, the block *position* contributes to the encryption and no +#: information leaks about symbol frequency. +#: +#: Each message block is associated to a *counter* which must be unique +#: across all messages that get encrypted with the same key (not just within +#: the same message). The counter is as big as the block size. +#: +#: Counters can be generated in several ways. The most straightword one is +#: to choose an *initial counter block* (which can be made public, similarly +#: to the *IV* for the other modes) and increment its lowest **m** bits by +#: one (modulo *2^m*) for each block. In most cases, **m** is chosen to be half +#: the block size. +#: +#: Reusing the same *initial counter block* for encryptions done with the same +#: key lead to catastrophic cryptograhic failures. +#: +#: See `NIST SP800-38A`_ , Section 6.5 (for the mode) and Appendix B (for how +#: to manage the *initial counter block*). +#: +#: .. _`NIST SP800-38A` : http://csrc.nist.gov/publications/nistpubs/800-38a/sp800-38a.pdf +MODE_CTR = 6 + +#: OpenPGP. This mode is a variant of CFB, and it is only used in PGP and OpenPGP_ applications. +#: An Initialization Vector (*IV*) is required. +#: +#: Unlike CFB, the IV is not transmitted to the receiver. Instead, the *encrypted* IV is. +#: The IV is a random data block. Two of its bytes are duplicated to act as a checksum +#: for the correctness of the key. The encrypted IV is therefore 2 bytes longer than +#: the clean IV. +#: +#: .. _OpenPGP: http://tools.ietf.org/html/rfc4880 +MODE_OPENPGP = 7 + +def _getParameter(name, index, args, kwargs, default=None): + """Find a parameter in tuple and dictionary arguments a function receives""" + param = kwargs.get(name) + if len(args)>index: + if param: + raise ValueError("Parameter '%s' is specified twice" % name) + param = args[index] + return param or default + +class BlockAlgo: + """Class modelling an abstract block cipher.""" + + def __init__(self, factory, key, *args, **kwargs): + self.mode = _getParameter('mode', 0, args, kwargs, default=MODE_ECB) + self.block_size = factory.block_size + + if self.mode != MODE_OPENPGP: + self._cipher = factory.new(key, *args, **kwargs) + self.IV = self._cipher.IV + else: + # OPENPGP mode. For details, see 13.9 in RCC4880. + # + # A few members are specifically created for this mode: + # - _encrypted_iv, set in this constructor + # - _done_first_block, set to True after the first encryption + # - _done_last_block, set to True after a partial block is processed + + self._done_first_block = False + self._done_last_block = False + self.IV = _getParameter('iv', 1, args, kwargs) + if not self.IV: + raise ValueError("MODE_OPENPGP requires an IV") + + # Instantiate a temporary cipher to process the IV + IV_cipher = factory.new(key, MODE_CFB, + b('\x00')*self.block_size, # IV for CFB + segment_size=self.block_size*8) + + # The cipher will be used for... + if len(self.IV) == self.block_size: + # ... encryption + self._encrypted_IV = IV_cipher.encrypt( + self.IV + self.IV[-2:] + # Plaintext + b('\x00')*(self.block_size-2) # Padding + )[:self.block_size+2] + elif len(self.IV) == self.block_size+2: + # ... decryption + self._encrypted_IV = self.IV + self.IV = IV_cipher.decrypt(self.IV + # Ciphertext + b('\x00')*(self.block_size-2) # Padding + )[:self.block_size+2] + if self.IV[-2:] != self.IV[-4:-2]: + raise ValueError("Failed integrity check for OPENPGP IV") + self.IV = self.IV[:-2] + else: + raise ValueError("Length of IV must be %d or %d bytes for MODE_OPENPGP" + % (self.block_size, self.block_size+2)) + + # Instantiate the cipher for the real PGP data + self._cipher = factory.new(key, MODE_CFB, + self._encrypted_IV[-self.block_size:], + segment_size=self.block_size*8) + + def encrypt(self, plaintext): + """Encrypt data with the key and the parameters set at initialization. + + The cipher object is stateful; encryption of a long block + of data can be broken up in two or more calls to `encrypt()`. + That is, the statement: + + >>> c.encrypt(a) + c.encrypt(b) + + is always equivalent to: + + >>> c.encrypt(a+b) + + That also means that you cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not perform any padding. + + - For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *plaintext* length + (in bytes) must be a multiple of *block_size*. + + - For `MODE_CFB`, *plaintext* length (in bytes) must be a multiple + of *segment_size*/8. + + - For `MODE_CTR`, *plaintext* can be of any length. + + - For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*, + unless it is the last chunk of the message. + + :Parameters: + plaintext : byte string + The piece of data to encrypt. + :Return: + the encrypted data, as a byte string. It is as long as + *plaintext* with one exception: when encrypting the first message + chunk with `MODE_OPENPGP`, the encypted IV is prepended to the + returned ciphertext. + """ + + if self.mode == MODE_OPENPGP: + padding_length = (self.block_size - len(plaintext) % self.block_size) % self.block_size + if padding_length>0: + # CFB mode requires ciphertext to have length multiple of block size, + # but PGP mode allows the last block to be shorter + if self._done_last_block: + raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes", + self.block_size) + self._done_last_block = True + padded = plaintext + b('\x00')*padding_length + res = self._cipher.encrypt(padded)[:len(plaintext)] + else: + res = self._cipher.encrypt(plaintext) + if not self._done_first_block: + res = self._encrypted_IV + res + self._done_first_block = True + return res + + return self._cipher.encrypt(plaintext) + + def decrypt(self, ciphertext): + """Decrypt data with the key and the parameters set at initialization. + + The cipher object is stateful; decryption of a long block + of data can be broken up in two or more calls to `decrypt()`. + That is, the statement: + + >>> c.decrypt(a) + c.decrypt(b) + + is always equivalent to: + + >>> c.decrypt(a+b) + + That also means that you cannot reuse an object for encrypting + or decrypting other data with the same key. + + This function does not perform any padding. + + - For `MODE_ECB`, `MODE_CBC`, and `MODE_OFB`, *ciphertext* length + (in bytes) must be a multiple of *block_size*. + + - For `MODE_CFB`, *ciphertext* length (in bytes) must be a multiple + of *segment_size*/8. + + - For `MODE_CTR`, *ciphertext* can be of any length. + + - For `MODE_OPENPGP`, *plaintext* must be a multiple of *block_size*, + unless it is the last chunk of the message. + + :Parameters: + ciphertext : byte string + The piece of data to decrypt. + :Return: the decrypted data (byte string, as long as *ciphertext*). + """ + if self.mode == MODE_OPENPGP: + padding_length = (self.block_size - len(ciphertext) % self.block_size) % self.block_size + if padding_length>0: + # CFB mode requires ciphertext to have length multiple of block size, + # but PGP mode allows the last block to be shorter + if self._done_last_block: + raise ValueError("Only the last chunk is allowed to have length not multiple of %d bytes", + self.block_size) + self._done_last_block = True + padded = ciphertext + b('\x00')*padding_length + res = self._cipher.decrypt(padded)[:len(ciphertext)] + else: + res = self._cipher.decrypt(ciphertext) + return res + + return self._cipher.decrypt(ciphertext) + diff --git a/Lib/site-packages/Crypto/Hash/HMAC.py b/Lib/site-packages/Crypto/Hash/HMAC.py new file mode 100644 index 0000000..9ced373 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/HMAC.py @@ -0,0 +1,212 @@ +# HMAC.py - Implements the HMAC algorithm as described by RFC 2104. +# +# =================================================================== +# Portions Copyright (c) 2001, 2002, 2003 Python Software Foundation; +# All Rights Reserved +# +# This file contains code from the Python 2.2 hmac.py module (the +# "Original Code"), with modifications made after it was incorporated +# into PyCrypto (the "Modifications"). +# +# To the best of our knowledge, the Python Software Foundation is the +# copyright holder of the Original Code, and has licensed it under the +# Python 2.2 license. See the file LEGAL/copy/LICENSE.python-2.2 for +# details. +# +# The Modifications to this file are dedicated to the public domain. +# To the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. No rights are +# reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +"""HMAC (Hash-based Message Authentication Code) algorithm + +HMAC is a MAC defined in RFC2104_ and FIPS-198_ and constructed using +a cryptograpic hash algorithm. +It is usually named *HMAC-X*, where *X* is the hash algorithm; for +instance *HMAC-SHA1* or *HMAC-MD5*. + +The strength of an HMAC depends on: + + - the strength of the hash algorithm + - the length and entropy of the secret key + +An example of possible usage is the following: + + >>> from Crypto.Hash import HMAC + >>> + >>> secret = b'Swordfish' + >>> h = HMAC.new(secret) + >>> h.update(b'Hello') + >>> print h.hexdigest() + +.. _RFC2104: http://www.ietf.org/rfc/rfc2104.txt +.. _FIPS-198: http://csrc.nist.gov/publications/fips/fips198/fips-198a.pdf +""" + +# This is just a copy of the Python 2.2 HMAC module, modified to work when +# used on versions of Python before 2.2. + +__revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'HMAC' ] + +from Crypto.Util.strxor import strxor_c +from Crypto.Util.py3compat import * + +#: The size of the authentication tag produced by the MAC. +#: It matches the digest size on the underlying +#: hashing module used. +digest_size = None + +class HMAC: + """Class that implements HMAC""" + + #: The size of the authentication tag produced by the MAC. + #: It matches the digest size on the underlying + #: hashing module used. + digest_size = None + + def __init__(self, key, msg = None, digestmod = None): + """Create a new HMAC object. + + :Parameters: + key : byte string + secret key for the MAC object. + It must be long enough to match the expected security level of the + MAC. However, there is no benefit in using keys longer than the + `digest_size` of the underlying hash algorithm. + msg : byte string + The very first chunk of the message to authenticate. + It is equivalent to an early call to `update()`. Optional. + :Parameter digestmod: + The hash algorithm the HMAC is based on. + Default is `Crypto.Hash.MD5`. + :Type digestmod: + A hash module or object instantiated from `Crypto.Hash` + """ + if digestmod is None: + from . import MD5 + digestmod = MD5 + + self.digestmod = digestmod + self.outer = digestmod.new() + self.inner = digestmod.new() + try: + self.digest_size = digestmod.digest_size + except AttributeError: + self.digest_size = len(self.outer.digest()) + + try: + # The block size is 128 bytes for SHA384 and SHA512 and 64 bytes + # for the others hash function + blocksize = digestmod.block_size + except AttributeError: + blocksize = 64 + + ipad = 0x36 + opad = 0x5C + + if len(key) > blocksize: + key = digestmod.new(key).digest() + + key = key + bchr(0) * (blocksize - len(key)) + self.outer.update(strxor_c(key, opad)) + self.inner.update(strxor_c(key, ipad)) + if (msg): + self.update(msg) + + def update(self, msg): + """Continue authentication of a message by consuming the next chunk of data. + + Repeated calls are equivalent to a single call with the concatenation + of all the arguments. In other words: + + >>> m.update(a); m.update(b) + + is equivalent to: + + >>> m.update(a+b) + + :Parameters: + msg : byte string + The next chunk of the message being authenticated + """ + + self.inner.update(msg) + + def copy(self): + """Return a copy ("clone") of the MAC object. + + The copy will have the same internal state as the original MAC + object. + This can be used to efficiently compute the MAC of strings that + share a common initial substring. + + :Returns: An `HMAC` object + """ + other = HMAC(b("")) + other.digestmod = self.digestmod + other.inner = self.inner.copy() + other.outer = self.outer.copy() + return other + + def digest(self): + """Return the **binary** (non-printable) MAC of the message that has + been authenticated so far. + + This method does not change the state of the MAC object. + You can continue updating the object after calling this function. + + :Return: A byte string of `digest_size` bytes. It may contain non-ASCII + characters, including null bytes. + """ + h = self.outer.copy() + h.update(self.inner.digest()) + return h.digest() + + def hexdigest(self): + """Return the **printable** MAC of the message that has been + authenticated so far. + + This method does not change the state of the MAC object. + + :Return: A string of 2* `digest_size` bytes. It contains only + hexadecimal ASCII digits. + """ + return "".join(["%02x" % bord(x) + for x in tuple(self.digest())]) + +def new(key, msg = None, digestmod = None): + """Create a new HMAC object. + + :Parameters: + key : byte string + key for the MAC object. + It must be long enough to match the expected security level of the + MAC. However, there is no benefit in using keys longer than the + `digest_size` of the underlying hash algorithm. + msg : byte string + The very first chunk of the message to authenticate. + It is equivalent to an early call to `HMAC.update()`. + Optional. + :Parameter digestmod: + The hash to use to implement the HMAC. Default is `Crypto.Hash.MD5`. + :Type digestmod: + A hash module or instantiated object from `Crypto.Hash` + :Returns: An `HMAC` object + """ + return HMAC(key, msg, digestmod) + diff --git a/Lib/site-packages/Crypto/Hash/MD2.py b/Lib/site-packages/Crypto/Hash/MD2.py new file mode 100644 index 0000000..dac959e --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/MD2.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""MD2 cryptographic hash algorithm. + +MD2 is specified in RFC1319_ and it produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD2 + >>> + >>> h = MD2.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD2 stand for Message Digest version 2, and it was invented by Rivest in 1989. + +This algorithm is both slow and insecure. Do not use it for new designs. + +.. _RFC1319: http://tools.ietf.org/html/rfc1319 +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'MD2Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +import Crypto.Hash._MD2 as _MD2 +hashFactory = _MD2 + +class MD2Hash(HashAlgo): + """Class that implements an MD2 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-md2 OBJECT IDENTIFIER ::= { + #: iso(1) member-body(2) us(840) rsadsi(113549) + #: digestAlgorithm(2) 2 + #: } + #: + #: This value uniquely identifies the MD2 algorithm. + oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02') + + digest_size = 16 + block_size = 16 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return MD2Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `MD2Hash.update()`. + Optional. + + :Return: An `MD2Hash` object + """ + return MD2Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD2Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD2Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/MD4.py b/Lib/site-packages/Crypto/Hash/MD4.py new file mode 100644 index 0000000..e28a201 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/MD4.py @@ -0,0 +1,91 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""MD4 cryptographic hash algorithm. + +MD4 is specified in RFC1320_ and produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD4 + >>> + >>> h = MD4.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD4 stand for Message Digest version 4, and it was invented by Rivest in 1990. + +This algorithm is insecure. Do not use it for new designs. + +.. _RFC1320: http://tools.ietf.org/html/rfc1320 +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'MD4Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +import Crypto.Hash._MD4 as _MD4 +hashFactory = _MD4 + +class MD4Hash(HashAlgo): + """Class that implements an MD4 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-md2 OBJECT IDENTIFIER ::= { + #: iso(1) member-body(2) us(840) rsadsi(113549) + #: digestAlgorithm(2) 4 + #: } + #: + #: This value uniquely identifies the MD4 algorithm. + oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04') + + digest_size = 16 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return MD4Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `MD4Hash.update()`. + Optional. + + :Return: A `MD4Hash` object + """ + return MD4Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD4Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD4Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/MD5.py b/Lib/site-packages/Crypto/Hash/MD5.py new file mode 100644 index 0000000..b6ae23d --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/MD5.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""MD5 cryptographic hash algorithm. + +MD5 is specified in RFC1321_ and produces the 128 bit digest of a message. + + >>> from Crypto.Hash import MD5 + >>> + >>> h = MD5.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +MD5 stand for Message Digest version 5, and it was invented by Rivest in 1991. + +This algorithm is insecure. Do not use it for new designs. + +.. _RFC1321: http://tools.ietf.org/html/rfc1321 +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'MD5Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + # The md5 module is deprecated in Python 2.6, so use hashlib when possible. + import hashlib + hashFactory = hashlib.md5 + +except ImportError: + from . import md5 + hashFactory = md5 + +class MD5Hash(HashAlgo): + """Class that implements an MD5 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-md5 OBJECT IDENTIFIER ::= { + #: iso(1) member-body(2) us(840) rsadsi(113549) + #: digestAlgorithm(2) 5 + #: } + #: + #: This value uniquely identifies the MD5 algorithm. + oid = b('\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05') + + digest_size = 16 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return MD5Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `MD5Hash.update()`. + Optional. + + :Return: A `MD5Hash` object + """ + return MD5Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = MD5Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = MD5Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/RIPEMD.py b/Lib/site-packages/Crypto/Hash/RIPEMD.py new file mode 100644 index 0000000..33099cb --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/RIPEMD.py @@ -0,0 +1,94 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RIPEMD-160 cryptographic hash algorithm. + +RIPEMD-160_ produces the 160 bit digest of a message. + + >>> from Crypto.Hash import RIPEMD + >>> + >>> h = RIPEMD.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +RIPEMD-160 stands for RACE Integrity Primitives Evaluation Message Digest +with a 160 bit digest. It was invented by Dobbertin, Bosselaers, and Preneel. + +This algorithm is considered secure, although it has not been scrutinized as +extensively as SHA-1. Moreover, it provides an informal security level of just +80bits. + +.. _RIPEMD-160: http://homes.esat.kuleuven.be/~bosselae/ripemd160.html +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'RIPEMD160Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +import Crypto.Hash._RIPEMD160 as _RIPEMD160 +hashFactory = _RIPEMD160 + +class RIPEMD160Hash(HashAlgo): + """Class that implements a RIPMD-160 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-ripemd160 OBJECT IDENTIFIER ::= { + #: iso(1) identified-organization(3) teletrust(36) + #: algorithm(3) hashAlgorithm(2) ripemd160(1) + #: } + #: + #: This value uniquely identifies the RIPMD-160 algorithm. + oid = b("\x06\x05\x2b\x24\x03\x02\x01") + + digest_size = 20 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return RIPEMD160Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `RIPEMD160Hash.update()`. + Optional. + + :Return: A `RIPEMD160Hash` object + """ + return RIPEMD160Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = RIPEMD160Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = RIPEMD160Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/SHA.py b/Lib/site-packages/Crypto/Hash/SHA.py new file mode 100644 index 0000000..03bf224 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/SHA.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-1 cryptographic hash algorithm. + +SHA-1_ produces the 160 bit digest of a message. + + >>> from Crypto.Hash import SHA + >>> + >>> h = SHA.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +This algorithm is not considered secure. Do not use it for new designs. + +.. _SHA-1: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA1Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + # The sha module is deprecated in Python 2.6, so use hashlib when possible. + import hashlib + hashFactory = hashlib.sha1 + +except ImportError: + from . import sha + hashFactory = sha + +class SHA1Hash(HashAlgo): + """Class that implements a SHA-1 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha1 OBJECT IDENTIFIER ::= { + #: iso(1) identified-organization(3) oiw(14) secsig(3) + #: algorithms(2) 26 + #: } + #: + #: This value uniquely identifies the SHA-1 algorithm. + oid = b('\x06\x05\x2b\x0e\x03\x02\x1a') + + digest_size = 20 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA1Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA1Hash.update()`. + Optional. + + :Return: A `SHA1Hash` object + """ + return SHA1Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA1Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA1Hash.block_size + + diff --git a/Lib/site-packages/Crypto/Hash/SHA224.py b/Lib/site-packages/Crypto/Hash/SHA224.py new file mode 100644 index 0000000..959b56d --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/SHA224.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-224 cryptographic hash algorithm. + +SHA-224 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 224 bit digest of a message. + + >>> from Crypto.Hash import SHA224 + >>> + >>> h = SHA224.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA224Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha224 + +except ImportError: + from Crypto.Hash import _SHA224 + hashFactory = _SHA224 + +class SHA224Hash(HashAlgo): + """Class that implements a SHA-224 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha224 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) + #: nistalgorithm(4) hashalgs(2) 4 + #: } + #: + #: This value uniquely identifies the SHA-224 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04') + + digest_size = 28 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA224Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA224Hash.update()`. + Optional. + + :Return: A `SHA224Hash` object + """ + return SHA224Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA224Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA224Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/SHA256.py b/Lib/site-packages/Crypto/Hash/SHA256.py new file mode 100644 index 0000000..b0a99b3 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/SHA256.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-256 cryptographic hash algorithm. + +SHA-256 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 256 bit digest of a message. + + >>> from Crypto.Hash import SHA256 + >>> + >>> h = SHA256.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA256Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha256 + +except ImportError: + from Crypto.Hash import _SHA256 + hashFactory = _SHA256 + +class SHA256Hash(HashAlgo): + """Class that implements a SHA-256 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha256 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) country(16) us(840) organization(1) + #: gov(101) csor(3) nistalgorithm(4) hashalgs(2) 1 + #: } + #: + #: This value uniquely identifies the SHA-256 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01') + + digest_size = 32 + block_size = 64 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA256Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA256Hash.update()`. + Optional. + + :Return: A `SHA256Hash` object + """ + return SHA256Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA256Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA256Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/SHA384.py b/Lib/site-packages/Crypto/Hash/SHA384.py new file mode 100644 index 0000000..3490b02 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/SHA384.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-384 cryptographic hash algorithm. + +SHA-384 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 384 bit digest of a message. + + >>> from Crypto.Hash import SHA384 + >>> + >>> h = SHA384.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA384Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha384 + +except ImportError: + from Crypto.Hash import _SHA384 + hashFactory = _SHA384 + +class SHA384Hash(HashAlgo): + """Class that implements a SHA-384 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha384 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) country(16) us(840) organization(1) gov(101) csor(3) + #: nistalgorithm(4) hashalgs(2) 2 + #: } + #: + #: This value uniquely identifies the SHA-384 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02') + + digest_size = 48 + block_size = 128 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA384Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA384Hash.update()`. + Optional. + + :Return: A `SHA384Hash` object + """ + return SHA384Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA384Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA384Hash.block_size + + diff --git a/Lib/site-packages/Crypto/Hash/SHA512.py b/Lib/site-packages/Crypto/Hash/SHA512.py new file mode 100644 index 0000000..d57548d --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/SHA512.py @@ -0,0 +1,95 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""SHA-512 cryptographic hash algorithm. + +SHA-512 belongs to the SHA-2_ family of cryptographic hashes. +It produces the 512 bit digest of a message. + + >>> from Crypto.Hash import SHA512 + >>> + >>> h = SHA512.new() + >>> h.update(b'Hello') + >>> print h.hexdigest() + +*SHA* stands for Secure Hash Algorithm. + +.. _SHA-2: http://csrc.nist.gov/publications/fips/fips180-2/fips180-2.pdf +""" + +_revision__ = "$Id$" + +__all__ = ['new', 'digest_size', 'SHA512Hash' ] + +from Crypto.Util.py3compat import * +from Crypto.Hash.hashalgo import HashAlgo + +try: + import hashlib + hashFactory = hashlib.sha512 + +except ImportError: + from Crypto.Hash import _SHA512 + hashFactory = _SHA512 + +class SHA512Hash(HashAlgo): + """Class that implements a SHA-512 hash + + :undocumented: block_size + """ + + #: ASN.1 Object identifier (OID):: + #: + #: id-sha512 OBJECT IDENTIFIER ::= { + #: joint-iso-itu-t(2) + #: country(16) us(840) organization(1) gov(101) csor(3) nistalgorithm(4) hashalgs(2) 3 + #: } + #: + #: This value uniquely identifies the SHA-512 algorithm. + oid = b('\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03') + + digest_size = 64 + block_size = 128 + + def __init__(self, data=None): + HashAlgo.__init__(self, hashFactory, data) + + def new(self, data=None): + return SHA512Hash(data) + +def new(data=None): + """Return a fresh instance of the hash object. + + :Parameters: + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `SHA512Hash.update()`. + Optional. + + :Return: A `SHA512Hash` object + """ + return SHA512Hash().new(data) + +#: The size of the resulting hash in bytes. +digest_size = SHA512Hash.digest_size + +#: The internal block size of the hash algorithm in bytes. +block_size = SHA512Hash.block_size + diff --git a/Lib/site-packages/Crypto/Hash/__init__.py b/Lib/site-packages/Crypto/Hash/__init__.py new file mode 100644 index 0000000..4582c66 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/__init__.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Hashing algorithms + +Hash functions take arbitrary binary strings as input, and produce a random-like output +of fixed size that is dependent on the input; it should be practically infeasible +to derive the original input data given only the hash function's +output. In other words, the hash function is *one-way*. + +It should also not be practically feasible to find a second piece of data +(a *second pre-image*) whose hash is the same as the original message +(*weak collision resistance*). + +Finally, it should not be feasible to find two arbitrary messages with the +same hash (*strong collision resistance*). + +The output of the hash function is called the *digest* of the input message. +In general, the security of a hash function is related to the length of the +digest. If the digest is *n* bits long, its security level is roughly comparable +to the the one offered by an *n/2* bit encryption algorithm. + +Hash functions can be used simply as a integrity check, or, in +association with a public-key algorithm, can be used to implement +digital signatures. + +The hashing modules here all support the interface described in `PEP +247`_ , "API for Cryptographic Hash Functions". + +.. _`PEP 247` : http://www.python.org/dev/peps/pep-0247/ + +:undocumented: _MD2, _MD4, _RIPEMD160, _SHA224, _SHA256, _SHA384, _SHA512 +""" + +__all__ = ['HMAC', 'MD2', 'MD4', 'MD5', 'RIPEMD', 'SHA', + 'SHA224', 'SHA256', 'SHA384', 'SHA512'] +__revision__ = "$Id$" + + diff --git a/Lib/site-packages/Crypto/Hash/hashalgo.py b/Lib/site-packages/Crypto/Hash/hashalgo.py new file mode 100644 index 0000000..b38b3a6 --- /dev/null +++ b/Lib/site-packages/Crypto/Hash/hashalgo.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from binascii import hexlify + +class HashAlgo: + """A generic class for an abstract cryptographic hash algorithm. + + :undocumented: block_size + """ + + #: The size of the resulting hash in bytes. + digest_size = None + #: The internal block size of the hash algorithm in bytes. + block_size = None + + def __init__(self, hashFactory, data=None): + """Initialize the hash object. + + :Parameters: + hashFactory : callable + An object that will generate the actual hash implementation. + *hashFactory* must have a *new()* method, or must be directly + callable. + data : byte string + The very first chunk of the message to hash. + It is equivalent to an early call to `update()`. + """ + if hasattr(hashFactory, 'new'): + self._hash = hashFactory.new() + else: + self._hash = hashFactory() + if data: + self.update(data) + + def update(self, data): + """Continue hashing of a message by consuming the next chunk of data. + + Repeated calls are equivalent to a single call with the concatenation + of all the arguments. In other words: + + >>> m.update(a); m.update(b) + + is equivalent to: + + >>> m.update(a+b) + + :Parameters: + data : byte string + The next chunk of the message being hashed. + """ + return self._hash.update(data) + + def digest(self): + """Return the **binary** (non-printable) digest of the message that has been hashed so far. + + This method does not change the state of the hash object. + You can continue updating the object after calling this function. + + :Return: A byte string of `digest_size` bytes. It may contain non-ASCII + characters, including null bytes. + """ + return self._hash.digest() + + def hexdigest(self): + """Return the **printable** digest of the message that has been hashed so far. + + This method does not change the state of the hash object. + + :Return: A string of 2* `digest_size` characters. It contains only + hexadecimal ASCII digits. + """ + return self._hash.hexdigest() + + def copy(self): + """Return a copy ("clone") of the hash object. + + The copy will have the same internal state as the original hash + object. + This can be used to efficiently compute the digests of strings that + share a common initial substring. + + :Return: A hash object of the same type + """ + return self._hash.copy() + + def new(self, data=None): + """Return a fresh instance of the hash object. + + Unlike the `copy` method, the internal state of the object is empty. + + :Parameters: + data : byte string + The next chunk of the message being hashed. + + :Return: A hash object of the same type + """ + pass + diff --git a/Lib/site-packages/Crypto/Protocol/AllOrNothing.py b/Lib/site-packages/Crypto/Protocol/AllOrNothing.py new file mode 100644 index 0000000..dfcf975 --- /dev/null +++ b/Lib/site-packages/Crypto/Protocol/AllOrNothing.py @@ -0,0 +1,320 @@ +# +# AllOrNothing.py : all-or-nothing package transformations +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""This file implements all-or-nothing package transformations. + +An all-or-nothing package transformation is one in which some text is +transformed into message blocks, such that all blocks must be obtained before +the reverse transformation can be applied. Thus, if any blocks are corrupted +or lost, the original message cannot be reproduced. + +An all-or-nothing package transformation is not encryption, although a block +cipher algorithm is used. The encryption key is randomly generated and is +extractable from the message blocks. + +This class implements the All-Or-Nothing package transformation algorithm +described in: + +Ronald L. Rivest. "All-Or-Nothing Encryption and The Package Transform" +http://theory.lcs.mit.edu/~rivest/fusion.pdf + +""" + +__revision__ = "$Id$" + +import operator +import sys +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.Util.py3compat import * +from functools import reduce + +def isInt(x): + test = 0 + try: + test += x + except TypeError: + return 0 + return 1 + +class AllOrNothing: + """Class implementing the All-or-Nothing package transform. + + Methods for subclassing: + + _inventkey(key_size): + Returns a randomly generated key. Subclasses can use this to + implement better random key generating algorithms. The default + algorithm is probably not very cryptographically secure. + + """ + + def __init__(self, ciphermodule, mode=None, IV=None): + """AllOrNothing(ciphermodule, mode=None, IV=None) + + ciphermodule is a module implementing the cipher algorithm to + use. It must provide the PEP272 interface. + + Note that the encryption key is randomly generated + automatically when needed. Optional arguments mode and IV are + passed directly through to the ciphermodule.new() method; they + are the feedback mode and initialization vector to use. All + three arguments must be the same for the object used to create + the digest, and to undigest'ify the message blocks. + """ + + self.__ciphermodule = ciphermodule + self.__mode = mode + self.__IV = IV + self.__key_size = ciphermodule.key_size + if not isInt(self.__key_size) or self.__key_size==0: + self.__key_size = 16 + + __K0digit = bchr(0x69) + + def digest(self, text): + """digest(text:string) : [string] + + Perform the All-or-Nothing package transform on the given + string. Output is a list of message blocks describing the + transformed text, where each block is a string of bit length equal + to the ciphermodule's block_size. + """ + + # generate a random session key and K0, the key used to encrypt the + # hash blocks. Rivest calls this a fixed, publically-known encryption + # key, but says nothing about the security implications of this key or + # how to choose it. + key = self._inventkey(self.__key_size) + K0 = self.__K0digit * self.__key_size + + # we need two cipher objects here, one that is used to encrypt the + # message blocks and one that is used to encrypt the hashes. The + # former uses the randomly generated key, while the latter uses the + # well-known key. + mcipher = self.__newcipher(key) + hcipher = self.__newcipher(K0) + + # Pad the text so that its length is a multiple of the cipher's + # block_size. Pad with trailing spaces, which will be eliminated in + # the undigest() step. + block_size = self.__ciphermodule.block_size + padbytes = block_size - (len(text) % block_size) + text = text + b(' ') * padbytes + + # Run through the algorithm: + # s: number of message blocks (size of text / block_size) + # input sequence: m1, m2, ... ms + # random key K' (`key' in the code) + # Compute output sequence: m'1, m'2, ... m's' for s' = s + 1 + # Let m'i = mi ^ E(K', i) for i = 1, 2, 3, ..., s + # Let m's' = K' ^ h1 ^ h2 ^ ... hs + # where hi = E(K0, m'i ^ i) for i = 1, 2, ... s + # + # The one complication I add is that the last message block is hard + # coded to the number of padbytes added, so that these can be stripped + # during the undigest() step + s = divmod(len(text), block_size)[0] + blocks = [] + hashes = [] + for i in range(1, s+1): + start = (i-1) * block_size + end = start + block_size + mi = text[start:end] + assert len(mi) == block_size + cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) + mticki = bytes_to_long(mi) ^ bytes_to_long(cipherblock) + blocks.append(mticki) + # calculate the hash block for this block + hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size)) + hashes.append(bytes_to_long(hi)) + + # Add the padbytes length as a message block + i = i + 1 + cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) + mticki = padbytes ^ bytes_to_long(cipherblock) + blocks.append(mticki) + + # calculate this block's hash + hi = hcipher.encrypt(long_to_bytes(mticki ^ i, block_size)) + hashes.append(bytes_to_long(hi)) + + # Now calculate the last message block of the sequence 1..s'. This + # will contain the random session key XOR'd with all the hash blocks, + # so that for undigest(), once all the hash blocks are calculated, the + # session key can be trivially extracted. Calculating all the hash + # blocks requires that all the message blocks be received, thus the + # All-or-Nothing algorithm succeeds. + mtick_stick = bytes_to_long(key) ^ reduce(operator.xor, hashes) + blocks.append(mtick_stick) + + # we convert the blocks to strings since in Python, byte sequences are + # always represented as strings. This is more consistent with the + # model that encryption and hash algorithms always operate on strings. + return [long_to_bytes(i,self.__ciphermodule.block_size) for i in blocks] + + + def undigest(self, blocks): + """undigest(blocks : [string]) : string + + Perform the reverse package transformation on a list of message + blocks. Note that the ciphermodule used for both transformations + must be the same. blocks is a list of strings of bit length + equal to the ciphermodule's block_size. + """ + + # better have at least 2 blocks, for the padbytes package and the hash + # block accumulator + if len(blocks) < 2: + raise ValueError("List must be at least length 2.") + + # blocks is a list of strings. We need to deal with them as long + # integers + blocks = list(map(bytes_to_long, blocks)) + + # Calculate the well-known key, to which the hash blocks are + # encrypted, and create the hash cipher. + K0 = self.__K0digit * self.__key_size + hcipher = self.__newcipher(K0) + block_size = self.__ciphermodule.block_size + + # Since we have all the blocks (or this method would have been called + # prematurely), we can calculate all the hash blocks. + hashes = [] + for i in range(1, len(blocks)): + mticki = blocks[i-1] ^ i + hi = hcipher.encrypt(long_to_bytes(mticki, block_size)) + hashes.append(bytes_to_long(hi)) + + # now we can calculate K' (key). remember the last block contains + # m's' which we don't include here + key = blocks[-1] ^ reduce(operator.xor, hashes) + + # and now we can create the cipher object + mcipher = self.__newcipher(long_to_bytes(key, self.__key_size)) + + # And we can now decode the original message blocks + parts = [] + for i in range(1, len(blocks)): + cipherblock = mcipher.encrypt(long_to_bytes(i, block_size)) + mi = blocks[i-1] ^ bytes_to_long(cipherblock) + parts.append(mi) + + # The last message block contains the number of pad bytes appended to + # the original text string, such that its length was an even multiple + # of the cipher's block_size. This number should be small enough that + # the conversion from long integer to integer should never overflow + padbytes = int(parts[-1]) + text = b('').join(map(long_to_bytes, parts[:-1])) + return text[:-padbytes] + + def _inventkey(self, key_size): + # Return key_size random bytes + from Crypto import Random + return Random.new().read(key_size) + + def __newcipher(self, key): + if self.__mode is None and self.__IV is None: + return self.__ciphermodule.new(key) + elif self.__IV is None: + return self.__ciphermodule.new(key, self.__mode) + else: + return self.__ciphermodule.new(key, self.__mode, self.__IV) + + + +if __name__ == '__main__': + import sys + import getopt + import base64 + + usagemsg = '''\ +Test module usage: %(program)s [-c cipher] [-l] [-h] + +Where: + --cipher module + -c module + Cipher module to use. Default: %(ciphermodule)s + + --aslong + -l + Print the encoded message blocks as long integers instead of base64 + encoded strings + + --help + -h + Print this help message +''' + + ciphermodule = 'AES' + aslong = 0 + + def usage(code, msg=None): + if msg: + print(msg) + print(usagemsg % {'program': sys.argv[0], + 'ciphermodule': ciphermodule}) + sys.exit(code) + + try: + opts, args = getopt.getopt(sys.argv[1:], + 'c:l', ['cipher=', 'aslong']) + except getopt.error as msg: + usage(1, msg) + + if args: + usage(1, 'Too many arguments') + + for opt, arg in opts: + if opt in ('-h', '--help'): + usage(0) + elif opt in ('-c', '--cipher'): + ciphermodule = arg + elif opt in ('-l', '--aslong'): + aslong = 1 + + # ugly hack to force __import__ to give us the end-path module + module = __import__('Crypto.Cipher.'+ciphermodule, None, None, ['new']) + + x = AllOrNothing(module) + print('Original text:\n==========') + print(__doc__) + print('==========') + msgblocks = x.digest(b(__doc__)) + print('message blocks:') + for i, blk in zip(list(range(len(msgblocks))), msgblocks): + # base64 adds a trailing newline + print(' %3d' % i, end=' ') + if aslong: + print(bytes_to_long(blk)) + else: + print(base64.encodestring(blk)[:-1]) + # + # get a new undigest-only object so there's no leakage + y = AllOrNothing(module) + text = y.undigest(msgblocks) + if text == b(__doc__): + print('They match!') + else: + print('They differ!') diff --git a/Lib/site-packages/Crypto/Protocol/Chaffing.py b/Lib/site-packages/Crypto/Protocol/Chaffing.py new file mode 100644 index 0000000..373eabb --- /dev/null +++ b/Lib/site-packages/Crypto/Protocol/Chaffing.py @@ -0,0 +1,245 @@ +# +# Chaffing.py : chaffing & winnowing support +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling, Barry A. Warsaw, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# +"""This file implements the chaffing algorithm. + +Winnowing and chaffing is a technique for enhancing privacy without requiring +strong encryption. In short, the technique takes a set of authenticated +message blocks (the wheat) and adds a number of chaff blocks which have +randomly chosen data and MAC fields. This means that to an adversary, the +chaff blocks look as valid as the wheat blocks, and so the authentication +would have to be performed on every block. By tailoring the number of chaff +blocks added to the message, the sender can make breaking the message +computationally infeasible. There are many other interesting properties of +the winnow/chaff technique. + +For example, say Alice is sending a message to Bob. She packetizes the +message and performs an all-or-nothing transformation on the packets. Then +she authenticates each packet with a message authentication code (MAC). The +MAC is a hash of the data packet, and there is a secret key which she must +share with Bob (key distribution is an exercise left to the reader). She then +adds a serial number to each packet, and sends the packets to Bob. + +Bob receives the packets, and using the shared secret authentication key, +authenticates the MACs for each packet. Those packets that have bad MACs are +simply discarded. The remainder are sorted by serial number, and passed +through the reverse all-or-nothing transform. The transform means that an +eavesdropper (say Eve) must acquire all the packets before any of the data can +be read. If even one packet is missing, the data is useless. + +There's one twist: by adding chaff packets, Alice and Bob can make Eve's job +much harder, since Eve now has to break the shared secret key, or try every +combination of wheat and chaff packet to read any of the message. The cool +thing is that Bob doesn't need to add any additional code; the chaff packets +are already filtered out because their MACs don't match (in all likelihood -- +since the data and MACs for the chaff packets are randomly chosen it is +possible, but very unlikely that a chaff MAC will match the chaff data). And +Alice need not even be the party adding the chaff! She could be completely +unaware that a third party, say Charles, is adding chaff packets to her +messages as they are transmitted. + +For more information on winnowing and chaffing see this paper: + +Ronald L. Rivest, "Chaffing and Winnowing: Confidentiality without Encryption" +http://theory.lcs.mit.edu/~rivest/chaffing.txt + +""" + +__revision__ = "$Id$" + +from Crypto.Util.number import bytes_to_long + +class Chaff: + """Class implementing the chaff adding algorithm. + + Methods for subclasses: + + _randnum(size): + Returns a randomly generated number with a byte-length equal + to size. Subclasses can use this to implement better random + data and MAC generating algorithms. The default algorithm is + probably not very cryptographically secure. It is most + important that the chaff data does not contain any patterns + that can be used to discern it from wheat data without running + the MAC. + + """ + + def __init__(self, factor=1.0, blocksper=1): + """Chaff(factor:float, blocksper:int) + + factor is the number of message blocks to add chaff to, + expressed as a percentage between 0.0 and 1.0. blocksper is + the number of chaff blocks to include for each block being + chaffed. Thus the defaults add one chaff block to every + message block. By changing the defaults, you can adjust how + computationally difficult it could be for an adversary to + brute-force crack the message. The difficulty is expressed + as: + + pow(blocksper, int(factor * number-of-blocks)) + + For ease of implementation, when factor < 1.0, only the first + int(factor*number-of-blocks) message blocks are chaffed. + """ + + if not (0.0<=factor<=1.0): + raise ValueError("'factor' must be between 0.0 and 1.0") + if blocksper < 0: + raise ValueError("'blocksper' must be zero or more") + + self.__factor = factor + self.__blocksper = blocksper + + + def chaff(self, blocks): + """chaff( [(serial-number:int, data:string, MAC:string)] ) + : [(int, string, string)] + + Add chaff to message blocks. blocks is a list of 3-tuples of the + form (serial-number, data, MAC). + + Chaff is created by choosing a random number of the same + byte-length as data, and another random number of the same + byte-length as MAC. The message block's serial number is + placed on the chaff block and all the packet's chaff blocks + are randomly interspersed with the single wheat block. This + method then returns a list of 3-tuples of the same form. + Chaffed blocks will contain multiple instances of 3-tuples + with the same serial number, but the only way to figure out + which blocks are wheat and which are chaff is to perform the + MAC hash and compare values. + """ + + chaffedblocks = [] + + # count is the number of blocks to add chaff to. blocksper is the + # number of chaff blocks to add per message block that is being + # chaffed. + count = len(blocks) * self.__factor + blocksper = list(range(self.__blocksper)) + for i, wheat in zip(list(range(len(blocks))), blocks): + # it shouldn't matter which of the n blocks we add chaff to, so for + # ease of implementation, we'll just add them to the first count + # blocks + if i < count: + serial, data, mac = wheat + datasize = len(data) + macsize = len(mac) + addwheat = 1 + # add chaff to this block + for j in blocksper: + import sys + chaffdata = self._randnum(datasize) + chaffmac = self._randnum(macsize) + chaff = (serial, chaffdata, chaffmac) + # mix up the order, if the 5th bit is on then put the + # wheat on the list + if addwheat and bytes_to_long(self._randnum(16)) & 0x40: + chaffedblocks.append(wheat) + addwheat = 0 + chaffedblocks.append(chaff) + if addwheat: + chaffedblocks.append(wheat) + else: + # just add the wheat + chaffedblocks.append(wheat) + return chaffedblocks + + def _randnum(self, size): + from Crypto import Random + return Random.new().read(size) + + +if __name__ == '__main__': + text = """\ +We hold these truths to be self-evident, that all men are created equal, that +they are endowed by their Creator with certain unalienable Rights, that among +these are Life, Liberty, and the pursuit of Happiness. That to secure these +rights, Governments are instituted among Men, deriving their just powers from +the consent of the governed. That whenever any Form of Government becomes +destructive of these ends, it is the Right of the People to alter or to +abolish it, and to institute new Government, laying its foundation on such +principles and organizing its powers in such form, as to them shall seem most +likely to effect their Safety and Happiness. +""" + print('Original text:\n==========') + print(text) + print('==========') + + # first transform the text into packets + blocks = [] ; size = 40 + for i in range(0, len(text), size): + blocks.append( text[i:i+size] ) + + # now get MACs for all the text blocks. The key is obvious... + print('Calculating MACs...') + from Crypto.Hash import HMAC, SHA + key = 'Jefferson' + macs = [HMAC.new(key, block, digestmod=SHA).digest() + for block in blocks] + + assert len(blocks) == len(macs) + + # put these into a form acceptable as input to the chaffing procedure + source = [] + m = list(zip(list(range(len(blocks))), blocks, macs)) + print(m) + for i, data, mac in m: + source.append((i, data, mac)) + + # now chaff these + print('Adding chaff...') + c = Chaff(factor=0.5, blocksper=2) + chaffed = c.chaff(source) + + from base64 import encodestring + + # print the chaffed message blocks. meanwhile, separate the wheat from + # the chaff + + wheat = [] + print('chaffed message blocks:') + for i, data, mac in chaffed: + # do the authentication + h = HMAC.new(key, data, digestmod=SHA) + pmac = h.digest() + if pmac == mac: + tag = '-->' + wheat.append(data) + else: + tag = ' ' + # base64 adds a trailing newline + print(tag, '%3d' % i, \ + repr(data), encodestring(mac)[:-1]) + + # now decode the message packets and check it against the original text + print('Undigesting wheat...') + # PY3K: This is meant to be text, do not change to bytes (data) + newtext = "".join(wheat) + if newtext == text: + print('They match!') + else: + print('They differ!') diff --git a/Lib/site-packages/Crypto/Protocol/KDF.py b/Lib/site-packages/Crypto/Protocol/KDF.py new file mode 100644 index 0000000..a40c501 --- /dev/null +++ b/Lib/site-packages/Crypto/Protocol/KDF.py @@ -0,0 +1,123 @@ +# +# KDF.py : a collection of Key Derivation Functions +# +# Part of the Python Cryptography Toolkit +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""This file contains a collection of standard key derivation functions. + +A key derivation function derives one or more secondary secret keys from +one primary secret (a master key or a pass phrase). + +This is typically done to insulate the secondary keys from each other, +to avoid that leakage of a secondary key compromises the security of the +master key, or to thwart attacks on pass phrases (e.g. via rainbow tables). + +:undocumented: __revision__ +""" + +__revision__ = "$Id$" + +import math +import struct + +from Crypto.Util.py3compat import * +from Crypto.Hash import SHA as SHA1, HMAC +from Crypto.Util.strxor import strxor + +def PBKDF1(password, salt, dkLen, count=1000, hashAlgo=None): + """Derive one key from a password (or passphrase). + + This function performs key derivation according an old version of + the PKCS#5 standard (v1.5). + + This algorithm is called ``PBKDF1``. Even though it is still described + in the latest version of the PKCS#5 standard (version 2, or RFC2898), + newer applications should use the more secure and versatile `PBKDF2` instead. + + :Parameters: + password : string + The secret password or pass phrase to generate the key from. + salt : byte string + An 8 byte string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. + dkLen : integer + The length of the desired key. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. + count : integer + The number of iterations to carry out. It's recommended to use at least 1000. + hashAlgo : module + The hash algorithm to use, as a module or an object from the `Crypto.Hash` package. + The digest length must be no shorter than ``dkLen``. + The default algorithm is `SHA1`. + + :Return: A byte string of length `dkLen` that can be used as key. + """ + if not hashAlgo: + hashAlgo = SHA1 + password = tobytes(password) + pHash = hashAlgo.new(password+salt) + digest = pHash.digest_size + if dkLen>digest: + raise ValueError("Selected hash algorithm has a too short digest (%d bytes)." % digest) + if len(salt)!=8: + raise ValueError("Salt is not 8 bytes long.") + for i in range(count-1): + pHash = pHash.new(pHash.digest()) + return pHash.digest()[:dkLen] + +def PBKDF2(password, salt, dkLen=16, count=1000, prf=None): + """Derive one or more keys from a password (or passphrase). + + This performs key derivation according to the PKCS#5 standard (v2.0), + by means of the ``PBKDF2`` algorithm. + + :Parameters: + password : string + The secret password or pass phrase to generate the key from. + salt : string + A string to use for better protection from dictionary attacks. + This value does not need to be kept secret, but it should be randomly + chosen for each derivation. It is recommended to be at least 8 bytes long. + dkLen : integer + The cumulative length of the desired keys. Default is 16 bytes, suitable for instance for `Crypto.Cipher.AES`. + count : integer + The number of iterations to carry out. It's recommended to use at least 1000. + prf : callable + A pseudorandom function. It must be a function that returns a pseudorandom string + from two parameters: a secret and a salt. If not specified, HMAC-SHA1 is used. + + :Return: A byte string of length `dkLen` that can be used as key material. + If you wanted multiple keys, just break up this string into segments of the desired length. +""" + password = tobytes(password) + if prf is None: + prf = lambda p,s: HMAC.new(p,s,SHA1).digest() + key = b('') + i = 1 + while len(key)I", i)) + for j in range(count-1): + previousU = t = prf(password,previousU) + U = strxor(U,t) + key += U + i = i + 1 + return key[:dkLen] + diff --git a/Lib/site-packages/Crypto/Protocol/__init__.py b/Lib/site-packages/Crypto/Protocol/__init__.py new file mode 100644 index 0000000..cacc685 --- /dev/null +++ b/Lib/site-packages/Crypto/Protocol/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Cryptographic protocols + +Implements various cryptographic protocols. (Don't expect to find +network protocols here.) + +Crypto.Protocol.AllOrNothing + Transforms a message into a set of message blocks, such that the blocks + can be recombined to get the message back. + +Crypto.Protocol.Chaffing + Takes a set of authenticated message blocks (the wheat) and adds a number + of randomly generated blocks (the chaff). + +Crypto.Protocol.KDF + A collection of standard key derivation functions. + +:undocumented: __revision__ +""" + +__all__ = ['AllOrNothing', 'Chaffing', 'KDF'] +__revision__ = "$Id$" diff --git a/Lib/site-packages/Crypto/PublicKey/DSA.py b/Lib/site-packages/Crypto/PublicKey/DSA.py new file mode 100644 index 0000000..03f5883 --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/DSA.py @@ -0,0 +1,379 @@ +# -*- coding: utf-8 -*- +# +# PublicKey/DSA.py : DSA signature primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""DSA public-key signature algorithm. + +DSA_ is a widespread public-key signature algorithm. Its security is +based on the discrete logarithm problem (DLP_). Given a cyclic +group, a generator *g*, and an element *h*, it is hard +to find an integer *x* such that *g^x = h*. The problem is believed +to be difficult, and it has been proved such (and therefore secure) for +more than 30 years. + +The group is actually a sub-group over the integers modulo *p*, with *p* prime. +The sub-group order is *q*, which is prime too; it always holds that *(p-1)* is a multiple of *q*. +The cryptographic strength is linked to the magnitude of *p* and *q*. +The signer holds a value *x* (*0>> from Crypto.Random import random + >>> from Crypto.PublicKey import DSA + >>> from Crypto.Hash import SHA + >>> + >>> message = "Hello" + >>> key = DSA.generate(1024) + >>> h = SHA.new(message).digest() + >>> k = random.StrongRandom().randint(1,key.q-1) + >>> sig = key.sign(h,k) + >>> ... + >>> if key.verify(h,sig): + >>> print "OK" + >>> else: + >>> print "Incorrect signature" + +.. _DSA: http://en.wikipedia.org/wiki/Digital_Signature_Algorithm +.. _DLP: http://www.cosic.esat.kuleuven.be/publications/talk-78.pdf +.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf +""" + +__revision__ = "$Id$" + +__all__ = ['generate', 'construct', 'error', 'DSAImplementation', '_DSAobj'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +from Crypto.PublicKey import _DSA, _slowmath, pubkey +from Crypto import Random + +try: + from Crypto.PublicKey import _fastmath +except ImportError: + _fastmath = None + +class _DSAobj(pubkey.pubkey): + """Class defining an actual DSA key. + + :undocumented: __getstate__, __setstate__, __repr__, __getattr__ + """ + #: Dictionary of DSA parameters. + #: + #: A public key will only have the following entries: + #: + #: - **y**, the public key. + #: - **g**, the generator. + #: - **p**, the modulus. + #: - **q**, the order of the sub-group. + #: + #: A private key will also have: + #: + #: - **x**, the private key. + keydata = ['y', 'g', 'p', 'q', 'x'] + + def __init__(self, implementation, key): + self.implementation = implementation + self.key = key + + def __getattr__(self, attrname): + if attrname in self.keydata: + # For backward compatibility, allow the user to get (not set) the + # DSA key parameters directly from this object. + return getattr(self.key, attrname) + else: + raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,)) + + def sign(self, M, K): + """Sign a piece of data with DSA. + + :Parameter M: The piece of data to sign with DSA. It may + not be longer in bit size than the sub-group order (*q*). + :Type M: byte string or long + + :Parameter K: A secret number, chosen randomly in the closed + range *[1,q-1]*. + :Type K: long (recommended) or byte string (not recommended) + + :attention: selection of *K* is crucial for security. Generating a + random number larger than *q* and taking the modulus by *q* is + **not** secure, since smaller values will occur more frequently. + Generating a random number systematically smaller than *q-1* + (e.g. *floor((q-1)/8)* random bytes) is also **not** secure. In general, + it shall not be possible for an attacker to know the value of `any + bit of K`__. + + :attention: The number *K* shall not be reused for any other + operation and shall be discarded immediately. + + :attention: M must be a digest cryptographic hash, otherwise + an attacker may mount an existential forgery attack. + + :Return: A tuple with 2 longs. + + .. __: http://www.di.ens.fr/~pnguyen/pub_NgSh00.htm + """ + return pubkey.pubkey.sign(self, M, K) + + def verify(self, M, signature): + """Verify the validity of a DSA signature. + + :Parameter M: The expected message. + :Type M: byte string or long + + :Parameter signature: The DSA signature to verify. + :Type signature: A tuple with 2 longs as return by `sign` + + :Return: True if the signature is correct, False otherwise. + """ + return pubkey.pubkey.verify(self, M, signature) + + def _encrypt(self, c, K): + raise TypeError("DSA cannot encrypt") + + def _decrypt(self, c): + raise TypeError("DSA cannot decrypt") + + def _blind(self, m, r): + raise TypeError("DSA cannot blind") + + def _unblind(self, m, r): + raise TypeError("DSA cannot unblind") + + def _sign(self, m, k): + return self.key._sign(m, k) + + def _verify(self, m, sig): + (r, s) = sig + return self.key._verify(m, r, s) + + def has_private(self): + return self.key.has_private() + + def size(self): + return self.key.size() + + def can_blind(self): + return False + + def can_encrypt(self): + return False + + def can_sign(self): + return True + + def publickey(self): + return self.implementation.construct((self.key.y, self.key.g, self.key.p, self.key.q)) + + def __getstate__(self): + d = {} + for k in self.keydata: + try: + d[k] = getattr(self.key, k) + except AttributeError: + pass + return d + + def __setstate__(self, d): + if not hasattr(self, 'implementation'): + self.implementation = DSAImplementation() + t = [] + for k in self.keydata: + if k not in d: + break + t.append(d[k]) + self.key = self.implementation._math.dsa_construct(*tuple(t)) + + def __repr__(self): + attrs = [] + for k in self.keydata: + if k == 'p': + attrs.append("p(%d)" % (self.size()+1,)) + elif hasattr(self.key, k): + attrs.append(k) + if self.has_private(): + attrs.append("private") + # PY3K: This is meant to be text, do not change to bytes (data) + return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) + +class DSAImplementation(object): + """ + A DSA key factory. + + This class is only internally used to implement the methods of the + `Crypto.PublicKey.DSA` module. + """ + + def __init__(self, **kwargs): + """Create a new DSA key factory. + + :Keywords: + use_fast_math : bool + Specify which mathematic library to use: + + - *None* (default). Use fastest math available. + - *True* . Use fast math. + - *False* . Use slow math. + default_randfunc : callable + Specify how to collect random data: + + - *None* (default). Use Random.new().read(). + - not *None* . Use the specified function directly. + :Raise RuntimeError: + When **use_fast_math** =True but fast math is not available. + """ + use_fast_math = kwargs.get('use_fast_math', None) + if use_fast_math is None: # Automatic + if _fastmath is not None: + self._math = _fastmath + else: + self._math = _slowmath + + elif use_fast_math: # Explicitly select fast math + if _fastmath is not None: + self._math = _fastmath + else: + raise RuntimeError("fast math module not available") + + else: # Explicitly select slow math + self._math = _slowmath + + self.error = self._math.error + + # 'default_randfunc' parameter: + # None (default) - use Random.new().read + # not None - use the specified function + self._default_randfunc = kwargs.get('default_randfunc', None) + self._current_randfunc = None + + def _get_randfunc(self, randfunc): + if randfunc is not None: + return randfunc + elif self._current_randfunc is None: + self._current_randfunc = Random.new().read + return self._current_randfunc + + def generate(self, bits, randfunc=None, progress_func=None): + """Randomly generate a fresh, new DSA key. + + :Parameters: + bits : int + Key length, or size (in bits) of the DSA modulus + *p*. + It must be a multiple of 64, in the closed + interval [512,1024]. + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data + N bytes long. + If not specified, a new one will be instantiated + from ``Crypto.Random``. + progress_func : callable + Optional function that will be called with a short string + containing the key parameter currently being generated; + it's useful for interactive applications where a user is + waiting for a key to be generated. + + :attention: You should always use a cryptographically secure random number generator, + such as the one defined in the ``Crypto.Random`` module; **don't** just use the + current time and the ``random`` module. + + :Return: A DSA key object (`_DSAobj`). + + :Raise ValueError: + When **bits** is too little, too big, or not a multiple of 64. + """ + + # Check against FIPS 186-2, which says that the size of the prime p + # must be a multiple of 64 bits between 512 and 1024 + for i in (0, 1, 2, 3, 4, 5, 6, 7, 8): + if bits == 512 + 64*i: + return self._generate(bits, randfunc, progress_func) + + # The March 2006 draft of FIPS 186-3 also allows 2048 and 3072-bit + # primes, but only with longer q values. Since the current DSA + # implementation only supports a 160-bit q, we don't support larger + # values. + raise ValueError("Number of bits in p must be a multiple of 64 between 512 and 1024, not %d bits" % (bits,)) + + def _generate(self, bits, randfunc=None, progress_func=None): + rf = self._get_randfunc(randfunc) + obj = _DSA.generate_py(bits, rf, progress_func) # TODO: Don't use legacy _DSA module + key = self._math.dsa_construct(obj.y, obj.g, obj.p, obj.q, obj.x) + return _DSAobj(self, key) + + def construct(self, tup): + """Construct a DSA key from a tuple of valid DSA components. + + The modulus *p* must be a prime. + + The following equations must apply: + + - p-1 = 0 mod q + - g^x = y mod p + - 0 < x < q + - 1 < g < p + + :Parameters: + tup : tuple + A tuple of long integers, with 4 or 5 items + in the following order: + + 1. Public key (*y*). + 2. Sub-group generator (*g*). + 3. Modulus, finite field order (*p*). + 4. Sub-group order (*q*). + 5. Private key (*x*). Optional. + + :Return: A DSA key object (`_DSAobj`). + """ + key = self._math.dsa_construct(*tup) + return _DSAobj(self, key) + +_impl = DSAImplementation() +generate = _impl.generate +construct = _impl.construct +error = _impl.error + +# vim:set ts=4 sw=4 sts=4 expandtab: + diff --git a/Lib/site-packages/Crypto/PublicKey/ElGamal.py b/Lib/site-packages/Crypto/PublicKey/ElGamal.py new file mode 100644 index 0000000..99af71c --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/ElGamal.py @@ -0,0 +1,373 @@ +# +# ElGamal.py : ElGamal encryption/decryption and signatures +# +# Part of the Python Cryptography Toolkit +# +# Originally written by: A.M. Kuchling +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""ElGamal public-key algorithm (randomized encryption and signature). + +Signature algorithm +------------------- +The security of the ElGamal signature scheme is based (like DSA) on the discrete +logarithm problem (DLP_). Given a cyclic group, a generator *g*, +and an element *h*, it is hard to find an integer *x* such that *g^x = h*. + +The group is the largest multiplicative sub-group of the integers modulo *p*, +with *p* prime. +The signer holds a value *x* (*0>> from Crypto import Random + >>> from Crypto.Random import random + >>> from Crypto.PublicKey import ElGamal + >>> from Crypto.Util.number import GCD + >>> from Crypto.Hash import SHA + >>> + >>> message = "Hello" + >>> key = ElGamal.generate(1024, Random.new().read) + >>> h = SHA.new(message).digest() + >>> while 1: + >>> k = random.StrongRandom().randint(1,key.p-1) + >>> if GCD(k,key.p-1)==1: break + >>> sig = key.sign(h,k) + >>> ... + >>> if key.verify(h,sig): + >>> print "OK" + >>> else: + >>> print "Incorrect signature" + +.. _DLP: http://www.cosic.esat.kuleuven.be/publications/talk-78.pdf +.. _CDH: http://en.wikipedia.org/wiki/Computational_Diffie%E2%80%93Hellman_assumption +.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf +""" + +__revision__ = "$Id$" + +__all__ = ['generate', 'construct', 'error', 'ElGamalobj'] + +from Crypto.PublicKey.pubkey import * +from Crypto.Util import number + +class error (Exception): + pass + +# Generate an ElGamal key with N bits +def generate(bits, randfunc, progress_func=None): + """Randomly generate a fresh, new ElGamal key. + + The key will be safe for use for both encryption and signature + (although it should be used for **only one** purpose). + + :Parameters: + bits : int + Key length, or size (in bits) of the modulus *p*. + Recommended value is 2048. + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data + N bytes long. + progress_func : callable + Optional function that will be called with a short string + containing the key parameter currently being generated; + it's useful for interactive applications where a user is + waiting for a key to be generated. + + :attention: You should always use a cryptographically secure random number generator, + such as the one defined in the ``Crypto.Random`` module; **don't** just use the + current time and the ``random`` module. + + :Return: An ElGamal key object (`ElGamalobj`). + """ + obj=ElGamalobj() + # Generate a safe prime p + # See Algorithm 4.86 in Handbook of Applied Cryptography + if progress_func: + progress_func('p\n') + while 1: + q = bignum(getPrime(bits-1, randfunc)) + obj.p = 2*q+1 + if number.isPrime(obj.p, randfunc=randfunc): + break + # Generate generator g + # See Algorithm 4.80 in Handbook of Applied Cryptography + # Note that the order of the group is n=p-1=2q, where q is prime + if progress_func: + progress_func('g\n') + while 1: + # We must avoid g=2 because of Bleichenbacher's attack described + # in "Generating ElGamal signatures without knowning the secret key", + # 1996 + # + obj.g = number.getRandomRange(3, obj.p, randfunc) + safe = 1 + if pow(obj.g, 2, obj.p)==1: + safe=0 + if safe and pow(obj.g, q, obj.p)==1: + safe=0 + # Discard g if it divides p-1 because of the attack described + # in Note 11.67 (iii) in HAC + if safe and divmod(obj.p-1, obj.g)[1]==0: + safe=0 + # g^{-1} must not divide p-1 because of Khadir's attack + # described in "Conditions of the generator for forging ElGamal + # signature", 2011 + ginv = number.inverse(obj.g, obj.p) + if safe and divmod(obj.p-1, ginv)[1]==0: + safe=0 + if safe: + break + # Generate private key x + if progress_func: + progress_func('x\n') + obj.x=number.getRandomRange(2, obj.p-1, randfunc) + # Generate public key y + if progress_func: + progress_func('y\n') + obj.y = pow(obj.g, obj.x, obj.p) + return obj + +def construct(tup): + """Construct an ElGamal key from a tuple of valid ElGamal components. + + The modulus *p* must be a prime. + + The following conditions must apply: + + - 1 < g < p-1 + - g^{p-1} = 1 mod p + - 1 < x < p-1 + - g^x = y mod p + + :Parameters: + tup : tuple + A tuple of long integers, with 3 or 4 items + in the following order: + + 1. Modulus (*p*). + 2. Generator (*g*). + 3. Public key (*y*). + 4. Private key (*x*). Optional. + + :Return: An ElGamal key object (`ElGamalobj`). + """ + + obj=ElGamalobj() + if len(tup) not in [3,4]: + raise ValueError('argument for construct() wrong length') + for i in range(len(tup)): + field = obj.keydata[i] + setattr(obj, field, tup[i]) + return obj + +class ElGamalobj(pubkey): + """Class defining an ElGamal key. + + :undocumented: __getstate__, __setstate__, __repr__, __getattr__ + """ + + #: Dictionary of ElGamal parameters. + #: + #: A public key will only have the following entries: + #: + #: - **y**, the public key. + #: - **g**, the generator. + #: - **p**, the modulus. + #: + #: A private key will also have: + #: + #: - **x**, the private key. + keydata=['p', 'g', 'y', 'x'] + + def encrypt(self, plaintext, K): + """Encrypt a piece of data with ElGamal. + + :Parameter plaintext: The piece of data to encrypt with ElGamal. + It must be numerically smaller than the module (*p*). + :Type plaintext: byte string or long + + :Parameter K: A secret number, chosen randomly in the closed + range *[1,p-2]*. + :Type K: long (recommended) or byte string (not recommended) + + :Return: A tuple with two items. Each item is of the same type as the + plaintext (string or long). + + :attention: selection of *K* is crucial for security. Generating a + random number larger than *p-1* and taking the modulus by *p-1* is + **not** secure, since smaller values will occur more frequently. + Generating a random number systematically smaller than *p-1* + (e.g. *floor((p-1)/8)* random bytes) is also **not** secure. + In general, it shall not be possible for an attacker to know + the value of any bit of K. + + :attention: The number *K* shall not be reused for any other + operation and shall be discarded immediately. + """ + return pubkey.encrypt(self, plaintext, K) + + def decrypt(self, ciphertext): + """Decrypt a piece of data with ElGamal. + + :Parameter ciphertext: The piece of data to decrypt with ElGamal. + :Type ciphertext: byte string, long or a 2-item tuple as returned + by `encrypt` + + :Return: A byte string if ciphertext was a byte string or a tuple + of byte strings. A long otherwise. + """ + return pubkey.decrypt(self, ciphertext) + + def sign(self, M, K): + """Sign a piece of data with ElGamal. + + :Parameter M: The piece of data to sign with ElGamal. It may + not be longer in bit size than *p-1*. + :Type M: byte string or long + + :Parameter K: A secret number, chosen randomly in the closed + range *[1,p-2]* and such that *gcd(k,p-1)=1*. + :Type K: long (recommended) or byte string (not recommended) + + :attention: selection of *K* is crucial for security. Generating a + random number larger than *p-1* and taking the modulus by *p-1* is + **not** secure, since smaller values will occur more frequently. + Generating a random number systematically smaller than *p-1* + (e.g. *floor((p-1)/8)* random bytes) is also **not** secure. + In general, it shall not be possible for an attacker to know + the value of any bit of K. + + :attention: The number *K* shall not be reused for any other + operation and shall be discarded immediately. + + :attention: M must be be a cryptographic hash, otherwise an + attacker may mount an existential forgery attack. + + :Return: A tuple with 2 longs. + """ + return pubkey.sign(self, M, K) + + def verify(self, M, signature): + """Verify the validity of an ElGamal signature. + + :Parameter M: The expected message. + :Type M: byte string or long + + :Parameter signature: The ElGamal signature to verify. + :Type signature: A tuple with 2 longs as return by `sign` + + :Return: True if the signature is correct, False otherwise. + """ + return pubkey.verify(self, M, signature) + + def _encrypt(self, M, K): + a=pow(self.g, K, self.p) + b=( M*pow(self.y, K, self.p) ) % self.p + return ( a,b ) + + def _decrypt(self, M): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + ax=pow(M[0], self.x, self.p) + plaintext=(M[1] * inverse(ax, self.p ) ) % self.p + return plaintext + + def _sign(self, M, K): + if (not hasattr(self, 'x')): + raise TypeError('Private key not available in this object') + p1=self.p-1 + if (GCD(K, p1)!=1): + raise ValueError('Bad K value: GCD(K,p-1)!=1') + a=pow(self.g, K, self.p) + t=(M-self.x*a) % p1 + while t<0: t=t+p1 + b=(t*inverse(K, p1)) % p1 + return (a, b) + + def _verify(self, M, sig): + if sig[0]<1 or sig[0]>self.p-1: + return 0 + v1=pow(self.y, sig[0], self.p) + v1=(v1*pow(sig[0], sig[1], self.p)) % self.p + v2=pow(self.g, M, self.p) + if v1==v2: + return 1 + return 0 + + def size(self): + return number.size(self.p) - 1 + + def has_private(self): + if hasattr(self, 'x'): + return 1 + else: + return 0 + + def publickey(self): + return construct((self.p, self.g, self.y)) + + +object=ElGamalobj diff --git a/Lib/site-packages/Crypto/PublicKey/RSA.py b/Lib/site-packages/Crypto/PublicKey/RSA.py new file mode 100644 index 0000000..26097f8 --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/RSA.py @@ -0,0 +1,719 @@ +# -*- coding: utf-8 -*- +# +# PublicKey/RSA.py : RSA public key primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RSA public-key cryptography algorithm (signature and encryption). + +RSA_ is the most widespread and used public key algorithm. Its security is +based on the difficulty of factoring large integers. The algorithm has +withstood attacks for 30 years, and it is therefore considered reasonably +secure for new designs. + +The algorithm can be used for both confidentiality (encryption) and +authentication (digital signature). It is worth noting that signing and +decryption are significantly slower than verification and encryption. +The cryptograhic strength is primarily linked to the length of the modulus *n*. +In 2012, a sufficient length is deemed to be 2048 bits. For more information, +see the most recent ECRYPT_ report. + +Both RSA ciphertext and RSA signature are as big as the modulus *n* (256 +bytes if *n* is 2048 bit long). + +This module provides facilities for generating fresh, new RSA keys, constructing +them from known components, exporting them, and importing them. + + >>> from Crypto.PublicKey import RSA + >>> + >>> key = RSA.generate(2048) + >>> f = open('mykey.pem','w') + >>> f.write(RSA.exportKey('PEM')) + >>> f.close() + ... + >>> f = open('mykey.pem','r') + >>> key = RSA.importKey(f.read()) + +Even though you may choose to directly use the methods of an RSA key object +to perform the primitive cryptographic operations (e.g. `_RSAobj.encrypt`), +it is recommended to use one of the standardized schemes instead (like +`Crypto.Cipher.PKCS1_v1_5` or `Crypto.Signature.PKCS1_v1_5`). + +.. _RSA: http://en.wikipedia.org/wiki/RSA_%28algorithm%29 +.. _ECRYPT: http://www.ecrypt.eu.org/documents/D.SPA.17.pdf + +:sort: generate,construct,importKey,error +""" + +__revision__ = "$Id$" + +__all__ = ['generate', 'construct', 'error', 'importKey', 'RSAImplementation', '_RSAobj'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * +#from Crypto.Util.python_compat import * +from Crypto.Util.number import getRandomRange, bytes_to_long, long_to_bytes + +from Crypto.PublicKey import _RSA, _slowmath, pubkey +from Crypto import Random + +from Crypto.Util.asn1 import DerObject, DerSequence, DerNull +import binascii +import struct + +from Crypto.Util.number import inverse + +from Crypto.Util.number import inverse + +try: + from Crypto.PublicKey import _fastmath +except ImportError: + _fastmath = None + +class _RSAobj(pubkey.pubkey): + """Class defining an actual RSA key. + + :undocumented: __getstate__, __setstate__, __repr__, __getattr__ + """ + #: Dictionary of RSA parameters. + #: + #: A public key will only have the following entries: + #: + #: - **n**, the modulus. + #: - **e**, the public exponent. + #: + #: A private key will also have: + #: + #: - **d**, the private exponent. + #: - **p**, the first factor of n. + #: - **q**, the second factor of n. + #: - **u**, the CRT coefficient (1/p) mod q. + keydata = ['n', 'e', 'd', 'p', 'q', 'u'] + + def __init__(self, implementation, key, randfunc=None): + self.implementation = implementation + self.key = key + if randfunc is None: + randfunc = Random.new().read + self._randfunc = randfunc + + def __getattr__(self, attrname): + if attrname in self.keydata: + # For backward compatibility, allow the user to get (not set) the + # RSA key parameters directly from this object. + return getattr(self.key, attrname) + else: + raise AttributeError("%s object has no %r attribute" % (self.__class__.__name__, attrname,)) + + def encrypt(self, plaintext, K): + """Encrypt a piece of data with RSA. + + :Parameter plaintext: The piece of data to encrypt with RSA. It may not + be numerically larger than the RSA module (**n**). + :Type plaintext: byte string or long + + :Parameter K: A random parameter (*for compatibility only. This + value will be ignored*) + :Type K: byte string or long + + :attention: this function performs the plain, primitive RSA encryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly encrypt data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Cipher.PKCS1_OAEP` or `Crypto.Cipher.PKCS1_v1_5` instead. + + :Return: A tuple with two items. The first item is the ciphertext + of the same type as the plaintext (string or long). The second item + is always None. + """ + return pubkey.pubkey.encrypt(self, plaintext, K) + + def decrypt(self, ciphertext): + """Decrypt a piece of data with RSA. + + Decryption always takes place with blinding. + + :attention: this function performs the plain, primitive RSA decryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly decrypt data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Cipher.PKCS1_OAEP` or `Crypto.Cipher.PKCS1_v1_5` instead. + + :Parameter ciphertext: The piece of data to decrypt with RSA. It may + not be numerically larger than the RSA module (**n**). If a tuple, + the first item is the actual ciphertext; the second item is ignored. + + :Type ciphertext: byte string, long or a 2-item tuple as returned by + `encrypt` + + :Return: A byte string if ciphertext was a byte string or a tuple + of byte strings. A long otherwise. + """ + return pubkey.pubkey.decrypt(self, ciphertext) + + def sign(self, M, K): + """Sign a piece of data with RSA. + + Signing always takes place with blinding. + + :attention: this function performs the plain, primitive RSA decryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly sign data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. + + :Parameter M: The piece of data to sign with RSA. It may + not be numerically larger than the RSA module (**n**). + :Type M: byte string or long + + :Parameter K: A random parameter (*for compatibility only. This + value will be ignored*) + :Type K: byte string or long + + :Return: A 2-item tuple. The first item is the actual signature (a + long). The second item is always None. + """ + return pubkey.pubkey.sign(self, M, K) + + def verify(self, M, signature): + """Verify the validity of an RSA signature. + + :attention: this function performs the plain, primitive RSA encryption + (*textbook*). In real applications, you always need to use proper + cryptographic padding, and you should not directly verify data with + this method. Failure to do so may lead to security vulnerabilities. + It is recommended to use modules + `Crypto.Signature.PKCS1_PSS` or `Crypto.Signature.PKCS1_v1_5` instead. + + :Parameter M: The expected message. + :Type M: byte string or long + + :Parameter signature: The RSA signature to verify. The first item of + the tuple is the actual signature (a long not larger than the modulus + **n**), whereas the second item is always ignored. + :Type signature: A 2-item tuple as return by `sign` + + :Return: True if the signature is correct, False otherwise. + """ + return pubkey.pubkey.verify(self, M, signature) + + def _encrypt(self, c, K): + return (self.key._encrypt(c),) + + def _decrypt(self, c): + #(ciphertext,) = c + (ciphertext,) = c[:1] # HACK - We should use the previous line + # instead, but this is more compatible and we're + # going to replace the Crypto.PublicKey API soon + # anyway. + + # Blinded RSA decryption (to prevent timing attacks): + # Step 1: Generate random secret blinding factor r, such that 0 < r < n-1 + r = getRandomRange(1, self.key.n-1, randfunc=self._randfunc) + # Step 2: Compute c' = c * r**e mod n + cp = self.key._blind(ciphertext, r) + # Step 3: Compute m' = c'**d mod n (ordinary RSA decryption) + mp = self.key._decrypt(cp) + # Step 4: Compute m = m**(r-1) mod n + return self.key._unblind(mp, r) + + def _blind(self, m, r): + return self.key._blind(m, r) + + def _unblind(self, m, r): + return self.key._unblind(m, r) + + def _sign(self, m, K=None): + return (self.key._sign(m),) + + def _verify(self, m, sig): + #(s,) = sig + (s,) = sig[:1] # HACK - We should use the previous line instead, but + # this is more compatible and we're going to replace + # the Crypto.PublicKey API soon anyway. + return self.key._verify(m, s) + + def has_private(self): + return self.key.has_private() + + def size(self): + return self.key.size() + + def can_blind(self): + return True + + def can_encrypt(self): + return True + + def can_sign(self): + return True + + def publickey(self): + return self.implementation.construct((self.key.n, self.key.e)) + + def __getstate__(self): + d = {} + for k in self.keydata: + try: + d[k] = getattr(self.key, k) + except AttributeError: + pass + return d + + def __setstate__(self, d): + if not hasattr(self, 'implementation'): + self.implementation = RSAImplementation() + t = [] + for k in self.keydata: + if k not in d: + break + t.append(d[k]) + self.key = self.implementation._math.rsa_construct(*tuple(t)) + + def __repr__(self): + attrs = [] + for k in self.keydata: + if k == 'n': + attrs.append("n(%d)" % (self.size()+1,)) + elif hasattr(self.key, k): + attrs.append(k) + if self.has_private(): + attrs.append("private") + # PY3K: This is meant to be text, do not change to bytes (data) + return "<%s @0x%x %s>" % (self.__class__.__name__, id(self), ",".join(attrs)) + + def exportKey(self, format='PEM', passphrase=None, pkcs=1): + """Export this RSA key. + + :Parameter format: The format to use for wrapping the key. + + - *'DER'*. Binary encoding, always unencrypted. + - *'PEM'*. Textual encoding, done according to `RFC1421`_/`RFC1423`_. + Unencrypted (default) or encrypted. + - *'OpenSSH'*. Textual encoding, done according to OpenSSH specification. + Only suitable for public keys (not private keys). + :Type format: string + + :Parameter passphrase: In case of PEM, the pass phrase to derive the encryption key from. + :Type passphrase: string + + :Parameter pkcs: The PKCS standard to follow for assembling the key. + You have two choices: + + - with **1**, the public key is embedded into an X.509 `SubjectPublicKeyInfo` DER SEQUENCE. + The private key is embedded into a `PKCS#1`_ `RSAPrivateKey` DER SEQUENCE. + This mode is the default. + - with **8**, the private key is embedded into a `PKCS#8`_ `PrivateKeyInfo` DER SEQUENCE. + This mode is not available for public keys. + + PKCS standards are not relevant for the *OpenSSH* format. + :Type pkcs: integer + + :Return: A byte string with the encoded public or private half. + :Raise ValueError: + When the format is unknown. + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + if passphrase is not None: + passphrase = tobytes(passphrase) + if format=='OpenSSH': + eb = long_to_bytes(self.e) + nb = long_to_bytes(self.n) + if bord(eb[0]) & 0x80: eb=bchr(0x00)+eb + if bord(nb[0]) & 0x80: nb=bchr(0x00)+nb + keyparts = [ 'ssh-rsa', eb, nb ] + keystring = ''.join([ struct.pack(">I",len(kp))+kp for kp in keyparts]) + return 'ssh-rsa '+binascii.b2a_base64(keystring)[:-1] + + # DER format is always used, even in case of PEM, which simply + # encodes it into BASE64. + der = DerSequence() + if self.has_private(): + keyType= { 1: 'RSA PRIVATE', 8: 'PRIVATE' }[pkcs] + der[:] = [ 0, self.n, self.e, self.d, self.p, self.q, + self.d % (self.p-1), self.d % (self.q-1), + inverse(self.q, self.p) ] + if pkcs==8: + derkey = der.encode() + der = DerSequence([0]) + der.append(algorithmIdentifier) + der.append(DerObject('OCTET STRING', derkey).encode()) + else: + keyType = "PUBLIC" + der.append(algorithmIdentifier) + bitmap = DerObject('BIT STRING') + derPK = DerSequence( [ self.n, self.e ] ) + bitmap.payload = bchr(0x00) + derPK.encode() + der.append(bitmap.encode()) + if format=='DER': + return der.encode() + if format=='PEM': + pem = b("-----BEGIN " + keyType + " KEY-----\n") + objenc = None + if passphrase and keyType.endswith('PRIVATE'): + # We only support 3DES for encryption + import Crypto.Hash.MD5 + from Crypto.Cipher import DES3 + from Crypto.Protocol.KDF import PBKDF1 + salt = self._randfunc(8) + key = PBKDF1(passphrase, salt, 16, 1, Crypto.Hash.MD5) + key += PBKDF1(key+passphrase, salt, 8, 1, Crypto.Hash.MD5) + objenc = DES3.new(key, Crypto.Cipher.DES3.MODE_CBC, salt) + pem += b('Proc-Type: 4,ENCRYPTED\n') + pem += b('DEK-Info: DES-EDE3-CBC,') + binascii.b2a_hex(salt).upper() + b('\n\n') + + binaryKey = der.encode() + if objenc: + # Add PKCS#7-like padding + padding = objenc.block_size-len(binaryKey)%objenc.block_size + binaryKey = objenc.encrypt(binaryKey+bchr(padding)*padding) + + # Each BASE64 line can take up to 64 characters (=48 bytes of data) + chunks = [ binascii.b2a_base64(binaryKey[i:i+48]) for i in range(0, len(binaryKey), 48) ] + pem += b('').join(chunks) + pem += b("-----END " + keyType + " KEY-----") + return pem + return ValueError("Unknown key format '%s'. Cannot export the RSA key." % format) + +class RSAImplementation(object): + """ + An RSA key factory. + + This class is only internally used to implement the methods of the `Crypto.PublicKey.RSA` module. + + :sort: __init__,generate,construct,importKey + :undocumented: _g*, _i* + """ + + def __init__(self, **kwargs): + """Create a new RSA key factory. + + :Keywords: + use_fast_math : bool + Specify which mathematic library to use: + + - *None* (default). Use fastest math available. + - *True* . Use fast math. + - *False* . Use slow math. + default_randfunc : callable + Specify how to collect random data: + + - *None* (default). Use Random.new().read(). + - not *None* . Use the specified function directly. + :Raise RuntimeError: + When **use_fast_math** =True but fast math is not available. + """ + use_fast_math = kwargs.get('use_fast_math', None) + if use_fast_math is None: # Automatic + if _fastmath is not None: + self._math = _fastmath + else: + self._math = _slowmath + + elif use_fast_math: # Explicitly select fast math + if _fastmath is not None: + self._math = _fastmath + else: + raise RuntimeError("fast math module not available") + + else: # Explicitly select slow math + self._math = _slowmath + + self.error = self._math.error + + self._default_randfunc = kwargs.get('default_randfunc', None) + self._current_randfunc = None + + def _get_randfunc(self, randfunc): + if randfunc is not None: + return randfunc + elif self._current_randfunc is None: + self._current_randfunc = Random.new().read + return self._current_randfunc + + def generate(self, bits, randfunc=None, progress_func=None, e=65537): + """Randomly generate a fresh, new RSA key. + + :Parameters: + bits : int + Key length, or size (in bits) of the RSA modulus. + It must be a multiple of 256, and no smaller than 1024. + + randfunc : callable + Random number generation function; it should accept + a single integer N and return a string of random data + N bytes long. + If not specified, a new one will be instantiated + from ``Crypto.Random``. + + progress_func : callable + Optional function that will be called with a short string + containing the key parameter currently being generated; + it's useful for interactive applications where a user is + waiting for a key to be generated. + + e : int + Public RSA exponent. It must be an odd positive integer. + It is typically a small number with very few ones in its + binary representation. + The default value 65537 (= ``0b10000000000000001`` ) is a safe + choice: other common values are 5, 7, 17, and 257. + + :attention: You should always use a cryptographically secure random number generator, + such as the one defined in the ``Crypto.Random`` module; **don't** just use the + current time and the ``random`` module. + + :attention: Exponent 3 is also widely used, but it requires very special care when padding + the message. + + :Return: An RSA key object (`_RSAobj`). + + :Raise ValueError: + When **bits** is too little or not a multiple of 256, or when + **e** is not odd or smaller than 2. + """ + if bits < 1024 or (bits & 0xff) != 0: + # pubkey.getStrongPrime doesn't like anything that's not a multiple of 256 and >= 1024 + raise ValueError("RSA modulus length must be a multiple of 256 and >= 1024") + if e%2==0 or e<3: + raise ValueError("RSA public exponent must be a positive, odd integer larger than 2.") + rf = self._get_randfunc(randfunc) + obj = _RSA.generate_py(bits, rf, progress_func, e) # TODO: Don't use legacy _RSA module + key = self._math.rsa_construct(obj.n, obj.e, obj.d, obj.p, obj.q, obj.u) + return _RSAobj(self, key) + + def construct(self, tup): + """Construct an RSA key from a tuple of valid RSA components. + + The modulus **n** must be the product of two primes. + The public exponent **e** must be odd and larger than 1. + + In case of a private key, the following equations must apply: + + - e != 1 + - p*q = n + - e*d = 1 mod (p-1)(q-1) + - p*u = 1 mod q + + :Parameters: + tup : tuple + A tuple of long integers, with at least 2 and no + more than 6 items. The items come in the following order: + + 1. RSA modulus (n). + 2. Public exponent (e). + 3. Private exponent (d). Only required if the key is private. + 4. First factor of n (p). Optional. + 5. Second factor of n (q). Optional. + 6. CRT coefficient, (1/p) mod q (u). Optional. + + :Return: An RSA key object (`_RSAobj`). + """ + key = self._math.rsa_construct(*tup) + return _RSAobj(self, key) + + def _importKeyDER(self, externKey): + """Import an RSA key (public or private half), encoded in DER form.""" + + try: + + der = DerSequence() + der.decode(externKey, True) + + # Try PKCS#1 first, for a private key + if len(der)==9 and der.hasOnlyInts() and der[0]==0: + # ASN.1 RSAPrivateKey element + del der[6:] # Remove d mod (p-1), d mod (q-1), and q^{-1} mod p + der.append(inverse(der[4],der[5])) # Add p^{-1} mod q + del der[0] # Remove version + return self.construct(der[:]) + + # Keep on trying PKCS#1, but now for a public key + if len(der)==2: + # The DER object is an RSAPublicKey SEQUENCE with two elements + if der.hasOnlyInts(): + return self.construct(der[:]) + # The DER object is a SubjectPublicKeyInfo SEQUENCE with two elements: + # an 'algorithm' (or 'algorithmIdentifier') SEQUENCE and a 'subjectPublicKey' BIT STRING. + # 'algorithm' takes the value given a few lines above. + # 'subjectPublicKey' encapsulates the actual ASN.1 RSAPublicKey element. + if der[0]==algorithmIdentifier: + bitmap = DerObject() + bitmap.decode(der[1], True) + if bitmap.isType('BIT STRING') and bord(bitmap.payload[0])==0x00: + der.decode(bitmap.payload[1:], True) + if len(der)==2 and der.hasOnlyInts(): + return self.construct(der[:]) + + # Try unencrypted PKCS#8 + if der[0]==0: + # The second element in the SEQUENCE is algorithmIdentifier. + # It must say RSA (see above for description). + if der[1]==algorithmIdentifier: + privateKey = DerObject() + privateKey.decode(der[2], True) + if privateKey.isType('OCTET STRING'): + return self._importKeyDER(privateKey.payload) + + except ValueError as IndexError: + pass + + raise ValueError("RSA key format is not supported") + + def importKey(self, externKey, passphrase=None): + """Import an RSA key (public or private half), encoded in standard form. + + :Parameter externKey: + The RSA key to import, encoded as a string. + + An RSA public key can be in any of the following formats: + + - X.509 `subjectPublicKeyInfo` DER SEQUENCE (binary or PEM encoding) + - `PKCS#1`_ `RSAPublicKey` DER SEQUENCE (binary or PEM encoding) + - OpenSSH (textual public key only) + + An RSA private key can be in any of the following formats: + + - PKCS#1 `RSAPrivateKey` DER SEQUENCE (binary or PEM encoding) + - `PKCS#8`_ `PrivateKeyInfo` DER SEQUENCE (binary or PEM encoding) + - OpenSSH (textual public key only) + + For details about the PEM encoding, see `RFC1421`_/`RFC1423`_. + + In case of PEM encoding, the private key can be encrypted with DES or 3TDES according to a certain ``pass phrase``. + Only OpenSSL-compatible pass phrases are supported. + :Type externKey: string + + :Parameter passphrase: + In case of an encrypted PEM key, this is the pass phrase from which the encryption key is derived. + :Type passphrase: string + + :Return: An RSA key object (`_RSAobj`). + + :Raise ValueError/IndexError/TypeError: + When the given key cannot be parsed (possibly because the pass phrase is wrong). + + .. _RFC1421: http://www.ietf.org/rfc/rfc1421.txt + .. _RFC1423: http://www.ietf.org/rfc/rfc1423.txt + .. _`PKCS#1`: http://www.ietf.org/rfc/rfc3447.txt + .. _`PKCS#8`: http://www.ietf.org/rfc/rfc5208.txt + """ + externKey = tobytes(externKey) + if passphrase is not None: + passphrase = tobytes(passphrase) + + if externKey.startswith(b('-----')): + # This is probably a PEM encoded key + lines = externKey.replace(b(" "),b('')).split() + keyobj = None + + # The encrypted PEM format + if lines[1].startswith(b('Proc-Type:4,ENCRYPTED')): + DEK = lines[2].split(b(':')) + if len(DEK)!=2 or DEK[0]!=b('DEK-Info') or not passphrase: + raise ValueError("PEM encryption format not supported.") + algo, salt = DEK[1].split(b(',')) + salt = binascii.a2b_hex(salt) + import Crypto.Hash.MD5 + from Crypto.Cipher import DES, DES3 + from Crypto.Protocol.KDF import PBKDF1 + if algo==b("DES-CBC"): + # This is EVP_BytesToKey in OpenSSL + key = PBKDF1(passphrase, salt, 8, 1, Crypto.Hash.MD5) + keyobj = DES.new(key, Crypto.Cipher.DES.MODE_CBC, salt) + elif algo==b("DES-EDE3-CBC"): + # Note that EVP_BytesToKey is note exactly the same as PBKDF1 + key = PBKDF1(passphrase, salt, 16, 1, Crypto.Hash.MD5) + key += PBKDF1(key+passphrase, salt, 8, 1, Crypto.Hash.MD5) + keyobj = DES3.new(key, Crypto.Cipher.DES3.MODE_CBC, salt) + else: + raise ValueError("Unsupport PEM encryption algorithm.") + lines = lines[2:] + + der = binascii.a2b_base64(b('').join(lines[1:-1])) + if keyobj: + der = keyobj.decrypt(der) + padding = bord(der[-1]) + der = der[:-padding] + return self._importKeyDER(der) + + if externKey.startswith(b('ssh-rsa ')): + # This is probably an OpenSSH key + keystring = binascii.a2b_base64(externKey.split(b(' '))[1]) + keyparts = [] + while len(keystring)>4: + l = struct.unpack(">I",keystring[:4])[0] + keyparts.append(keystring[4:4+l]) + keystring = keystring[4+l:] + e = bytes_to_long(keyparts[1]) + n = bytes_to_long(keyparts[2]) + return self.construct([n, e]) + if bord(externKey[0])==0x30: + # This is probably a DER encoded key + return self._importKeyDER(externKey) + + raise ValueError("RSA key format is not supported") + +#: This is the ASN.1 DER object that qualifies an algorithm as +#: compliant to PKCS#1 (that is, the standard RSA). +# It is found in all 'algorithm' fields (also called 'algorithmIdentifier'). +# It is a SEQUENCE with the oid assigned to RSA and with its parameters (none). +# 0x06 0x09 OBJECT IDENTIFIER, 9 bytes of payload +# 0x2A 0x86 0x48 0x86 0xF7 0x0D 0x01 0x01 0x01 +# rsaEncryption (1 2 840 113549 1 1 1) (PKCS #1) +# 0x05 0x00 NULL +algorithmIdentifier = DerSequence( + [ b('\x06\x09\x2A\x86\x48\x86\xF7\x0D\x01\x01\x01'), + DerNull().encode() ] + ).encode() + +_impl = RSAImplementation() +#: +#: Randomly generate a fresh, new RSA key object. +#: +#: See `RSAImplementation.generate`. +#: +generate = _impl.generate +#: +#: Construct an RSA key object from a tuple of valid RSA components. +#: +#: See `RSAImplementation.construct`. +#: +construct = _impl.construct +#: +#: Import an RSA key (public or private half), encoded in standard form. +#: +#: See `RSAImplementation.importKey`. +#: +importKey = _impl.importKey +error = _impl.error + +# vim:set ts=4 sw=4 sts=4 expandtab: + diff --git a/Lib/site-packages/Crypto/PublicKey/_DSA.py b/Lib/site-packages/Crypto/PublicKey/_DSA.py new file mode 100644 index 0000000..ce6025e --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/_DSA.py @@ -0,0 +1,115 @@ + +# +# DSA.py : Digital Signature Algorithm +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling, Paul Swartz, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +__revision__ = "$Id$" + +from Crypto.PublicKey.pubkey import * +from Crypto.Util import number +from Crypto.Util.number import bytes_to_long, long_to_bytes +from Crypto.Hash import SHA +from Crypto.Util.py3compat import * + +class error (Exception): + pass + +def generateQ(randfunc): + S=randfunc(20) + hash1=SHA.new(S).digest() + hash2=SHA.new(long_to_bytes(bytes_to_long(S)+1)).digest() + q = bignum(0) + for i in range(0,20): + c=bord(hash1[i])^bord(hash2[i]) + if i==0: + c=c | 128 + if i==19: + c= c | 1 + q=q*256+c + while (not isPrime(q)): + q=q+2 + if pow(2,159) < q < pow(2,160): + return S, q + raise RuntimeError('Bad q value generated') + +def generate_py(bits, randfunc, progress_func=None): + """generate(bits:int, randfunc:callable, progress_func:callable) + + Generate a DSA key of length 'bits', using 'randfunc' to get + random data and 'progress_func', if present, to display + the progress of the key generation. + """ + + if bits<160: + raise ValueError('Key length < 160 bits') + obj=DSAobj() + # Generate string S and prime q + if progress_func: + progress_func('p,q\n') + while (1): + S, obj.q = generateQ(randfunc) + n=divmod(bits-1, 160)[0] + C, N, V = 0, 2, {} + b=(obj.q >> 5) & 15 + powb=pow(bignum(2), b) + powL1=pow(bignum(2), bits-1) + while C<4096: + for k in range(0, n+1): + V[k]=bytes_to_long(SHA.new(S+bstr(N)+bstr(k)).digest()) + W=V[n] % powb + for k in range(n-1, -1, -1): + W=(W<<160)+V[k] + X=W+powL1 + p=X-(X%(2*obj.q)-1) + if powL1<=p and isPrime(p): + break + C, N = C+1, N+n+1 + if C<4096: + break + if progress_func: + progress_func('4096 multiples failed\n') + + obj.p = p + power=divmod(p-1, obj.q)[0] + if progress_func: + progress_func('h,g\n') + while (1): + h=bytes_to_long(randfunc(bits)) % (p-1) + g=pow(h, power, p) + if 11: + break + obj.g=g + if progress_func: + progress_func('x,y\n') + while (1): + x=bytes_to_long(randfunc(20)) + if 0 < x < obj.q: + break + obj.x, obj.y = x, pow(g, x, p) + return obj + +class DSAobj: + pass + diff --git a/Lib/site-packages/Crypto/PublicKey/_RSA.py b/Lib/site-packages/Crypto/PublicKey/_RSA.py new file mode 100644 index 0000000..fd882bf --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/_RSA.py @@ -0,0 +1,81 @@ +# +# RSA.py : RSA encryption/decryption +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling, Paul Swartz, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +__revision__ = "$Id$" + +from Crypto.PublicKey import pubkey +from Crypto.Util import number + +def generate_py(bits, randfunc, progress_func=None, e=65537): + """generate(bits:int, randfunc:callable, progress_func:callable, e:int) + + Generate an RSA key of length 'bits', public exponent 'e'(which must be + odd), using 'randfunc' to get random data and 'progress_func', + if present, to display the progress of the key generation. + """ + obj=RSAobj() + obj.e = int(e) + + # Generate the prime factors of n + if progress_func: + progress_func('p,q\n') + p = q = 1 + while number.size(p*q) < bits: + # Note that q might be one bit longer than p if somebody specifies an odd + # number of bits for the key. (Why would anyone do that? You don't get + # more security.) + p = pubkey.getStrongPrime(bits>>1, obj.e, 1e-12, randfunc) + q = pubkey.getStrongPrime(bits - (bits>>1), obj.e, 1e-12, randfunc) + + # It's OK for p to be larger than q, but let's be + # kind to the function that will invert it for + # th calculation of u. + if p > q: + (p, q)=(q, p) + obj.p = p + obj.q = q + + if progress_func: + progress_func('u\n') + obj.u = pubkey.inverse(obj.p, obj.q) + obj.n = obj.p*obj.q + + if progress_func: + progress_func('d\n') + obj.d=pubkey.inverse(obj.e, (obj.p-1)*(obj.q-1)) + + assert bits <= 1+obj.size(), "Generated key is too small" + + return obj + +class RSAobj(pubkey.pubkey): + + def size(self): + """size() : int + Return the maximum number of bits that can be handled by this key. + """ + return number.size(self.n) - 1 + diff --git a/Lib/site-packages/Crypto/PublicKey/__init__.py b/Lib/site-packages/Crypto/PublicKey/__init__.py new file mode 100644 index 0000000..503809f --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Public-key encryption and signature algorithms. + +Public-key encryption uses two different keys, one for encryption and +one for decryption. The encryption key can be made public, and the +decryption key is kept private. Many public-key algorithms can also +be used to sign messages, and some can *only* be used for signatures. + +======================== ============================================= +Module Description +======================== ============================================= +Crypto.PublicKey.DSA Digital Signature Algorithm (Signature only) +Crypto.PublicKey.ElGamal (Signing and encryption) +Crypto.PublicKey.RSA (Signing, encryption, and blinding) +======================== ============================================= + +:undocumented: _DSA, _RSA, _fastmath, _slowmath, pubkey +""" + +__all__ = ['RSA', 'DSA', 'ElGamal'] +__revision__ = "$Id$" + diff --git a/Lib/site-packages/Crypto/PublicKey/_slowmath.py b/Lib/site-packages/Crypto/PublicKey/_slowmath.py new file mode 100644 index 0000000..e38015f --- /dev/null +++ b/Lib/site-packages/Crypto/PublicKey/_slowmath.py @@ -0,0 +1,187 @@ +# -*- coding: utf-8 -*- +# +# PubKey/RSA/_slowmath.py : Pure Python implementation of the RSA portions of _fastmath +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Pure Python implementation of the RSA-related portions of Crypto.PublicKey._fastmath.""" + +__revision__ = "$Id$" + +__all__ = ['rsa_construct'] + +import sys + +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.number import size, inverse, GCD + +class error(Exception): + pass + +class _RSAKey(object): + def _blind(self, m, r): + # compute r**e * m (mod n) + return m * pow(r, self.e, self.n) + + def _unblind(self, m, r): + # compute m / r (mod n) + return inverse(r, self.n) * m % self.n + + def _decrypt(self, c): + # compute c**d (mod n) + if not self.has_private(): + raise TypeError("No private key") + if (hasattr(self,'p') and hasattr(self,'q') and hasattr(self,'u')): + m1 = pow(c, self.d % (self.p-1), self.p) + m2 = pow(c, self.d % (self.q-1), self.q) + h = m2 - m1 + if (h<0): + h = h + self.q + h = h*self.u % self.q + return h*self.p+m1 + return pow(c, self.d, self.n) + + def _encrypt(self, m): + # compute m**d (mod n) + return pow(m, self.e, self.n) + + def _sign(self, m): # alias for _decrypt + if not self.has_private(): + raise TypeError("No private key") + return self._decrypt(m) + + def _verify(self, m, sig): + return self._encrypt(sig) == m + + def has_private(self): + return hasattr(self, 'd') + + def size(self): + """Return the maximum number of bits that can be encrypted""" + return size(self.n) - 1 + +def rsa_construct(n, e, d=None, p=None, q=None, u=None): + """Construct an RSAKey object""" + assert isinstance(n, int) + assert isinstance(e, int) + assert isinstance(d, (int, type(None))) + assert isinstance(p, (int, type(None))) + assert isinstance(q, (int, type(None))) + assert isinstance(u, (int, type(None))) + obj = _RSAKey() + obj.n = n + obj.e = e + if d is None: + return obj + obj.d = d + if p is not None and q is not None: + obj.p = p + obj.q = q + else: + # Compute factors p and q from the private exponent d. + # We assume that n has no more than two factors. + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d*e-1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t%2==0: + t=divmod(t,2)[0] + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = 0 + a = 2 + while not spotted and a<100: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +from binascii import b2a_hex +import time +import warnings + +from Crypto.pct_warnings import ClockRewindWarning +from . import SHAd256 + +from . import FortunaGenerator + +class FortunaPool(object): + """Fortuna pool type + + This object acts like a hash object, with the following differences: + + - It keeps a count (the .length attribute) of the number of bytes that + have been added to the pool + - It supports a .reset() method for in-place reinitialization + - The method to add bytes to the pool is .append(), not .update(). + """ + + digest_size = SHAd256.digest_size + + def __init__(self): + self.reset() + + def append(self, data): + self._h.update(data) + self.length += len(data) + + def digest(self): + return self._h.digest() + + def hexdigest(self): + if sys.version_info[0] == 2: + return b2a_hex(self.digest()) + else: + return b2a_hex(self.digest()).decode() + + def reset(self): + self._h = SHAd256.new() + self.length = 0 + +def which_pools(r): + """Return a list of pools indexes (in range(32)) that are to be included during reseed number r. + + According to _Practical Cryptography_, chapter 10.5.2 "Pools": + + "Pool P_i is included if 2**i is a divisor of r. Thus P_0 is used + every reseed, P_1 every other reseed, P_2 every fourth reseed, etc." + """ + # This is a separate function so that it can be unit-tested. + assert r >= 1 + retval = [] + mask = 0 + for i in range(32): + # "Pool P_i is included if 2**i is a divisor of [reseed_count]" + if (r & mask) == 0: + retval.append(i) + else: + break # optimization. once this fails, it always fails + mask = (mask << 1) | 1 + return retval + +class FortunaAccumulator(object): + + # An estimate of how many bytes we must append to pool 0 before it will + # contain 128 bits of entropy (with respect to an attack). We reseed the + # generator only after pool 0 contains `min_pool_size` bytes. Note that + # unlike with some other PRNGs, Fortuna's security does not rely on the + # accuracy of this estimate---we can accord to be optimistic here. + min_pool_size = 64 # size in bytes + + # If an attacker can predict some (but not all) of our entropy sources, the + # `min_pool_size` check may not be sufficient to prevent a successful state + # compromise extension attack. To resist this attack, Fortuna spreads the + # input across 32 pools, which are then consumed (to reseed the output + # generator) with exponentially decreasing frequency. + # + # In order to prevent an attacker from gaining knowledge of all 32 pools + # before we have a chance to fill them with enough information that the + # attacker cannot predict, we impose a rate limit of 10 reseeds/second (one + # per 100 ms). This ensures that a hypothetical 33rd pool would only be + # needed after a minimum of 13 years of sustained attack. + reseed_interval = 0.100 # time in seconds + + def __init__(self): + self.reseed_count = 0 + self.generator = FortunaGenerator.AESGenerator() + self.last_reseed = None + + # Initialize 32 FortunaPool instances. + # NB: This is _not_ equivalent to [FortunaPool()]*32, which would give + # us 32 references to the _same_ FortunaPool instance (and cause the + # assertion below to fail). + self.pools = [FortunaPool() for i in range(32)] # 32 pools + assert(self.pools[0] is not self.pools[1]) + + def _forget_last_reseed(self): + # This is not part of the standard Fortuna definition, and using this + # function frequently can weaken Fortuna's ability to resist a state + # compromise extension attack, but we need this in order to properly + # implement Crypto.Random.atfork(). Otherwise, forked child processes + # might continue to use their parent's PRNG state for up to 100ms in + # some cases. (e.g. CVE-2013-1445) + self.last_reseed = None + + def random_data(self, bytes): + current_time = time.time() + if (self.last_reseed is not None and self.last_reseed > current_time): # Avoid float comparison to None to make Py3k happy + warnings.warn("Clock rewind detected. Resetting last_reseed.", ClockRewindWarning) + self.last_reseed = None + if (self.pools[0].length >= self.min_pool_size and + (self.last_reseed is None or + current_time > self.last_reseed + self.reseed_interval)): + self._reseed(current_time) + # The following should fail if we haven't seeded the pool yet. + return self.generator.pseudo_random_data(bytes) + + def _reseed(self, current_time=None): + if current_time is None: + current_time = time.time() + seed = [] + self.reseed_count += 1 + self.last_reseed = current_time + for i in which_pools(self.reseed_count): + seed.append(self.pools[i].digest()) + self.pools[i].reset() + + seed = b("").join(seed) + self.generator.reseed(seed) + + def add_random_event(self, source_number, pool_number, data): + assert 1 <= len(data) <= 32 + assert 0 <= source_number <= 255 + assert 0 <= pool_number <= 31 + self.pools[pool_number].append(bchr(source_number)) + self.pools[pool_number].append(bchr(len(data))) + self.pools[pool_number].append(data) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py b/Lib/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py new file mode 100644 index 0000000..ec4d6e5 --- /dev/null +++ b/Lib/site-packages/Crypto/Random/Fortuna/FortunaGenerator.py @@ -0,0 +1,132 @@ +# -*- coding: ascii -*- +# +# FortunaGenerator.py : Fortuna's internal PRNG +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] is 2 and sys.version_info[1] is 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import struct + +from Crypto.Util.number import ceil_shift, exact_log2, exact_div +from Crypto.Util import Counter +from Crypto.Cipher import AES + +from . import SHAd256 + +class AESGenerator(object): + """The Fortuna "generator" + + This is used internally by the Fortuna PRNG to generate arbitrary amounts + of pseudorandom data from a smaller amount of seed data. + + The output is generated by running AES-256 in counter mode and re-keying + after every mebibyte (2**16 blocks) of output. + """ + + block_size = AES.block_size # output block size in octets (128 bits) + key_size = 32 # key size in octets (256 bits) + + # Because of the birthday paradox, we expect to find approximately one + # collision for every 2**64 blocks of output from a real random source. + # However, this code generates pseudorandom data by running AES in + # counter mode, so there will be no collisions until the counter + # (theoretically) wraps around at 2**128 blocks. Thus, in order to prevent + # Fortuna's pseudorandom output from deviating perceptibly from a true + # random source, Ferguson and Schneier specify a limit of 2**16 blocks + # without rekeying. + max_blocks_per_request = 2**16 # Allow no more than this number of blocks per _pseudo_random_data request + + _four_kiblocks_of_zeros = b("\0") * block_size * 4096 + + def __init__(self): + self.counter = Counter.new(nbits=self.block_size*8, initial_value=0, little_endian=True) + self.key = None + + # Set some helper constants + self.block_size_shift = exact_log2(self.block_size) + assert (1 << self.block_size_shift) == self.block_size + + self.blocks_per_key = exact_div(self.key_size, self.block_size) + assert self.key_size == self.blocks_per_key * self.block_size + + self.max_bytes_per_request = self.max_blocks_per_request * self.block_size + + def reseed(self, seed): + if self.key is None: + self.key = b("\0") * self.key_size + + self._set_key(SHAd256.new(self.key + seed).digest()) + self.counter() # increment counter + assert len(self.key) == self.key_size + + def pseudo_random_data(self, bytes): + assert bytes >= 0 + + num_full_blocks = bytes >> 20 + remainder = bytes & ((1<<20)-1) + + retval = [] + for i in range(num_full_blocks): + retval.append(self._pseudo_random_data(1<<20)) + retval.append(self._pseudo_random_data(remainder)) + + return b("").join(retval) + + def _set_key(self, key): + self.key = key + self._cipher = AES.new(key, AES.MODE_CTR, counter=self.counter) + + def _pseudo_random_data(self, bytes): + if not (0 <= bytes <= self.max_bytes_per_request): + raise AssertionError("You cannot ask for more than 1 MiB of data per request") + + num_blocks = ceil_shift(bytes, self.block_size_shift) # num_blocks = ceil(bytes / self.block_size) + + # Compute the output + retval = self._generate_blocks(num_blocks)[:bytes] + + # Switch to a new key to avoid later compromises of this output (i.e. + # state compromise extension attacks) + self._set_key(self._generate_blocks(self.blocks_per_key)) + + assert len(retval) == bytes + assert len(self.key) == self.key_size + + return retval + + def _generate_blocks(self, num_blocks): + if self.key is None: + raise AssertionError("generator must be seeded before use") + assert 0 <= num_blocks <= self.max_blocks_per_request + retval = [] + for i in range(num_blocks >> 12): # xrange(num_blocks / 4096) + retval.append(self._cipher.encrypt(self._four_kiblocks_of_zeros)) + remaining_bytes = (num_blocks & 4095) << self.block_size_shift # (num_blocks % 4095) * self.block_size + retval.append(self._cipher.encrypt(self._four_kiblocks_of_zeros[:remaining_bytes])) + return b("").join(retval) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/Fortuna/SHAd256.py b/Lib/site-packages/Crypto/Random/Fortuna/SHAd256.py new file mode 100644 index 0000000..2e135c9 --- /dev/null +++ b/Lib/site-packages/Crypto/Random/Fortuna/SHAd256.py @@ -0,0 +1,98 @@ +# -*- coding: ascii -*- +# +# Random/Fortuna/SHAd256.py : SHA_d-256 hash function implementation +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""\ +SHA_d-256 hash function implementation. + +This module should comply with PEP 247. +""" + +__revision__ = "$Id$" +__all__ = ['new', 'digest_size'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +from binascii import b2a_hex + +from Crypto.Hash import SHA256 + +assert SHA256.digest_size == 32 + +class _SHAd256(object): + """SHA-256, doubled. + + Returns SHA-256(SHA-256(data)). + """ + + digest_size = SHA256.digest_size + + _internal = object() + + def __init__(self, internal_api_check, sha256_hash_obj): + if internal_api_check is not self._internal: + raise AssertionError("Do not instantiate this class directly. Use %s.new()" % (__name__,)) + self._h = sha256_hash_obj + + # PEP 247 "copy" method + def copy(self): + """Return a copy of this hashing object""" + return _SHAd256(SHAd256._internal, self._h.copy()) + + # PEP 247 "digest" method + def digest(self): + """Return the hash value of this object as a binary string""" + retval = SHA256.new(self._h.digest()).digest() + assert len(retval) == 32 + return retval + + # PEP 247 "hexdigest" method + def hexdigest(self): + """Return the hash value of this object as a (lowercase) hexadecimal string""" + retval = b2a_hex(self.digest()) + assert len(retval) == 64 + if sys.version_info[0] == 2: + return retval + else: + return retval.decode() + + # PEP 247 "update" method + def update(self, data): + self._h.update(data) + +# PEP 247 module-level "digest_size" variable +digest_size = _SHAd256.digest_size + +# PEP 247 module-level "new" function +def new(data=None): + """Return a new SHAd256 hashing object""" + if not data: + data=b("") + sha = _SHAd256(_SHAd256._internal, SHA256.new(data)) + sha.new = globals()['new'] + return sha + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/Fortuna/__init__.py b/Lib/site-packages/Crypto/Random/Fortuna/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/Lib/site-packages/Crypto/Random/OSRNG/__init__.py b/Lib/site-packages/Crypto/Random/OSRNG/__init__.py new file mode 100644 index 0000000..2fbbecb --- /dev/null +++ b/Lib/site-packages/Crypto/Random/OSRNG/__init__.py @@ -0,0 +1,40 @@ +# +# Random/OSRNG/__init__.py : Platform-independent OS RNG API +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Provides a platform-independent interface to the random number generators +supplied by various operating systems.""" + +__revision__ = "$Id$" + +import os + +if os.name == 'posix': + from Crypto.Random.OSRNG.posix import new +elif os.name == 'nt': + from Crypto.Random.OSRNG.nt import new +elif hasattr(os, 'urandom'): + from Crypto.Random.OSRNG.fallback import new +else: + raise ImportError("Not implemented") + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/OSRNG/fallback.py b/Lib/site-packages/Crypto/Random/OSRNG/fallback.py new file mode 100644 index 0000000..f098f01 --- /dev/null +++ b/Lib/site-packages/Crypto/Random/OSRNG/fallback.py @@ -0,0 +1,46 @@ +# +# Random/OSRNG/fallback.py : Fallback entropy source for systems with os.urandom +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +__revision__ = "$Id$" +__all__ = ['PythonOSURandomRNG'] + +import os + +from .rng_base import BaseRNG + +class PythonOSURandomRNG(BaseRNG): + + name = "" + + def __init__(self): + self._read = os.urandom + BaseRNG.__init__(self) + + def _close(self): + self._read = None + +def new(*args, **kwargs): + return PythonOSURandomRNG(*args, **kwargs) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/OSRNG/nt.py b/Lib/site-packages/Crypto/Random/OSRNG/nt.py new file mode 100644 index 0000000..8ffc590 --- /dev/null +++ b/Lib/site-packages/Crypto/Random/OSRNG/nt.py @@ -0,0 +1,74 @@ +# +# Random/OSRNG/nt.py : OS entropy source for MS Windows +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +__revision__ = "$Id$" +__all__ = ['WindowsRNG'] + +from . import winrandom +from .rng_base import BaseRNG + +class WindowsRNG(BaseRNG): + + name = "" + + def __init__(self): + self.__winrand = winrandom.new() + BaseRNG.__init__(self) + + def flush(self): + """Work around weakness in Windows RNG. + + The CryptGenRandom mechanism in some versions of Windows allows an + attacker to learn 128 KiB of past and future output. As a workaround, + this function reads 128 KiB of 'random' data from Windows and discards + it. + + For more information about the weaknesses in CryptGenRandom, see + _Cryptanalysis of the Random Number Generator of the Windows Operating + System_, by Leo Dorrendorf and Zvi Gutterman and Benny Pinkas + http://eprint.iacr.org/2007/419 + """ + if self.closed: + raise ValueError("I/O operation on closed file") + data = self.__winrand.get_bytes(128*1024) + assert (len(data) == 128*1024) + BaseRNG.flush(self) + + def _close(self): + self.__winrand = None + + def _read(self, N): + # Unfortunately, research shows that CryptGenRandom doesn't provide + # forward secrecy and fails the next-bit test unless we apply a + # workaround, which we do here. See http://eprint.iacr.org/2007/419 + # for information on the vulnerability. + self.flush() + data = self.__winrand.get_bytes(N) + self.flush() + return data + +def new(*args, **kwargs): + return WindowsRNG(*args, **kwargs) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/OSRNG/posix.py b/Lib/site-packages/Crypto/Random/OSRNG/posix.py new file mode 100644 index 0000000..810a04a --- /dev/null +++ b/Lib/site-packages/Crypto/Random/OSRNG/posix.py @@ -0,0 +1,86 @@ +# +# Random/OSRNG/posix.py : OS entropy source for POSIX systems +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + +__revision__ = "$Id$" +__all__ = ['DevURandomRNG'] + +import errno +import os +import stat + +from .rng_base import BaseRNG +from Crypto.Util.py3compat import b + +class DevURandomRNG(BaseRNG): + + def __init__(self, devname=None): + if devname is None: + self.name = "/dev/urandom" + else: + self.name = devname + + # Test that /dev/urandom is a character special device + f = open(self.name, "rb", 0) + fmode = os.fstat(f.fileno())[stat.ST_MODE] + if not stat.S_ISCHR(fmode): + f.close() + raise TypeError("%r is not a character special device" % (self.name,)) + + self.__file = f + + BaseRNG.__init__(self) + + def _close(self): + self.__file.close() + + def _read(self, N): + # Starting with Python 3 open with buffering=0 returns a FileIO object. + # FileIO.read behaves like read(2) and not like fread(3) and thus we + # have to handle the case that read returns less data as requested here + # more carefully. + data = b("") + while len(data) < N: + try: + d = self.__file.read(N - len(data)) + except IOError as e: + # read(2) has been interrupted by a signal; redo the read + if e.errno == errno.EINTR: + continue + raise + + if d is None: + # __file is in non-blocking mode and no data is available + return data + if len(d) == 0: + # __file is in blocking mode and arrived at EOF + return data + + data += d + return data + +def new(*args, **kwargs): + return DevURandomRNG(*args, **kwargs) + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/OSRNG/rng_base.py b/Lib/site-packages/Crypto/Random/OSRNG/rng_base.py new file mode 100644 index 0000000..9039274 --- /dev/null +++ b/Lib/site-packages/Crypto/Random/OSRNG/rng_base.py @@ -0,0 +1,88 @@ +# +# Random/OSRNG/rng_base.py : Base class for OSRNG +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +class BaseRNG(object): + + def __init__(self): + self.closed = False + self._selftest() + + def __del__(self): + self.close() + + def _selftest(self): + # Test that urandom can return data + data = self.read(16) + if len(data) != 16: + raise AssertionError("read truncated") + + # Test that we get different data every time (if we don't, the RNG is + # probably malfunctioning) + data2 = self.read(16) + if data == data2: + raise AssertionError("OS RNG returned duplicate data") + + # PEP 343: Support for the "with" statement + def __enter__(self): + pass + def __exit__(self): + """PEP 343 support""" + self.close() + + def close(self): + if not self.closed: + self._close() + self.closed = True + + def flush(self): + pass + + def read(self, N=-1): + """Return N bytes from the RNG.""" + if self.closed: + raise ValueError("I/O operation on closed file") + if not isinstance(N, int): + raise TypeError("an integer is required") + if N < 0: + raise ValueError("cannot read to end of infinite stream") + elif N == 0: + return "" + data = self._read(N) + if len(data) != N: + raise AssertionError("%s produced truncated output (requested %d, got %d)" % (self.name, N, len(data))) + return data + + def _close(self): + raise NotImplementedError("child class must implement this") + + def _read(self, N): + raise NotImplementedError("child class must implement this") + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/_UserFriendlyRNG.py b/Lib/site-packages/Crypto/Random/_UserFriendlyRNG.py new file mode 100644 index 0000000..fdad10c --- /dev/null +++ b/Lib/site-packages/Crypto/Random/_UserFriendlyRNG.py @@ -0,0 +1,230 @@ +# -*- coding: utf-8 -*- +# +# Random/_UserFriendlyRNG.py : A user-friendly random number generator +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +import os +import threading +import struct +import time +from math import floor + +from Crypto.Random import OSRNG +from Crypto.Random.Fortuna import FortunaAccumulator + +class _EntropySource(object): + def __init__(self, accumulator, src_num): + self._fortuna = accumulator + self._src_num = src_num + self._pool_num = 0 + + def feed(self, data): + self._fortuna.add_random_event(self._src_num, self._pool_num, data) + self._pool_num = (self._pool_num + 1) & 31 + +class _EntropyCollector(object): + + def __init__(self, accumulator): + self._osrng = OSRNG.new() + self._osrng_es = _EntropySource(accumulator, 255) + self._time_es = _EntropySource(accumulator, 254) + self._clock_es = _EntropySource(accumulator, 253) + + def reinit(self): + # Add 256 bits to each of the 32 pools, twice. (For a total of 16384 + # bits collected from the operating system.) + for i in range(2): + block = self._osrng.read(32*32) + for p in range(32): + self._osrng_es.feed(block[p*32:(p+1)*32]) + block = None + self._osrng.flush() + + def collect(self): + # Collect 64 bits of entropy from the operating system and feed it to Fortuna. + self._osrng_es.feed(self._osrng.read(8)) + + # Add the fractional part of time.time() + t = time.time() + self._time_es.feed(struct.pack("@I", int(2**30 * (t - floor(t))))) + + # Add the fractional part of time.clock() + t = time.clock() + self._clock_es.feed(struct.pack("@I", int(2**30 * (t - floor(t))))) + + +class _UserFriendlyRNG(object): + + def __init__(self): + self.closed = False + self._fa = FortunaAccumulator.FortunaAccumulator() + self._ec = _EntropyCollector(self._fa) + self.reinit() + + def reinit(self): + """Initialize the random number generator and seed it with entropy from + the operating system. + """ + + # Save the pid (helps ensure that Crypto.Random.atfork() gets called) + self._pid = os.getpid() + + # Collect entropy from the operating system and feed it to + # FortunaAccumulator + self._ec.reinit() + + # Override FortunaAccumulator's 100ms minimum re-seed interval. This + # is necessary to avoid a race condition between this function and + # self.read(), which that can otherwise cause forked child processes to + # produce identical output. (e.g. CVE-2013-1445) + # + # Note that if this function can be called frequently by an attacker, + # (and if the bits from OSRNG are insufficiently random) it will weaken + # Fortuna's ability to resist a state compromise extension attack. + self._fa._forget_last_reseed() + + def close(self): + self.closed = True + self._osrng = None + self._fa = None + + def flush(self): + pass + + def read(self, N): + """Return N bytes from the RNG.""" + if self.closed: + raise ValueError("I/O operation on closed file") + if not isinstance(N, int): + raise TypeError("an integer is required") + if N < 0: + raise ValueError("cannot read to end of infinite stream") + + # Collect some entropy and feed it to Fortuna + self._ec.collect() + + # Ask Fortuna to generate some bytes + retval = self._fa.random_data(N) + + # Check that we haven't forked in the meantime. (If we have, we don't + # want to use the data, because it might have been duplicated in the + # parent process. + self._check_pid() + + # Return the random data. + return retval + + def _check_pid(self): + # Lame fork detection to remind developers to invoke Random.atfork() + # after every call to os.fork(). Note that this check is not reliable, + # since process IDs can be reused on most operating systems. + # + # You need to do Random.atfork() in the child process after every call + # to os.fork() to avoid reusing PRNG state. If you want to avoid + # leaking PRNG state to child processes (for example, if you are using + # os.setuid()) then you should also invoke Random.atfork() in the + # *parent* process. + if os.getpid() != self._pid: + raise AssertionError("PID check failed. RNG must be re-initialized after fork(). Hint: Try Random.atfork()") + + +class _LockingUserFriendlyRNG(_UserFriendlyRNG): + def __init__(self): + self._lock = threading.Lock() + _UserFriendlyRNG.__init__(self) + + def close(self): + self._lock.acquire() + try: + return _UserFriendlyRNG.close(self) + finally: + self._lock.release() + + def reinit(self): + self._lock.acquire() + try: + return _UserFriendlyRNG.reinit(self) + finally: + self._lock.release() + + def read(self, bytes): + self._lock.acquire() + try: + return _UserFriendlyRNG.read(self, bytes) + finally: + self._lock.release() + +class RNGFile(object): + def __init__(self, singleton): + self.closed = False + self._singleton = singleton + + # PEP 343: Support for the "with" statement + def __enter__(self): + """PEP 343 support""" + def __exit__(self): + """PEP 343 support""" + self.close() + + def close(self): + # Don't actually close the singleton, just close this RNGFile instance. + self.closed = True + self._singleton = None + + def read(self, bytes): + if self.closed: + raise ValueError("I/O operation on closed file") + return self._singleton.read(bytes) + + def flush(self): + if self.closed: + raise ValueError("I/O operation on closed file") + +_singleton_lock = threading.Lock() +_singleton = None +def _get_singleton(): + global _singleton + _singleton_lock.acquire() + try: + if _singleton is None: + _singleton = _LockingUserFriendlyRNG() + return _singleton + finally: + _singleton_lock.release() + +def new(): + return RNGFile(_get_singleton()) + +def reinit(): + _get_singleton().reinit() + +def get_random_bytes(n): + """Return the specified number of cryptographically-strong random bytes.""" + return _get_singleton().read(n) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/__init__.py b/Lib/site-packages/Crypto/Random/__init__.py new file mode 100644 index 0000000..659ffee --- /dev/null +++ b/Lib/site-packages/Crypto/Random/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# +# Random/__init__.py : PyCrypto random number generation +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" +__all__ = ['new'] + +from Crypto.Random import OSRNG +from Crypto.Random import _UserFriendlyRNG + +def new(*args, **kwargs): + """Return a file-like object that outputs cryptographically random bytes.""" + return _UserFriendlyRNG.new(*args, **kwargs) + +def atfork(): + """Call this whenever you call os.fork()""" + _UserFriendlyRNG.reinit() + +def get_random_bytes(n): + """Return the specified number of cryptographically-strong random bytes.""" + return _UserFriendlyRNG.get_random_bytes(n) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Random/random.py b/Lib/site-packages/Crypto/Random/random.py new file mode 100644 index 0000000..4d21e0f --- /dev/null +++ b/Lib/site-packages/Crypto/Random/random.py @@ -0,0 +1,142 @@ +# -*- coding: utf-8 -*- +# +# Random/random.py : Strong alternative for the standard 'random' module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""A cryptographically strong version of Python's standard "random" module.""" + +__revision__ = "$Id$" +__all__ = ['StrongRandom', 'getrandbits', 'randrange', 'randint', 'choice', 'shuffle', 'sample'] + +from Crypto import Random +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +class StrongRandom(object): + def __init__(self, rng=None, randfunc=None): + if randfunc is None and rng is None: + self._randfunc = None + elif randfunc is not None and rng is None: + self._randfunc = randfunc + elif randfunc is None and rng is not None: + self._randfunc = rng.read + else: + raise ValueError("Cannot specify both 'rng' and 'randfunc'") + + def getrandbits(self, k): + """Return a python long integer with k random bits.""" + if self._randfunc is None: + self._randfunc = Random.new().read + mask = (1 << k) - 1 + return mask & bytes_to_long(self._randfunc(ceil_div(k, 8))) + + def randrange(self, *args): + """randrange([start,] stop[, step]): + Return a randomly-selected element from range(start, stop, step).""" + if len(args) == 3: + (start, stop, step) = args + elif len(args) == 2: + (start, stop) = args + step = 1 + elif len(args) == 1: + (stop,) = args + start = 0 + step = 1 + else: + raise TypeError("randrange expected at most 3 arguments, got %d" % (len(args),)) + if (not isinstance(start, int) + or not isinstance(stop, int) + or not isinstance(step, int)): + raise TypeError("randrange requires integer arguments") + if step == 0: + raise ValueError("randrange step argument must not be zero") + + num_choices = ceil_div(stop - start, step) + if num_choices < 0: + num_choices = 0 + if num_choices < 1: + raise ValueError("empty range for randrange(%r, %r, %r)" % (start, stop, step)) + + # Pick a random number in the range of possible numbers + r = num_choices + while r >= num_choices: + r = self.getrandbits(size(num_choices)) + + return start + (step * r) + + def randint(self, a, b): + """Return a random integer N such that a <= N <= b.""" + if not isinstance(a, int) or not isinstance(b, int): + raise TypeError("randint requires integer arguments") + N = self.randrange(a, b+1) + assert a <= N <= b + return N + + def choice(self, seq): + """Return a random element from a (non-empty) sequence. + + If the seqence is empty, raises IndexError. + """ + if len(seq) == 0: + raise IndexError("empty sequence") + return seq[self.randrange(len(seq))] + + def shuffle(self, x): + """Shuffle the sequence in place.""" + # Make a (copy) of the list of objects we want to shuffle + items = list(x) + + # Choose a random item (without replacement) until all the items have been + # chosen. + for i in range(len(x)): + x[i] = items.pop(self.randrange(len(items))) + + def sample(self, population, k): + """Return a k-length list of unique elements chosen from the population sequence.""" + + num_choices = len(population) + if k > num_choices: + raise ValueError("sample larger than population") + + retval = [] + selected = {} # we emulate a set using a dict here + for i in range(k): + r = None + while r is None or r in selected: + r = self.randrange(num_choices) + retval.append(population[r]) + selected[r] = 1 + return retval + +_r = StrongRandom() +getrandbits = _r.getrandbits +randrange = _r.randrange +randint = _r.randint +choice = _r.choice +shuffle = _r.shuffle +sample = _r.sample + +# These are at the bottom to avoid problems with recursive imports +from Crypto.Util.number import ceil_div, bytes_to_long, long_to_bytes, size + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py b/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py new file mode 100644 index 0000000..63e9c57 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/__init__.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/__init__.py: Self-test for cipher modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for cipher modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_XOR; tests += test_XOR.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from Crypto.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/common.py b/Lib/site-packages/Crypto/SelfTest/Cipher/common.py new file mode 100644 index 0000000..8e684fe --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/common.py @@ -0,0 +1,399 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +__revision__ = "$Id$" + +import sys +import unittest +from binascii import a2b_hex, b2a_hex +from Crypto.Util.py3compat import * + +# For compatibility with Python 2.1 and Python 2.2 +if sys.hexversion < 0x02030000: + # Python 2.1 doesn't have a dict() function + # Python 2.2 dict() function raises TypeError if you do dict(MD5='blah') + def dict(**kwargs): + return kwargs.copy() +else: + dict = dict + +class _NoDefault: pass # sentinel object +def _extract(d, k, default=_NoDefault): + """Get an item from a dictionary, and remove it from the dictionary.""" + try: + retval = d[k] + except KeyError: + if default is _NoDefault: + raise + return default + del d[k] + return retval + +# Generic cipher test case +class CipherSelfTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + + # Extract the parameters + params = params.copy() + self.description = _extract(params, 'description') + self.key = b(_extract(params, 'key')) + self.plaintext = b(_extract(params, 'plaintext')) + self.ciphertext = b(_extract(params, 'ciphertext')) + self.module_name = _extract(params, 'module_name', None) + + mode = _extract(params, 'mode', None) + self.mode_name = str(mode) + if mode is not None: + # Block cipher + self.mode = getattr(self.module, "MODE_" + mode) + self.iv = _extract(params, 'iv', None) + if self.iv is not None: self.iv = b(self.iv) + + # Only relevant for OPENPGP mode + self.encrypted_iv = _extract(params, 'encrypted_iv', None) + if self.encrypted_iv is not None: + self.encrypted_iv = b(self.encrypted_iv) + else: + # Stream cipher + self.mode = None + self.iv = None + + self.extra_params = params + + def shortDescription(self): + return self.description + + def _new(self, do_decryption=0): + params = self.extra_params.copy() + + # Handle CTR mode parameters. By default, we use Counter.new(self.module.block_size) + if hasattr(self.module, "MODE_CTR") and self.mode == self.module.MODE_CTR: + from Crypto.Util import Counter + ctr_class = _extract(params, 'ctr_class', Counter.new) + ctr_params = _extract(params, 'ctr_params', {}).copy() + if 'prefix' in ctr_params: ctr_params['prefix'] = a2b_hex(b(ctr_params['prefix'])) + if 'suffix' in ctr_params: ctr_params['suffix'] = a2b_hex(b(ctr_params['suffix'])) + if 'nbits' not in ctr_params: + ctr_params['nbits'] = 8*(self.module.block_size - len(ctr_params.get('prefix', '')) - len(ctr_params.get('suffix', ''))) + params['counter'] = ctr_class(**ctr_params) + + if self.mode is None: + # Stream cipher + return self.module.new(a2b_hex(self.key), **params) + elif self.iv is None: + # Block cipher without iv + return self.module.new(a2b_hex(self.key), self.mode, **params) + else: + # Block cipher with iv + if do_decryption and self.mode == self.module.MODE_OPENPGP: + # In PGP mode, the IV to feed for decryption is the *encrypted* one + return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.encrypted_iv), **params) + else: + return self.module.new(a2b_hex(self.key), self.mode, a2b_hex(self.iv), **params) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + ct1 = b2a_hex(self._new().encrypt(plaintext)) + pt1 = b2a_hex(self._new(1).decrypt(ciphertext)) + ct2 = b2a_hex(self._new().encrypt(plaintext)) + pt2 = b2a_hex(self._new(1).decrypt(ciphertext)) + + if hasattr(self.module, "MODE_OPENPGP") and self.mode == self.module.MODE_OPENPGP: + # In PGP mode, data returned by the first encrypt() + # is prefixed with the encrypted IV. + # Here we check it and then remove it from the ciphertexts. + eilen = len(self.encrypted_iv) + self.assertEqual(self.encrypted_iv, ct1[:eilen]) + self.assertEqual(self.encrypted_iv, ct2[:eilen]) + ct1 = ct1[eilen:] + ct2 = ct2[eilen:] + + self.assertEqual(self.ciphertext, ct1) # encrypt + self.assertEqual(self.ciphertext, ct2) # encrypt (second time) + self.assertEqual(self.plaintext, pt1) # decrypt + self.assertEqual(self.plaintext, pt2) # decrypt (second time) + +class CipherStreamingSelfTest(CipherSelfTest): + + def shortDescription(self): + desc = self.module_name + if self.mode is not None: + desc += " in %s mode" % (self.mode_name,) + return "%s should behave like a stream cipher" % (desc,) + + def runTest(self): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # The cipher should work like a stream cipher + + # Test counter mode encryption, 3 bytes at a time + ct3 = [] + cipher = self._new() + for i in range(0, len(plaintext), 3): + ct3.append(cipher.encrypt(plaintext[i:i+3])) + ct3 = b2a_hex(b("").join(ct3)) + self.assertEqual(self.ciphertext, ct3) # encryption (3 bytes at a time) + + # Test counter mode decryption, 3 bytes at a time + pt3 = [] + cipher = self._new() + for i in range(0, len(ciphertext), 3): + pt3.append(cipher.encrypt(ciphertext[i:i+3])) + # PY3K: This is meant to be text, do not change to bytes (data) + pt3 = b2a_hex(b("").join(pt3)) + self.assertEqual(self.plaintext, pt3) # decryption (3 bytes at a time) + +class CTRSegfaultTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """Regression test: %s.new(key, %s.MODE_CTR) should raise TypeError, not segfault""" % (self.module_name, self.module_name) + + def runTest(self): + self.assertRaises(TypeError, self.module.new, a2b_hex(self.key), self.module.MODE_CTR) + +class CTRWraparoundTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """Regression test: %s with MODE_CTR should raise OverflowError on wraparound when shortcut used""" % (self.module_name,) + + def runTest(self): + from Crypto.Util import Counter + + for disable_shortcut in (0, 1): # (False, True) Test CTR-mode shortcut and PyObject_CallObject code paths + for little_endian in (0, 1): # (False, True) Test both endiannesses + ctr = Counter.new(8*self.module.block_size, initial_value=2**(8*self.module.block_size)-1, little_endian=little_endian, disable_shortcut=disable_shortcut) + cipher = self.module.new(a2b_hex(self.key), self.module.MODE_CTR, counter=ctr) + block = b("\x00") * self.module.block_size + cipher.encrypt(block) + self.assertRaises(OverflowError, cipher.encrypt, block) + +class CFBSegmentSizeTest(unittest.TestCase): + + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + self.description = params['description'] + + def shortDescription(self): + return self.description + + def runTest(self): + """Regression test: m.new(key, m.MODE_CFB, segment_size=N) should require segment_size to be a multiple of 8 bits""" + for i in range(1, 8): + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), self.module.MODE_CFB, segment_size=i) + self.module.new(a2b_hex(self.key), self.module.MODE_CFB, "\0"*self.module.block_size, segment_size=8) # should succeed + +class RoundtripTest(unittest.TestCase): + def __init__(self, module, params): + from Crypto import Random + unittest.TestCase.__init__(self) + self.module = module + self.iv = Random.get_random_bytes(module.block_size) + self.key = b(params['key']) + self.plaintext = 100 * b(params['plaintext']) + self.module_name = params.get('module_name', None) + + def shortDescription(self): + return """%s .decrypt() output of .encrypt() should not be garbled""" % (self.module_name,) + + def runTest(self): + for mode in (self.module.MODE_ECB, self.module.MODE_CBC, self.module.MODE_CFB, self.module.MODE_OFB, self.module.MODE_OPENPGP): + encryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv) + ciphertext = encryption_cipher.encrypt(self.plaintext) + + if mode != self.module.MODE_OPENPGP: + decryption_cipher = self.module.new(a2b_hex(self.key), mode, self.iv) + else: + eiv = ciphertext[:self.module.block_size+2] + ciphertext = ciphertext[self.module.block_size+2:] + decryption_cipher = self.module.new(a2b_hex(self.key), mode, eiv) + decrypted_plaintext = decryption_cipher.decrypt(ciphertext) + self.assertEqual(self.plaintext, decrypted_plaintext) + +class PGPTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def shortDescription(self): + return "MODE_PGP was implemented incorrectly and insecurely. It's completely banished now." + + def runTest(self): + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_PGP) + +class IVLengthTest(unittest.TestCase): + def __init__(self, module, params): + unittest.TestCase.__init__(self) + self.module = module + self.key = b(params['key']) + + def shortDescription(self): + return "Check that all modes except MODE_ECB and MODE_CTR require an IV of the proper length" + + def runTest(self): + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_CBC, "") + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_CFB, "") + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_OFB, "") + self.assertRaises(ValueError, self.module.new, a2b_hex(self.key), + self.module.MODE_OPENPGP, "") + self.module.new(a2b_hex(self.key), self.module.MODE_ECB, "") + self.module.new(a2b_hex(self.key), self.module.MODE_CTR, "", counter=self._dummy_counter) + + def _dummy_counter(self): + return "\0" * self.module.block_size + +def make_block_tests(module, module_name, test_data): + tests = [] + extra_tests_added = 0 + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary + params = {'mode': 'ECB'} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_description = _extract(p2, 'description', None) + p_mode = p2.get('mode', 'ECB') + if p_mode == 'ECB': + _extract(p2, 'mode', 'ECB') + + if p_description is not None: + description = p_description + elif p_mode == 'ECB' and not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + + # Add extra test(s) to the test suite before the current test + if not extra_tests_added: + tests += [ + CTRSegfaultTest(module, params), + CTRWraparoundTest(module, params), + CFBSegmentSizeTest(module, params), + RoundtripTest(module, params), + PGPTest(module, params), + IVLengthTest(module, params), + ] + extra_tests_added = 1 + + # Add the current test to the test suite + tests.append(CipherSelfTest(module, params)) + + # When using CTR mode, test that the interface behaves like a stream cipher + if p_mode == 'CTR': + tests.append(CipherStreamingSelfTest(module, params)) + + # When using CTR mode, test the non-shortcut code path. + if p_mode == 'CTR' and 'ctr_class' not in params: + params2 = params.copy() + params2['description'] += " (shortcut disabled)" + ctr_params2 = params.get('ctr_params', {}).copy() + params2['ctr_params'] = ctr_params2 + if 'disable_shortcut' not in params2['ctr_params']: + params2['ctr_params']['disable_shortcut'] = 1 + tests.append(CipherSelfTest(module, params2)) + return tests + +def make_stream_tests(module, module_name, test_data): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + + # Build the "params" dictionary + params = {} + if len(row) == 3: + (params['plaintext'], params['ciphertext'], params['key']) = row + elif len(row) == 4: + (params['plaintext'], params['ciphertext'], params['key'], params['description']) = row + elif len(row) == 5: + (params['plaintext'], params['ciphertext'], params['key'], params['description'], extra_params) = row + params.update(extra_params) + else: + raise AssertionError("Unsupported tuple size %d" % (len(row),)) + + # Build the display-name for the test + p2 = params.copy() + p_key = _extract(p2, 'key') + p_plaintext = _extract(p2, 'plaintext') + p_ciphertext = _extract(p2, 'ciphertext') + p_description = _extract(p2, 'description', None) + + if p_description is not None: + description = p_description + elif not p2: + description = "p=%s, k=%s" % (p_plaintext, p_key) + else: + description = "p=%s, k=%s, %r" % (p_plaintext, p_key, p2) + name = "%s #%d: %s" % (module_name, i+1, description) + params['description'] = name + params['module_name'] = module_name + + # Add the test to the test suite + tests.append(CipherSelfTest(module, params)) + tests.append(CipherStreamingSelfTest(module, params)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py new file mode 100644 index 0000000..8ee0b87 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_AES.py @@ -0,0 +1,1433 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/AES.py: Self-test for the AES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.AES""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * +from binascii import hexlify + +# This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. +test_data = [ + # FIPS PUB 197 test vectors + # http://csrc.nist.gov/publications/fips/fips197/fips-197.pdf + + ('00112233445566778899aabbccddeeff', '69c4e0d86a7b0430d8cdb78070b4c55a', + '000102030405060708090a0b0c0d0e0f', 'FIPS 197 C.1 (AES-128)'), + + ('00112233445566778899aabbccddeeff', 'dda97ca4864cdfe06eaf70a0ec0d7191', + '000102030405060708090a0b0c0d0e0f1011121314151617', + 'FIPS 197 C.2 (AES-192)'), + + ('00112233445566778899aabbccddeeff', '8ea2b7ca516745bfeafc49904b496089', + '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + 'FIPS 197 C.3 (AES-256)'), + + # Rijndael128 test vectors + # Downloaded 2008-09-13 from + # http://www.iaik.tugraz.at/Research/krypto/AES/old/~rijmen/rijndael/testvalues.tar.gz + + # ecb_tbl.txt, KEYSIZE=128 + ('506812a45f08c889b97f5980038b8359', 'd8f532538289ef7d06b506a4fd5be9c9', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=1'), + ('5c6d71ca30de8b8b00549984d2ec7d4b', '59ab30f4d4ee6e4ff9907ef65b1fb68c', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=2'), + ('53f3f4c64f8616e4e7c56199f48f21f6', 'bf1ed2fcb2af3fd41443b56d85025cb1', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=3'), + ('a1eb65a3487165fb0f1c27ff9959f703', '7316632d5c32233edcb0780560eae8b2', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=4'), + ('3553ecf0b1739558b08e350a98a39bfa', '408c073e3e2538072b72625e68b8364b', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=5'), + ('67429969490b9711ae2b01dc497afde8', 'e1f94dfa776597beaca262f2f6366fea', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=6'), + ('93385c1f2aec8bed192f5a8e161dd508', 'f29e986c6a1c27d7b29ffd7ee92b75f1', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=7'), + ('b5bf946be19beb8db3983b5f4c6e8ddb', '131c886a57f8c2e713aba6955e2b55b5', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=8'), + ('41321ee10e21bd907227c4450ff42324', 'd2ab7662df9b8c740210e5eeb61c199d', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=9'), + ('00a82f59c91c8486d12c0a80124f6089', '14c10554b2859c484cab5869bbe7c470', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=10'), + ('7ce0fd076754691b4bbd9faf8a1372fe', 'db4d498f0a49cf55445d502c1f9ab3b5', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=11'), + ('23605a8243d07764541bc5ad355b3129', '6d96fef7d66590a77a77bb2056667f7f', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=12'), + ('12a8cfa23ea764fd876232b4e842bc44', '316fb68edba736c53e78477bf913725c', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=13'), + ('bcaf32415e8308b3723e5fdd853ccc80', '6936f2b93af8397fd3a771fc011c8c37', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=14'), + ('89afae685d801ad747ace91fc49adde0', 'f3f92f7a9c59179c1fcc2c2ba0b082cd', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=15'), + ('f521d07b484357c4a69e76124a634216', '6a95ea659ee3889158e7a9152ff04ebc', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=16'), + ('3e23b3bc065bcc152407e23896d77783', '1959338344e945670678a5d432c90b93', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=17'), + ('79f0fba002be1744670e7e99290d8f52', 'e49bddd2369b83ee66e6c75a1161b394', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=18'), + ('da23fe9d5bd63e1d72e3dafbe21a6c2a', 'd3388f19057ff704b70784164a74867d', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=19'), + ('e3f5698ba90b6a022efd7db2c7e6c823', '23aa03e2d5e4cd24f3217e596480d1e1', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=20'), + ('bdc2691d4f1b73d2700679c3bcbf9c6e', 'c84113d68b666ab2a50a8bdb222e91b9', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=21'), + ('ba74e02093217ee1ba1b42bd5624349a', 'ac02403981cd4340b507963db65cb7b6', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=22'), + ('b5c593b5851c57fbf8b3f57715e8f680', '8d1299236223359474011f6bf5088414', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=23'), + ('3da9bd9cec072381788f9387c3bbf4ee', '5a1d6ab8605505f7977e55b9a54d9b90', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=24'), + ('4197f3051121702ab65d316b3c637374', '72e9c2d519cf555e4208805aabe3b258', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=25'), + ('9f46c62ec4f6ee3f6e8c62554bc48ab7', 'a8f3e81c4a23a39ef4d745dffe026e80', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=26'), + ('0220673fe9e699a4ebc8e0dbeb6979c8', '546f646449d31458f9eb4ef5483aee6c', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=27'), + ('b2b99171337ded9bc8c2c23ff6f18867', '4dbe4bc84ac797c0ee4efb7f1a07401c', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=28'), + ('a7facf4e301e984e5efeefd645b23505', '25e10bfb411bbd4d625ac8795c8ca3b3', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=29'), + ('f7c762e4a9819160fd7acfb6c4eedcdd', '315637405054ec803614e43def177579', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=30'), + ('9b64fc21ea08709f4915436faa70f1be', '60c5bc8a1410247295c6386c59e572a8', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=31'), + ('52af2c3de07ee6777f55a4abfc100b3f', '01366fc8ca52dfe055d6a00a76471ba6', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=32'), + ('2fca001224386c57aa3f968cbe2c816f', 'ecc46595516ec612449c3f581e7d42ff', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=33'), + ('4149c73658a4a9c564342755ee2c132f', '6b7ffe4c602a154b06ee9c7dab5331c9', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=34'), + ('af60005a00a1772f7c07a48a923c23d2', '7da234c14039a240dd02dd0fbf84eb67', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=35'), + ('6fccbc28363759914b6f0280afaf20c6', 'c7dc217d9e3604ffe7e91f080ecd5a3a', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=36'), + ('7d82a43ddf4fefa2fc5947499884d386', '37785901863f5c81260ea41e7580cda5', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=37'), + ('5d5a990eaab9093afe4ce254dfa49ef9', 'a07b9338e92ed105e6ad720fccce9fe4', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=38'), + ('4cd1e2fd3f4434b553aae453f0ed1a02', 'ae0fb9722418cc21a7da816bbc61322c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=39'), + ('5a2c9a9641d4299125fa1b9363104b5e', 'c826a193080ff91ffb21f71d3373c877', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=40'), + ('b517fe34c0fa217d341740bfd4fe8dd4', '1181b11b0e494e8d8b0aa6b1d5ac2c48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=41'), + ('014baf2278a69d331d5180103643e99a', '6743c3d1519ab4f2cd9a78ab09a511bd', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=42'), + ('b529bd8164f20d0aa443d4932116841c', 'dc55c076d52bacdf2eefd952946a439d', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=43'), + ('2e596dcbb2f33d4216a1176d5bd1e456', '711b17b590ffc72b5c8e342b601e8003', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=44'), + ('7274a1ea2b7ee2424e9a0e4673689143', '19983bb0950783a537e1339f4aa21c75', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=45'), + ('ae20020bd4f13e9d90140bee3b5d26af', '3ba7762e15554169c0f4fa39164c410c', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=46'), + ('baac065da7ac26e855e79c8849d75a02', 'a0564c41245afca7af8aa2e0e588ea89', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=47'), + ('7c917d8d1d45fab9e2540e28832540cc', '5e36a42a2e099f54ae85ecd92e2381ed', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=48'), + ('bde6f89e16daadb0e847a2a614566a91', '770036f878cd0f6ca2268172f106f2fe', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=49'), + ('c9de163725f1f5be44ebb1db51d07fbc', '7e4e03908b716116443ccf7c94e7c259', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=50'), + ('3af57a58f0c07dffa669572b521e2b92', '482735a48c30613a242dd494c7f9185d', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=51'), + ('3d5ebac306dde4604f1b4fbbbfcdae55', 'b4c0f6c9d4d7079addf9369fc081061d', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=52'), + ('c2dfa91bceb76a1183c995020ac0b556', 'd5810fe0509ac53edcd74f89962e6270', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=53'), + ('c70f54305885e9a0746d01ec56c8596b', '03f17a16b3f91848269ecdd38ebb2165', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=54'), + ('c4f81b610e98012ce000182050c0c2b2', 'da1248c3180348bad4a93b4d9856c9df', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=55'), + ('eaab86b1d02a95d7404eff67489f97d4', '3d10d7b63f3452c06cdf6cce18be0c2c', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=56'), + ('7c55bdb40b88870b52bec3738de82886', '4ab823e7477dfddc0e6789018fcb6258', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=57'), + ('ba6eaa88371ff0a3bd875e3f2a975ce0', 'e6478ba56a77e70cfdaa5c843abde30e', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=58'), + ('08059130c4c24bd30cf0575e4e0373dc', '1673064895fbeaf7f09c5429ff75772d', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=59'), + ('9a8eab004ef53093dfcf96f57e7eda82', '4488033ae9f2efd0ca9383bfca1a94e9', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=60'), + ('0745b589e2400c25f117b1d796c28129', '978f3b8c8f9d6f46626cac3c0bcb9217', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=61'), + ('2f1777781216cec3f044f134b1b92bbe', 'e08c8a7e582e15e5527f1d9e2eecb236', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=62'), + ('353a779ffc541b3a3805d90ce17580fc', 'cec155b76ac5ffda4cf4f9ca91e49a7a', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=63'), + ('1a1eae4415cefcf08c4ac1c8f68bea8f', 'd5ac7165763225dd2a38cdc6862c29ad', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=64'), + ('e6e7e4e5b0b3b2b5d4d5aaab16111013', '03680fe19f7ce7275452020be70e8204', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-128: I=65'), + ('f8f9fafbfbf8f9e677767170efe0e1e2', '461df740c9781c388e94bb861ceb54f6', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-128: I=66'), + ('63626160a1a2a3a445444b4a75727370', '451bd60367f96483042742219786a074', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-128: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', 'e4dfa42671a02e57ef173b85c0ea9f2b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-128: I=68'), + ('78797a7beae9e8ef3736292891969794', 'ed11b89e76274282227d854700a78b9e', + '04050607090a0b0c0e0f101113141516', + 'ecb-tbl-128: I=69'), + ('838281803231300fdddcdbdaa0afaead', '433946eaa51ea47af33895f2b90b3b75', + '18191a1b1d1e1f20222324252728292a', + 'ecb-tbl-128: I=70'), + ('18191a1bbfbcbdba75747b7a7f78797a', '6bc6d616a5d7d0284a5910ab35022528', + '2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-128: I=71'), + ('848586879b989996a3a2a5a4849b9a99', 'd2a920ecfe919d354b5f49eae9719c98', + '40414243454647484a4b4c4d4f505152', + 'ecb-tbl-128: I=72'), + ('0001020322212027cacbf4f551565754', '3a061b17f6a92885efbd0676985b373d', + '54555657595a5b5c5e5f606163646566', + 'ecb-tbl-128: I=73'), + ('cecfcccdafacadb2515057564a454447', 'fadeec16e33ea2f4688499d157e20d8f', + '68696a6b6d6e6f70727374757778797a', + 'ecb-tbl-128: I=74'), + ('92939091cdcecfc813121d1c80878685', '5cdefede59601aa3c3cda36fa6b1fa13', + '7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-128: I=75'), + ('d2d3d0d16f6c6d6259585f5ed1eeefec', '9574b00039844d92ebba7ee8719265f8', + '90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-128: I=76'), + ('acadaeaf878485820f0e1110d5d2d3d0', '9a9cf33758671787e5006928188643fa', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-128: I=77'), + ('9091929364676619e6e7e0e1757a7b78', '2cddd634c846ba66bb46cbfea4a674f9', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-128: I=78'), + ('babbb8b98a89888f74757a7b92959497', 'd28bae029393c3e7e26e9fafbbb4b98f', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-128: I=79'), + ('8d8c8f8e6e6d6c633b3a3d3ccad5d4d7', 'ec27529b1bee0a9ab6a0d73ebc82e9b7', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-128: I=80'), + ('86878485010203040808f7f767606162', '3cb25c09472aff6ee7e2b47ccd7ccb17', + 'f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-128: I=81'), + ('8e8f8c8d656667788a8b8c8d010e0f0c', 'dee33103a7283370d725e44ca38f8fe5', + '08090a0b0d0e0f10121314151718191a', + 'ecb-tbl-128: I=82'), + ('c8c9cacb858687807a7b7475e7e0e1e2', '27f9bcd1aac64bffc11e7815702c1a69', + '1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-128: I=83'), + ('6d6c6f6e5053525d8c8d8a8badd2d3d0', '5df534ffad4ed0749a9988e9849d0021', + '30313233353637383a3b3c3d3f404142', + 'ecb-tbl-128: I=84'), + ('28292a2b393a3b3c0607181903040506', 'a48bee75db04fb60ca2b80f752a8421b', + '44454647494a4b4c4e4f505153545556', + 'ecb-tbl-128: I=85'), + ('a5a4a7a6b0b3b28ddbdadddcbdb2b3b0', '024c8cf70bc86ee5ce03678cb7af45f9', + '58595a5b5d5e5f60626364656768696a', + 'ecb-tbl-128: I=86'), + ('323330316467666130313e3f2c2b2a29', '3c19ac0f8a3a3862ce577831301e166b', + '6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-128: I=87'), + ('27262524080b0a05171611100b141516', 'c5e355b796a57421d59ca6be82e73bca', + '80818283858687888a8b8c8d8f909192', + 'ecb-tbl-128: I=88'), + ('040506074142434435340b0aa3a4a5a6', 'd94033276417abfb05a69d15b6e386e2', + '94959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-128: I=89'), + ('242526271112130c61606766bdb2b3b0', '24b36559ea3a9b9b958fe6da3e5b8d85', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-128: I=90'), + ('4b4a4948252627209e9f9091cec9c8cb', '20fd4feaa0e8bf0cce7861d74ef4cb72', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-128: I=91'), + ('68696a6b6665646b9f9e9998d9e6e7e4', '350e20d5174277b9ec314c501570a11d', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-128: I=92'), + ('34353637c5c6c7c0f0f1eeef7c7b7a79', '87a29d61b7c604d238fe73045a7efd57', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-128: I=93'), + ('32333031c2c1c13f0d0c0b0a050a0b08', '2c3164c1cc7d0064816bdc0faa362c52', + 'f8f9fafbfdfefe00020304050708090a', + 'ecb-tbl-128: I=94'), + ('cdcccfcebebdbcbbabaaa5a4181f1e1d', '195fe5e8a05a2ed594f6e4400eee10b3', + '0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-128: I=95'), + ('212023223635343ba0a1a6a7445b5a59', 'e4663df19b9a21a5a284c2bd7f905025', + '20212223252627282a2b2c2d2f303132', + 'ecb-tbl-128: I=96'), + ('0e0f0c0da8abaaad2f2e515002050407', '21b88714cfb4e2a933bd281a2c4743fd', + '34353637393a3b3c3e3f404143444546', + 'ecb-tbl-128: I=97'), + ('070605042a2928378e8f8889bdb2b3b0', 'cbfc3980d704fd0fc54378ab84e17870', + '48494a4b4d4e4f50525354555758595a', + 'ecb-tbl-128: I=98'), + ('cbcac9c893909196a9a8a7a6a5a2a3a0', 'bc5144baa48bdeb8b63e22e03da418ef', + '5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-128: I=99'), + ('80818283c1c2c3cc9c9d9a9b0cf3f2f1', '5a1dbaef1ee2984b8395da3bdffa3ccc', + '70717273757677787a7b7c7d7f808182', + 'ecb-tbl-128: I=100'), + ('1213101125262720fafbe4e5b1b6b7b4', 'f0b11cd0729dfcc80cec903d97159574', + '84858687898a8b8c8e8f909193949596', + 'ecb-tbl-128: I=101'), + ('7f7e7d7c3033320d97969190222d2c2f', '9f95314acfddc6d1914b7f19a9cc8209', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-128: I=102'), + ('4e4f4c4d484b4a4d81808f8e53545556', '595736f6f0f70914a94e9e007f022519', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-128: I=103'), + ('dcdddedfb0b3b2bd15141312a1bebfbc', '1f19f57892cae586fcdfb4c694deb183', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-128: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '540700ee1f6f3dab0b3eddf6caee1ef5', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-128: I=105'), + ('f5f4f7f6c4c7c6d9373631307e717073', '14a342a91019a331687a2254e6626ca2', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fa', + 'ecb-tbl-128: I=106'), + ('93929190b6b5b4b364656a6b05020300', '7b25f3c3b2eea18d743ef283140f29ff', + 'fcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-128: I=107'), + ('babbb8b90d0e0f00a4a5a2a3043b3a39', '46c2587d66e5e6fa7f7ca6411ad28047', + '10111213151617181a1b1c1d1f202122', + 'ecb-tbl-128: I=108'), + ('d8d9dadb7f7c7d7a10110e0f787f7e7d', '09470e72229d954ed5ee73886dfeeba9', + '24252627292a2b2c2e2f303133343536', + 'ecb-tbl-128: I=109'), + ('fefffcfdefeced923b3a3d3c6768696a', 'd77c03de92d4d0d79ef8d4824ef365eb', + '38393a3b3d3e3f40424344454748494a', + 'ecb-tbl-128: I=110'), + ('d6d7d4d58a89888f96979899a5a2a3a0', '1d190219f290e0f1715d152d41a23593', + '4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-128: I=111'), + ('18191a1ba8abaaa5303136379b848586', 'a2cd332ce3a0818769616292e87f757b', + '60616263656667686a6b6c6d6f707172', + 'ecb-tbl-128: I=112'), + ('6b6a6968a4a7a6a1d6d72829b0b7b6b5', 'd54afa6ce60fbf9341a3690e21385102', + '74757677797a7b7c7e7f808183848586', + 'ecb-tbl-128: I=113'), + ('000102038a89889755545352a6a9a8ab', '06e5c364ded628a3f5e05e613e356f46', + '88898a8b8d8e8f90929394959798999a', + 'ecb-tbl-128: I=114'), + ('2d2c2f2eb3b0b1b6b6b7b8b9f2f5f4f7', 'eae63c0e62556dac85d221099896355a', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-128: I=115'), + ('979695943536373856575051e09f9e9d', '1fed060e2c6fc93ee764403a889985a2', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-128: I=116'), + ('a4a5a6a7989b9a9db1b0afae7a7d7c7f', 'c25235c1a30fdec1c7cb5c5737b2a588', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-128: I=117'), + ('c1c0c3c2686b6a55a8a9aeafeae5e4e7', '796dbef95147d4d30873ad8b7b92efc0', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9ea', + 'ecb-tbl-128: I=118'), + ('c1c0c3c2141716118c8d828364636261', 'cbcf0fb34d98d0bd5c22ce37211a46bf', + 'ecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-128: I=119'), + ('93929190cccfcec196979091e0fffefd', '94b44da6466126cafa7c7fd09063fc24', + '00010203050607080a0b0c0d0f101112', + 'ecb-tbl-128: I=120'), + ('b4b5b6b7f9fafbfc25241b1a6e69686b', 'd78c5b5ebf9b4dbda6ae506c5074c8fe', + '14151617191a1b1c1e1f202123242526', + 'ecb-tbl-128: I=121'), + ('868784850704051ac7c6c1c08788898a', '6c27444c27204b043812cf8cf95f9769', + '28292a2b2d2e2f30323334353738393a', + 'ecb-tbl-128: I=122'), + ('f4f5f6f7aaa9a8affdfcf3f277707172', 'be94524ee5a2aa50bba8b75f4c0aebcf', + '3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-128: I=123'), + ('d3d2d1d00605040bc3c2c5c43e010003', 'a0aeaae91ba9f31f51aeb3588cf3a39e', + '50515253555657585a5b5c5d5f606162', + 'ecb-tbl-128: I=124'), + ('73727170424140476a6b74750d0a0b08', '275297779c28266ef9fe4c6a13c08488', + '64656667696a6b6c6e6f707173747576', + 'ecb-tbl-128: I=125'), + ('c2c3c0c10a0908f754555253a1aeafac', '86523d92bb8672cb01cf4a77fd725882', + '78797a7b7d7e7f80828384858788898a', + 'ecb-tbl-128: I=126'), + ('6d6c6f6ef8fbfafd82838c8df8fffefd', '4b8327640e9f33322a04dd96fcbf9a36', + '8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-128: I=127'), + ('f5f4f7f684878689a6a7a0a1d2cdcccf', 'ce52af650d088ca559425223f4d32694', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-128: I=128'), + + # ecb_tbl.txt, KEYSIZE=192 + ('2d33eef2c0430a8a9ebf45e809c40bb6', 'dff4945e0336df4c1c56bc700eff837f', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=1'), + ('6aa375d1fa155a61fb72353e0a5a8756', 'b6fddef4752765e347d5d2dc196d1252', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=2'), + ('bc3736518b9490dcb8ed60eb26758ed4', 'd23684e3d963b3afcf1a114aca90cbd6', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=3'), + ('aa214402b46cffb9f761ec11263a311e', '3a7ac027753e2a18c2ceab9e17c11fd0', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=4'), + ('02aea86e572eeab66b2c3af5e9a46fd6', '8f6786bd007528ba26603c1601cdd0d8', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=5'), + ('e2aef6acc33b965c4fa1f91c75ff6f36', 'd17d073b01e71502e28b47ab551168b3', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=6'), + ('0659df46427162b9434865dd9499f91d', 'a469da517119fab95876f41d06d40ffa', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=7'), + ('49a44239c748feb456f59c276a5658df', '6091aa3b695c11f5c0b6ad26d3d862ff', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=8'), + ('66208f6e9d04525bdedb2733b6a6be37', '70f9e67f9f8df1294131662dc6e69364', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=9'), + ('3393f8dfc729c97f5480b950bc9666b0', 'd154dcafad8b207fa5cbc95e9996b559', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=10'), + ('606834c8ce063f3234cf1145325dbd71', '4934d541e8b46fa339c805a7aeb9e5da', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=11'), + ('fec1c04f529bbd17d8cecfcc4718b17f', '62564c738f3efe186e1a127a0c4d3c61', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=12'), + ('32df99b431ed5dc5acf8caf6dc6ce475', '07805aa043986eb23693e23bef8f3438', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=13'), + ('7fdc2b746f3f665296943b83710d1f82', 'df0b4931038bade848dee3b4b85aa44b', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=14'), + ('8fba1510a3c5b87e2eaa3f7a91455ca2', '592d5fded76582e4143c65099309477c', + 'a4a5a6a7a9aaabacaeafb0b1b3b4b5b6b8b9babbbdbebfc0', + 'ecb-tbl-192: I=15'), + ('2c9b468b1c2eed92578d41b0716b223b', 'c9b8d6545580d3dfbcdd09b954ed4e92', + 'c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-192: I=16'), + ('0a2bbf0efc6bc0034f8a03433fca1b1a', '5dccd5d6eb7c1b42acb008201df707a0', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfc', + 'ecb-tbl-192: I=17'), + ('25260e1f31f4104d387222e70632504b', 'a2a91682ffeb6ed1d34340946829e6f9', + 'fefe01010304050608090a0b0d0e0f10121314151718191a', + 'ecb-tbl-192: I=18'), + ('c527d25a49f08a5228d338642ae65137', 'e45d185b797000348d9267960a68435d', + '1c1d1e1f21222324262728292b2c2d2e3031323335363738', + 'ecb-tbl-192: I=19'), + ('3b49fc081432f5890d0e3d87e884a69e', '45e060dae5901cda8089e10d4f4c246b', + '3a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-192: I=20'), + ('d173f9ed1e57597e166931df2754a083', 'f6951afacc0079a369c71fdcff45df50', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374', + 'ecb-tbl-192: I=21'), + ('8c2b7cafa5afe7f13562daeae1adede0', '9e95e00f351d5b3ac3d0e22e626ddad6', + '767778797b7c7d7e80818283858687888a8b8c8d8f909192', + 'ecb-tbl-192: I=22'), + ('aaf4ec8c1a815aeb826cab741339532c', '9cb566ff26d92dad083b51fdc18c173c', + '94959697999a9b9c9e9fa0a1a3a4a5a6a8a9aaabadaeafb0', + 'ecb-tbl-192: I=23'), + ('40be8c5d9108e663f38f1a2395279ecf', 'c9c82766176a9b228eb9a974a010b4fb', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebec', + 'ecb-tbl-192: I=24'), + ('0c8ad9bc32d43e04716753aa4cfbe351', 'd8e26aa02945881d5137f1c1e1386e88', + '2a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-192: I=25'), + ('1407b1d5f87d63357c8dc7ebbaebbfee', 'c0e024ccd68ff5ffa4d139c355a77c55', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364', + 'ecb-tbl-192: I=26'), + ('e62734d1ae3378c4549e939e6f123416', '0b18b3d16f491619da338640df391d43', + '84858687898a8b8c8e8f90919394959698999a9b9d9e9fa0', + 'ecb-tbl-192: I=27'), + ('5a752cff2a176db1a1de77f2d2cdee41', 'dbe09ac8f66027bf20cb6e434f252efc', + 'a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-192: I=28'), + ('a9c8c3a4eabedc80c64730ddd018cd88', '6d04e5e43c5b9cbe05feb9606b6480fe', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdc', + 'ecb-tbl-192: I=29'), + ('ee9b3dbbdb86180072130834d305999a', 'dd1d6553b96be526d9fee0fbd7176866', + '1a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-192: I=30'), + ('a7fa8c3586b8ebde7568ead6f634a879', '0260ca7e3f979fd015b0dd4690e16d2a', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354', + 'ecb-tbl-192: I=31'), + ('37e0f4a87f127d45ac936fe7ad88c10a', '9893734de10edcc8a67c3b110b8b8cc6', + '929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-192: I=32'), + ('3f77d8b5d92bac148e4e46f697a535c5', '93b30b750516b2d18808d710c2ee84ef', + '464748494b4c4d4e50515253555657585a5b5c5d5f606162', + 'ecb-tbl-192: I=33'), + ('d25ebb686c40f7e2c4da1014936571ca', '16f65fa47be3cb5e6dfe7c6c37016c0e', + '828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-192: I=34'), + ('4f1c769d1e5b0552c7eca84dea26a549', 'f3847210d5391e2360608e5acb560581', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbc', + 'ecb-tbl-192: I=35'), + ('8548e2f882d7584d0fafc54372b6633a', '8754462cd223366d0753913e6af2643d', + 'bebfc0c1c3c4c5c6c8c9cacbcdcecfd0d2d3d4d5d7d8d9da', + 'ecb-tbl-192: I=36'), + ('87d7a336cb476f177cd2a51af2a62cdf', '1ea20617468d1b806a1fd58145462017', + 'dcdddedfe1e2e3e4e6e7e8e9ebecedeef0f1f2f3f5f6f7f8', + 'ecb-tbl-192: I=37'), + ('03b1feac668c4e485c1065dfc22b44ee', '3b155d927355d737c6be9dda60136e2e', + 'fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-192: I=38'), + ('bda15e66819fa72d653a6866aa287962', '26144f7b66daa91b6333dbd3850502b3', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334', + 'ecb-tbl-192: I=39'), + ('4d0c7a0d2505b80bf8b62ceb12467f0a', 'e4f9a4ab52ced8134c649bf319ebcc90', + '363738393b3c3d3e40414243454647484a4b4c4d4f505152', + 'ecb-tbl-192: I=40'), + ('626d34c9429b37211330986466b94e5f', 'b9ddd29ac6128a6cab121e34a4c62b36', + '54555657595a5b5c5e5f60616364656668696a6b6d6e6f70', + 'ecb-tbl-192: I=41'), + ('333c3e6bf00656b088a17e5ff0e7f60a', '6fcddad898f2ce4eff51294f5eaaf5c9', + '727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-192: I=42'), + ('687ed0cdc0d2a2bc8c466d05ef9d2891', 'c9a6fe2bf4028080bea6f7fc417bd7e3', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabac', + 'ecb-tbl-192: I=43'), + ('487830e78cc56c1693e64b2a6660c7b6', '6a2026846d8609d60f298a9c0673127f', + 'aeafb0b1b3b4b5b6b8b9babbbdbebfc0c2c3c4c5c7c8c9ca', + 'ecb-tbl-192: I=44'), + ('7a48d6b7b52b29392aa2072a32b66160', '2cb25c005e26efea44336c4c97a4240b', + 'cccdcecfd1d2d3d4d6d7d8d9dbdcdddee0e1e2e3e5e6e7e8', + 'ecb-tbl-192: I=45'), + ('907320e64c8c5314d10f8d7a11c8618d', '496967ab8680ddd73d09a0e4c7dcc8aa', + 'eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-192: I=46'), + ('b561f2ca2d6e65a4a98341f3ed9ff533', 'd5af94de93487d1f3a8c577cb84a66a4', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324', + 'ecb-tbl-192: I=47'), + ('df769380d212792d026f049e2e3e48ef', '84bdac569cae2828705f267cc8376e90', + '262728292b2c2d2e30313233353637383a3b3c3d3f404142', + 'ecb-tbl-192: I=48'), + ('79f374bc445bdabf8fccb8843d6054c6', 'f7401dda5ad5ab712b7eb5d10c6f99b6', + '44454647494a4b4c4e4f50515354555658595a5b5d5e5f60', + 'ecb-tbl-192: I=49'), + ('4e02f1242fa56b05c68dbae8fe44c9d6', '1c9d54318539ebd4c3b5b7e37bf119f0', + '626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-192: I=50'), + ('cf73c93cbff57ac635a6f4ad2a4a1545', 'aca572d65fb2764cffd4a6eca090ea0d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c', + 'ecb-tbl-192: I=51'), + ('9923548e2875750725b886566784c625', '36d9c627b8c2a886a10ccb36eae3dfbb', + '9e9fa0a1a3a4a5a6a8a9aaabadaeafb0b2b3b4b5b7b8b9ba', + 'ecb-tbl-192: I=52'), + ('4888336b723a022c9545320f836a4207', '010edbf5981e143a81d646e597a4a568', + 'bcbdbebfc1c2c3c4c6c7c8c9cbcccdced0d1d2d3d5d6d7d8', + 'ecb-tbl-192: I=53'), + ('f84d9a5561b0608b1160dee000c41ba8', '8db44d538dc20cc2f40f3067fd298e60', + 'dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-192: I=54'), + ('c23192a0418e30a19b45ae3e3625bf22', '930eb53bc71e6ac4b82972bdcd5aafb3', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314', + 'ecb-tbl-192: I=55'), + ('b84e0690b28b0025381ad82a15e501a7', '6c42a81edcbc9517ccd89c30c95597b4', + '161718191b1c1d1e20212223252627282a2b2c2d2f303132', + 'ecb-tbl-192: I=56'), + ('acef5e5c108876c4f06269f865b8f0b0', 'da389847ad06df19d76ee119c71e1dd3', + '34353637393a3b3c3e3f40414344454648494a4b4d4e4f50', + 'ecb-tbl-192: I=57'), + ('0f1b3603e0f5ddea4548246153a5e064', 'e018fdae13d3118f9a5d1a647a3f0462', + '525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-192: I=58'), + ('fbb63893450d42b58c6d88cd3c1809e3', '2aa65db36264239d3846180fabdfad20', + '70717273757677787a7b7c7d7f80818284858687898a8b8c', + 'ecb-tbl-192: I=59'), + ('4bef736df150259dae0c91354e8a5f92', '1472163e9a4f780f1ceb44b07ecf4fdb', + '8e8f90919394959698999a9b9d9e9fa0a2a3a4a5a7a8a9aa', + 'ecb-tbl-192: I=60'), + ('7d2d46242056ef13d3c3fc93c128f4c7', 'c8273fdc8f3a9f72e91097614b62397c', + 'acadaeafb1b2b3b4b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8', + 'ecb-tbl-192: I=61'), + ('e9c1ba2df415657a256edb33934680fd', '66c8427dcd733aaf7b3470cb7d976e3f', + 'cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-192: I=62'), + ('e23ee277b0aa0a1dfb81f7527c3514f1', '146131cb17f1424d4f8da91e6f80c1d0', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304', + 'ecb-tbl-192: I=63'), + ('3e7445b0b63caaf75e4a911e12106b4c', '2610d0ad83659081ae085266a88770dc', + '060708090b0c0d0e10111213151617181a1b1c1d1f202122', + 'ecb-tbl-192: I=64'), + ('767774752023222544455a5be6e1e0e3', '38a2b5a974b0575c5d733917fb0d4570', + '24252627292a2b2c2e2f30313334353638393a3b3d3e3f40', + 'ecb-tbl-192: I=65'), + ('72737475717e7f7ce9e8ebea696a6b6c', 'e21d401ebc60de20d6c486e4f39a588b', + '424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-192: I=66'), + ('dfdedddc25262728c9c8cfcef1eeefec', 'e51d5f88c670b079c0ca1f0c2c4405a2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c', + 'ecb-tbl-192: I=67'), + ('fffe0100707776755f5e5d5c7675746b', '246a94788a642fb3d1b823c8762380c8', + '7e7f80818384858688898a8b8d8e8f90929394959798999a', + 'ecb-tbl-192: I=68'), + ('e0e1e2e3424140479f9e9190292e2f2c', 'b80c391c5c41a4c3b30c68e0e3d7550f', + '9c9d9e9fa1a2a3a4a6a7a8a9abacadaeb0b1b2b3b5b6b7b8', + 'ecb-tbl-192: I=69'), + ('2120272690efeeed3b3a39384e4d4c4b', 'b77c4754fc64eb9a1154a9af0bb1f21c', + 'babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-192: I=70'), + ('ecedeeef5350516ea1a0a7a6a3acadae', 'fb554de520d159a06bf219fc7f34a02f', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4', + 'ecb-tbl-192: I=71'), + ('32333c3d25222320e9e8ebeacecdccc3', 'a89fba152d76b4927beed160ddb76c57', + 'f6f7f8f9fbfcfdfe00010203050607080a0b0c0d0f101112', + 'ecb-tbl-192: I=72'), + ('40414243626160678a8bb4b511161714', '5676eab4a98d2e8473b3f3d46424247c', + '14151617191a1b1c1e1f20212324252628292a2b2d2e2f30', + 'ecb-tbl-192: I=73'), + ('94959293f5fafbf81f1e1d1c7c7f7e79', '4e8f068bd7ede52a639036ec86c33568', + '323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-192: I=74'), + ('bebfbcbd191a1b14cfcec9c8546b6a69', 'f0193c4d7aff1791ee4c07eb4a1824fc', + '50515253555657585a5b5c5d5f60616264656667696a6b6c', + 'ecb-tbl-192: I=75'), + ('2c2d3233898e8f8cbbbab9b8333031ce', 'ac8686eeca9ba761afe82d67b928c33f', + '6e6f70717374757678797a7b7d7e7f80828384858788898a', + 'ecb-tbl-192: I=76'), + ('84858687bfbcbdba37363938fdfafbf8', '5faf8573e33b145b6a369cd3606ab2c9', + '8c8d8e8f91929394969798999b9c9d9ea0a1a2a3a5a6a7a8', + 'ecb-tbl-192: I=77'), + ('828384857669686b909192930b08090e', '31587e9944ab1c16b844ecad0df2e7da', + 'aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-192: I=78'), + ('bebfbcbd9695948b707176779e919093', 'd017fecd91148aba37f6f3068aa67d8a', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4', + 'ecb-tbl-192: I=79'), + ('8b8a85846067666521202322d0d3d2dd', '788ef2f021a73cba2794b616078a8500', + 'e6e7e8e9ebecedeef0f1f2f3f5f6f7f8fafbfcfdfe010002', + 'ecb-tbl-192: I=80'), + ('76777475f1f2f3f4f8f9e6e777707172', '5d1ef20dced6bcbc12131ac7c54788aa', + '04050607090a0b0c0e0f10111314151618191a1b1d1e1f20', + 'ecb-tbl-192: I=81'), + ('a4a5a2a34f404142b4b5b6b727242522', 'b3c8cf961faf9ea05fdde6d1e4d8f663', + '222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-192: I=82'), + ('94959697e1e2e3ec16171011839c9d9e', '143075c70605861c7fac6526199e459f', + '40414243454647484a4b4c4d4f50515254555657595a5b5c', + 'ecb-tbl-192: I=83'), + ('03023d3c06010003dedfdcddfffcfde2', 'a5ae12eade9a87268d898bfc8fc0252a', + '5e5f60616364656668696a6b6d6e6f70727374757778797a', + 'ecb-tbl-192: I=84'), + ('10111213f1f2f3f4cecfc0c1dbdcddde', '0924f7cf2e877a4819f5244a360dcea9', + '7c7d7e7f81828384868788898b8c8d8e9091929395969798', + 'ecb-tbl-192: I=85'), + ('67666160724d4c4f1d1c1f1e73707176', '3d9e9635afcc3e291cc7ab3f27d1c99a', + '9a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-192: I=86'), + ('e6e7e4e5a8abaad584858283909f9e9d', '9d80feebf87510e2b8fb98bb54fd788c', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4', + 'ecb-tbl-192: I=87'), + ('71707f7e565150537d7c7f7e6162636c', '5f9d1a082a1a37985f174002eca01309', + 'd6d7d8d9dbdcdddee0e1e2e3e5e6e7e8eaebecedeff0f1f2', + 'ecb-tbl-192: I=88'), + ('64656667212223245555aaaa03040506', 'a390ebb1d1403930184a44b4876646e4', + 'f4f5f6f7f9fafbfcfefe01010304050608090a0b0d0e0f10', + 'ecb-tbl-192: I=89'), + ('9e9f9899aba4a5a6cfcecdcc2b28292e', '700fe918981c3195bb6c4bcb46b74e29', + '121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-192: I=90'), + ('c7c6c5c4d1d2d3dc626364653a454447', '907984406f7bf2d17fb1eb15b673d747', + '30313233353637383a3b3c3d3f40414244454647494a4b4c', + 'ecb-tbl-192: I=91'), + ('f6f7e8e9e0e7e6e51d1c1f1e5b585966', 'c32a956dcfc875c2ac7c7cc8b8cc26e1', + '4e4f50515354555658595a5b5d5e5f60626364656768696a', + 'ecb-tbl-192: I=92'), + ('bcbdbebf5d5e5f5868696667f4f3f2f1', '02646e2ebfa9b820cf8424e9b9b6eb51', + '6c6d6e6f71727374767778797b7c7d7e8081828385868788', + 'ecb-tbl-192: I=93'), + ('40414647b0afaead9b9a99989b98999e', '621fda3a5bbd54c6d3c685816bd4ead8', + '8a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-192: I=94'), + ('69686b6a0201001f0f0e0908b4bbbab9', 'd4e216040426dfaf18b152469bc5ac2f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4', + 'ecb-tbl-192: I=95'), + ('c7c6c9c8d8dfdedd5a5b5859bebdbcb3', '9d0635b9d33b6cdbd71f5d246ea17cc8', + 'c6c7c8c9cbcccdced0d1d2d3d5d6d7d8dadbdcdddfe0e1e2', + 'ecb-tbl-192: I=96'), + ('dedfdcdd787b7a7dfffee1e0b2b5b4b7', '10abad1bd9bae5448808765583a2cc1a', + 'e4e5e6e7e9eaebeceeeff0f1f3f4f5f6f8f9fafbfdfefe00', + 'ecb-tbl-192: I=97'), + ('4d4c4b4a606f6e6dd0d1d2d3fbf8f9fe', '6891889e16544e355ff65a793c39c9a8', + '020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-192: I=98'), + ('b7b6b5b4d7d4d5dae5e4e3e2e1fefffc', 'cc735582e68072c163cd9ddf46b91279', + '20212223252627282a2b2c2d2f30313234353637393a3b3c', + 'ecb-tbl-192: I=99'), + ('cecfb0b1f7f0f1f2aeafacad3e3d3c23', 'c5c68b9aeeb7f878df578efa562f9574', + '3e3f40414344454648494a4b4d4e4f50525354555758595a', + 'ecb-tbl-192: I=100'), + ('cacbc8c9cdcecfc812131c1d494e4f4c', '5f4764395a667a47d73452955d0d2ce8', + '5c5d5e5f61626364666768696b6c6d6e7071727375767778', + 'ecb-tbl-192: I=101'), + ('9d9c9b9ad22d2c2fb1b0b3b20c0f0e09', '701448331f66106cefddf1eb8267c357', + '7a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-192: I=102'), + ('7a7b787964676659959493924f404142', 'cb3ee56d2e14b4e1941666f13379d657', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4', + 'ecb-tbl-192: I=103'), + ('aaaba4a5cec9c8cb1f1e1d1caba8a9a6', '9fe16efd18ab6e1981191851fedb0764', + 'b6b7b8b9bbbcbdbec0c1c2c3c5c6c7c8cacbcccdcfd0d1d2', + 'ecb-tbl-192: I=104'), + ('93929190282b2a2dc4c5fafb92959497', '3dc9ba24e1b223589b147adceb4c8e48', + 'd4d5d6d7d9dadbdcdedfe0e1e3e4e5e6e8e9eaebedeeeff0', + 'ecb-tbl-192: I=105'), + ('efeee9e8ded1d0d339383b3a888b8a8d', '1c333032682e7d4de5e5afc05c3e483c', + 'f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-192: I=106'), + ('7f7e7d7ca2a1a0af78797e7f112e2f2c', 'd593cc99a95afef7e92038e05a59d00a', + '10111213151617181a1b1c1d1f20212224252627292a2b2c', + 'ecb-tbl-192: I=107'), + ('84859a9b2b2c2d2e868784852625245b', '51e7f96f53b4353923452c222134e1ec', + '2e2f30313334353638393a3b3d3e3f40424344454748494a', + 'ecb-tbl-192: I=108'), + ('b0b1b2b3070405026869666710171615', '4075b357a1a2b473400c3b25f32f81a4', + '4c4d4e4f51525354565758595b5c5d5e6061626365666768', + 'ecb-tbl-192: I=109'), + ('acadaaabbda2a3a00d0c0f0e595a5b5c', '302e341a3ebcd74f0d55f61714570284', + '6a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-192: I=110'), + ('121310115655544b5253545569666764', '57abdd8231280da01c5042b78cf76522', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4', + 'ecb-tbl-192: I=111'), + ('dedfd0d166616063eaebe8e94142434c', '17f9ea7eea17ac1adf0e190fef799e92', + 'a6a7a8a9abacadaeb0b1b2b3b5b6b7b8babbbcbdbfc0c1c2', + 'ecb-tbl-192: I=112'), + ('dbdad9d81417161166677879e0e7e6e5', '2e1bdd563dd87ee5c338dd6d098d0a7a', + 'c4c5c6c7c9cacbcccecfd0d1d3d4d5d6d8d9dadbdddedfe0', + 'ecb-tbl-192: I=113'), + ('6a6b6c6de0efeeed2b2a2928c0c3c2c5', 'eb869996e6f8bfb2bfdd9e0c4504dbb2', + 'e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-192: I=114'), + ('b1b0b3b21714151a1a1b1c1d5649484b', 'c2e01549e9decf317468b3e018c61ba8', + '00010203050607080a0b0c0d0f10111214151617191a1b1c', + 'ecb-tbl-192: I=115'), + ('39380706a3a4a5a6c4c5c6c77271706f', '8da875d033c01dd463b244a1770f4a22', + '1e1f20212324252628292a2b2d2e2f30323334353738393a', + 'ecb-tbl-192: I=116'), + ('5c5d5e5f1013121539383736e2e5e4e7', '8ba0dcf3a186844f026d022f8839d696', + '3c3d3e3f41424344464748494b4c4d4e5051525355565758', + 'ecb-tbl-192: I=117'), + ('43424544ead5d4d72e2f2c2d64676661', 'e9691ff9a6cc6970e51670a0fd5b88c1', + '5a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-192: I=118'), + ('55545756989b9a65f8f9feff18171615', 'f2baec06faeed30f88ee63ba081a6e5b', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394', + 'ecb-tbl-192: I=119'), + ('05040b0a525554573c3d3e3f4a494847', '9c39d4c459ae5753394d6094adc21e78', + '969798999b9c9d9ea0a1a2a3a5a6a7a8aaabacadafb0b1b2', + 'ecb-tbl-192: I=120'), + ('14151617595a5b5c8584fbfa8e89888b', '6345b532a11904502ea43ba99c6bd2b2', + 'b4b5b6b7b9babbbcbebfc0c1c3c4c5c6c8c9cacbcdcecfd0', + 'ecb-tbl-192: I=121'), + ('7c7d7a7bfdf2f3f029282b2a51525354', '5ffae3061a95172e4070cedce1e428c8', + 'd2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-192: I=122'), + ('38393a3b1e1d1c1341404746c23d3c3e', '0a4566be4cdf9adce5dec865b5ab34cd', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c', + 'ecb-tbl-192: I=123'), + ('8d8c939240474645818083827c7f7e41', 'ca17fcce79b7404f2559b22928f126fb', + '0e0f10111314151618191a1b1d1e1f20222324252728292a', + 'ecb-tbl-192: I=124'), + ('3b3a39381a19181f32333c3d45424340', '97ca39b849ed73a6470a97c821d82f58', + '2c2d2e2f31323334363738393b3c3d3e4041424345464748', + 'ecb-tbl-192: I=125'), + ('f0f1f6f738272625828380817f7c7d7a', '8198cb06bc684c6d3e9b7989428dcf7a', + '4a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-192: I=126'), + ('89888b8a0407061966676061141b1a19', 'f53c464c705ee0f28d9a4c59374928bd', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384', + 'ecb-tbl-192: I=127'), + ('d3d2dddcaaadacaf9c9d9e9fe8ebeae5', '9adb3d4cca559bb98c3e2ed73dbf1154', + '868788898b8c8d8e90919293959697989a9b9c9d9fa0a1a2', + 'ecb-tbl-192: I=128'), + + # ecb_tbl.txt, KEYSIZE=256 + ('834eadfccac7e1b30664b1aba44815ab', '1946dabf6a03a2a2c3d0b05080aed6fc', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=1'), + ('d9dc4dba3021b05d67c0518f72b62bf1', '5ed301d747d3cc715445ebdec62f2fb4', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=2'), + ('a291d86301a4a739f7392173aa3c604c', '6585c8f43d13a6beab6419fc5935b9d0', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=3'), + ('4264b2696498de4df79788a9f83e9390', '2a5b56a596680fcc0e05f5e0f151ecae', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=4'), + ('ee9932b3721804d5a83ef5949245b6f6', 'f5d6ff414fd2c6181494d20c37f2b8c4', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=5'), + ('e6248f55c5fdcbca9cbbb01c88a2ea77', '85399c01f59fffb5204f19f8482f00b8', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=6'), + ('b8358e41b9dff65fd461d55a99266247', '92097b4c88a041ddf98144bc8d22e8e7', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=7'), + ('f0e2d72260af58e21e015ab3a4c0d906', '89bd5b73b356ab412aef9f76cea2d65c', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=8'), + ('475b8b823ce8893db3c44a9f2a379ff7', '2536969093c55ff9454692f2fac2f530', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=9'), + ('688f5281945812862f5f3076cf80412f', '07fc76a872843f3f6e0081ee9396d637', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=10'), + ('08d1d2bc750af553365d35e75afaceaa', 'e38ba8ec2aa741358dcc93e8f141c491', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=11'), + ('8707121f47cc3efceca5f9a8474950a1', 'd028ee23e4a89075d0b03e868d7d3a42', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=12'), + ('e51aa0b135dba566939c3b6359a980c5', '8cd9423dfc459e547155c5d1d522e540', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=13'), + ('069a007fc76a459f98baf917fedf9521', '080e9517eb1677719acf728086040ae3', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=14'), + ('726165c1723fbcf6c026d7d00b091027', '7c1700211a3991fc0ecded0ab3e576b0', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=15'), + ('d7c544de91d55cfcde1f84ca382200ce', 'dabcbcc855839251db51e224fbe87435', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=16'), + ('fed3c9a161b9b5b2bd611b41dc9da357', '68d56fad0406947a4dd27a7448c10f1d', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=17'), + ('4f634cdc6551043409f30b635832cf82', 'da9a11479844d1ffee24bbf3719a9925', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=18'), + ('109ce98db0dfb36734d9f3394711b4e6', '5e4ba572f8d23e738da9b05ba24b8d81', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=19'), + ('4ea6dfaba2d8a02ffdffa89835987242', 'a115a2065d667e3f0b883837a6e903f8', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=20'), + ('5ae094f54af58e6e3cdbf976dac6d9ef', '3e9e90dc33eac2437d86ad30b137e66e', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=21'), + ('764d8e8e0f29926dbe5122e66354fdbe', '01ce82d8fbcdae824cb3c48e495c3692', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=22'), + ('3f0418f888cdf29a982bf6b75410d6a9', '0c9cff163ce936faaf083cfd3dea3117', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=23'), + ('e4a3e7cb12cdd56aa4a75197a9530220', '5131ba9bd48f2bba85560680df504b52', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=24'), + ('211677684aac1ec1a160f44c4ebf3f26', '9dc503bbf09823aec8a977a5ad26ccb2', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=25'), + ('d21e439ff749ac8f18d6d4b105e03895', '9a6db0c0862e506a9e397225884041d7', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=26'), + ('d9f6ff44646c4725bd4c0103ff5552a7', '430bf9570804185e1ab6365fc6a6860c', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=27'), + ('0b1256c2a00b976250cfc5b0c37ed382', '3525ebc02f4886e6a5a3762813e8ce8a', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=28'), + ('b056447ffc6dc4523a36cc2e972a3a79', '07fa265c763779cce224c7bad671027b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=29'), + ('5e25ca78f0de55802524d38da3fe4456', 'e8b72b4e8be243438c9fff1f0e205872', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=30'), + ('a5bcf4728fa5eaad8567c0dc24675f83', '109d4f999a0e11ace1f05e6b22cbcb50', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=31'), + ('814e59f97ed84646b78b2ca022e9ca43', '45a5e8d4c3ed58403ff08d68a0cc4029', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=32'), + ('15478beec58f4775c7a7f5d4395514d7', '196865964db3d417b6bd4d586bcb7634', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=33'), + ('253548ffca461c67c8cbc78cd59f4756', '60436ad45ac7d30d99195f815d98d2ae', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=34'), + ('fd7ad8d73b9b0f8cc41600640f503d65', 'bb07a23f0b61014b197620c185e2cd75', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=35'), + ('06199de52c6cbf8af954cd65830bcd56', '5bc0b2850129c854423aff0751fe343b', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=36'), + ('f17c4ffe48e44c61bd891e257e725794', '7541a78f96738e6417d2a24bd2beca40', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=37'), + ('9a5b4a402a3e8a59be6bf5cd8154f029', 'b0a303054412882e464591f1546c5b9e', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=38'), + ('79bd40b91a7e07dc939d441782ae6b17', '778c06d8a355eeee214fcea14b4e0eef', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=39'), + ('d8ceaaf8976e5fbe1012d8c84f323799', '09614206d15cbace63227d06db6beebb', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=40'), + ('3316e2751e2e388b083da23dd6ac3fbe', '41b97fb20e427a9fdbbb358d9262255d', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=41'), + ('8b7cfbe37de7dca793521819242c5816', 'c1940f703d845f957652c2d64abd7adf', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=42'), + ('f23f033c0eebf8ec55752662fd58ce68', 'd2d44fcdae5332343366db297efcf21b', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=43'), + ('59eb34f6c8bdbacc5fc6ad73a59a1301', 'ea8196b79dbe167b6aa9896e287eed2b', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=44'), + ('dcde8b6bd5cf7cc22d9505e3ce81261a', 'd6b0b0c4ba6c7dbe5ed467a1e3f06c2d', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=45'), + ('e33cf7e524fed781e7042ff9f4b35dc7', 'ec51eb295250c22c2fb01816fb72bcae', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=46'), + ('27963c8facdf73062867d164df6d064c', 'aded6630a07ce9c7408a155d3bd0d36f', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=47'), + ('77b1ce386b551b995f2f2a1da994eef8', '697c9245b9937f32f5d1c82319f0363a', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=48'), + ('f083388b013679efcf0bb9b15d52ae5c', 'aad5ad50c6262aaec30541a1b7b5b19c', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=49'), + ('c5009e0dab55db0abdb636f2600290c8', '7d34b893855341ec625bd6875ac18c0d', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=50'), + ('7804881e26cd532d8514d3683f00f1b9', '7ef05105440f83862f5d780e88f02b41', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=51'), + ('46cddcd73d1eb53e675ca012870a92a3', 'c377c06403382061af2c9c93a8e70df6', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=52'), + ('a9fb44062bb07fe130a8e8299eacb1ab', '1dbdb3ffdc052dacc83318853abc6de5', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=53'), + ('2b6ff8d7a5cc3a28a22d5a6f221af26b', '69a6eab00432517d0bf483c91c0963c7', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=54'), + ('1a9527c29b8add4b0e3e656dbb2af8b4', '0797f41dc217c80446e1d514bd6ab197', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=55'), + ('7f99cf2c75244df015eb4b0c1050aeae', '9dfd76575902a637c01343c58e011a03', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=56'), + ('e84ff85b0d9454071909c1381646c4ed', 'acf4328ae78f34b9fa9b459747cc2658', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=57'), + ('89afd40f99521280d5399b12404f6db4', 'b0479aea12bac4fe2384cf98995150c6', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=58'), + ('a09ef32dbc5119a35ab7fa38656f0329', '9dd52789efe3ffb99f33b3da5030109a', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=59'), + ('61773457f068c376c7829b93e696e716', 'abbb755e4621ef8f1214c19f649fb9fd', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=60'), + ('a34f0cae726cce41dd498747d891b967', 'da27fb8174357bce2bed0e7354f380f9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=61'), + ('856f59496c7388ee2d2b1a27b7697847', 'c59a0663f0993838f6e5856593bdc5ef', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=62'), + ('cb090c593ef7720bd95908fb93b49df4', 'ed60b264b5213e831607a99c0ce5e57e', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=63'), + ('a0ac75cd2f1923d460fc4d457ad95baf', 'e50548746846f3eb77b8c520640884ed', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=64'), + ('2a2b282974777689e8e9eeef525d5c5f', '28282cc7d21d6a2923641e52d188ef0c', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=65'), + ('909192939390919e0f0e09089788898a', '0dfa5b02abb18e5a815305216d6d4f8e', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=66'), + ('777675748d8e8f907170777649464744', '7359635c0eecefe31d673395fb46fb99', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=67'), + ('717073720605040b2d2c2b2a05fafbf9', '73c679f7d5aef2745c9737bb4c47fb36', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=68'), + ('64656667fefdfcc31b1a1d1ca5aaaba8', 'b192bd472a4d2eafb786e97458967626', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=69'), + ('dbdad9d86a696867b5b4b3b2c8d7d6d5', '0ec327f6c8a2b147598ca3fde61dc6a4', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=70'), + ('5c5d5e5fe3e0e1fe31303736333c3d3e', 'fc418eb3c41b859b38d4b6f646629729', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=71'), + ('545556574b48494673727574546b6a69', '30249e5ac282b1c981ea64b609f3a154', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=72'), + ('ecedeeefc6c5c4bb56575051f5fafbf8', '5e6e08646d12150776bb43c2d78a9703', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=73'), + ('464744452724252ac9c8cfced2cdcccf', 'faeb3d5de652cd3447dceb343f30394a', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=74'), + ('e6e7e4e54142435c878681801c131211', 'a8e88706823f6993ef80d05c1c7b2cf0', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=75'), + ('72737071cfcccdc2f9f8fffe710e0f0c', '8ced86677e6e00a1a1b15968f2d3cce6', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=76'), + ('505152537370714ec3c2c5c4010e0f0c', '9fc7c23858be03bdebb84e90db6786a9', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=77'), + ('a8a9aaab5c5f5e51aeafa8a93d222320', 'b4fbd65b33f70d8cf7f1111ac4649c36', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=78'), + ('dedfdcddf6f5f4eb10111617fef1f0f3', 'c5c32d5ed03c4b53cc8c1bd0ef0dbbf6', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=79'), + ('bdbcbfbe5e5d5c530b0a0d0cfac5c4c7', 'd1a7f03b773e5c212464b63709c6a891', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=80'), + ('8a8b8889050606f8f4f5f2f3636c6d6e', '6b7161d8745947ac6950438ea138d028', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=81'), + ('a6a7a4a54d4e4f40b2b3b4b539262724', 'fd47a9f7e366ee7a09bc508b00460661', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=82'), + ('9c9d9e9fe9eaebf40e0f08099b949596', '00d40b003dc3a0d9310b659b98c7e416', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=83'), + ('2d2c2f2e1013121dcccdcacbed121310', 'eea4c79dcc8e2bda691f20ac48be0717', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=84'), + ('f4f5f6f7edeeefd0eaebecedf7f8f9fa', 'e78f43b11c204403e5751f89d05a2509', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=85'), + ('3d3c3f3e282b2a2573727574150a0b08', 'd0f0e3d1f1244bb979931e38dd1786ef', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=86'), + ('b6b7b4b5f8fbfae5b4b5b2b3a0afaead', '042e639dc4e1e4dde7b75b749ea6f765', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=87'), + ('b7b6b5b4989b9a95878681809ba4a5a6', 'bc032fdd0efe29503a980a7d07ab46a8', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=88'), + ('a8a9aaabe5e6e798e9e8efee4748494a', '0c93ac949c0da6446effb86183b6c910', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=89'), + ('ecedeeefd9dadbd4b9b8bfbe657a7b78', 'e0d343e14da75c917b4a5cec4810d7c2', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=90'), + ('7f7e7d7c696a6b74cacbcccd929d9c9f', '0eafb821748408279b937b626792e619', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=91'), + ('08090a0b0605040bfffef9f8b9c6c7c4', 'fa1ac6e02d23b106a1fef18b274a553f', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=92'), + ('08090a0bf1f2f3ccfcfdfafb68676665', '0dadfe019cd12368075507df33c1a1e9', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=93'), + ('cacbc8c93a393837050403020d121310', '3a0879b414465d9ffbaf86b33a63a1b9', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=94'), + ('e9e8ebea8281809f8f8e8988343b3a39', '62199fadc76d0be1805d3ba0b7d914bf', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=95'), + ('515053524645444bd0d1d6d7340b0a09', '1b06d6c5d333e742730130cf78e719b4', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=96'), + ('42434041ecefee1193929594c6c9c8cb', 'f1f848824c32e9dcdcbf21580f069329', + '78797a7b7d7e7f80828384858788898a8c8d8e8f91929394969798999b9c9d9e', + 'ecb-tbl-256: I=97'), + ('efeeedecc2c1c0cf76777071455a5b58', '1a09050cbd684f784d8e965e0782f28a', + 'a0a1a2a3a5a6a7a8aaabacadafb0b1b2b4b5b6b7b9babbbcbebfc0c1c3c4c5c6', + 'ecb-tbl-256: I=98'), + ('5f5e5d5c3f3c3d221d1c1b1a19161714', '79c2969e7ded2ba7d088f3f320692360', + 'c8c9cacbcdcecfd0d2d3d4d5d7d8d9dadcdddedfe1e2e3e4e6e7e8e9ebecedee', + 'ecb-tbl-256: I=99'), + ('000102034142434c1c1d1a1b8d727371', '091a658a2f7444c16accb669450c7b63', + 'f0f1f2f3f5f6f7f8fafbfcfdfe01000204050607090a0b0c0e0f101113141516', + 'ecb-tbl-256: I=100'), + ('8e8f8c8db1b2b38c56575051050a0b08', '97c1e3a72cca65fa977d5ed0e8a7bbfc', + '18191a1b1d1e1f20222324252728292a2c2d2e2f31323334363738393b3c3d3e', + 'ecb-tbl-256: I=101'), + ('a7a6a5a4e8ebeae57f7e7978cad5d4d7', '70c430c6db9a17828937305a2df91a2a', + '40414243454647484a4b4c4d4f50515254555657595a5b5c5e5f606163646566', + 'ecb-tbl-256: I=102'), + ('8a8b888994979689454443429f909192', '629553457fbe2479098571c7c903fde8', + '68696a6b6d6e6f70727374757778797a7c7d7e7f81828384868788898b8c8d8e', + 'ecb-tbl-256: I=103'), + ('8c8d8e8fe0e3e2ed45444342f1cecfcc', 'a25b25a61f612669e7d91265c7d476ba', + '90919293959697989a9b9c9d9fa0a1a2a4a5a6a7a9aaabacaeafb0b1b3b4b5b6', + 'ecb-tbl-256: I=104'), + ('fffefdfc4c4f4e31d8d9dedfb6b9b8bb', 'eb7e4e49b8ae0f024570dda293254fed', + 'b8b9babbbdbebfc0c2c3c4c5c7c8c9cacccdcecfd1d2d3d4d6d7d8d9dbdcddde', + 'ecb-tbl-256: I=105'), + ('fdfcfffecccfcec12f2e29286679787b', '38fe15d61cca84516e924adce5014f67', + 'e0e1e2e3e5e6e7e8eaebecedeff0f1f2f4f5f6f7f9fafbfcfefe010103040506', + 'ecb-tbl-256: I=106'), + ('67666564bab9b8a77071767719161714', '3ad208492249108c9f3ebeb167ad0583', + '08090a0b0d0e0f10121314151718191a1c1d1e1f21222324262728292b2c2d2e', + 'ecb-tbl-256: I=107'), + ('9a9b98992d2e2f2084858283245b5a59', '299ba9f9bf5ab05c3580fc26edd1ed12', + '30313233353637383a3b3c3d3f40414244454647494a4b4c4e4f505153545556', + 'ecb-tbl-256: I=108'), + ('a4a5a6a70b0809365c5d5a5b2c232221', '19dc705b857a60fb07717b2ea5717781', + '58595a5b5d5e5f60626364656768696a6c6d6e6f71727374767778797b7c7d7e', + 'ecb-tbl-256: I=109'), + ('464744455754555af3f2f5f4afb0b1b2', 'ffc8aeb885b5efcad06b6dbebf92e76b', + '80818283858687888a8b8c8d8f90919294959697999a9b9c9e9fa0a1a3a4a5a6', + 'ecb-tbl-256: I=110'), + ('323330317675746b7273747549464744', 'f58900c5e0b385253ff2546250a0142b', + 'a8a9aaabadaeafb0b2b3b4b5b7b8b9babcbdbebfc1c2c3c4c6c7c8c9cbcccdce', + 'ecb-tbl-256: I=111'), + ('a8a9aaab181b1a15808186872b141516', '2ee67b56280bc462429cee6e3370cbc1', + 'd0d1d2d3d5d6d7d8dadbdcdddfe0e1e2e4e5e6e7e9eaebeceeeff0f1f3f4f5f6', + 'ecb-tbl-256: I=112'), + ('e7e6e5e4202323ddaaabacad343b3a39', '20db650a9c8e9a84ab4d25f7edc8f03f', + 'f8f9fafbfdfefe00020304050708090a0c0d0e0f11121314161718191b1c1d1e', + 'ecb-tbl-256: I=113'), + ('a8a9aaab2221202fedecebea1e010003', '3c36da169525cf818843805f25b78ae5', + '20212223252627282a2b2c2d2f30313234353637393a3b3c3e3f404143444546', + 'ecb-tbl-256: I=114'), + ('f9f8fbfa5f5c5d42424344450e010003', '9a781d960db9e45e37779042fea51922', + '48494a4b4d4e4f50525354555758595a5c5d5e5f61626364666768696b6c6d6e', + 'ecb-tbl-256: I=115'), + ('57565554f5f6f7f89697909120dfdedd', '6560395ec269c672a3c288226efdba77', + '70717273757677787a7b7c7d7f80818284858687898a8b8c8e8f909193949596', + 'ecb-tbl-256: I=116'), + ('f8f9fafbcccfcef1dddcdbda0e010003', '8c772b7a189ac544453d5916ebb27b9a', + '98999a9b9d9e9fa0a2a3a4a5a7a8a9aaacadaeafb1b2b3b4b6b7b8b9bbbcbdbe', + 'ecb-tbl-256: I=117'), + ('d9d8dbda7073727d80818687c2dddcdf', '77ca5468cc48e843d05f78eed9d6578f', + 'c0c1c2c3c5c6c7c8cacbcccdcfd0d1d2d4d5d6d7d9dadbdcdedfe0e1e3e4e5e6', + 'ecb-tbl-256: I=118'), + ('c5c4c7c6080b0a1588898e8f68676665', '72cdcc71dc82c60d4429c9e2d8195baa', + 'e8e9eaebedeeeff0f2f3f4f5f7f8f9fafcfdfeff01020304060708090b0c0d0e', + 'ecb-tbl-256: I=119'), + ('83828180dcdfded186878081f0cfcecd', '8080d68ce60e94b40b5b8b69eeb35afa', + '10111213151617181a1b1c1d1f20212224252627292a2b2c2e2f303133343536', + 'ecb-tbl-256: I=120'), + ('98999a9bdddedfa079787f7e0a050407', '44222d3cde299c04369d58ac0eba1e8e', + '38393a3b3d3e3f40424344454748494a4c4d4e4f51525354565758595b5c5d5e', + 'ecb-tbl-256: I=121'), + ('cecfcccd4f4c4d429f9e9998dfc0c1c2', '9b8721b0a8dfc691c5bc5885dbfcb27a', + '60616263656667686a6b6c6d6f70717274757677797a7b7c7e7f808183848586', + 'ecb-tbl-256: I=122'), + ('404142436665647b29282f2eaba4a5a6', '0dc015ce9a3a3414b5e62ec643384183', + '88898a8b8d8e8f90929394959798999a9c9d9e9fa1a2a3a4a6a7a8a9abacadae', + 'ecb-tbl-256: I=123'), + ('33323130e6e5e4eb23222524dea1a0a3', '705715448a8da412025ce38345c2a148', + 'b0b1b2b3b5b6b7b8babbbcbdbfc0c1c2c4c5c6c7c9cacbcccecfd0d1d3d4d5d6', + 'ecb-tbl-256: I=124'), + ('cfcecdccf6f5f4cbe6e7e0e199969794', 'c32b5b0b6fbae165266c569f4b6ecf0b', + 'd8d9dadbdddedfe0e2e3e4e5e7e8e9eaecedeeeff1f2f3f4f6f7f8f9fbfcfdfe', + 'ecb-tbl-256: I=125'), + ('babbb8b97271707fdcdddadb29363734', '4dca6c75192a01ddca9476af2a521e87', + '00010203050607080a0b0c0d0f10111214151617191a1b1c1e1f202123242526', + 'ecb-tbl-256: I=126'), + ('c9c8cbca4447465926272021545b5a59', '058691e627ecbc36ac07b6db423bd698', + '28292a2b2d2e2f30323334353738393a3c3d3e3f41424344464748494b4c4d4e', + 'ecb-tbl-256: I=127'), + ('050407067477767956575051221d1c1f', '7444527095838fe080fc2bcdd30847eb', + '50515253555657585a5b5c5d5f60616264656667696a6b6c6e6f707173747576', + 'ecb-tbl-256: I=128'), + + # FIPS PUB 800-38A test vectors, 2001 edition. Annex F. + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3ad77bb40d7a3660a89ecaf32466ef97'+'f5d3d58503b9699de785895a96fdbaaf'+ + '43b1cd7f598ece23881b00e3ed030688'+'7b0c785e27e8ad3f8223207104725dd4', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.1.1, ECB and AES-128'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'bd334f1d6e45f25ff712a214571fa5cc'+'974104846d0ad3ad7734ecb3ecee4eef'+ + 'ef7afd2270e2e60adce0ba2face6444e'+'9a4b41ba738d6c72fb16691603c18e0e', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.1.3, ECB and AES-192'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'f3eed1bdb5d2a03c064b5a7e3db181f8'+'591ccb10d410ed26dc5ba74a31362870'+ + 'b6ed21b99ca6f4f9f153e7b1beafed1d'+'23304b7a39f9f3ff067d8d8f9e24ecc7', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.1.3, ECB and AES-256'), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '7649abac8119b246cee98e9b12e9197d'+'5086cb9b507219ee95db113a917678b2'+ + '73bed6b8e3c1743b7116e69e22229516'+'3ff1caa1681fac09120eca307586e1a7', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.2.1, CBC and AES-128', + dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '4f021db243bc633d7178183a9fa071e8'+'b4d9ada9ad7dedf4e5e738763f69145a'+ + '571b242012fb7ae07fa9baac3df102e0'+'08b0e27988598881d920a9e64f5615cd', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.2.1, CBC and AES-192', + dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'f58c4c04d6e5f1ba779eabfb5f7bfbd6'+'9cfc4e967edb808d679f777bc6702c7d'+ + '39f23369a9d9bacfa530e26304231461'+'b2eb05e2c39be9fcda6c19078c6a9d1b', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.2.1, CBC and AES-256', + dict(mode='CBC', iv='000102030405060708090a0b0c0d0e0f')), + + # Skip CFB-1 since it is not supported by PyCrypto + + ('6bc1bee22e409f96e93d7e117393172aae2d','3b79424c9c0dd436bace9e0ed4586a4f32b9', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.3.7, CFB-8 and AES-128', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), + + ('6bc1bee22e409f96e93d7e117393172aae2d','cda2521ef0a905ca44cd057cbf0d47a0678a', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.3.9, CFB-8 and AES-192', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), + + ('6bc1bee22e409f96e93d7e117393172aae2d','dc1f1a8520a64db55fcc8ac554844e889700', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.3.11, CFB-8 and AES-256', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=8)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3b3fd92eb72dad20333449f8e83cfb4a'+'c8a64537a0b3a93fcde3cdad9f1ce58b'+ + '26751f67a3cbb140b1808cf187a4f4df'+'c04b05357c5d1c0eeac4c66f9ff7f2e6', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.3.13, CFB-128 and AES-128', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'cdc80d6fddf18cab34c25909c99a4174'+'67ce7f7f81173621961a2b70171d3d7a'+ + '2e1e8a1dd59b88b1c8e60fed1efac4c9'+'c05f9f9ca9834fa042ae8fba584b09ff', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.3.15, CFB-128 and AES-192', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'dc7e84bfda79164b7ecd8486985d3860'+'39ffed143b28b1c832113c6331e5407b'+ + 'df10132415e54b92a13ed0a8267ae2f9'+'75a385741ab9cef82031623d55b1e471', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.3.17, CFB-128 and AES-256', + dict(mode='CFB', iv='000102030405060708090a0b0c0d0e0f', segment_size=128)), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '3b3fd92eb72dad20333449f8e83cfb4a'+'7789508d16918f03f53c52dac54ed825'+ + '9740051e9c5fecf64344f7a82260edcc'+'304c6528f659c77866a510d9c1d6ae5e', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.4.1, OFB and AES-128', + dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'cdc80d6fddf18cab34c25909c99a4174'+'fcc28b8d4c63837c09e81700c1100401'+ + '8d9a9aeac0f6596f559c6d4daf59a5f2'+'6d9f200857ca6c3e9cac524bd9acc92a', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.4.3, OFB and AES-192', + dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + 'dc7e84bfda79164b7ecd8486985d3860'+'4febdc6740d20b3ac88f6ad82a4fb08d'+ + '71ab47a086e86eedf39d1c5bba97c408'+'0126141d67f37be8538f5a8be740e484', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.4.5, OFB and AES-256', + dict(mode='OFB', iv='000102030405060708090a0b0c0d0e0f')), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '874d6191b620e3261bef6864990db6ce'+'9806f66b7970fdff8617187bb9fffdff'+ + '5ae4df3edbd5d35e5b4f09020db03eab'+'1e031dda2fbe03d1792170a0f3009cee', + '2b7e151628aed2a6abf7158809cf4f3c', + 'NIST 800-38A, F.5.1, CTR and AES-128', + dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '1abc932417521ca24f2b0459fe7e6e0b'+'090339ec0aa6faefd5ccc2c6f4ce8e94'+ + '1e36b26bd1ebc670d1bd1d665620abf7'+'4f78a7f6d29809585a97daec58c6b050', + '8e73b0f7da0e6452c810f32b809079e562f8ead2522c6b7b', + 'NIST 800-38A, F.5.3, CTR and AES-192', + dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), + + ('6bc1bee22e409f96e93d7e117393172a'+'ae2d8a571e03ac9c9eb76fac45af8e51'+ + '30c81c46a35ce411e5fbc1191a0a52ef'+'f69f2445df4f9b17ad2b417be66c3710', + '601ec313775789a5b7a7f504bbf3d228'+'f443e3ca4d62b59aca84e990cacaf5c5'+ + '2b0930daa23de94ce87017ba2d84988d'+'dfc9c58db67aada613c2dd08457941a6', + '603deb1015ca71be2b73aef0857d77811f352c073b6108d72d9810a30914dff4', + 'NIST 800-38A, F.5.5, CTR and AES-256', + dict(mode='CTR', ctr_params=dict(nbits=16, prefix='f0f1f2f3f4f5f6f7f8f9fafbfcfd', initial_value=0xfeff))), + + # RFC 3686 test vectors + # This is a list of (plaintext, ciphertext, key[, description[, params]]) tuples. + ('53696e676c6520626c6f636b206d7367', 'e4095d4fb7a7b3792d6175a3261311b8', + 'ae6852f8121067cc4bf7a5765577f39e', + 'RFC 3686 Test Vector #1: Encrypting 16 octets using AES-CTR with 128-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000030'+'0000000000000000'))), + ('000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '5104a106168a72d9790d41ee8edad388eb2e1efc46da57c8fce630df9141be28', + '7e24067817fae0d743d6ce1f32539163', + 'RFC 3686 Test Vector #2: Encrypting 32 octets using AES-CTR with 128-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='006cb6db'+'c0543b59da48d90b'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', + 'c1cf48a89f2ffdd9cf4652e9efdb72d7'+'4540a42bde6d7836d59a5ceaaef31053'+'25b2072f', + '7691be035e5020a8ac6e618529f9a0dc', + 'RFC 3686 Test Vector #3: Encrypting 36 octets using AES-CTR with 128-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00e0017b'+'27777f3f4a1786f0'))), + ('53696e676c6520626c6f636b206d7367', + '4b55384fe259c9c84e7935a003cbe928', + '16af5b145fc9f579c175f93e3bfb0eed'+'863d06ccfdb78515', + 'RFC 3686 Test Vector #4: Encrypting 16 octets using AES-CTR with 192-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000048'+'36733c147d6d93cb'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f', + '453243fc609b23327edfaafa7131cd9f'+'8490701c5ad4a79cfc1fe0ff42f4fb00', + '7c5cb2401b3dc33c19e7340819e0f69c'+'678c3db8e6f6a91a', + 'RFC 3686 Test Vector #5: Encrypting 32 octets using AES-CTR with 192-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='0096b03b'+'020c6eadc2cb500d'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', + '96893fc55e5c722f540b7dd1ddf7e758'+'d288bc95c69165884536c811662f2188'+'abee0935', + '02bf391ee8ecb159b959617b0965279b'+'f59b60a786d3e0fe', + 'RFC 3686 Test Vector #6: Encrypting 36 octets using AES-CTR with 192-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='0007bdfd'+'5cbd60278dcc0912'))), + ('53696e676c6520626c6f636b206d7367', + '145ad01dbf824ec7560863dc71e3e0c0', + '776beff2851db06f4c8a0542c8696f6c'+'6a81af1eec96b4d37fc1d689e6c1c104', + 'RFC 3686 Test Vector #7: Encrypting 16 octets using AES-CTR with 256-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00000060'+'db5672c97aa8f0b2'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f', + 'f05e231b3894612c49ee000b804eb2a9'+'b8306b508f839d6a5530831d9344af1c', + 'f6d66d6bd52d59bb0796365879eff886'+'c66dd51a5b6a99744b50590c87a23884', + 'RFC 3686 Test Vector #8: Encrypting 32 octets using AES-CTR with 256-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='00faac24'+'c1585ef15a43d875'))), + ('000102030405060708090a0b0c0d0e0f'+'101112131415161718191a1b1c1d1e1f'+'20212223', + 'eb6c52821d0bbbf7ce7594462aca4faa'+'b407df866569fd07f48cc0b583d6071f'+'1ec0e6b8', + 'ff7a617ce69148e4f1726e2f43581de2'+'aa62d9f805532edff1eed687fb54153d', + 'RFC 3686 Test Vector #9: Encrypting 36 octets using AES-CTR with 256-bit key', + dict(mode='CTR', ctr_params=dict(nbits=32, prefix='001cc5b7'+'51a51d70a1c11148'))), + + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # + # gpg -c -z 0 --cipher-algo AES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # + # As result, the content of the file 'pt' is encrypted with a key derived + # from 'secret_passphrase' and written to file 'ct'. + # Test vectors must be extracted from 'ct', which is a collection of + # TLVs (see RFC4880 for all details): + # - the encrypted data (with the encrypted IV as prefix) is the payload + # of the TLV with tag 9 (Symmetrical Encrypted Data Packet). + # This is the ciphertext in the test vector. + # - inside the encrypted part, there is a further layer of TLVs. One must + # look for tag 11 (Literal Data Packet); in its payload, after a short + # but time dependent header, there is the content of file 'pt'. + # In the test vector, the plaintext is the complete set of TLVs that gets + # encrypted. It is not just the content of 'pt'. + # - the key is the leftmost 16 bytes of the SHA1 digest of the password. + # The test vector contains such shortened digest. + # + # Note that encryption uses a clear IV, and decryption an encrypted IV + ( 'ac18620270744fb4f647426c61636b4361745768697465436174', # Plaintext, 'BlackCatWhiteCat' + 'dc6b9e1f095de609765c59983db5956ae4f63aea7405389d2ebb', # Ciphertext + '5baa61e4c9b93f3f0682250b6cf8331b', # Key (hash of 'password') + 'GPG Test Vector #1', + dict(mode='OPENPGP', iv='3d7d3e62282add7eb203eeba5c800733', encrypted_iv='fd934601ef49cb58b6d9aebca6056bdb96ef' ) ), +] + +def get_tests(config={}): + from Crypto.Cipher import AES + from .common import make_block_tests + return make_block_tests(AES, "AES", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py new file mode 100644 index 0000000..d9197f8 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC2.py @@ -0,0 +1,124 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC2.py: Self-test for the Alleged-RC2 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC2""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 + +import unittest +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key[, description[, extra_params]]) tuples. +test_data = [ + # Test vectors from RFC 2268 + + # 63-bit effective key length + ('0000000000000000', 'ebb773f993278eff', '0000000000000000', + 'RFC2268-1', dict(effective_keylen=63)), + + # 64-bit effective key length + ('ffffffffffffffff', '278b27e42e2f0d49', 'ffffffffffffffff', + 'RFC2268-2', dict(effective_keylen=64)), + ('1000000000000001', '30649edf9be7d2c2', '3000000000000000', + 'RFC2268-3', dict(effective_keylen=64)), + ('0000000000000000', '61a8a244adacccf0', '88', + 'RFC2268-4', dict(effective_keylen=64)), + ('0000000000000000', '6ccf4308974c267f', '88bca90e90875a', + 'RFC2268-5', dict(effective_keylen=64)), + ('0000000000000000', '1a807d272bbe5db1', '88bca90e90875a7f0f79c384627bafb2', + 'RFC2268-6', dict(effective_keylen=64)), + + # 128-bit effective key length + ('0000000000000000', '2269552ab0f85ca6', '88bca90e90875a7f0f79c384627bafb2', + "RFC2268-7", dict(effective_keylen=128)), + ('0000000000000000', '5b78d3a43dfff1f1', + '88bca90e90875a7f0f79c384627bafb216f80a6f85920584c42fceb0be255daf1e', + "RFC2268-8", dict(effective_keylen=129)), + + # Test vectors from PyCrypto 2.0.1's testdata.py + # 1024-bit effective key length + ('0000000000000000', '624fb3e887419e48', '5068696c6970476c617373', + 'PCTv201-0'), + ('ffffffffffffffff', '79cadef44c4a5a85', '5068696c6970476c617373', + 'PCTv201-1'), + ('0001020304050607', '90411525b34e4c2c', '5068696c6970476c617373', + 'PCTv201-2'), + ('0011223344556677', '078656aaba61cbfb', '5068696c6970476c617373', + 'PCTv201-3'), + ('0000000000000000', 'd7bcc5dbb4d6e56a', 'ffffffffffffffff', + 'PCTv201-4'), + ('ffffffffffffffff', '7259018ec557b357', 'ffffffffffffffff', + 'PCTv201-5'), + ('0001020304050607', '93d20a497f2ccb62', 'ffffffffffffffff', + 'PCTv201-6'), + ('0011223344556677', 'cb15a7f819c0014d', 'ffffffffffffffff', + 'PCTv201-7'), + ('0000000000000000', '63ac98cdf3843a7a', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-8'), + ('ffffffffffffffff', '3fb49e2fa12371dd', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-9'), + ('0001020304050607', '46414781ab387d5f', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-10'), + ('0011223344556677', 'be09dc81feaca271', 'ffffffffffffffff5065746572477265656e6177617953e5ffe553', + 'PCTv201-11'), + ('0000000000000000', 'e64221e608be30ab', '53e5ffe553', + 'PCTv201-12'), + ('ffffffffffffffff', '862bc60fdcd4d9a9', '53e5ffe553', + 'PCTv201-13'), + ('0001020304050607', '6a34da50fa5e47de', '53e5ffe553', + 'PCTv201-14'), + ('0011223344556677', '584644c34503122c', '53e5ffe553', + 'PCTv201-15'), +] + +class BufferOverflowTest(unittest.TestCase): + # Test a buffer overflow found in older versions of PyCrypto + + def setUp(self): + global ARC2 + from Crypto.Cipher import ARC2 + + def runTest(self): + """ARC2 with keylength > 128""" + key = "x" * 16384 + mode = ARC2.MODE_ECB + self.assertRaises(ValueError, ARC2.new, key, mode) + +def get_tests(config={}): + from Crypto.Cipher import ARC2 + from .common import make_block_tests + + tests = make_block_tests(ARC2, "ARC2", test_data) + tests.append(BufferOverflowTest()) + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py new file mode 100644 index 0000000..b94116c --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_ARC4.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/ARC4.py: Self-test for the Alleged-RC4 cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.ARC4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key[, description]) tuples. +test_data = [ + # Test vectors from Eric Rescorla's message with the subject + # "RC4 compatibility testing", sent to the cipherpunks mailing list on + # September 13, 1994. + # http://cypherpunks.venona.com/date/1994/09/msg00420.html + + ('0123456789abcdef', '75b7878099e0c596', '0123456789abcdef', + 'Test vector 0'), + + ('0000000000000000', '7494c2e7104b0879', '0123456789abcdef', + 'Test vector 1'), + + ('0000000000000000', 'de188941a3375d3a', '0000000000000000', + 'Test vector 2'), + + ('00000000000000000000', 'd6a141a7ec3c38dfbd61', 'ef012345', + 'Test vector 3'), + + ('01' * 512, + '7595c3e6114a09780c4ad452338e1ffd9a1be9498f813d76533449b6778dcad8' + + 'c78a8d2ba9ac66085d0e53d59c26c2d1c490c1ebbe0ce66d1b6b1b13b6b919b8' + + '47c25a91447a95e75e4ef16779cde8bf0a95850e32af9689444fd377108f98fd' + + 'cbd4e726567500990bcc7e0ca3c4aaa304a387d20f3b8fbbcd42a1bd311d7a43' + + '03dda5ab078896ae80c18b0af66dff319616eb784e495ad2ce90d7f772a81747' + + 'b65f62093b1e0db9e5ba532fafec47508323e671327df9444432cb7367cec82f' + + '5d44c0d00b67d650a075cd4b70dedd77eb9b10231b6b5b741347396d62897421' + + 'd43df9b42e446e358e9c11a9b2184ecbef0cd8e7a877ef968f1390ec9b3d35a5' + + '585cb009290e2fcde7b5ec66d9084be44055a619d9dd7fc3166f9487f7cb2729' + + '12426445998514c15d53a18c864ce3a2b7555793988126520eacf2e3066e230c' + + '91bee4dd5304f5fd0405b35bd99c73135d3d9bc335ee049ef69b3867bf2d7bd1' + + 'eaa595d8bfc0066ff8d31509eb0c6caa006c807a623ef84c3d33c195d23ee320' + + 'c40de0558157c822d4b8c569d849aed59d4e0fd7f379586b4b7ff684ed6a189f' + + '7486d49b9c4bad9ba24b96abf924372c8a8fffb10d55354900a77a3db5f205e1' + + 'b99fcd8660863a159ad4abe40fa48934163ddde542a6585540fd683cbfd8c00f' + + '12129a284deacc4cdefe58be7137541c047126c8d49e2755ab181ab7e940b0c0', + '0123456789abcdef', + "Test vector 4"), +] + +def get_tests(config={}): + from Crypto.Cipher import ARC4 + from .common import make_stream_tests + return make_stream_tests(ARC4, "ARC4", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py new file mode 100644 index 0000000..c7c1503 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_Blowfish.py @@ -0,0 +1,113 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_Blowfish.py: Self-test for the Blowfish cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.Blowfish""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from http://www.schneier.com/code/vectors.txt + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('ffffffffffffffff', '51866fd5b85ecb8a', 'ffffffffffffffff'), + ('1000000000000001', '7d856f9a613063f2', '3000000000000000'), + ('1111111111111111', '2466dd878b963c9d', '1111111111111111'), + ('1111111111111111', '61f9c3802281b096', '0123456789abcdef'), + ('0123456789abcdef', '7d0cc630afda1ec7', '1111111111111111'), + ('0000000000000000', '4ef997456198dd78', '0000000000000000'), + ('0123456789abcdef', '0aceab0fc6a0a28d', 'fedcba9876543210'), + ('01a1d6d039776742', '59c68245eb05282b', '7ca110454a1a6e57'), + ('5cd54ca83def57da', 'b1b8cc0b250f09a0', '0131d9619dc1376e'), + ('0248d43806f67172', '1730e5778bea1da4', '07a1133e4a0b2686'), + ('51454b582ddf440a', 'a25e7856cf2651eb', '3849674c2602319e'), + ('42fd443059577fa2', '353882b109ce8f1a', '04b915ba43feb5b6'), + ('059b5e0851cf143a', '48f4d0884c379918', '0113b970fd34f2ce'), + ('0756d8e0774761d2', '432193b78951fc98', '0170f175468fb5e6'), + ('762514b829bf486a', '13f04154d69d1ae5', '43297fad38e373fe'), + ('3bdd119049372802', '2eedda93ffd39c79', '07a7137045da2a16'), + ('26955f6835af609a', 'd887e0393c2da6e3', '04689104c2fd3b2f'), + ('164d5e404f275232', '5f99d04f5b163969', '37d06bb516cb7546'), + ('6b056e18759f5cca', '4a057a3b24d3977b', '1f08260d1ac2465e'), + ('004bd6ef09176062', '452031c1e4fada8e', '584023641aba6176'), + ('480d39006ee762f2', '7555ae39f59b87bd', '025816164629b007'), + ('437540c8698f3cfa', '53c55f9cb49fc019', '49793ebc79b3258f'), + ('072d43a077075292', '7a8e7bfa937e89a3', '4fb05e1515ab73a7'), + ('02fe55778117f12a', 'cf9c5d7a4986adb5', '49e95d6d4ca229bf'), + ('1d9d5c5018f728c2', 'd1abb290658bc778', '018310dc409b26d6'), + ('305532286d6f295a', '55cb3774d13ef201', '1c587f1c13924fef'), + ('0123456789abcdef', 'fa34ec4847b268b2', '0101010101010101'), + ('0123456789abcdef', 'a790795108ea3cae', '1f1f1f1f0e0e0e0e'), + ('0123456789abcdef', 'c39e072d9fac631d', 'e0fee0fef1fef1fe'), + ('ffffffffffffffff', '014933e0cdaff6e4', '0000000000000000'), + ('0000000000000000', 'f21e9a77b71c49bc', 'ffffffffffffffff'), + ('0000000000000000', '245946885754369a', '0123456789abcdef'), + ('ffffffffffffffff', '6b5c5a9c5d9e0a5a', 'fedcba9876543210'), + ('fedcba9876543210', 'f9ad597c49db005e', 'f0'), + ('fedcba9876543210', 'e91d21c1d961a6d6', 'f0e1'), + ('fedcba9876543210', 'e9c2b70a1bc65cf3', 'f0e1d2'), + ('fedcba9876543210', 'be1e639408640f05', 'f0e1d2c3'), + ('fedcba9876543210', 'b39e44481bdb1e6e', 'f0e1d2c3b4'), + ('fedcba9876543210', '9457aa83b1928c0d', 'f0e1d2c3b4a5'), + ('fedcba9876543210', '8bb77032f960629d', 'f0e1d2c3b4a596'), + ('fedcba9876543210', 'e87a244e2cc85e82', 'f0e1d2c3b4a59687'), + ('fedcba9876543210', '15750e7a4f4ec577', 'f0e1d2c3b4a5968778'), + ('fedcba9876543210', '122ba70b3ab64ae0', 'f0e1d2c3b4a596877869'), + ('fedcba9876543210', '3a833c9affc537f6', 'f0e1d2c3b4a5968778695a'), + ('fedcba9876543210', '9409da87a90f6bf2', 'f0e1d2c3b4a5968778695a4b'), + ('fedcba9876543210', '884f80625060b8b4', 'f0e1d2c3b4a5968778695a4b3c'), + ('fedcba9876543210', '1f85031c19e11968', 'f0e1d2c3b4a5968778695a4b3c2d'), + ('fedcba9876543210', '79d9373a714ca34f', 'f0e1d2c3b4a5968778695a4b3c2d1e'), + ('fedcba9876543210', '93142887ee3be15c', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f'), + ('fedcba9876543210', '03429e838ce2d14b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00'), + ('fedcba9876543210', 'a4299e27469ff67b', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011'), + ('fedcba9876543210', 'afd5aed1c1bc96a8', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122'), + ('fedcba9876543210', '10851c0e3858da9f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233'), + ('fedcba9876543210', 'e6f51ed79b9db21f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344'), + ('fedcba9876543210', '64a6e14afd36b46f', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f001122334455'), + ('fedcba9876543210', '80c7d7d45a5479ad', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f00112233445566'), + ('fedcba9876543210', '05044b62fa52d080', + 'f0e1d2c3b4a5968778695a4b3c2d1e0f0011223344556677'), +] + +def get_tests(config={}): + from Crypto.Cipher import Blowfish + from .common import make_block_tests + return make_block_tests(Blowfish, "Blowfish", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py new file mode 100644 index 0000000..bf3b112 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_CAST.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.CAST""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors from RFC 2144, B.1 + ('0123456789abcdef', '238b4fe5847e44b2', + '0123456712345678234567893456789a', + '128-bit key'), + + ('0123456789abcdef', 'eb6a711a2c02271b', + '01234567123456782345', + '80-bit key'), + + ('0123456789abcdef', '7ac816d16e9b302e', + '0123456712', + '40-bit key'), +] + +def get_tests(config={}): + from Crypto.Cipher import CAST + from .common import make_block_tests + return make_block_tests(CAST, "CAST", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py new file mode 100644 index 0000000..352d5a4 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES.py @@ -0,0 +1,339 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES.py: Self-test for the (Single) DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * +import unittest + +# This is a list of (plaintext, ciphertext, key, description) tuples. +SP800_17_B1_KEY = '01' * 8 +SP800_17_B2_PT = '00' * 8 +test_data = [ + # Test vectors from Appendix A of NIST SP 800-17 + # "Modes of Operation Validation System (MOVS): Requirements and Procedures" + # http://csrc.nist.gov/publications/nistpubs/800-17/800-17.pdf + + # Appendix A - "Sample Round Outputs for the DES" + ('0000000000000000', '82dcbafbdeab6602', '10316e028c8f3b4a', + "NIST SP800-17 A"), + + # Table B.1 - Variable Plaintext Known Answer Test + ('8000000000000000', '95f8a5e5dd31d900', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #0'), + ('4000000000000000', 'dd7f121ca5015619', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #1'), + ('2000000000000000', '2e8653104f3834ea', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #2'), + ('1000000000000000', '4bd388ff6cd81d4f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #3'), + ('0800000000000000', '20b9e767b2fb1456', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #4'), + ('0400000000000000', '55579380d77138ef', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #5'), + ('0200000000000000', '6cc5defaaf04512f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #6'), + ('0100000000000000', '0d9f279ba5d87260', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #7'), + ('0080000000000000', 'd9031b0271bd5a0a', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #8'), + ('0040000000000000', '424250b37c3dd951', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #9'), + ('0020000000000000', 'b8061b7ecd9a21e5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #10'), + ('0010000000000000', 'f15d0f286b65bd28', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #11'), + ('0008000000000000', 'add0cc8d6e5deba1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #12'), + ('0004000000000000', 'e6d5f82752ad63d1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #13'), + ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #14'), + ('0001000000000000', 'f356834379d165cd', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #15'), + ('0000800000000000', '2b9f982f20037fa9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #16'), + ('0000400000000000', '889de068a16f0be6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #17'), + ('0000200000000000', 'e19e275d846a1298', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #18'), + ('0000100000000000', '329a8ed523d71aec', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #19'), + ('0000080000000000', 'e7fce22557d23c97', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #20'), + ('0000040000000000', '12a9f5817ff2d65d', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #21'), + ('0000020000000000', 'a484c3ad38dc9c19', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #22'), + ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #23'), + ('0000008000000000', '750d079407521363', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #24'), + ('0000004000000000', '64feed9c724c2faf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #25'), + ('0000002000000000', 'f02b263b328e2b60', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #26'), + ('0000001000000000', '9d64555a9a10b852', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #27'), + ('0000000800000000', 'd106ff0bed5255d7', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #28'), + ('0000000400000000', 'e1652c6b138c64a5', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #29'), + ('0000000200000000', 'e428581186ec8f46', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #30'), + ('0000000100000000', 'aeb5f5ede22d1a36', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #31'), + ('0000000080000000', 'e943d7568aec0c5c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #32'), + ('0000000040000000', 'df98c8276f54b04b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #33'), + ('0000000020000000', 'b160e4680f6c696f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #34'), + ('0000000010000000', 'fa0752b07d9c4ab8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #35'), + ('0000000008000000', 'ca3a2b036dbc8502', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #36'), + ('0000000004000000', '5e0905517bb59bcf', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #37'), + ('0000000002000000', '814eeb3b91d90726', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #38'), + ('0000000001000000', '4d49db1532919c9f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #39'), + ('0000000000800000', '25eb5fc3f8cf0621', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #40'), + ('0000000000400000', 'ab6a20c0620d1c6f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #41'), + ('0000000000200000', '79e90dbc98f92cca', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #42'), + ('0000000000100000', '866ecedd8072bb0e', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #43'), + ('0000000000080000', '8b54536f2f3e64a8', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #44'), + ('0000000000040000', 'ea51d3975595b86b', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #45'), + ('0000000000020000', 'caffc6ac4542de31', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #46'), + ('0000000000010000', '8dd45a2ddf90796c', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #47'), + ('0000000000008000', '1029d55e880ec2d0', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #48'), + ('0000000000004000', '5d86cb23639dbea9', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #49'), + ('0000000000002000', '1d1ca853ae7c0c5f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #50'), + ('0000000000001000', 'ce332329248f3228', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #51'), + ('0000000000000800', '8405d1abe24fb942', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #52'), + ('0000000000000400', 'e643d78090ca4207', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #53'), + ('0000000000000200', '48221b9937748a23', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #54'), + ('0000000000000100', 'dd7c0bbd61fafd54', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #55'), + ('0000000000000080', '2fbc291a570db5c4', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #56'), + ('0000000000000040', 'e07c30d7e4e26e12', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #57'), + ('0000000000000020', '0953e2258e8e90a1', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #58'), + ('0000000000000010', '5b711bc4ceebf2ee', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #59'), + ('0000000000000008', 'cc083f1e6d9e85f6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #60'), + ('0000000000000004', 'd2fd8867d50d2dfe', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #61'), + ('0000000000000002', '06e7ea22ce92708f', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #62'), + ('0000000000000001', '166b40b44aba4bd6', SP800_17_B1_KEY, + 'NIST SP800-17 B.1 #63'), + + # Table B.2 - Variable Key Known Answer Test + (SP800_17_B2_PT, '95a8d72813daa94d', '8001010101010101', + 'NIST SP800-17 B.2 #0'), + (SP800_17_B2_PT, '0eec1487dd8c26d5', '4001010101010101', + 'NIST SP800-17 B.2 #1'), + (SP800_17_B2_PT, '7ad16ffb79c45926', '2001010101010101', + 'NIST SP800-17 B.2 #2'), + (SP800_17_B2_PT, 'd3746294ca6a6cf3', '1001010101010101', + 'NIST SP800-17 B.2 #3'), + (SP800_17_B2_PT, '809f5f873c1fd761', '0801010101010101', + 'NIST SP800-17 B.2 #4'), + (SP800_17_B2_PT, 'c02faffec989d1fc', '0401010101010101', + 'NIST SP800-17 B.2 #5'), + (SP800_17_B2_PT, '4615aa1d33e72f10', '0201010101010101', + 'NIST SP800-17 B.2 #6'), + (SP800_17_B2_PT, '2055123350c00858', '0180010101010101', + 'NIST SP800-17 B.2 #7'), + (SP800_17_B2_PT, 'df3b99d6577397c8', '0140010101010101', + 'NIST SP800-17 B.2 #8'), + (SP800_17_B2_PT, '31fe17369b5288c9', '0120010101010101', + 'NIST SP800-17 B.2 #9'), + (SP800_17_B2_PT, 'dfdd3cc64dae1642', '0110010101010101', + 'NIST SP800-17 B.2 #10'), + (SP800_17_B2_PT, '178c83ce2b399d94', '0108010101010101', + 'NIST SP800-17 B.2 #11'), + (SP800_17_B2_PT, '50f636324a9b7f80', '0104010101010101', + 'NIST SP800-17 B.2 #12'), + (SP800_17_B2_PT, 'a8468ee3bc18f06d', '0102010101010101', + 'NIST SP800-17 B.2 #13'), + (SP800_17_B2_PT, 'a2dc9e92fd3cde92', '0101800101010101', + 'NIST SP800-17 B.2 #14'), + (SP800_17_B2_PT, 'cac09f797d031287', '0101400101010101', + 'NIST SP800-17 B.2 #15'), + (SP800_17_B2_PT, '90ba680b22aeb525', '0101200101010101', + 'NIST SP800-17 B.2 #16'), + (SP800_17_B2_PT, 'ce7a24f350e280b6', '0101100101010101', + 'NIST SP800-17 B.2 #17'), + (SP800_17_B2_PT, '882bff0aa01a0b87', '0101080101010101', + 'NIST SP800-17 B.2 #18'), + (SP800_17_B2_PT, '25610288924511c2', '0101040101010101', + 'NIST SP800-17 B.2 #19'), + (SP800_17_B2_PT, 'c71516c29c75d170', '0101020101010101', + 'NIST SP800-17 B.2 #20'), + (SP800_17_B2_PT, '5199c29a52c9f059', '0101018001010101', + 'NIST SP800-17 B.2 #21'), + (SP800_17_B2_PT, 'c22f0a294a71f29f', '0101014001010101', + 'NIST SP800-17 B.2 #22'), + (SP800_17_B2_PT, 'ee371483714c02ea', '0101012001010101', + 'NIST SP800-17 B.2 #23'), + (SP800_17_B2_PT, 'a81fbd448f9e522f', '0101011001010101', + 'NIST SP800-17 B.2 #24'), + (SP800_17_B2_PT, '4f644c92e192dfed', '0101010801010101', + 'NIST SP800-17 B.2 #25'), + (SP800_17_B2_PT, '1afa9a66a6df92ae', '0101010401010101', + 'NIST SP800-17 B.2 #26'), + (SP800_17_B2_PT, 'b3c1cc715cb879d8', '0101010201010101', + 'NIST SP800-17 B.2 #27'), + (SP800_17_B2_PT, '19d032e64ab0bd8b', '0101010180010101', + 'NIST SP800-17 B.2 #28'), + (SP800_17_B2_PT, '3cfaa7a7dc8720dc', '0101010140010101', + 'NIST SP800-17 B.2 #29'), + (SP800_17_B2_PT, 'b7265f7f447ac6f3', '0101010120010101', + 'NIST SP800-17 B.2 #30'), + (SP800_17_B2_PT, '9db73b3c0d163f54', '0101010110010101', + 'NIST SP800-17 B.2 #31'), + (SP800_17_B2_PT, '8181b65babf4a975', '0101010108010101', + 'NIST SP800-17 B.2 #32'), + (SP800_17_B2_PT, '93c9b64042eaa240', '0101010104010101', + 'NIST SP800-17 B.2 #33'), + (SP800_17_B2_PT, '5570530829705592', '0101010102010101', + 'NIST SP800-17 B.2 #34'), + (SP800_17_B2_PT, '8638809e878787a0', '0101010101800101', + 'NIST SP800-17 B.2 #35'), + (SP800_17_B2_PT, '41b9a79af79ac208', '0101010101400101', + 'NIST SP800-17 B.2 #36'), + (SP800_17_B2_PT, '7a9be42f2009a892', '0101010101200101', + 'NIST SP800-17 B.2 #37'), + (SP800_17_B2_PT, '29038d56ba6d2745', '0101010101100101', + 'NIST SP800-17 B.2 #38'), + (SP800_17_B2_PT, '5495c6abf1e5df51', '0101010101080101', + 'NIST SP800-17 B.2 #39'), + (SP800_17_B2_PT, 'ae13dbd561488933', '0101010101040101', + 'NIST SP800-17 B.2 #40'), + (SP800_17_B2_PT, '024d1ffa8904e389', '0101010101020101', + 'NIST SP800-17 B.2 #41'), + (SP800_17_B2_PT, 'd1399712f99bf02e', '0101010101018001', + 'NIST SP800-17 B.2 #42'), + (SP800_17_B2_PT, '14c1d7c1cffec79e', '0101010101014001', + 'NIST SP800-17 B.2 #43'), + (SP800_17_B2_PT, '1de5279dae3bed6f', '0101010101012001', + 'NIST SP800-17 B.2 #44'), + (SP800_17_B2_PT, 'e941a33f85501303', '0101010101011001', + 'NIST SP800-17 B.2 #45'), + (SP800_17_B2_PT, 'da99dbbc9a03f379', '0101010101010801', + 'NIST SP800-17 B.2 #46'), + (SP800_17_B2_PT, 'b7fc92f91d8e92e9', '0101010101010401', + 'NIST SP800-17 B.2 #47'), + (SP800_17_B2_PT, 'ae8e5caa3ca04e85', '0101010101010201', + 'NIST SP800-17 B.2 #48'), + (SP800_17_B2_PT, '9cc62df43b6eed74', '0101010101010180', + 'NIST SP800-17 B.2 #49'), + (SP800_17_B2_PT, 'd863dbb5c59a91a0', '0101010101010140', + 'NIST SP800-17 B.2 #50'), + (SP800_17_B2_PT, 'a1ab2190545b91d7', '0101010101010120', + 'NIST SP800-17 B.2 #51'), + (SP800_17_B2_PT, '0875041e64c570f7', '0101010101010110', + 'NIST SP800-17 B.2 #52'), + (SP800_17_B2_PT, '5a594528bebef1cc', '0101010101010108', + 'NIST SP800-17 B.2 #53'), + (SP800_17_B2_PT, 'fcdb3291de21f0c0', '0101010101010104', + 'NIST SP800-17 B.2 #54'), + (SP800_17_B2_PT, '869efd7f9f265a09', '0101010101010102', + 'NIST SP800-17 B.2 #55'), +] + +class RonRivestTest(unittest.TestCase): + """ Ronald L. Rivest's DES test, see + http://people.csail.mit.edu/rivest/Destest.txt + ABSTRACT + -------- + + We present a simple way to test the correctness of a DES implementation: + Use the recurrence relation: + + X0 = 9474B8E8C73BCA7D (hexadecimal) + + X(i+1) = IF (i is even) THEN E(Xi,Xi) ELSE D(Xi,Xi) + + to compute a sequence of 64-bit values: X0, X1, X2, ..., X16. Here + E(X,K) denotes the DES encryption of X using key K, and D(X,K) denotes + the DES decryption of X using key K. If you obtain + + X16 = 1B1A2DDB4C642438 + + your implementation does not have any of the 36,568 possible single-fault + errors described herein. + """ + def runTest(self): + from Crypto.Cipher import DES + from binascii import b2a_hex + + X = [] + X[0:] = [b('\x94\x74\xB8\xE8\xC7\x3B\xCA\x7D')] + + for i in range(16): + c = DES.new(X[i],DES.MODE_ECB) + if not (i&1): # (num&1) returns 1 for odd numbers + X[i+1:] = [c.encrypt(X[i])] # even + else: + X[i+1:] = [c.decrypt(X[i])] # odd + + self.assertEqual(b2a_hex(X[16]), + b2a_hex(b('\x1B\x1A\x2D\xDB\x4C\x64\x24\x38'))) + +def get_tests(config={}): + from Crypto.Cipher import DES + from .common import make_block_tests + return make_block_tests(DES, "DES", test_data) + [RonRivestTest()] + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py new file mode 100644 index 0000000..8948695 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_DES3.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/DES3.py: Self-test for the Triple-DES cipher +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.DES3""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * +from binascii import hexlify + +# This is a list of (plaintext, ciphertext, key, description) tuples. +SP800_20_A1_KEY = '01' * 24 +SP800_20_A2_PT = '00' * 8 +test_data = [ + # Test vector from Appendix B of NIST SP 800-67 + # "Recommendation for the Triple Data Encryption Algorithm (TDEA) Block + # Cipher" + # http://csrc.nist.gov/publications/nistpubs/800-67/SP800-67.pdf + ('54686520717566636b2062726f776e20666f78206a756d70', + 'a826fd8ce53b855fcce21c8112256fe668d5c05dd9b6b900', + '0123456789abcdef23456789abcdef01456789abcdef0123', + 'NIST SP800-67 B.1'), + + # Test vectors "The Multi-block Message Test (MMT) for DES and TDES" + # http://csrc.nist.gov/groups/STM/cavp/documents/des/DESMMT.pdf + ('326a494cd33fe756', 'b22b8d66de970692', + '627f460e08104a1043cd265d5840eaf1313edf97df2a8a8c', + 'DESMMT #1', dict(mode='CBC', iv='8e29f75ea77e5475')), + + ('84401f78fe6c10876d8ea23094ea5309', '7b1f7c7e3b1c948ebd04a75ffba7d2f5', + '37ae5ebf46dff2dc0754b94f31cbb3855e7fd36dc870bfae', + 'DESMMT #2', dict(mode='CBC', iv='3d1de3cc132e3b65')), + + # Test vectors from Appendix A of NIST SP 800-20 + # "Modes of Operation Validation System for the Triple Data Encryption + # Algorithm (TMOVS): Requirements and Procedures" + # http://csrc.nist.gov/publications/nistpubs/800-20/800-20.pdf + + # Table A.1 - Variable Plaintext Known Answer Test + ('8000000000000000', '95f8a5e5dd31d900', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #0'), + ('4000000000000000', 'dd7f121ca5015619', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #1'), + ('2000000000000000', '2e8653104f3834ea', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #2'), + ('1000000000000000', '4bd388ff6cd81d4f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #3'), + ('0800000000000000', '20b9e767b2fb1456', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #4'), + ('0400000000000000', '55579380d77138ef', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #5'), + ('0200000000000000', '6cc5defaaf04512f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #6'), + ('0100000000000000', '0d9f279ba5d87260', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #7'), + ('0080000000000000', 'd9031b0271bd5a0a', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #8'), + ('0040000000000000', '424250b37c3dd951', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #9'), + ('0020000000000000', 'b8061b7ecd9a21e5', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #10'), + ('0010000000000000', 'f15d0f286b65bd28', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #11'), + ('0008000000000000', 'add0cc8d6e5deba1', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #12'), + ('0004000000000000', 'e6d5f82752ad63d1', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #13'), + ('0002000000000000', 'ecbfe3bd3f591a5e', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #14'), + ('0001000000000000', 'f356834379d165cd', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #15'), + ('0000800000000000', '2b9f982f20037fa9', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #16'), + ('0000400000000000', '889de068a16f0be6', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #17'), + ('0000200000000000', 'e19e275d846a1298', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #18'), + ('0000100000000000', '329a8ed523d71aec', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #19'), + ('0000080000000000', 'e7fce22557d23c97', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #20'), + ('0000040000000000', '12a9f5817ff2d65d', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #21'), + ('0000020000000000', 'a484c3ad38dc9c19', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #22'), + ('0000010000000000', 'fbe00a8a1ef8ad72', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #23'), + ('0000008000000000', '750d079407521363', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #24'), + ('0000004000000000', '64feed9c724c2faf', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #25'), + ('0000002000000000', 'f02b263b328e2b60', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #26'), + ('0000001000000000', '9d64555a9a10b852', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #27'), + ('0000000800000000', 'd106ff0bed5255d7', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #28'), + ('0000000400000000', 'e1652c6b138c64a5', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #29'), + ('0000000200000000', 'e428581186ec8f46', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #30'), + ('0000000100000000', 'aeb5f5ede22d1a36', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #31'), + ('0000000080000000', 'e943d7568aec0c5c', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #32'), + ('0000000040000000', 'df98c8276f54b04b', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #33'), + ('0000000020000000', 'b160e4680f6c696f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #34'), + ('0000000010000000', 'fa0752b07d9c4ab8', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #35'), + ('0000000008000000', 'ca3a2b036dbc8502', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #36'), + ('0000000004000000', '5e0905517bb59bcf', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #37'), + ('0000000002000000', '814eeb3b91d90726', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #38'), + ('0000000001000000', '4d49db1532919c9f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #39'), + ('0000000000800000', '25eb5fc3f8cf0621', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #40'), + ('0000000000400000', 'ab6a20c0620d1c6f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #41'), + ('0000000000200000', '79e90dbc98f92cca', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #42'), + ('0000000000100000', '866ecedd8072bb0e', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #43'), + ('0000000000080000', '8b54536f2f3e64a8', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #44'), + ('0000000000040000', 'ea51d3975595b86b', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #45'), + ('0000000000020000', 'caffc6ac4542de31', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #46'), + ('0000000000010000', '8dd45a2ddf90796c', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #47'), + ('0000000000008000', '1029d55e880ec2d0', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #48'), + ('0000000000004000', '5d86cb23639dbea9', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #49'), + ('0000000000002000', '1d1ca853ae7c0c5f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #50'), + ('0000000000001000', 'ce332329248f3228', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #51'), + ('0000000000000800', '8405d1abe24fb942', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #52'), + ('0000000000000400', 'e643d78090ca4207', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #53'), + ('0000000000000200', '48221b9937748a23', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #54'), + ('0000000000000100', 'dd7c0bbd61fafd54', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #55'), + ('0000000000000080', '2fbc291a570db5c4', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #56'), + ('0000000000000040', 'e07c30d7e4e26e12', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #57'), + ('0000000000000020', '0953e2258e8e90a1', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #58'), + ('0000000000000010', '5b711bc4ceebf2ee', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #59'), + ('0000000000000008', 'cc083f1e6d9e85f6', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #60'), + ('0000000000000004', 'd2fd8867d50d2dfe', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #61'), + ('0000000000000002', '06e7ea22ce92708f', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #62'), + ('0000000000000001', '166b40b44aba4bd6', SP800_20_A1_KEY, + 'NIST SP800-20 A.1 #63'), + + # Table A.2 - Variable Key Known Answer Test + (SP800_20_A2_PT, '95a8d72813daa94d', '8001010101010101'*3, + 'NIST SP800-20 A.2 #0'), + (SP800_20_A2_PT, '0eec1487dd8c26d5', '4001010101010101'*3, + 'NIST SP800-20 A.2 #1'), + (SP800_20_A2_PT, '7ad16ffb79c45926', '2001010101010101'*3, + 'NIST SP800-20 A.2 #2'), + (SP800_20_A2_PT, 'd3746294ca6a6cf3', '1001010101010101'*3, + 'NIST SP800-20 A.2 #3'), + (SP800_20_A2_PT, '809f5f873c1fd761', '0801010101010101'*3, + 'NIST SP800-20 A.2 #4'), + (SP800_20_A2_PT, 'c02faffec989d1fc', '0401010101010101'*3, + 'NIST SP800-20 A.2 #5'), + (SP800_20_A2_PT, '4615aa1d33e72f10', '0201010101010101'*3, + 'NIST SP800-20 A.2 #6'), + (SP800_20_A2_PT, '2055123350c00858', '0180010101010101'*3, + 'NIST SP800-20 A.2 #7'), + (SP800_20_A2_PT, 'df3b99d6577397c8', '0140010101010101'*3, + 'NIST SP800-20 A.2 #8'), + (SP800_20_A2_PT, '31fe17369b5288c9', '0120010101010101'*3, + 'NIST SP800-20 A.2 #9'), + (SP800_20_A2_PT, 'dfdd3cc64dae1642', '0110010101010101'*3, + 'NIST SP800-20 A.2 #10'), + (SP800_20_A2_PT, '178c83ce2b399d94', '0108010101010101'*3, + 'NIST SP800-20 A.2 #11'), + (SP800_20_A2_PT, '50f636324a9b7f80', '0104010101010101'*3, + 'NIST SP800-20 A.2 #12'), + (SP800_20_A2_PT, 'a8468ee3bc18f06d', '0102010101010101'*3, + 'NIST SP800-20 A.2 #13'), + (SP800_20_A2_PT, 'a2dc9e92fd3cde92', '0101800101010101'*3, + 'NIST SP800-20 A.2 #14'), + (SP800_20_A2_PT, 'cac09f797d031287', '0101400101010101'*3, + 'NIST SP800-20 A.2 #15'), + (SP800_20_A2_PT, '90ba680b22aeb525', '0101200101010101'*3, + 'NIST SP800-20 A.2 #16'), + (SP800_20_A2_PT, 'ce7a24f350e280b6', '0101100101010101'*3, + 'NIST SP800-20 A.2 #17'), + (SP800_20_A2_PT, '882bff0aa01a0b87', '0101080101010101'*3, + 'NIST SP800-20 A.2 #18'), + (SP800_20_A2_PT, '25610288924511c2', '0101040101010101'*3, + 'NIST SP800-20 A.2 #19'), + (SP800_20_A2_PT, 'c71516c29c75d170', '0101020101010101'*3, + 'NIST SP800-20 A.2 #20'), + (SP800_20_A2_PT, '5199c29a52c9f059', '0101018001010101'*3, + 'NIST SP800-20 A.2 #21'), + (SP800_20_A2_PT, 'c22f0a294a71f29f', '0101014001010101'*3, + 'NIST SP800-20 A.2 #22'), + (SP800_20_A2_PT, 'ee371483714c02ea', '0101012001010101'*3, + 'NIST SP800-20 A.2 #23'), + (SP800_20_A2_PT, 'a81fbd448f9e522f', '0101011001010101'*3, + 'NIST SP800-20 A.2 #24'), + (SP800_20_A2_PT, '4f644c92e192dfed', '0101010801010101'*3, + 'NIST SP800-20 A.2 #25'), + (SP800_20_A2_PT, '1afa9a66a6df92ae', '0101010401010101'*3, + 'NIST SP800-20 A.2 #26'), + (SP800_20_A2_PT, 'b3c1cc715cb879d8', '0101010201010101'*3, + 'NIST SP800-20 A.2 #27'), + (SP800_20_A2_PT, '19d032e64ab0bd8b', '0101010180010101'*3, + 'NIST SP800-20 A.2 #28'), + (SP800_20_A2_PT, '3cfaa7a7dc8720dc', '0101010140010101'*3, + 'NIST SP800-20 A.2 #29'), + (SP800_20_A2_PT, 'b7265f7f447ac6f3', '0101010120010101'*3, + 'NIST SP800-20 A.2 #30'), + (SP800_20_A2_PT, '9db73b3c0d163f54', '0101010110010101'*3, + 'NIST SP800-20 A.2 #31'), + (SP800_20_A2_PT, '8181b65babf4a975', '0101010108010101'*3, + 'NIST SP800-20 A.2 #32'), + (SP800_20_A2_PT, '93c9b64042eaa240', '0101010104010101'*3, + 'NIST SP800-20 A.2 #33'), + (SP800_20_A2_PT, '5570530829705592', '0101010102010101'*3, + 'NIST SP800-20 A.2 #34'), + (SP800_20_A2_PT, '8638809e878787a0', '0101010101800101'*3, + 'NIST SP800-20 A.2 #35'), + (SP800_20_A2_PT, '41b9a79af79ac208', '0101010101400101'*3, + 'NIST SP800-20 A.2 #36'), + (SP800_20_A2_PT, '7a9be42f2009a892', '0101010101200101'*3, + 'NIST SP800-20 A.2 #37'), + (SP800_20_A2_PT, '29038d56ba6d2745', '0101010101100101'*3, + 'NIST SP800-20 A.2 #38'), + (SP800_20_A2_PT, '5495c6abf1e5df51', '0101010101080101'*3, + 'NIST SP800-20 A.2 #39'), + (SP800_20_A2_PT, 'ae13dbd561488933', '0101010101040101'*3, + 'NIST SP800-20 A.2 #40'), + (SP800_20_A2_PT, '024d1ffa8904e389', '0101010101020101'*3, + 'NIST SP800-20 A.2 #41'), + (SP800_20_A2_PT, 'd1399712f99bf02e', '0101010101018001'*3, + 'NIST SP800-20 A.2 #42'), + (SP800_20_A2_PT, '14c1d7c1cffec79e', '0101010101014001'*3, + 'NIST SP800-20 A.2 #43'), + (SP800_20_A2_PT, '1de5279dae3bed6f', '0101010101012001'*3, + 'NIST SP800-20 A.2 #44'), + (SP800_20_A2_PT, 'e941a33f85501303', '0101010101011001'*3, + 'NIST SP800-20 A.2 #45'), + (SP800_20_A2_PT, 'da99dbbc9a03f379', '0101010101010801'*3, + 'NIST SP800-20 A.2 #46'), + (SP800_20_A2_PT, 'b7fc92f91d8e92e9', '0101010101010401'*3, + 'NIST SP800-20 A.2 #47'), + (SP800_20_A2_PT, 'ae8e5caa3ca04e85', '0101010101010201'*3, + 'NIST SP800-20 A.2 #48'), + (SP800_20_A2_PT, '9cc62df43b6eed74', '0101010101010180'*3, + 'NIST SP800-20 A.2 #49'), + (SP800_20_A2_PT, 'd863dbb5c59a91a0', '0101010101010140'*3, + 'NIST SP800-20 A.2 #50'), + (SP800_20_A2_PT, 'a1ab2190545b91d7', '0101010101010120'*3, + 'NIST SP800-20 A.2 #51'), + (SP800_20_A2_PT, '0875041e64c570f7', '0101010101010110'*3, + 'NIST SP800-20 A.2 #52'), + (SP800_20_A2_PT, '5a594528bebef1cc', '0101010101010108'*3, + 'NIST SP800-20 A.2 #53'), + (SP800_20_A2_PT, 'fcdb3291de21f0c0', '0101010101010104'*3, + 'NIST SP800-20 A.2 #54'), + (SP800_20_A2_PT, '869efd7f9f265a09', '0101010101010102'*3, + 'NIST SP800-20 A.2 #55'), + + # "Two-key 3DES". Test vector generated using PyCrypto 2.0.1. + # This test is designed to test the DES3 API, not the correctness of the + # output. + ('21e81b7ade88a259', '5c577d4d9b20c0f8', + '9b397ebf81b1181e282f4bb8adbadc6b', 'Two-key 3DES'), + + # The following test vectors have been generated with gpg v1.4.0. + # The command line used was: + # gpg -c -z 0 --cipher-algo 3DES --passphrase secret_passphrase \ + # --disable-mdc --s2k-mode 0 --output ct pt + # For an explanation, see test_AES.py . + ( 'ac1762037074324fb53ba3596f73656d69746556616c6c6579', # Plaintext, 'YosemiteValley' + '9979238528357b90e2e0be549cb0b2d5999b9a4a447e5c5c7d', # Ciphertext + '7ade65b460f5ea9be35f9e14aa883a2048e3824aa616c0b2', # Key (hash of 'BearsAhead') + 'GPG Test Vector #1', + dict(mode='OPENPGP', iv='cd47e2afb8b7e4b0', encrypted_iv='6a7eef0b58050e8b904a' ) ), +] + +def get_tests(config={}): + from Crypto.Cipher import DES3 + from .common import make_block_tests + return make_block_tests(DES3, "DES3", test_data) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_XOR.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_XOR.py new file mode 100644 index 0000000..8b7cb66 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_XOR.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/XOR.py: Self-test for the XOR "cipher" +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Cipher.XOR""" + +import unittest + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (plaintext, ciphertext, key) tuples. +test_data = [ + # Test vectors written from scratch. (Nobody posts XOR test vectors on the web? How disappointing.) + ('01', '01', + '00', + 'zero key'), + + ('0102040810204080', '0003050911214181', + '01', + '1-byte key'), + + ('0102040810204080', 'cda8c8a2dc8a8c2a', + 'ccaa', + '2-byte key'), + + ('ff'*64, 'fffefdfcfbfaf9f8f7f6f5f4f3f2f1f0efeeedecebeae9e8e7e6e5e4e3e2e1e0'*2, + '000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f', + '32-byte key'), +] + +class TruncationSelfTest(unittest.TestCase): + + def runTest(self): + """33-byte key (should raise ValueError under current implementation)""" + # Crypto.Cipher.XOR previously truncated its inputs at 32 bytes. Now + # it should raise a ValueError if the length is too long. + self.assertRaises(ValueError, XOR.new, "x"*33) + +def get_tests(config={}): + global XOR + from Crypto.Cipher import XOR + from .common import make_stream_tests + return make_stream_tests(XOR, "XOR", test_data) + [TruncationSelfTest()] + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py new file mode 100644 index 0000000..b97e045 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_15.py @@ -0,0 +1,174 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +import sys + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.Cipher import PKCS1_v1_5 as PKCS +from Crypto.Util.py3compat import * + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + print(clean) + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_15_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 v1.5. + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data + + _testData = ( + + # + # Generated with openssl 0.9.8o + # + ( + # Private key + '''-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQDAiAnvIAOvqVwJTaYzsKnefZftgtXGE2hPJppGsWl78yz9jeXY +W/FxX/gTPURArNhdnhP6n3p2ZaDIBrO2zizbgIXs0IsljTTcr4vnI8fMXzyNUOjA +zP3nzMqZDZK6757XQAobOssMkBFqRWwilT/3DsBhRpl3iMUhF+wvpTSHewIDAQAB +AoGAC4HV/inOrpgTvSab8Wj0riyZgQOZ3U3ZpSlsfR8ra9Ib9Uee3jCYnKscu6Gk +y6zI/cdt8EPJ4PuwAWSNJzbpbVaDvUq25OD+CX8/uRT08yBS4J8TzBitZJTD4lS7 +atdTnKT0Wmwk+u8tDbhvMKwnUHdJLcuIsycts9rwJVapUtkCQQDvDpx2JMun0YKG +uUttjmL8oJ3U0m3ZvMdVwBecA0eebZb1l2J5PvI3EJD97eKe91Nsw8T3lwpoN40k +IocSVDklAkEAzi1HLHE6EzVPOe5+Y0kGvrIYRRhncOb72vCvBZvD6wLZpQgqo6c4 +d3XHFBBQWA6xcvQb5w+VVEJZzw64y25sHwJBAMYReRl6SzL0qA0wIYrYWrOt8JeQ +8mthulcWHXmqTgC6FEXP9Es5GD7/fuKl4wqLKZgIbH4nqvvGay7xXLCXD/ECQH9a +1JYNMtRen5unSAbIOxRcKkWz92F0LKpm9ZW/S9vFHO+mBcClMGoKJHiuQxLBsLbT +NtEZfSJZAeS2sUtn3/0CQDb2M2zNBTF8LlM0nxmh0k9VGm5TVIyBEMcipmvOgqIs +HKukWBcq9f/UOmS0oEhai/6g+Uf7VHJdWaeO5LzuvwU= +-----END RSA PRIVATE KEY-----''', + # Plaintext + '''THIS IS PLAINTEXT\x0A''', + # Ciphertext + '''3f dc fd 3c cd 5c 9b 12 af 65 32 e3 f7 d0 da 36 + 8f 8f d9 e3 13 1c 7f c8 b3 f9 c1 08 e4 eb 79 9c + 91 89 1f 96 3b 94 77 61 99 a4 b1 ee 5d e6 17 c9 + 5d 0a b5 63 52 0a eb 00 45 38 2a fb b0 71 3d 11 + f7 a1 9e a7 69 b3 af 61 c0 bb 04 5b 5d 4b 27 44 + 1f 5b 97 89 ba 6a 08 95 ee 4f a2 eb 56 64 e5 0f + da 7c f9 9a 61 61 06 62 ed a0 bc 5f aa 6c 31 78 + 70 28 1a bb 98 3c e3 6a 60 3c d1 0b 0f 5a f4 75''', + # Random data + '''eb d7 7d 86 a4 35 23 a3 54 7e 02 0b 42 1d + 61 6c af 67 b8 4e 17 56 80 66 36 04 64 34 26 8a + 47 dd 44 b3 1a b2 17 60 f4 91 2e e2 b5 95 64 cc + f9 da c8 70 94 54 86 4c ef 5b 08 7d 18 c4 ab 8d + 04 06 33 8f ca 15 5f 52 60 8a a1 0c f5 08 b5 4c + bb 99 b8 94 25 04 9c e6 01 75 e6 f9 63 7a 65 61 + 13 8a a7 47 77 81 ae 0d b8 2c 4d 50 a5''' + ), + ) + + def testEncrypt1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + def __init__(self, data): + self.data = data + self.idx = 0 + def __call__(self, N): + r = self.data[self.idx:N] + self.idx += N + return r + # The real test + key._randfunc = randGen(t2b(test[3])) + cipher = PKCS.new(key) + ct = cipher.encrypt(b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fail if plaintext is too long + pt = '\x00'*(128-11+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testVerify1(self): + for test in self._testData: + # Build the key + key = RSA.importKey(test[0]) + # The real test + cipher = PKCS.new(key) + pt = cipher.decrypt(t2b(test[2]), "---") + self.assertEqual(pt, b(test[1])) + + def testVerify2(self): + # Verify that decryption fails if ciphertext is not as long as + # RSA modulus + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, '\x00'*127, "---") + self.assertRaises(ValueError, cipher.decrypt, '\x00'*129, "---") + + # Verify that decryption fails if there are less then 8 non-zero padding + # bytes + pt = b('\x00\x02' + '\xFF'*7 + '\x00' + '\x45'*118) + ct = self.key1024.encrypt(pt, 0)[0] + ct = b('\x00'*(128-len(ct))) + ct + self.assertEqual("---", cipher.decrypt(ct, "---")) + + def testEncryptVerify1(self): + # Encrypt/Verify messages of length [0..RSAlen-11] + # and therefore padding [8..117] + for pt_len in range(0,128-11+1): + pt = self.rng(pt_len) + cipher = PKCS.new(self.key1024) + ct = cipher.encrypt(pt) + pt2 = cipher.decrypt(ct, "---") + self.assertEqual(pt,pt2) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_15_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py b/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py new file mode 100644 index 0000000..5e249c4 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Cipher/test_pkcs1_oaep.py @@ -0,0 +1,372 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Cipher/test_pkcs1_oaep.py: Self-test for PKCS#1 OAEP encryption +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + + +__revision__ = "$Id$" + +import unittest + +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +from Crypto.Util.py3compat import * +from Crypto.PublicKey import RSA +from Crypto.Cipher import PKCS1_OAEP as PKCS +from Crypto.Hash import MD2,MD5,SHA as SHA1,SHA256,RIPEMD +from Crypto import Random + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = rws(t) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_OAEP_Tests(unittest.TestCase): + + def setUp(self): + self.rng = Random.new().read + self.key1024 = RSA.generate(1024, self.rng) + + # List of tuples with test data for PKCS#1 OAEP + # Each tuple is made up by: + # Item #0: dictionary with RSA key component + # Item #1: plaintext + # Item #2: ciphertext + # Item #3: random data (=seed) + # Item #4: hash object + + _testData = ( + + # + # From in oaep-int.txt to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb''', + # Public key + 'e':'11', + # In the test vector, only p and q were given... + # d is computed offline as e^{-1} mod (p-1)(q-1) + 'd':'''a5dafc5341faf289c4b988db30c1cdf83f31251e0 + 668b42784813801579641b29410b3c7998d6bc465745e5c3 + 92669d6870da2c082a939e37fdcb82ec93edac97ff3ad595 + 0accfbc111c76f1a9529444e56aaf68c56c092cd38dc3bef + 5d20a939926ed4f74a13eddfbe1a1cecc4894af9428c2b7b + 8883fe4463a4bc85b1cb3c1''' + } + , + # Plaintext + '''d4 36 e9 95 69 fd 32 a7 c8 a0 5b bc 90 d3 2c 49''', + # Ciphertext + '''12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55''', + # Random + '''aa fd 12 f6 59 ca e6 34 89 b4 79 e5 07 6d de c2 + f0 6c b5 8f''', + # Hash + SHA1, + ), + + # + # From in oaep-vect.txt to be found in Example 1.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a8 b3 b2 84 af 8e b5 0b 38 70 34 a8 60 f1 46 c4 + 91 9f 31 87 63 cd 6c 55 98 c8 ae 48 11 a1 e0 ab + c4 c7 e0 b0 82 d6 93 a5 e7 fc ed 67 5c f4 66 85 + 12 77 2c 0c bc 64 a7 42 c6 c6 30 f5 33 c8 cc 72 + f6 2a e8 33 c4 0b f2 58 42 e9 84 bb 78 bd bf 97 + c0 10 7d 55 bd b6 62 f5 c4 e0 fa b9 84 5c b5 14 + 8e f7 39 2d d3 aa ff 93 ae 1e 6b 66 7b b3 d4 24 + 76 16 d4 f5 ba 10 d4 cf d2 26 de 88 d3 9f 16 fb''', + 'e':'''01 00 01''', + 'd':'''53 33 9c fd b7 9f c8 46 6a 65 5c 73 16 ac a8 5c + 55 fd 8f 6d d8 98 fd af 11 95 17 ef 4f 52 e8 fd + 8e 25 8d f9 3f ee 18 0f a0 e4 ab 29 69 3c d8 3b + 15 2a 55 3d 4a c4 d1 81 2b 8b 9f a5 af 0e 7f 55 + fe 73 04 df 41 57 09 26 f3 31 1f 15 c4 d6 5a 73 + 2c 48 31 16 ee 3d 3d 2d 0a f3 54 9a d9 bf 7c bf + b7 8a d8 84 f8 4d 5b eb 04 72 4d c7 36 9b 31 de + f3 7d 0c f5 39 e9 cf cd d3 de 65 37 29 ea d5 d1 ''' + } + , + # Plaintext + '''66 28 19 4e 12 07 3d b0 3b a9 4c da 9e f9 53 23 + 97 d5 0d ba 79 b9 87 00 4a fe fe 34''', + # Ciphertext + '''35 4f e6 7b 4a 12 6d 5d 35 fe 36 c7 77 79 1a 3f + 7b a1 3d ef 48 4e 2d 39 08 af f7 22 fa d4 68 fb + 21 69 6d e9 5d 0b e9 11 c2 d3 17 4f 8a fc c2 01 + 03 5f 7b 6d 8e 69 40 2d e5 45 16 18 c2 1a 53 5f + a9 d7 bf c5 b8 dd 9f c2 43 f8 cf 92 7d b3 13 22 + d6 e8 81 ea a9 1a 99 61 70 e6 57 a0 5a 26 64 26 + d9 8c 88 00 3f 84 77 c1 22 70 94 a0 d9 fa 1e 8c + 40 24 30 9c e1 ec cc b5 21 00 35 d4 7a c7 2e 8a''', + # Random + '''18 b7 76 ea 21 06 9d 69 77 6a 33 e9 6b ad 48 e1 + dd a0 a5 ef''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 2.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''01 94 7c 7f ce 90 42 5f 47 27 9e 70 85 1f 25 d5 + e6 23 16 fe 8a 1d f1 93 71 e3 e6 28 e2 60 54 3e + 49 01 ef 60 81 f6 8c 0b 81 41 19 0d 2a e8 da ba + 7d 12 50 ec 6d b6 36 e9 44 ec 37 22 87 7c 7c 1d + 0a 67 f1 4b 16 94 c5 f0 37 94 51 a4 3e 49 a3 2d + de 83 67 0b 73 da 91 a1 c9 9b c2 3b 43 6a 60 05 + 5c 61 0f 0b af 99 c1 a0 79 56 5b 95 a3 f1 52 66 + 32 d1 d4 da 60 f2 0e da 25 e6 53 c4 f0 02 76 6f + 45''', + 'e':'''01 00 01''', + 'd':'''08 23 f2 0f ad b5 da 89 08 8a 9d 00 89 3e 21 fa + 4a 1b 11 fb c9 3c 64 a3 be 0b aa ea 97 fb 3b 93 + c3 ff 71 37 04 c1 9c 96 3c 1d 10 7a ae 99 05 47 + 39 f7 9e 02 e1 86 de 86 f8 7a 6d de fe a6 d8 cc + d1 d3 c8 1a 47 bf a7 25 5b e2 06 01 a4 a4 b2 f0 + 8a 16 7b 5e 27 9d 71 5b 1b 45 5b dd 7e ab 24 59 + 41 d9 76 8b 9a ce fb 3c cd a5 95 2d a3 ce e7 25 + 25 b4 50 16 63 a8 ee 15 c9 e9 92 d9 24 62 fe 39''' + }, + # Plaintext + '''8f f0 0c aa 60 5c 70 28 30 63 4d 9a 6c 3d 42 c6 + 52 b5 8c f1 d9 2f ec 57 0b ee e7''', + # Ciphertext + '''01 81 af 89 22 b9 fc b4 d7 9d 92 eb e1 98 15 99 + 2f c0 c1 43 9d 8b cd 49 13 98 a0 f4 ad 3a 32 9a + 5b d9 38 55 60 db 53 26 83 c8 b7 da 04 e4 b1 2a + ed 6a ac df 47 1c 34 c9 cd a8 91 ad dc c2 df 34 + 56 65 3a a6 38 2e 9a e5 9b 54 45 52 57 eb 09 9d + 56 2b be 10 45 3f 2b 6d 13 c5 9c 02 e1 0f 1f 8a + bb 5d a0 d0 57 09 32 da cf 2d 09 01 db 72 9d 0f + ef cc 05 4e 70 96 8e a5 40 c8 1b 04 bc ae fe 72 + 0e''', + # Random + '''8c 40 7b 5e c2 89 9e 50 99 c5 3e 8c e7 93 bf 94 + e7 1b 17 82''', + SHA1 + ), + + # + # From in oaep-vect.txt to be found in Example 10.1 + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''ae 45 ed 56 01 ce c6 b8 cc 05 f8 03 93 5c 67 4d + db e0 d7 5c 4c 09 fd 79 51 fc 6b 0c ae c3 13 a8 + df 39 97 0c 51 8b ff ba 5e d6 8f 3f 0d 7f 22 a4 + 02 9d 41 3f 1a e0 7e 4e be 9e 41 77 ce 23 e7 f5 + 40 4b 56 9e 4e e1 bd cf 3c 1f b0 3e f1 13 80 2d + 4f 85 5e b9 b5 13 4b 5a 7c 80 85 ad ca e6 fa 2f + a1 41 7e c3 76 3b e1 71 b0 c6 2b 76 0e de 23 c1 + 2a d9 2b 98 08 84 c6 41 f5 a8 fa c2 6b da d4 a0 + 33 81 a2 2f e1 b7 54 88 50 94 c8 25 06 d4 01 9a + 53 5a 28 6a fe b2 71 bb 9b a5 92 de 18 dc f6 00 + c2 ae ea e5 6e 02 f7 cf 79 fc 14 cf 3b dc 7c d8 + 4f eb bb f9 50 ca 90 30 4b 22 19 a7 aa 06 3a ef + a2 c3 c1 98 0e 56 0c d6 4a fe 77 95 85 b6 10 76 + 57 b9 57 85 7e fd e6 01 09 88 ab 7d e4 17 fc 88 + d8 f3 84 c4 e6 e7 2c 3f 94 3e 0c 31 c0 c4 a5 cc + 36 f8 79 d8 a3 ac 9d 7d 59 86 0e aa da 6b 83 bb''', + 'e':'''01 00 01''', + 'd':'''05 6b 04 21 6f e5 f3 54 ac 77 25 0a 4b 6b 0c 85 + 25 a8 5c 59 b0 bd 80 c5 64 50 a2 2d 5f 43 8e 59 + 6a 33 3a a8 75 e2 91 dd 43 f4 8c b8 8b 9d 5f c0 + d4 99 f9 fc d1 c3 97 f9 af c0 70 cd 9e 39 8c 8d + 19 e6 1d b7 c7 41 0a 6b 26 75 df bf 5d 34 5b 80 + 4d 20 1a dd 50 2d 5c e2 df cb 09 1c e9 99 7b be + be 57 30 6f 38 3e 4d 58 81 03 f0 36 f7 e8 5d 19 + 34 d1 52 a3 23 e4 a8 db 45 1d 6f 4a 5b 1b 0f 10 + 2c c1 50 e0 2f ee e2 b8 8d ea 4a d4 c1 ba cc b2 + 4d 84 07 2d 14 e1 d2 4a 67 71 f7 40 8e e3 05 64 + fb 86 d4 39 3a 34 bc f0 b7 88 50 1d 19 33 03 f1 + 3a 22 84 b0 01 f0 f6 49 ea f7 93 28 d4 ac 5c 43 + 0a b4 41 49 20 a9 46 0e d1 b7 bc 40 ec 65 3e 87 + 6d 09 ab c5 09 ae 45 b5 25 19 01 16 a0 c2 61 01 + 84 82 98 50 9c 1c 3b f3 a4 83 e7 27 40 54 e1 5e + 97 07 50 36 e9 89 f6 09 32 80 7b 52 57 75 1e 79''' + }, + # Plaintext + '''8b ba 6b f8 2a 6c 0f 86 d5 f1 75 6e 97 95 68 70 + b0 89 53 b0 6b 4e b2 05 bc 16 94 ee''', + # Ciphertext + '''53 ea 5d c0 8c d2 60 fb 3b 85 85 67 28 7f a9 15 + 52 c3 0b 2f eb fb a2 13 f0 ae 87 70 2d 06 8d 19 + ba b0 7f e5 74 52 3d fb 42 13 9d 68 c3 c5 af ee + e0 bf e4 cb 79 69 cb f3 82 b8 04 d6 e6 13 96 14 + 4e 2d 0e 60 74 1f 89 93 c3 01 4b 58 b9 b1 95 7a + 8b ab cd 23 af 85 4f 4c 35 6f b1 66 2a a7 2b fc + c7 e5 86 55 9d c4 28 0d 16 0c 12 67 85 a7 23 eb + ee be ff 71 f1 15 94 44 0a ae f8 7d 10 79 3a 87 + 74 a2 39 d4 a0 4c 87 fe 14 67 b9 da f8 52 08 ec + 6c 72 55 79 4a 96 cc 29 14 2f 9a 8b d4 18 e3 c1 + fd 67 34 4b 0c d0 82 9d f3 b2 be c6 02 53 19 62 + 93 c6 b3 4d 3f 75 d3 2f 21 3d d4 5c 62 73 d5 05 + ad f4 cc ed 10 57 cb 75 8f c2 6a ee fa 44 12 55 + ed 4e 64 c1 99 ee 07 5e 7f 16 64 61 82 fd b4 64 + 73 9b 68 ab 5d af f0 e6 3e 95 52 01 68 24 f0 54 + bf 4d 3c 8c 90 a9 7b b6 b6 55 32 84 eb 42 9f cc''', + # Random + '''47 e1 ab 71 19 fe e5 6c 95 ee 5e aa d8 6f 40 d0 + aa 63 bd 33''', + SHA1 + ), + ) + + def testEncrypt1(self): + # Verify encryption using all test vectors + for test in self._testData: + # Build the key + comps = [ int(rws(test[0][x]),16) for x in ('n','e') ] + key = RSA.construct(comps) + # RNG that takes its random numbers from a pool given + # at initialization + class randGen: + def __init__(self, data): + self.data = data + self.idx = 0 + def __call__(self, N): + r = self.data[self.idx:N] + self.idx += N + return r + # The real test + key._randfunc = randGen(t2b(test[3])) + cipher = PKCS.new(key, test[4]) + ct = cipher.encrypt(t2b(test[1])) + self.assertEqual(ct, t2b(test[2])) + + def testEncrypt2(self): + # Verify that encryption fails if plaintext is too long + pt = '\x00'*(128-2*20-2+1) + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.encrypt, pt) + + def testDecrypt1(self): + # Verify decryption using all test vectors + for test in self._testData: + # Build the key + comps = [ int(rws(test[0][x]),16) for x in ('n','e','d') ] + key = RSA.construct(comps) + # The real test + cipher = PKCS.new(key, test[4]) + pt = cipher.decrypt(t2b(test[2])) + self.assertEqual(pt, t2b(test[1])) + + def testDecrypt2(self): + # Simplest possible negative tests + for ct_size in (127,128,129): + cipher = PKCS.new(self.key1024) + self.assertRaises(ValueError, cipher.decrypt, bchr(0x00)*ct_size) + + def testEncryptDecrypt1(self): + # Encrypt/Decrypt messages of length [0..128-2*20-2] + for pt_len in range(0,128-2*20-2): + pt = self.rng(pt_len) + ct = PKCS.encrypt(pt, self.key1024) + pt2 = PKCS.decrypt(ct, self.key1024) + self.assertEqual(pt,pt2) + + def testEncryptDecrypt1(self): + # Helper function to monitor what's requested from RNG + global asked + def localRng(N): + global asked + asked += N + return self.rng(N) + # Verify that OAEP is friendly to all hashes + for hashmod in (MD2,MD5,SHA1,SHA256,RIPEMD): + # Verify that encrypt() asks for as many random bytes + # as the hash output size + asked = 0 + pt = self.rng(40) + self.key1024._randfunc = localRng + cipher = PKCS.new(self.key1024, hashmod) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + self.assertTrue(asked > hashmod.digest_size) + + def testEncryptDecrypt2(self): + # Verify that OAEP supports labels + pt = self.rng(35) + xlabel = self.rng(22) + cipher = PKCS.new(self.key1024, label=xlabel) + ct = cipher.encrypt(pt) + self.assertEqual(cipher.decrypt(ct), pt) + + def testEncryptDecrypt3(self): + # Verify that encrypt() uses the custom MGF + global mgfcalls + # Helper function to monitor what's requested from MGF + def newMGF(seed,maskLen): + global mgfcalls + mgfcalls += 1 + return bchr(0x00)*maskLen + mgfcalls = 0 + pt = self.rng(32) + cipher = PKCS.new(self.key1024, mgfunc=newMGF) + ct = cipher.encrypt(pt) + self.assertEqual(mgfcalls, 2) + self.assertEqual(cipher.decrypt(ct), pt) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_OAEP_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py b/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py new file mode 100644 index 0000000..bb19f9b --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/__init__.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/__init__.py: Self-test for hash modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for hash modules""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Hash import test_HMAC; tests += test_HMAC.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD2; tests += test_MD2.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD4; tests += test_MD4.get_tests(config=config) + from Crypto.SelfTest.Hash import test_MD5; tests += test_MD5.get_tests(config=config) + from Crypto.SelfTest.Hash import test_RIPEMD; tests += test_RIPEMD.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA; tests += test_SHA.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA256; tests += test_SHA256.get_tests(config=config) + try: + from Crypto.SelfTest.Hash import test_SHA224; tests += test_SHA224.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA384; tests += test_SHA384.get_tests(config=config) + from Crypto.SelfTest.Hash import test_SHA512; tests += test_SHA512.get_tests(config=config) + except ImportError: + import sys + sys.stderr.write("SelfTest: warning: not testing SHA224/SHA384/SHA512 modules (not available)\n") + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/common.py b/Lib/site-packages/Crypto/SelfTest/Hash/common.py new file mode 100644 index 0000000..d806c64 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/common.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/common.py: Common code for Crypto.SelfTest.Hash +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-testing for PyCrypto hash modules""" + +__revision__ = "$Id$" + +import sys +import unittest +import binascii +from Crypto.Util.py3compat import * + +# For compatibility with Python 2.1 and Python 2.2 +if sys.hexversion < 0x02030000: + # Python 2.1 doesn't have a dict() function + # Python 2.2 dict() function raises TypeError if you do dict(MD5='blah') + def dict(**kwargs): + return kwargs.copy() +else: + dict = dict + + +class HashDigestSizeSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + self.assertTrue(hasattr(self.hashmod, "digest_size")) + self.assertEqual(self.hashmod.digest_size, self.expected) + h = self.hashmod.new() + self.assertTrue(hasattr(h, "digest_size")) + self.assertEqual(h.digest_size, self.expected) + + +class HashSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected, input): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected = expected + self.input = input + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + h = self.hashmod.new() + h.update(self.input) + + out1 = binascii.b2a_hex(h.digest()) + out2 = h.hexdigest() + + h = self.hashmod.new(self.input) + + out3 = h.hexdigest() + out4 = binascii.b2a_hex(h.digest()) + + # PY3K: hexdigest() should return str(), and digest() bytes + self.assertEqual(self.expected, out1) # h = .new(); h.update(data); h.digest() + if sys.version_info[0] == 2: + self.assertEqual(self.expected, out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected, out3) # h = .new(data); h.hexdigest() + else: + self.assertEqual(self.expected.decode(), out2) # h = .new(); h.update(data); h.hexdigest() + self.assertEqual(self.expected.decode(), out3) # h = .new(data); h.hexdigest() + self.assertEqual(self.expected, out4) # h = .new(data); h.digest() + + # Verify that new() object method produces a fresh hash object + h2 = h.new() + h2.update(self.input) + out5 = binascii.b2a_hex(h2.digest()) + self.assertEqual(self.expected, out5) + +class HashTestOID(unittest.TestCase): + def __init__(self, hashmod, oid): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.oid = oid + + def runTest(self): + h = self.hashmod.new() + if self.oid==None: + try: + raised = 0 + a = h.oid + except AttributeError: + raised = 1 + self.assertEqual(raised,1) + else: + self.assertEqual(h.oid, self.oid) + +class MACSelfTest(unittest.TestCase): + + def __init__(self, hashmod, description, expected_dict, input, key, hashmods): + unittest.TestCase.__init__(self) + self.hashmod = hashmod + self.expected_dict = expected_dict + self.input = input + self.key = key + self.hashmods = hashmods + self.description = description + + def shortDescription(self): + return self.description + + def runTest(self): + for hashname in list(self.expected_dict.keys()): + hashmod = self.hashmods[hashname] + key = binascii.a2b_hex(b(self.key)) + data = binascii.a2b_hex(b(self.input)) + + # Strip whitespace from the expected string (which should be in lowercase-hex) + expected = b("".join(self.expected_dict[hashname].split())) + + h = self.hashmod.new(key, digestmod=hashmod) + h.update(data) + out1 = binascii.b2a_hex(h.digest()) + out2 = h.hexdigest() + + h = self.hashmod.new(key, data, hashmod) + + out3 = h.hexdigest() + out4 = binascii.b2a_hex(h.digest()) + + # Test .copy() + h2 = h.copy() + h.update(b("blah blah blah")) # Corrupt the original hash object + out5 = binascii.b2a_hex(h2.digest()) # The copied hash object should return the correct result + + # PY3K: hexdigest() should return str(), and digest() bytes + self.assertEqual(expected, out1) + if sys.version_info[0] == 2: + self.assertEqual(expected, out2) + self.assertEqual(expected, out3) + else: + self.assertEqual(expected.decode(), out2) + self.assertEqual(expected.decode(), out3) + self.assertEqual(expected, out4) + self.assertEqual(expected, out5) + +def make_hash_tests(module, module_name, test_data, digest_size, oid=None): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + (expected, input) = list(map(b,row[0:2])) + if len(row) < 3: + description = repr(input) + else: + description = row[2].encode('latin-1') + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(HashSelfTest(module, name, expected, input)) + if oid is not None: + oid = b(oid) + name = "%s #%d: digest_size" % (module_name, i+1) + tests.append(HashDigestSizeSelfTest(module, name, digest_size)) + tests.append(HashTestOID(module, oid)) + return tests + +def make_mac_tests(module, module_name, test_data, hashmods): + tests = [] + for i in range(len(test_data)): + row = test_data[i] + (key, data, results, description) = row + name = "%s #%d: %s" % (module_name, i+1, description) + tests.append(MACSelfTest(module, name, results, data, key, hashmods)) + return tests + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py new file mode 100644 index 0000000..48ef706 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_HMAC.py @@ -0,0 +1,223 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/HMAC.py: Self-test for the HMAC module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.HMAC""" + +__revision__ = "$Id$" + +from .common import dict # For compatibility with Python 2.1 and 2.2 +from Crypto.Util.py3compat import * + +# This is a list of (key, data, results, description) tuples. +test_data = [ + ## Test vectors from RFC 2202 ## + # Test that the default hashmod is MD5 + ('0b' * 16, + '4869205468657265', + dict(default='9294727a3638bb1c13f48ef8158bfc9d'), + 'default-is-MD5'), + + # Test case 1 (MD5) + ('0b' * 16, + '4869205468657265', + dict(MD5='9294727a3638bb1c13f48ef8158bfc9d'), + 'RFC 2202 #1-MD5 (HMAC-MD5)'), + + # Test case 1 (SHA1) + ('0b' * 20, + '4869205468657265', + dict(SHA1='b617318655057264e28bc0b6fb378c8ef146be00'), + 'RFC 2202 #1-SHA1 (HMAC-SHA1)'), + + # Test case 2 + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(MD5='750c783e6ab0b503eaa86e310a5db738', + SHA1='effcdf6ae5eb2fa2d27416d5f184df9c259a7c79'), + 'RFC 2202 #2 (HMAC-MD5/SHA1)'), + + # Test case 3 (MD5) + ('aa' * 16, + 'dd' * 50, + dict(MD5='56be34521d144c88dbb8c733f0e8b3f6'), + 'RFC 2202 #3-MD5 (HMAC-MD5)'), + + # Test case 3 (SHA1) + ('aa' * 20, + 'dd' * 50, + dict(SHA1='125d7342b9ac11cd91a39af48aa17b4f63f175d3'), + 'RFC 2202 #3-SHA1 (HMAC-SHA1)'), + + # Test case 4 + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(MD5='697eaf0aca3a3aea3a75164746ffaa79', + SHA1='4c9007f4026250c6bc8414f9bf50c86c2d7235da'), + 'RFC 2202 #4 (HMAC-MD5/SHA1)'), + + # Test case 5 (MD5) + ('0c' * 16, + '546573742057697468205472756e636174696f6e', + dict(MD5='56461ef2342edc00f9bab995690efd4c'), + 'RFC 2202 #5-MD5 (HMAC-MD5)'), + + # Test case 5 (SHA1) + # NB: We do not implement hash truncation, so we only test the full hash here. + ('0c' * 20, + '546573742057697468205472756e636174696f6e', + dict(SHA1='4c1a03424b55e07fe7f27be1d58bb9324a9a5a04'), + 'RFC 2202 #5-SHA1 (HMAC-SHA1)'), + + # Test case 6 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(MD5='6b1ab7fe4bd7bf8f0b62e6ce61b9d0cd', + SHA1='aa4ae5e15272d00e95705637ce8a3b55ed402112'), + 'RFC 2202 #6 (HMAC-MD5/SHA1)'), + + # Test case 7 + ('aa' * 80, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b657920616e64204c6172676572205468616e204f6e6520426c6f636b2d' + + '53697a652044617461', + dict(MD5='6f630fad67cda0ee1fb1f562db3aa53e', + SHA1='e8e99d0f45237d786d6bbaa7965c7808bbff1a91'), + 'RFC 2202 #7 (HMAC-MD5/SHA1)'), + + ## Test vectors from RFC 4231 ## + # 4.2. Test Case 1 + ('0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b', + '4869205468657265', + dict(SHA256=''' + b0344c61d8db38535ca8afceaf0bf12b + 881dc200c9833da726e9376c2e32cff7 + '''), + 'RFC 4231 #1 (HMAC-SHA256)'), + + # 4.3. Test Case 2 - Test with a key shorter than the length of the HMAC + # output. + ('4a656665', + '7768617420646f2079612077616e7420666f72206e6f7468696e673f', + dict(SHA256=''' + 5bdcc146bf60754e6a042426089575c7 + 5a003f089d2739839dec58b964ec3843 + '''), + 'RFC 4231 #2 (HMAC-SHA256)'), + + # 4.4. Test Case 3 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('aa' * 20, + 'dd' * 50, + dict(SHA256=''' + 773ea91e36800e46854db8ebd09181a7 + 2959098b3ef8c122d9635514ced565fe + '''), + 'RFC 4231 #3 (HMAC-SHA256)'), + + # 4.5. Test Case 4 - Test with a combined length of key and data that is + # larger than 64 bytes (= block-size of SHA-224 and SHA-256). + ('0102030405060708090a0b0c0d0e0f10111213141516171819', + 'cd' * 50, + dict(SHA256=''' + 82558a389a443c0ea4cc819899f2083a + 85f0faa3e578f8077a2e3ff46729665b + '''), + 'RFC 4231 #4 (HMAC-SHA256)'), + + # 4.6. Test Case 5 - Test with a truncation of output to 128 bits. + # + # Not included because we do not implement hash truncation. + # + + # 4.7. Test Case 6 - Test with a key larger than 128 bytes (= block-size of + # SHA-384 and SHA-512). + ('aa' * 131, + '54657374205573696e67204c6172676572205468616e20426c6f636b2d53697a' + + '65204b6579202d2048617368204b6579204669727374', + dict(SHA256=''' + 60e431591ee0b67f0d8a26aacbf5b77f + 8e0bc6213728c5140546040f0ee37f54 + '''), + 'RFC 4231 #6 (HMAC-SHA256)'), + + # 4.8. Test Case 7 - Test with a key and data that is larger than 128 bytes + # (= block-size of SHA-384 and SHA-512). + ('aa' * 131, + '5468697320697320612074657374207573696e672061206c6172676572207468' + + '616e20626c6f636b2d73697a65206b657920616e642061206c61726765722074' + + '68616e20626c6f636b2d73697a6520646174612e20546865206b6579206e6565' + + '647320746f20626520686173686564206265666f7265206265696e6720757365' + + '642062792074686520484d414320616c676f726974686d2e', + dict(SHA256=''' + 9b09ffa71b942fcb27635fbcd5b0e944 + bfdc63644f0713938a7f51535c3a35e2 + '''), + 'RFC 4231 #7 (HMAC-SHA256)'), +] + +hashlib_test_data = [ + # Test case 8 (SHA224) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA224='a30e01098bc6dbbf45690f3a7e9e6d0f8bbea2a39e6148008fd05e44'), + 'RFC 4634 8.4 SHA224 (HMAC-SHA224)'), + + # Test case 9 (SHA384) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA384='af45d2e376484031617f78d2b58a6b1b9c7ef464f5a01b47e42ec3736322445e8e2240ca5e69e2c78b3239ecfab21649'), + 'RFC 4634 8.4 SHA384 (HMAC-SHA384)'), + + # Test case 10 (SHA512) + ('4a656665', + '7768617420646f2079612077616e74' + + '20666f72206e6f7468696e673f', + dict(SHA512='164b7a7bfcf819e2e395fbe73b56e0a387bd64222e831fd610270cd7ea2505549758bf75c05a994a6d034f65f8f0e6fdcaeab1a34d4a6b4b636e070a38bce737'), + 'RFC 4634 8.4 SHA512 (HMAC-SHA512)'), + +] + +def get_tests(config={}): + global test_data + from Crypto.Hash import HMAC, MD5, SHA as SHA1, SHA256 + from .common import make_mac_tests + hashmods = dict(MD5=MD5, SHA1=SHA1, SHA256=SHA256, default=None) + try: + from Crypto.Hash import SHA224, SHA384, SHA512 + hashmods.update(dict(SHA224=SHA224, SHA384=SHA384, SHA512=SHA512)) + test_data += hashlib_test_data + except ImportError: + import sys + sys.stderr.write("SelfTest: warning: not testing HMAC-SHA224/384/512 (not available)\n") + return make_mac_tests(HMAC, "HMAC", test_data, hashmods) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py new file mode 100644 index 0000000..4117b2a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_MD2.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD2.py: Self-test for the MD2 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD2""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1319 + ('8350e5a3e24c153df2275c9f80692773', '', "'' (empty string)"), + ('32ec01ec4a6dac72c0ab96fb34c0b5d1', 'a'), + ('da853b0d3f88d99b30283a69e6ded6bb', 'abc'), + ('ab4f496bfb2a530b219ff33031fe06b0', 'message digest'), + + ('4e8ddff3650292ab5a4108c3aa47940b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('da33def2a42df13975352846c30338cd', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('d5976f79d83d3a0dc9806c3c66f3efd8', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD2 + from .common import make_hash_tests + return make_hash_tests(MD2, "MD2", test_data, + digest_size=16, + oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x02") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py new file mode 100644 index 0000000..2196093 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_MD4.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD4.py: Self-test for the MD4 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD4""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1320 + ('31d6cfe0d16ae931b73c59d7e0c089c0', '', "'' (empty string)"), + ('bde52cb31de33e46245e05fbdbd6fb24', 'a'), + ('a448017aaf21d8525fc10ae87aa6729d', 'abc'), + ('d9130a8164549fe818874806e1c7014b', 'message digest'), + + ('d79e1c308aa5bbcdeea8ed63df412da9', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('043f8582f241db351ce627e153e7f0e4', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('e33b4ddc9c38f2199c3e7b164fcc0536', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD4 + from .common import make_hash_tests + return make_hash_tests(MD4, "MD4", test_data, + digest_size=16, + oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x04") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py new file mode 100644 index 0000000..2e5ccff --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_MD5.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/MD5.py: Self-test for the MD5 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.MD5""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors from RFC 1321 + ('d41d8cd98f00b204e9800998ecf8427e', '', "'' (empty string)"), + ('0cc175b9c0f1b6a831c399e269772661', 'a'), + ('900150983cd24fb0d6963f7d28e17f72', 'abc'), + ('f96b697d7cb7938d525a2f31aaf161d0', 'message digest'), + + ('c3fcd3d76192e4007dfb496cca67e13b', 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('d174ab98d277d9f5a5611c2c9f419d9f', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('57edf4a22be3c955ac49da2e2107b67a', + '1234567890123456789012345678901234567890123456' + + '7890123456789012345678901234567890', + "'1234567890' * 8"), +] + +def get_tests(config={}): + from Crypto.Hash import MD5 + from .common import make_hash_tests + return make_hash_tests(MD5, "MD5", test_data, + digest_size=16, + oid="\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py new file mode 100644 index 0000000..87ed1a5 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_RIPEMD.py @@ -0,0 +1,73 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_RIPEMD.py: Self-test for the RIPEMD-160 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +#"""Self-test suite for Crypto.Hash.RIPEMD""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # Test vectors downloaded 2008-09-12 from + # http://homes.esat.kuleuven.be/~bosselae/ripemd160.html + ('9c1185a5c5e9fc54612808977ee8f548b2258d31', '', "'' (empty string)"), + ('0bdc9d2d256b3ee9daae347be6f4dc835a467ffe', 'a'), + ('8eb208f7e05d987a9b044a8e98c6b087f15a0bfc', 'abc'), + ('5d0689ef49d2fae572b881b123a85ffa21595f36', 'message digest'), + + ('f71c27109c692c1b56bbdceb5b9d2865b3708dbc', + 'abcdefghijklmnopqrstuvwxyz', + 'a-z'), + + ('12a053384a9c0c88e405a06c27dcf49ada62eb2b', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq', + 'abcdbcd...pnopq'), + + ('b0e20b6e3116640286ed3a87a5713079b21f5189', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789', + 'A-Z, a-z, 0-9'), + + ('9b752e45573d4b39f4dbd3323cab82bf63326bfb', + '1234567890' * 8, + "'1234567890' * 8"), + + ('52783243c1697bdbe16d37f97f68f08325dc1528', + 'a' * 10**6, + '"a" * 10**6'), +] + +def get_tests(config={}): + from Crypto.Hash import RIPEMD + from .common import make_hash_tests + return make_hash_tests(RIPEMD, "RIPEMD", test_data, + digest_size=20, + oid="\x06\x05\x2b\x24\x03\02\x01") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA.py new file mode 100644 index 0000000..e913e32 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA.py @@ -0,0 +1,64 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/SHA.py: Self-test for the SHA-1 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA""" + +__revision__ = "$Id$" + +from Crypto.Util.py3compat import * + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # FIPS PUB 180-2, A.1 - "One-Block Message" + ('a9993e364706816aba3e25717850c26c9cd0d89d', 'abc'), + + # FIPS PUB 180-2, A.2 - "Multi-Block Message" + ('84983e441c3bd26ebaae4aa1f95129e5e54670f1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, A.3 - "Long Message" +# ('34aa973cd4c4daa4f61eeb2bdbad27316534016f', +# 'a' * 10**6, +# '"a" * 10**6'), + + # RFC 3174: Section 7.3, "TEST4" (multiple of 512 bits) + ('dea356a2cddd90c7a7ecedc5ebb563934f460452', + '01234567' * 80, + '"01234567" * 80'), +] + +def get_tests(config={}): + from Crypto.Hash import SHA + from .common import make_hash_tests + return make_hash_tests(SHA, "SHA", test_data, + digest_size=20, + oid="\x06\x05\x2B\x0E\x03\x02\x1A") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py new file mode 100644 index 0000000..6cc08ee --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA224.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA224.py: Self-test for the SHA-224 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA224""" + +__revision__ = "$Id$" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 3874: Section 3.1, "Test Vector #1 + ('23097d223405d8228642a477bda255b32aadbce4bda0b3f7e36c9da7', 'abc'), + + # RFC 3874: Section 3.2, "Test Vector #2 + ('75388b16512776cc5dba5da1fd890150b0c6455cb4f58b1952522525', 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # RFC 3874: Section 3.3, "Test Vector #3 + ('20794655980c91d8bbb4c1ea97618a4bf03f42581948b2ee4ee7ad67', 'a' * 10**6, "'a' * 10**6"), + + # Examples from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('d14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', ''), + + ('49b08defa65e644cbf8a2dd9270bdededabc741997d1dadd42026d7b', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + + ('58911e7fccf2971a7d07f93162d8bd13568e71aa8fc86fc1fe9043d1', + 'Frank jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA224 + from .common import make_hash_tests + return make_hash_tests(SHA224, "SHA224", test_data, + digest_size=28, + oid='\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x04') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py new file mode 100644 index 0000000..4f47cce --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA256.py @@ -0,0 +1,96 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA256.py: Self-test for the SHA-256 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA256""" + +__revision__ = "$Id$" + +import unittest +from Crypto.Util.py3compat import * + +class LargeSHA256Test(unittest.TestCase): + def runTest(self): + """SHA256: 512/520 MiB test""" + from Crypto.Hash import SHA256 + zeros = bchr(0x00) * (1024*1024) + + h = SHA256.new(zeros) + for i in range(511): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB + + for i in range(8): + h.update(zeros) + + # This test vector is from PyCrypto's old testdata.py file. + self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB + +def get_tests(config={}): + # Test vectors from FIPS PUB 180-2 + # This is a list of (expected_result, input[, description]) tuples. + test_data = [ + # FIPS PUB 180-2, B.1 - "One-Block Message" + ('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad', + 'abc'), + + # FIPS PUB 180-2, B.2 - "Multi-Block Message" + ('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'), + + # FIPS PUB 180-2, B.3 - "Long Message" + ('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0', + 'a' * 10**6, + '"a" * 10**6'), + + # Test for an old PyCrypto bug. + ('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103', + 'This message is precisely 55 bytes long, to test a bug.', + 'Length = 55 (mod 64)'), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', ''), + + ('d32b568cd1b96d459e7291ebf4b25d007f275c9f13149beeb782fac0716613f8', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + ] + + from Crypto.Hash import SHA256 + from .common import make_hash_tests + tests = make_hash_tests(SHA256, "SHA256", test_data, + digest_size=32, + oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01") + + if config.get('slow_tests'): + tests += [LargeSHA256Test()] + + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py new file mode 100644 index 0000000..862d6d5 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA384.py @@ -0,0 +1,63 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA.py: Self-test for the SHA-384 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA384""" + +__revision__ = "$Id$" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('cb00753f45a35e8bb5a03d699ac65007272c32ab0eded1631a8b605a43ff5bed8086072ba1e7cc2358baeca134c825a7', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.2" + ('09330c33f71147e83d192fc782cd1b4753111b173b3b05d22fa08086e3b0f712fcc7c71a557e2db966c3e9fa91746039', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('9d0e1809716474cb086e834e310a4a1ced149e9c00f248527972cec5704c2a5b07b8b3dc38ecc4ebae97ddd87f3d8985', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', ''), + + # Example from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('71e8383a4cea32d6fd6877495db2ee353542f46fa44bc23100bca48f3366b84e809f0708e81041f427c6d5219a286677', + 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), + +] + +def get_tests(config={}): + from Crypto.Hash import SHA384 + from .common import make_hash_tests + return make_hash_tests(SHA384, "SHA384", test_data, + digest_size=48, + oid='\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02') + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py new file mode 100644 index 0000000..473cfa3 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Hash/test_SHA512.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Hash/test_SHA512.py: Self-test for the SHA-512 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Hash.SHA512""" + +__revision__ = "$Id$" + +# Test vectors from various sources +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + + # RFC 4634: Section Page 8.4, "Test 1" + ('ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f', 'abc'), + + # RFC 4634: Section Page 8.4, "Test 2.1" + ('8e959b75dae313da8cf4f72814fc143f8f7779c6eb9f7fa17299aeadb6889018501d289e4900f7e4331b99dec4b5433ac7d329eeb6dd26545e96e55b874be909', 'abcdefghbcdefghicdefghijdefghijkefghijklfghijklmghijklmnhijklmnoijklmnopjklmnopqklmnopqrlmnopqrsmnopqrstnopqrstu'), + + # RFC 4634: Section Page 8.4, "Test 3" + ('e718483d0ce769644e2e42c7bc15b4638e1f98b13b2044285632a803afa973ebde0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b', 'a' * 10**6, "'a' * 10**6"), + + # Taken from http://de.wikipedia.org/wiki/Secure_Hash_Algorithm + ('cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', ''), + + ('af9ed2de700433b803240a552b41b5a472a6ef3fe1431a722b2063c75e9f07451f67a28e37d09cde769424c96aea6f8971389db9e1993d6c565c3c71b855723c', 'Franz jagt im komplett verwahrlosten Taxi quer durch Bayern'), +] + +def get_tests(config={}): + from Crypto.Hash import SHA512 + from .common import make_hash_tests + return make_hash_tests(SHA512, "SHA512", test_data, + digest_size=64, + oid="\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03") + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py b/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py new file mode 100644 index 0000000..a62c670 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Protocol/__init__.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/__init__.py: Self-tests for Crypto.Protocol +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for Crypto.Protocol""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Protocol import test_chaffing; tests += test_chaffing.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_rfc1751; tests += test_rfc1751.get_tests(config=config) + from Crypto.SelfTest.Protocol import test_AllOrNothing; tests += test_AllOrNothing.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py b/Lib/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py new file mode 100644 index 0000000..a211eab --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Protocol/test_AllOrNothing.py @@ -0,0 +1,76 @@ +# +# Test script for Crypto.Protocol.AllOrNothing +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +from Crypto.Protocol import AllOrNothing +from Crypto.Util.py3compat import * + +text = b("""\ +When in the Course of human events, it becomes necessary for one people to +dissolve the political bands which have connected them with another, and to +assume among the powers of the earth, the separate and equal station to which +the Laws of Nature and of Nature's God entitle them, a decent respect to the +opinions of mankind requires that they should declare the causes which impel +them to the separation. + +We hold these truths to be self-evident, that all men are created equal, that +they are endowed by their Creator with certain unalienable Rights, that among +these are Life, Liberty, and the pursuit of Happiness. That to secure these +rights, Governments are instituted among Men, deriving their just powers from +the consent of the governed. That whenever any Form of Government becomes +destructive of these ends, it is the Right of the People to alter or to +abolish it, and to institute new Government, laying its foundation on such +principles and organizing its powers in such form, as to them shall seem most +likely to effect their Safety and Happiness. +""") + +class AllOrNothingTest (unittest.TestCase): + + def runTest(self): + "Simple test of AllOrNothing" + + from Crypto.Cipher import AES + import base64 + + # The current AllOrNothing will fail + # every so often. Repeat the test + # several times to force this. + for i in range(50): + x = AllOrNothing.AllOrNothing(AES) + + msgblocks = x.digest(text) + + # get a new undigest-only object so there's no leakage + y = AllOrNothing.AllOrNothing(AES) + text2 = y.undigest(msgblocks) + self.assertEqual(text, text2) + +def get_tests(config={}): + return [AllOrNothingTest()] + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py b/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py new file mode 100644 index 0000000..ac45e10 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Protocol/test_KDF.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Protocol/test_KDF.py: Self-test for key derivation functions +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +from binascii import unhexlify + +from Crypto.SelfTest.st_common import list_test_cases +from Crypto.Hash import SHA as SHA1,HMAC + +from Crypto.Protocol.KDF import * + +def t2b(t): return unhexlify(b(t)) + +class PBKDF1_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (8 bytes encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password","78578E5A5D63CB06",16,1000,"DC19847E05C64D2FAF10EBFB4A3D2A20"), + ) + + def test1(self): + v = self._testData[0] + res = PBKDF1(v[0], t2b(v[1]), v[2], v[3], SHA1) + self.assertEqual(res, t2b(v[4])) + +class PBKDF2_Tests(unittest.TestCase): + + # List of tuples with test data. + # Each tuple is made up by: + # Item #0: a pass phrase + # Item #1: salt (encoded in hex) + # Item #2: output key length + # Item #3: iterations to use + # Item #4: expected result (encoded in hex) + _testData = ( + # From http://www.di-mgt.com.au/cryptoKDFs.html#examplespbkdf + ("password","78578E5A5D63CB06",24,2048,"BFDE6BE94DF7E11DD409BCE20A0255EC327CB936FFE93643"), + # From RFC 6050 + ("password","73616c74", 20, 1, "0c60c80f961f0e71f3a9b524af6012062fe037a6"), + ("password","73616c74", 20, 2, "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957"), + ("password","73616c74", 20, 4096, "4b007901b765489abead49d926f721d065a429c1"), + ("passwordPASSWORDpassword","73616c7453414c5473616c7453414c5473616c7453414c5473616c7453414c5473616c74", + 25, 4096, "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038"), + ( 'pass\x00word',"7361006c74",16,4096, "56fa6aa75548099dcc37d7f03425e0c3"), + ) + + def test1(self): + # Test only for HMAC-SHA1 as PRF + + def prf(p,s): + return HMAC.new(p,s,SHA1).digest() + + for i in range(len(self._testData)): + v = self._testData[i] + res = PBKDF2(v[0], t2b(v[1]), v[2], v[3]) + res2 = PBKDF2(v[0], t2b(v[1]), v[2], v[3], prf) + self.assertEqual(res, t2b(v[4])) + self.assertEqual(res, res2) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PBKDF1_Tests) + tests += list_test_cases(PBKDF2_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 diff --git a/Lib/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py b/Lib/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py new file mode 100644 index 0000000..5fa0120 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Protocol/test_chaffing.py @@ -0,0 +1,74 @@ +# +# Test script for Crypto.Protocol.Chaffing +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest +from Crypto.Protocol import Chaffing + +text = """\ +When in the Course of human events, it becomes necessary for one people to +dissolve the political bands which have connected them with another, and to +assume among the powers of the earth, the separate and equal station to which +the Laws of Nature and of Nature's God entitle them, a decent respect to the +opinions of mankind requires that they should declare the causes which impel +them to the separation. + +We hold these truths to be self-evident, that all men are created equal, that +they are endowed by their Creator with certain unalienable Rights, that among +these are Life, Liberty, and the pursuit of Happiness. That to secure these +rights, Governments are instituted among Men, deriving their just powers from +the consent of the governed. That whenever any Form of Government becomes +destructive of these ends, it is the Right of the People to alter or to +abolish it, and to institute new Government, laying its foundation on such +principles and organizing its powers in such form, as to them shall seem most +likely to effect their Safety and Happiness. +""" + +class ChaffingTest (unittest.TestCase): + + def runTest(self): + "Simple tests of chaffing and winnowing" + # Test constructors + Chaffing.Chaff() + Chaffing.Chaff(0.5, 1) + self.assertRaises(ValueError, Chaffing.Chaff, factor=-1) + self.assertRaises(ValueError, Chaffing.Chaff, blocksper=-1) + + data = [(1, 'data1', 'data1'), (2, 'data2', 'data2')] + c = Chaffing.Chaff(1.0, 1) + c.chaff(data) + chaff = c.chaff(data) + self.assertEqual(len(chaff), 4) + + c = Chaffing.Chaff(0.0, 1) + chaff = c.chaff(data) + self.assertEqual(len(chaff), 2) + +def get_tests(config={}): + return [ChaffingTest()] + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py b/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py new file mode 100644 index 0000000..0878cc5 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Protocol/test_rfc1751.py @@ -0,0 +1,62 @@ +# +# Test script for Crypto.Util.RFC1751. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import binascii +import unittest +from Crypto.Util import RFC1751 +from Crypto.Util.py3compat import * + +test_data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', + 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', + 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + +class RFC1751Test_k2e (unittest.TestCase): + + def runTest (self): + "Check converting keys to English" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.key_to_english(key), words) + +class RFC1751Test_e2k (unittest.TestCase): + + def runTest (self): + "Check converting English strings to keys" + for key, words in test_data: + key=binascii.a2b_hex(b(key)) + self.assertEqual(RFC1751.english_to_key(words), key) + +# class RFC1751Test + +def get_tests(config={}): + return [RFC1751Test_k2e(), RFC1751Test_e2k()] + +if __name__ == "__main__": + unittest.main() diff --git a/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py b/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py new file mode 100644 index 0000000..61ba53f --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/PublicKey/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/__init__.py: Self-test for public key crypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for public-key crypto""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.PublicKey import test_DSA; tests += test_DSA.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_RSA; tests += test_RSA.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_importKey; tests += test_importKey.get_tests(config=config) + from Crypto.SelfTest.PublicKey import test_ElGamal; tests += test_ElGamal.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py b/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py new file mode 100644 index 0000000..b05f69a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/PublicKey/test_DSA.py @@ -0,0 +1,244 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_DSA.py: Self-test for the DSA primitive +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.DSA""" + +__revision__ = "$Id$" + +import sys +import os +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +def _sws(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return "".join(s.split()) + else: + return b("").join(s.split()) + +class DSATest(unittest.TestCase): + # Test vector from "Appendix 5. Example of the DSA" of + # "Digital Signature Standard (DSS)", + # U.S. Department of Commerce/National Institute of Standards and Technology + # FIPS 186-2 (+Change Notice), 2000 January 27. + # http://csrc.nist.gov/publications/fips/fips186-2/fips186-2-change1.pdf + + y = _sws("""19131871 d75b1612 a819f29d 78d1b0d7 346f7aa7 7bb62a85 + 9bfd6c56 75da9d21 2d3a36ef 1672ef66 0b8c7c25 5cc0ec74 + 858fba33 f44c0669 9630a76b 030ee333""") + + g = _sws("""626d0278 39ea0a13 413163a5 5b4cb500 299d5522 956cefcb + 3bff10f3 99ce2c2e 71cb9de5 fa24babf 58e5b795 21925c9c + c42e9f6f 464b088c c572af53 e6d78802""") + + p = _sws("""8df2a494 492276aa 3d25759b b06869cb eac0d83a fb8d0cf7 + cbb8324f 0d7882e5 d0762fc5 b7210eaf c2e9adac 32ab7aac + 49693dfb f83724c2 ec0736ee 31c80291""") + + q = _sws("""c773218c 737ec8ee 993b4f2d ed30f48e dace915f""") + + x = _sws("""2070b322 3dba372f de1c0ffc 7b2e3b49 8b260614""") + + k = _sws("""358dad57 1462710f 50e254cf 1a376b2b deaadfbf""") + k_inverse = _sws("""0d516729 8202e49b 4116ac10 4fc3f415 ae52f917""") + m = b2a_hex(b("abc")) + m_hash = _sws("""a9993e36 4706816a ba3e2571 7850c26c 9cd0d89d""") + r = _sws("""8bac1ab6 6410435c b7181f95 b16ab97c 92b341c0""") + s = _sws("""41e2345f 1f56df24 58f426d1 55b4ba2d b6dcd8c8""") + + def setUp(self): + global DSA, Random, bytes_to_long, size + from Crypto.PublicKey import DSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse, size + + self.dsa = DSA + + def test_generate_1arg(self): + """DSA (default implementation) generated key (1 argument)""" + dsaObj = self.dsa.generate(1024) + self._check_private_key(dsaObj) + pub = dsaObj.publickey() + self._check_public_key(pub) + + def test_generate_2arg(self): + """DSA (default implementation) generated key (2 arguments)""" + dsaObj = self.dsa.generate(1024, Random.new().read) + self._check_private_key(dsaObj) + pub = dsaObj.publickey() + self._check_public_key(pub) + + def test_construct_4tuple(self): + """DSA (default implementation) constructed key (4-tuple)""" + (y, g, p, q) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q)] + dsaObj = self.dsa.construct((y, g, p, q)) + self._test_verification(dsaObj) + + def test_construct_5tuple(self): + """DSA (default implementation) constructed key (5-tuple)""" + (y, g, p, q, x) = [bytes_to_long(a2b_hex(param)) for param in (self.y, self.g, self.p, self.q, self.x)] + dsaObj = self.dsa.construct((y, g, p, q, x)) + self._test_signing(dsaObj) + self._test_verification(dsaObj) + + def _check_private_key(self, dsaObj): + # Check capabilities + self.assertEqual(1, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + self.assertEqual(0, dsaObj.can_blind()) + + # Check dsaObj.[ygpqx] -> dsaObj.key.[ygpqx] mapping + self.assertEqual(dsaObj.y, dsaObj.key.y) + self.assertEqual(dsaObj.g, dsaObj.key.g) + self.assertEqual(dsaObj.p, dsaObj.key.p) + self.assertEqual(dsaObj.q, dsaObj.key.q) + self.assertEqual(dsaObj.x, dsaObj.key.x) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + self.assertEqual(dsaObj.y, pow(dsaObj.g, dsaObj.x, dsaObj.p)) # y == g**x mod p + self.assertEqual(1, 0 < dsaObj.x < dsaObj.q) # 0 < x < q + + def _check_public_key(self, dsaObj): + k = a2b_hex(self.k) + m_hash = a2b_hex(self.m_hash) + + # Check capabilities + self.assertEqual(0, dsaObj.has_private()) + self.assertEqual(1, dsaObj.can_sign()) + self.assertEqual(0, dsaObj.can_encrypt()) + self.assertEqual(0, dsaObj.can_blind()) + + # Check dsaObj.[ygpq] -> dsaObj.key.[ygpq] mapping + self.assertEqual(dsaObj.y, dsaObj.key.y) + self.assertEqual(dsaObj.g, dsaObj.key.g) + self.assertEqual(dsaObj.p, dsaObj.key.p) + self.assertEqual(dsaObj.q, dsaObj.key.q) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(dsaObj, 'x')) + self.assertEqual(0, hasattr(dsaObj.key, 'x')) + + # Sanity check key data + self.assertEqual(1, dsaObj.p > dsaObj.q) # p > q + self.assertEqual(160, size(dsaObj.q)) # size(q) == 160 bits + self.assertEqual(0, (dsaObj.p - 1) % dsaObj.q) # q is a divisor of p-1 + + # Public-only key objects should raise an error when .sign() is called + self.assertRaises(TypeError, dsaObj.sign, m_hash, k) + + # Check __eq__ and __ne__ + self.assertEqual(dsaObj.publickey() == dsaObj.publickey(),True) # assert_ + self.assertEqual(dsaObj.publickey() != dsaObj.publickey(),False) # failIf + + def _test_signing(self, dsaObj): + k = a2b_hex(self.k) + m_hash = a2b_hex(self.m_hash) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + (r_out, s_out) = dsaObj.sign(m_hash, k) + self.assertEqual((r, s), (r_out, s_out)) + + def _test_verification(self, dsaObj): + m_hash = a2b_hex(self.m_hash) + r = bytes_to_long(a2b_hex(self.r)) + s = bytes_to_long(a2b_hex(self.s)) + self.assertEqual(1, dsaObj.verify(m_hash, (r, s))) + self.assertEqual(0, dsaObj.verify(m_hash + b("\0"), (r, s))) + +class DSAFastMathTest(DSATest): + def setUp(self): + DSATest.setUp(self) + self.dsa = DSA.DSAImplementation(use_fast_math=True) + + def test_generate_1arg(self): + """DSA (_fastmath implementation) generated key (1 argument)""" + DSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """DSA (_fastmath implementation) generated key (2 arguments)""" + DSATest.test_generate_2arg(self) + + def test_construct_4tuple(self): + """DSA (_fastmath implementation) constructed key (4-tuple)""" + DSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """DSA (_fastmath implementation) constructed key (5-tuple)""" + DSATest.test_construct_5tuple(self) + +class DSASlowMathTest(DSATest): + def setUp(self): + DSATest.setUp(self) + self.dsa = DSA.DSAImplementation(use_fast_math=False) + + def test_generate_1arg(self): + """DSA (_slowmath implementation) generated key (1 argument)""" + DSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """DSA (_slowmath implementation) generated key (2 arguments)""" + DSATest.test_generate_2arg(self) + + def test_construct_4tuple(self): + """DSA (_slowmath implementation) constructed key (4-tuple)""" + DSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """DSA (_slowmath implementation) constructed key (5-tuple)""" + DSATest.test_construct_5tuple(self) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(DSATest) + try: + from Crypto.PublicKey import _fastmath + tests += list_test_cases(DSAFastMathTest) + except ImportError: + from distutils.sysconfig import get_config_var + import inspect + _fm_path = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + +"/../../PublicKey/_fastmath"+get_config_var("SO")) + if os.path.exists(_fm_path): + raise ImportError("While the _fastmath module exists, importing "+ + "it failed. This may point to the gmp or mpir shared library "+ + "not being in the path. _fastmath was found at "+_fm_path) + tests += list_test_cases(DSASlowMathTest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py b/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py new file mode 100644 index 0000000..2e5c73e --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/PublicKey/test_ElGamal.py @@ -0,0 +1,210 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.ElGamal""" + +__revision__ = "$Id$" + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto import Random +from Crypto.PublicKey import ElGamal +from Crypto.Util.number import * +from Crypto.Util.py3compat import * + +class ElGamalTest(unittest.TestCase): + + # + # Test vectors + # + # There seem to be no real ElGamal test vectors available in the + # public domain. The following test vectors have been generated + # with libgcrypt 1.5.0. + # + # Encryption + tve=[ + { + # 256 bits + 'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933', + 'g' :'05', + 'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF', + 'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932', + 'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1', + 'pt' :'48656C6C6F207468657265', + 'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7', + 'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68' + }, + + { + # 512 bits + 'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227', + 'g' :'07', + 'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C', + 'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68', + 'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7', + 'pt' :'48656C6C6F207468657265', + 'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B', + 'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0' + } + ] + + # Signature + tvs=[ + { + # 256 bits + 'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7', + 'g' :'05', + 'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7', + 'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB', + 'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F', + 'h' :'48656C6C6F207468657265', + 'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2', + 'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7', + }, + { + # 512 bits + 'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF', + 'g' :'0B', + 'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC', + 'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB', + 'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69', + 'h' :'48656C6C6F207468657265', + 'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F', + 'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE', + } + ] + + def test_generate_128(self): + self._test_random_key(128) + + def test_generate_512(self): + self._test_random_key(512) + + def test_encryption(self): + for tv in self.tve: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + ct = key.encrypt(d['pt'], d['k']) + self.assertEqual(ct[0], d['ct1']) + self.assertEqual(ct[1], d['ct2']) + + def test_decryption(self): + for tv in self.tve: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + pt = key.decrypt((d['ct1'], d['ct2'])) + self.assertEqual(pt, d['pt']) + + def test_signing(self): + for tv in self.tvs: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + sig1, sig2 = key.sign(d['h'], d['k']) + self.assertEqual(sig1, d['sig1']) + self.assertEqual(sig2, d['sig2']) + + def test_verification(self): + for tv in self.tvs: + for as_longs in (0,1): + d = self.convert_tv(tv, as_longs) + key = ElGamal.construct(d['key']) + # Positive test + res = key.verify( d['h'], (d['sig1'],d['sig2']) ) + self.assertTrue(res) + # Negative test + res = key.verify( d['h'], (d['sig1']+1,d['sig2']) ) + self.assertFalse(res) + + def convert_tv(self, tv, as_longs=0): + """Convert a test vector from textual form (hexadecimal ascii + to either integers or byte strings.""" + key_comps = 'p','g','y','x' + tv2 = {} + for c in list(tv.keys()): + tv2[c] = a2b_hex(tv[c]) + if as_longs or c in key_comps or c in ('sig1','sig2'): + tv2[c] = bytes_to_long(tv2[c]) + tv2['key']=[] + for c in key_comps: + tv2['key'] += [tv2[c]] + del tv2[c] + return tv2 + + def _test_random_key(self, bits): + elgObj = ElGamal.generate(bits, Random.new().read) + self._check_private_key(elgObj) + self._exercise_primitive(elgObj) + pub = elgObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(elgObj) + + def _check_private_key(self, elgObj): + + # Check capabilities + self.assertTrue(elgObj.has_private()) + self.assertTrue(elgObj.can_sign()) + self.assertTrue(elgObj.can_encrypt()) + + # Sanity check key data + self.assertTrue(1 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.PublicKey.RSA""" + +__revision__ = "$Id$" + +import sys +import os +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex + +class RSATest(unittest.TestCase): + # Test vectors from "RSA-OAEP and RSA-PSS test vectors (.zip file)" + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # See RSADSI's PKCS#1 page at + # http://www.rsa.com/rsalabs/node.asp?id=2125 + + # from oaep-int.txt + + # TODO: PyCrypto treats the message as starting *after* the leading "00" + # TODO: That behaviour should probably be changed in the future. + plaintext = """ + eb 7a 19 ac e9 e3 00 63 50 e3 29 50 4b 45 e2 + ca 82 31 0b 26 dc d8 7d 5c 68 f1 ee a8 f5 52 67 + c3 1b 2e 8b b4 25 1f 84 d7 e0 b2 c0 46 26 f5 af + f9 3e dc fb 25 c9 c2 b3 ff 8a e1 0e 83 9a 2d db + 4c dc fe 4f f4 77 28 b4 a1 b7 c1 36 2b aa d2 9a + b4 8d 28 69 d5 02 41 21 43 58 11 59 1b e3 92 f9 + 82 fb 3e 87 d0 95 ae b4 04 48 db 97 2f 3a c1 4f + 7b c2 75 19 52 81 ce 32 d2 f1 b7 6d 4d 35 3e 2d + """ + + ciphertext = """ + 12 53 e0 4d c0 a5 39 7b b4 4a 7a b8 7e 9b f2 a0 + 39 a3 3d 1e 99 6f c8 2a 94 cc d3 00 74 c9 5d f7 + 63 72 20 17 06 9e 52 68 da 5d 1c 0b 4f 87 2c f6 + 53 c1 1d f8 23 14 a6 79 68 df ea e2 8d ef 04 bb + 6d 84 b1 c3 1d 65 4a 19 70 e5 78 3b d6 eb 96 a0 + 24 c2 ca 2f 4a 90 fe 9f 2e f5 c9 c1 40 e5 bb 48 + da 95 36 ad 87 00 c8 4f c9 13 0a de a7 4e 55 8d + 51 a7 4d df 85 d8 b5 0d e9 68 38 d6 06 3e 09 55 + """ + + modulus = """ + bb f8 2f 09 06 82 ce 9c 23 38 ac 2b 9d a8 71 f7 + 36 8d 07 ee d4 10 43 a4 40 d6 b6 f0 74 54 f5 1f + b8 df ba af 03 5c 02 ab 61 ea 48 ce eb 6f cd 48 + 76 ed 52 0d 60 e1 ec 46 19 71 9d 8a 5b 8b 80 7f + af b8 e0 a3 df c7 37 72 3e e6 b4 b7 d9 3a 25 84 + ee 6a 64 9d 06 09 53 74 88 34 b2 45 45 98 39 4e + e0 aa b1 2d 7b 61 a5 1f 52 7a 9a 41 f6 c1 68 7f + e2 53 72 98 ca 2a 8f 59 46 f8 e5 fd 09 1d bd cb + """ + + e = 0x11 # public exponent + + prime_factor = """ + c9 7f b1 f0 27 f4 53 f6 34 12 33 ea aa d1 d9 35 + 3f 6c 42 d0 88 66 b1 d0 5a 0f 20 35 02 8b 9d 86 + 98 40 b4 16 66 b4 2e 92 ea 0d a3 b4 32 04 b5 cf + ce 33 52 52 4d 04 16 a5 a4 41 e7 00 af 46 15 03 + """ + + def setUp(self): + global RSA, Random, bytes_to_long + from Crypto.PublicKey import RSA + from Crypto import Random + from Crypto.Util.number import bytes_to_long, inverse + self.n = bytes_to_long(a2b_hex(self.modulus)) + self.p = bytes_to_long(a2b_hex(self.prime_factor)) + + # Compute q, d, and u from n, e, and p + self.q = divmod(self.n, self.p)[0] + self.d = inverse(self.e, (self.p-1)*(self.q-1)) + self.u = inverse(self.p, self.q) # u = e**-1 (mod q) + + self.rsa = RSA + + def test_generate_1arg(self): + """RSA (default implementation) generated key (1 argument)""" + rsaObj = self.rsa.generate(1024) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_2arg(self): + """RSA (default implementation) generated key (2 arguments)""" + rsaObj = self.rsa.generate(1024, Random.new().read) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + + def test_generate_3args(self): + rsaObj = self.rsa.generate(1024, Random.new().read,e=65537) + self._check_private_key(rsaObj) + self._exercise_primitive(rsaObj) + pub = rsaObj.publickey() + self._check_public_key(pub) + self._exercise_public_primitive(rsaObj) + self.assertEqual(65537,rsaObj.e) + + def test_construct_2tuple(self): + """RSA (default implementation) constructed key (2-tuple)""" + pub = self.rsa.construct((self.n, self.e)) + self._check_public_key(pub) + self._check_encryption(pub) + self._check_verification(pub) + + def test_construct_3tuple(self): + """RSA (default implementation) constructed key (3-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_construct_4tuple(self): + """RSA (default implementation) constructed key (4-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p)) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_construct_5tuple(self): + """RSA (default implementation) constructed key (5-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_construct_6tuple(self): + """RSA (default implementation) constructed key (6-tuple)""" + rsaObj = self.rsa.construct((self.n, self.e, self.d, self.p, self.q, self.u)) + self._check_private_key(rsaObj) + self._check_encryption(rsaObj) + self._check_decryption(rsaObj) + self._check_signing(rsaObj) + self._check_verification(rsaObj) + + def test_factoring(self): + rsaObj = self.rsa.construct([self.n, self.e, self.d]) + self.assertTrue(rsaObj.p==self.p or rsaObj.p==self.q) + self.assertTrue(rsaObj.q==self.p or rsaObj.q==self.q) + self.assertTrue(rsaObj.q*rsaObj.p == self.n) + + self.assertRaises(ValueError, self.rsa.construct, [self.n, self.e, self.n-1]) + + def _check_private_key(self, rsaObj): + # Check capabilities + self.assertEqual(1, rsaObj.has_private()) + self.assertEqual(1, rsaObj.can_sign()) + self.assertEqual(1, rsaObj.can_encrypt()) + self.assertEqual(1, rsaObj.can_blind()) + + # Check rsaObj.[nedpqu] -> rsaObj.key.[nedpqu] mapping + self.assertEqual(rsaObj.n, rsaObj.key.n) + self.assertEqual(rsaObj.e, rsaObj.key.e) + self.assertEqual(rsaObj.d, rsaObj.key.d) + self.assertEqual(rsaObj.p, rsaObj.key.p) + self.assertEqual(rsaObj.q, rsaObj.key.q) + self.assertEqual(rsaObj.u, rsaObj.key.u) + + # Sanity check key data + self.assertEqual(rsaObj.n, rsaObj.p * rsaObj.q) # n = pq + self.assertEqual(1, rsaObj.d * rsaObj.e % ((rsaObj.p-1) * (rsaObj.q-1))) # ed = 1 (mod (p-1)(q-1)) + self.assertEqual(1, rsaObj.p * rsaObj.u % rsaObj.q) # pu = 1 (mod q) + self.assertEqual(1, rsaObj.p > 1) # p > 1 + self.assertEqual(1, rsaObj.q > 1) # q > 1 + self.assertEqual(1, rsaObj.e > 1) # e > 1 + self.assertEqual(1, rsaObj.d > 1) # d > 1 + + def _check_public_key(self, rsaObj): + ciphertext = a2b_hex(self.ciphertext) + + # Check capabilities + self.assertEqual(0, rsaObj.has_private()) + self.assertEqual(1, rsaObj.can_sign()) + self.assertEqual(1, rsaObj.can_encrypt()) + self.assertEqual(1, rsaObj.can_blind()) + + # Check rsaObj.[ne] -> rsaObj.key.[ne] mapping + self.assertEqual(rsaObj.n, rsaObj.key.n) + self.assertEqual(rsaObj.e, rsaObj.key.e) + + # Check that private parameters are all missing + self.assertEqual(0, hasattr(rsaObj, 'd')) + self.assertEqual(0, hasattr(rsaObj, 'p')) + self.assertEqual(0, hasattr(rsaObj, 'q')) + self.assertEqual(0, hasattr(rsaObj, 'u')) + self.assertEqual(0, hasattr(rsaObj.key, 'd')) + self.assertEqual(0, hasattr(rsaObj.key, 'p')) + self.assertEqual(0, hasattr(rsaObj.key, 'q')) + self.assertEqual(0, hasattr(rsaObj.key, 'u')) + + # Sanity check key data + self.assertEqual(1, rsaObj.e > 1) # e > 1 + + # Public keys should not be able to sign or decrypt + self.assertRaises(TypeError, rsaObj.sign, ciphertext, b("")) + self.assertRaises(TypeError, rsaObj.decrypt, ciphertext) + + # Check __eq__ and __ne__ + self.assertEqual(rsaObj.publickey() == rsaObj.publickey(),True) # assert_ + self.assertEqual(rsaObj.publickey() != rsaObj.publickey(),False) # failIf + + def _exercise_primitive(self, rsaObj): + # Since we're using a randomly-generated key, we can't check the test + # vector, but we can make sure encryption and decryption are inverse + # operations. + ciphertext = a2b_hex(self.ciphertext) + + # Test decryption + plaintext = rsaObj.decrypt((ciphertext,)) + + # Test encryption (2 arguments) + (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) + self.assertEqual(b2a_hex(ciphertext), b2a_hex(new_ciphertext2)) + + # Test blinded decryption + blinding_factor = Random.new().read(len(ciphertext)-1) + blinded_ctext = rsaObj.blind(ciphertext, blinding_factor) + blinded_ptext = rsaObj.decrypt((blinded_ctext,)) + unblinded_plaintext = rsaObj.unblind(blinded_ptext, blinding_factor) + self.assertEqual(b2a_hex(plaintext), b2a_hex(unblinded_plaintext)) + + # Test signing (2 arguments) + signature2 = rsaObj.sign(ciphertext, b("")) + self.assertEqual((bytes_to_long(plaintext),), signature2) + + # Test verification + self.assertEqual(1, rsaObj.verify(ciphertext, (bytes_to_long(plaintext),))) + + def _exercise_public_primitive(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + + # Test encryption (2 arguments) + (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) + + # Exercise verification + rsaObj.verify(new_ciphertext2, (bytes_to_long(plaintext),)) + + def _check_encryption(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # Test encryption (2 arguments) + (new_ciphertext2,) = rsaObj.encrypt(plaintext, b("")) + self.assertEqual(b2a_hex(ciphertext), b2a_hex(new_ciphertext2)) + + def _check_decryption(self, rsaObj): + plaintext = a2b_hex(self.plaintext) + ciphertext = a2b_hex(self.ciphertext) + + # Test plain decryption + new_plaintext = rsaObj.decrypt((ciphertext,)) + self.assertEqual(b2a_hex(plaintext), b2a_hex(new_plaintext)) + + # Test blinded decryption + blinding_factor = Random.new().read(len(ciphertext)-1) + blinded_ctext = rsaObj.blind(ciphertext, blinding_factor) + blinded_ptext = rsaObj.decrypt((blinded_ctext,)) + unblinded_plaintext = rsaObj.unblind(blinded_ptext, blinding_factor) + self.assertEqual(b2a_hex(plaintext), b2a_hex(unblinded_plaintext)) + + def _check_verification(self, rsaObj): + signature = bytes_to_long(a2b_hex(self.plaintext)) + message = a2b_hex(self.ciphertext) + + # Test verification + t = (signature,) # rsaObj.verify expects a tuple + self.assertEqual(1, rsaObj.verify(message, t)) + + # Test verification with overlong tuple (this is a + # backward-compatibility hack to support some harmless misuse of the + # API) + t2 = (signature, '') + self.assertEqual(1, rsaObj.verify(message, t2)) # extra garbage at end of tuple + + def _check_signing(self, rsaObj): + signature = bytes_to_long(a2b_hex(self.plaintext)) + message = a2b_hex(self.ciphertext) + + # Test signing (2 argument) + self.assertEqual((signature,), rsaObj.sign(message, b(""))) + +class RSAFastMathTest(RSATest): + def setUp(self): + RSATest.setUp(self) + self.rsa = RSA.RSAImplementation(use_fast_math=True) + + def test_generate_1arg(self): + """RSA (_fastmath implementation) generated key (1 argument)""" + RSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """RSA (_fastmath implementation) generated key (2 arguments)""" + RSATest.test_generate_2arg(self) + + def test_construct_2tuple(self): + """RSA (_fastmath implementation) constructed key (2-tuple)""" + RSATest.test_construct_2tuple(self) + + def test_construct_3tuple(self): + """RSA (_fastmath implementation) constructed key (3-tuple)""" + RSATest.test_construct_3tuple(self) + + def test_construct_4tuple(self): + """RSA (_fastmath implementation) constructed key (4-tuple)""" + RSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """RSA (_fastmath implementation) constructed key (5-tuple)""" + RSATest.test_construct_5tuple(self) + + def test_construct_6tuple(self): + """RSA (_fastmath implementation) constructed key (6-tuple)""" + RSATest.test_construct_6tuple(self) + + def test_factoring(self): + RSATest.test_factoring(self) + +class RSASlowMathTest(RSATest): + def setUp(self): + RSATest.setUp(self) + self.rsa = RSA.RSAImplementation(use_fast_math=False) + + def test_generate_1arg(self): + """RSA (_slowmath implementation) generated key (1 argument)""" + RSATest.test_generate_1arg(self) + + def test_generate_2arg(self): + """RSA (_slowmath implementation) generated key (2 arguments)""" + RSATest.test_generate_2arg(self) + + def test_construct_2tuple(self): + """RSA (_slowmath implementation) constructed key (2-tuple)""" + RSATest.test_construct_2tuple(self) + + def test_construct_3tuple(self): + """RSA (_slowmath implementation) constructed key (3-tuple)""" + RSATest.test_construct_3tuple(self) + + def test_construct_4tuple(self): + """RSA (_slowmath implementation) constructed key (4-tuple)""" + RSATest.test_construct_4tuple(self) + + def test_construct_5tuple(self): + """RSA (_slowmath implementation) constructed key (5-tuple)""" + RSATest.test_construct_5tuple(self) + + def test_construct_6tuple(self): + """RSA (_slowmath implementation) constructed key (6-tuple)""" + RSATest.test_construct_6tuple(self) + + def test_factoring(self): + RSATest.test_factoring(self) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(RSATest) + try: + from Crypto.PublicKey import _fastmath + tests += list_test_cases(RSAFastMathTest) + except ImportError: + from distutils.sysconfig import get_config_var + import inspect + _fm_path = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + +"/../../PublicKey/_fastmath"+get_config_var("SO")) + if os.path.exists(_fm_path): + raise ImportError("While the _fastmath module exists, importing "+ + "it failed. This may point to the gmp or mpir shared library "+ + "not being in the path. _fastmath was found at "+_fm_path) + if config.get('slow_tests',1): + tests += list_test_cases(RSASlowMathTest) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py b/Lib/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py new file mode 100644 index 0000000..575d031 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/PublicKey/test_importKey.py @@ -0,0 +1,345 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/PublicKey/test_importKey.py: Self-test for importing RSA keys +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + + +__revision__ = "$Id$" + +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import * +from Crypto.Util.py3compat import * +from Crypto.Util.number import inverse +from Crypto.Util import asn1 + +def der2pem(der, text='PUBLIC'): + import binascii + chunks = [ binascii.b2a_base64(der[i:i+48]) for i in range(0, len(der), 48) ] + pem = b('-----BEGIN %s KEY-----\n' % text) + pem += b('').join(chunks) + pem += b('-----END %s KEY-----' % text) + return pem + +class ImportKeyTests(unittest.TestCase): + # 512-bit RSA key generated with openssl + rsaKeyPEM = '''-----BEGIN RSA PRIVATE KEY----- +MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII +q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 +Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI +OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr ++rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK +JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 +n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== +-----END RSA PRIVATE KEY-----''' + + # As above, but this is actually an unencrypted PKCS#8 key + rsaKeyPEM8 = '''-----BEGIN PRIVATE KEY----- +MIIBVQIBADANBgkqhkiG9w0BAQEFAASCAT8wggE7AgEAAkEAvx4nkAqgiyNRGlwS +ga5tkzEsPv6RP5MuvtSS8S0WtGEMMoy24girX0WsvilQgzKY8xIsGfeEkt7fQPDj +wZAzhQIDAQABAkAJRIMSnxFN7fZ+2rwjAbxaiOXmYB3XAWIg6tn9S/xv3rdYk4mK +5BxU3b2/FTn4zL0Y9ntEDeGsMEQCgdQM+sg5AiEA8g8vPh2mGIP2KYCSK9jfVFzk +B8cmJBEDteLFNyMSSiMCIQDKH+kkeSz8yWv6t080Smi0GN9XgzgGSAYAD+KlyZoC +NwIhAIe+HDApUEvPNOxxPYd5R0R4EyiJdcokAICvewlAkbEhAiBqtGn6bVZIpXUx +yLAxpM6dtTvDEWz0M/Wm9rvqVgHOBQIhAL2fQKdkInohlipK3Qfk3v5D7ZGjrie7 +BX85JB8zqwHB +-----END PRIVATE KEY-----''' + + # The same RSA private key as in rsaKeyPEM, but now encrypted + rsaKeyEncryptedPEM=( + + # With DES and passphrase 'test' + ('test', '''-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-CBC,AF8F9A40BD2FA2FC + +Ckl9ex1kaVEWhYC2QBmfaF+YPiR4NFkRXA7nj3dcnuFEzBnY5XULupqQpQI3qbfA +u8GYS7+b3toWWiHZivHbAAUBPDIZG9hKDyB9Sq2VMARGsX1yW1zhNvZLIiVJzUHs +C6NxQ1IJWOXzTew/xM2I26kPwHIvadq+/VaT8gLQdjdH0jOiVNaevjWnLgrn1mLP +BCNRMdcexozWtAFNNqSzfW58MJL2OdMi21ED184EFytIc1BlB+FZiGZduwKGuaKy +9bMbdb/1PSvsSzPsqW7KSSrTw6MgJAFJg6lzIYvR5F4poTVBxwBX3+EyEmShiaNY +IRX3TgQI0IjrVuLmvlZKbGWP18FXj7I7k9tSsNOOzllTTdq3ny5vgM3A+ynfAaxp +dysKznQ6P+IoqML1WxAID4aGRMWka+uArOJ148Rbj9s= +-----END RSA PRIVATE KEY-----''', + "\xAF\x8F\x9A\x40\xBD\x2F\xA2\xFC"), + + # With Triple-DES and passphrase 'rocking' + ('rocking', '''-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,C05D6C07F7FC02F6 + +w4lwQrXaVoTTJ0GgwY566htTA2/t1YlimhxkxYt9AEeCcidS5M0Wq9ClPiPz9O7F +m6K5QpM1rxo1RUE/ZyI85gglRNPdNwkeTOqit+kum7nN73AToX17+irVmOA4Z9E+ +4O07t91GxGMcjUSIFk0ucwEU4jgxRvYscbvOMvNbuZszGdVNzBTVddnShKCsy9i7 +nJbPlXeEKYi/OkRgO4PtfqqWQu5GIEFVUf9ev1QV7AvC+kyWTR1wWYnHX265jU5c +sopxQQtP8XEHIJEdd5/p1oieRcWTCNyY8EkslxDSsrf0OtZp6mZH9N+KU47cgQtt +9qGORmlWnsIoFFKcDohbtOaWBTKhkj5h6OkLjFjfU/sBeV1c+7wDT3dAy5tawXjG +YSxC7qDQIT/RECvV3+oQKEcmpEujn45wAnkTi12BH30= +-----END RSA PRIVATE KEY-----''', + "\xC0\x5D\x6C\x07\xF7\xFC\x02\xF6"), + ) + + rsaPublicKeyPEM = '''-----BEGIN PUBLIC KEY----- +MFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+T +Lr7UkvEtFrRhDDKMtuIIq19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQ== +-----END PUBLIC KEY-----''' + + # Obtained using 'ssh-keygen -i -m PKCS8 -f rsaPublicKeyPEM' + rsaPublicKeyOpenSSH = '''ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAQQC/HieQCqCLI1EaXBKBrm2TMSw+/pE/ky6+1JLxLRa0YQwyjLbiCKtfRay+KVCDMpjzEiwZ94SS3t9A8OPBkDOF comment\n''' + + # The private key, in PKCS#1 format encoded with DER + rsaKeyDER = a2b_hex( + '''3082013b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe + 913f932ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f312 + 2c19f78492dedf40f0e3c190338502030100010240094483129f114dedf6 + 7edabc2301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c + 54ddbdbf1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f + 2f3e1da61883f62980922bd8df545ce407c726241103b5e2c53723124a23 + 022100ca1fe924792cfcc96bfab74f344a68b418df578338064806000fe2 + a5c99a023702210087be1c3029504bcf34ec713d877947447813288975ca + 240080af7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53b + c3116cf433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07 + e4defe43ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + # The private key, in unencrypted PKCS#8 format encoded with DER + rsaKeyDER8 = a2b_hex( + '''30820155020100300d06092a864886f70d01010105000482013f3082013 + b020100024100bf1e27900aa08b23511a5c1281ae6d93312c3efe913f932 + ebed492f12d16b4610c328cb6e208ab5f45acbe2950833298f3122c19f78 + 492dedf40f0e3c190338502030100010240094483129f114dedf67edabc2 + 301bc5a88e5e6601dd7016220ead9fd4bfc6fdeb75893898ae41c54ddbdb + f1539f8ccbd18f67b440de1ac30440281d40cfac839022100f20f2f3e1da + 61883f62980922bd8df545ce407c726241103b5e2c53723124a23022100c + a1fe924792cfcc96bfab74f344a68b418df578338064806000fe2a5c99a0 + 23702210087be1c3029504bcf34ec713d877947447813288975ca240080a + f7b094091b12102206ab469fa6d5648a57531c8b031a4ce9db53bc3116cf + 433f5a6f6bbea5601ce05022100bd9f40a764227a21962a4add07e4defe4 + 3ed91a3ae27bb057f39241f33ab01c1 + '''.replace(" ","")) + + rsaPublicKeyDER = a2b_hex( + '''305c300d06092a864886f70d0101010500034b003048024100bf1e27900a + a08b23511a5c1281ae6d93312c3efe913f932ebed492f12d16b4610c328c + b6e208ab5f45acbe2950833298f3122c19f78492dedf40f0e3c190338502 + 03010001 + '''.replace(" ","")) + + n = int('BF 1E 27 90 0A A0 8B 23 51 1A 5C 12 81 AE 6D 93 31 2C 3E FE 91 3F 93 2E BE D4 92 F1 2D 16 B4 61 0C 32 8C B6 E2 08 AB 5F 45 AC BE 29 50 83 32 98 F3 12 2C 19 F7 84 92 DE DF 40 F0 E3 C1 90 33 85'.replace(" ",""),16) + e = 65537 + d = int('09 44 83 12 9F 11 4D ED F6 7E DA BC 23 01 BC 5A 88 E5 E6 60 1D D7 01 62 20 EA D9 FD 4B FC 6F DE B7 58 93 89 8A E4 1C 54 DD BD BF 15 39 F8 CC BD 18 F6 7B 44 0D E1 AC 30 44 02 81 D4 0C FA C8 39'.replace(" ",""),16) + p = int('00 F2 0F 2F 3E 1D A6 18 83 F6 29 80 92 2B D8 DF 54 5C E4 07 C7 26 24 11 03 B5 E2 C5 37 23 12 4A 23'.replace(" ",""),16) + q = int('00 CA 1F E9 24 79 2C FC C9 6B FA B7 4F 34 4A 68 B4 18 DF 57 83 38 06 48 06 00 0F E2 A5 C9 9A 02 37'.replace(" ",""),16) + + # This is q^{-1} mod p). fastmath and slowmath use pInv (p^{-1} + # mod q) instead! + qInv = int('00 BD 9F 40 A7 64 22 7A 21 96 2A 4A DD 07 E4 DE FE 43 ED 91 A3 AE 27 BB 05 7F 39 24 1F 33 AB 01 C1'.replace(" ",""),16) + pInv = inverse(p,q) + + def testImportKey1(self): + """Verify import of RSAPrivateKey DER SEQUENCE""" + key = self.rsa.importKey(self.rsaKeyDER) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey2(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE""" + key = self.rsa.importKey(self.rsaPublicKeyDER) + self.assertFalse(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey3unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaKeyPEM) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey3bytes(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaKeyPEM)) + self.assertEqual(key.has_private(),True) # assert_ + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey4unicode(self): + """Verify import of RSAPrivateKey DER SEQUENCE, encoded with PEM as unicode""" + key = RSA.importKey(self.rsaPublicKeyPEM) + self.assertEqual(key.has_private(),False) # failIf + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey4bytes(self): + """Verify import of SubjectPublicKeyInfo DER SEQUENCE, encoded with PEM as byte string""" + key = RSA.importKey(b(self.rsaPublicKeyPEM)) + self.assertEqual(key.has_private(),False) # failIf + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey5(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyPEM) + idem = key.encrypt(key.decrypt(b("Test")),0) + self.assertEqual(idem[0],b("Test")) + + def testImportKey6(self): + """Verifies that the imported key is still a valid RSA pair""" + key = RSA.importKey(self.rsaKeyDER) + idem = key.encrypt(key.decrypt(b("Test")),0) + self.assertEqual(idem[0],b("Test")) + + def testImportKey7(self): + """Verify import of OpenSSH public key""" + key = self.rsa.importKey(self.rsaPublicKeyOpenSSH) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + + def testImportKey8(self): + """Verify import of encrypted PrivateKeyInfo DER SEQUENCE""" + for t in self.rsaKeyEncryptedPEM: + key = self.rsa.importKey(t[1], t[0]) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey9(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE""" + key = self.rsa.importKey(self.rsaKeyDER8) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey10(self): + """Verify import of unencrypted PrivateKeyInfo DER SEQUENCE, encoded with PEM""" + key = self.rsa.importKey(self.rsaKeyPEM8) + self.assertTrue(key.has_private()) + self.assertEqual(key.n, self.n) + self.assertEqual(key.e, self.e) + self.assertEqual(key.d, self.d) + self.assertEqual(key.p, self.p) + self.assertEqual(key.q, self.q) + + def testImportKey11(self): + """Verify import of RSAPublicKey DER SEQUENCE""" + der = asn1.DerSequence([17, 3]).encode() + key = self.rsa.importKey(der) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + def testImportKey12(self): + """Verify import of RSAPublicKey DER SEQUENCE, encoded with PEM""" + der = asn1.DerSequence([17, 3]).encode() + pem = der2pem(der) + key = self.rsa.importKey(pem) + self.assertEqual(key.n, 17) + self.assertEqual(key.e, 3) + + ### + def testExportKey1(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.exportKey("DER") + self.assertEqual(derKey, self.rsaKeyDER) + + def testExportKey2(self): + key = self.rsa.construct([self.n, self.e]) + derKey = key.exportKey("DER") + self.assertEqual(derKey, self.rsaPublicKeyDER) + + def testExportKey3(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.exportKey("PEM") + self.assertEqual(pemKey, b(self.rsaKeyPEM)) + + def testExportKey4(self): + key = self.rsa.construct([self.n, self.e]) + pemKey = key.exportKey("PEM") + self.assertEqual(pemKey, b(self.rsaPublicKeyPEM)) + + def testExportKey5(self): + key = self.rsa.construct([self.n, self.e]) + openssh_1 = key.exportKey("OpenSSH").split() + openssh_2 = self.rsaPublicKeyOpenSSH.split() + self.assertEqual(openssh_1[0], openssh_2[0]) + self.assertEqual(openssh_1[1], openssh_2[1]) + + def testExportKey4(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + # Tuple with index #1 is encrypted with 3DES + t = list(map(b,self.rsaKeyEncryptedPEM[1])) + # Force the salt being used when exporting + key._randfunc = lambda N: (t[2]*divmod(N+len(t[2]),len(t[2]))[0])[:N] + pemKey = key.exportKey("PEM", t[0]) + self.assertEqual(pemKey, t[1]) + + def testExportKey5(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + derKey = key.exportKey("DER", pkcs=8) + self.assertEqual(derKey, self.rsaKeyDER8) + + def testExportKey6(self): + key = self.rsa.construct([self.n, self.e, self.d, self.p, self.q, self.pInv]) + pemKey = key.exportKey("PEM", pkcs=8) + self.assertEqual(pemKey, b(self.rsaKeyPEM8)) + +class ImportKeyTestsSlow(ImportKeyTests): + def setUp(self): + self.rsa = RSA.RSAImplementation(use_fast_math=0) + +class ImportKeyTestsFast(ImportKeyTests): + def setUp(self): + self.rsa = RSA.RSAImplementation(use_fast_math=1) + +if __name__ == '__main__': + unittest.main() + +def get_tests(config={}): + tests = [] + try: + from Crypto.PublicKey import _fastmath + tests += list_test_cases(ImportKeyTestsFast) + except ImportError: + pass + tests += list_test_cases(ImportKeyTestsSlow) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py new file mode 100644 index 0000000..81a0e13 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/__init__.py: Self-test for Fortuna modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for the Crypto.Random.Fortuna package""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Random.Fortuna import test_FortunaAccumulator; tests += test_FortunaAccumulator.get_tests(config=config) + from Crypto.SelfTest.Random.Fortuna import test_FortunaGenerator; tests += test_FortunaGenerator.get_tests(config=config) + from Crypto.SelfTest.Random.Fortuna import test_SHAd256; tests += test_SHAd256.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py new file mode 100644 index 0000000..c120086 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaAccumulator.py @@ -0,0 +1,189 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/test_FortunaAccumulator.py: Self-test for the FortunaAccumulator module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Random.Fortuna.FortunaAccumulator""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from binascii import b2a_hex + +class FortunaAccumulatorTests(unittest.TestCase): + def setUp(self): + global FortunaAccumulator + from Crypto.Random.Fortuna import FortunaAccumulator + + def test_FortunaPool(self): + """FortunaAccumulator.FortunaPool""" + pool = FortunaAccumulator.FortunaPool() + self.assertEqual(0, pool.length) + self.assertEqual("5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456", pool.hexdigest()) + + pool.append(b('abc')) + + self.assertEqual(3, pool.length) + self.assertEqual("4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358", pool.hexdigest()) + + pool.append(b("dbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq")) + + self.assertEqual(56, pool.length) + self.assertEqual(b('0cffe17f68954dac3a84fb1458bd5ec99209449749b2b308b7cb55812f9563af'), b2a_hex(pool.digest())) + + pool.reset() + + self.assertEqual(0, pool.length) + + pool.append(b('a') * 10**6) + + self.assertEqual(10**6, pool.length) + self.assertEqual(b('80d1189477563e1b5206b2749f1afe4807e5705e8bd77887a60187a712156688'), b2a_hex(pool.digest())) + + def test_which_pools(self): + """FortunaAccumulator.which_pools""" + + # which_pools(0) should fail + self.assertRaises(AssertionError, FortunaAccumulator.which_pools, 0) + + self.assertEqual(FortunaAccumulator.which_pools(1), [0]) + self.assertEqual(FortunaAccumulator.which_pools(2), [0, 1]) + self.assertEqual(FortunaAccumulator.which_pools(3), [0]) + self.assertEqual(FortunaAccumulator.which_pools(4), [0, 1, 2]) + self.assertEqual(FortunaAccumulator.which_pools(5), [0]) + self.assertEqual(FortunaAccumulator.which_pools(6), [0, 1]) + self.assertEqual(FortunaAccumulator.which_pools(7), [0]) + self.assertEqual(FortunaAccumulator.which_pools(8), [0, 1, 2, 3]) + for i in range(1, 32): + self.assertEqual(FortunaAccumulator.which_pools(2**i-1), [0]) + self.assertEqual(FortunaAccumulator.which_pools(2**i), list(range(i+1))) + self.assertEqual(FortunaAccumulator.which_pools(2**i+1), [0]) + self.assertEqual(FortunaAccumulator.which_pools(2**31), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**32), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**33), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**34), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**35), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**36), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**64), list(range(32))) + self.assertEqual(FortunaAccumulator.which_pools(2**128), list(range(32))) + + def test_accumulator(self): + """FortunaAccumulator.FortunaAccumulator""" + fa = FortunaAccumulator.FortunaAccumulator() + + # This should fail, because we haven't seeded the PRNG yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Spread some test data across the pools (source number 42) + # This would be horribly insecure in a real system. + for p in range(32): + fa.add_random_event(42, p, b("X") * 32) + self.assertEqual(32+2, fa.pools[p].length) + + # This should still fail, because we haven't seeded the PRNG with 64 bytes yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Add more data + for p in range(32): + fa.add_random_event(42, p, b("X") * 32) + self.assertEqual((32+2)*2, fa.pools[p].length) + + # The underlying RandomGenerator should get seeded with Pool 0 + # s = SHAd256(chr(42) + chr(32) + "X"*32 + chr(42) + chr(32) + "X"*32) + # = SHA256(h'edd546f057b389155a31c32e3975e736c1dec030ddebb137014ecbfb32ed8c6f') + # = h'aef42a5dcbddab67e8efa118e1b47fde5d697f89beb971b99e6e8e5e89fbf064' + # The counter and the key before reseeding is: + # C_0 = 0 + # K_0 = "\x00" * 32 + # The counter after reseeding is 1, and the new key after reseeding is + # C_1 = 1 + # K_1 = SHAd256(K_0 || s) + # = SHA256(h'0eae3e401389fab86640327ac919ecfcb067359d95469e18995ca889abc119a6') + # = h'aafe9d0409fbaaafeb0a1f2ef2014a20953349d3c1c6e6e3b962953bea6184dd' + # The first block of random data, therefore, is + # r_1 = AES-256(K_1, 1) + # = AES-256(K_1, h'01000000000000000000000000000000') + # = h'b7b86bd9a27d96d7bb4add1b6b10d157' + # The second block of random data is + # r_2 = AES-256(K_1, 2) + # = AES-256(K_1, h'02000000000000000000000000000000') + # = h'2350b1c61253db2f8da233be726dc15f' + # The third and fourth blocks of random data (which become the new key) are + # r_3 = AES-256(K_1, 3) + # = AES-256(K_1, h'03000000000000000000000000000000') + # = h'f23ad749f33066ff53d307914fbf5b21' + # r_4 = AES-256(K_1, 4) + # = AES-256(K_1, h'04000000000000000000000000000000') + # = h'da9667c7e86ba247655c9490e9d94a7c' + # K_2 = r_3 || r_4 + # = h'f23ad749f33066ff53d307914fbf5b21da9667c7e86ba247655c9490e9d94a7c' + # The final counter value is 5. + self.assertEqual("aef42a5dcbddab67e8efa118e1b47fde5d697f89beb971b99e6e8e5e89fbf064", + fa.pools[0].hexdigest()) + self.assertEqual(None, fa.generator.key) + self.assertEqual(0, fa.generator.counter.next_value()) + + result = fa.random_data(32) + + self.assertEqual(b("b7b86bd9a27d96d7bb4add1b6b10d157" "2350b1c61253db2f8da233be726dc15f"), b2a_hex(result)) + self.assertEqual(b("f23ad749f33066ff53d307914fbf5b21da9667c7e86ba247655c9490e9d94a7c"), b2a_hex(fa.generator.key)) + self.assertEqual(5, fa.generator.counter.next_value()) + + def test_accumulator_pool_length(self): + """FortunaAccumulator.FortunaAccumulator minimum pool length""" + fa = FortunaAccumulator.FortunaAccumulator() + + # This test case is hard-coded to assume that FortunaAccumulator.min_pool_size is 64. + self.assertEqual(fa.min_pool_size, 64) + + # The PRNG should not allow us to get random data from it yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Add 60 bytes, 4 at a time (2 header + 2 payload) to each of the 32 pools + for i in range(15): + for p in range(32): + # Add the bytes to the pool + fa.add_random_event(2, p, b("XX")) + + # The PRNG should not allow us to get random data from it yet + self.assertRaises(AssertionError, fa.random_data, 1) + + # Add 4 more bytes to pool 0 + fa.add_random_event(2, 0, b("XX")) + + # We should now be able to get data from the accumulator + fa.random_data(1) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(FortunaAccumulatorTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py new file mode 100644 index 0000000..d41bb02 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_FortunaGenerator.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/test_FortunaGenerator.py: Self-test for the FortunaGenerator module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Random.Fortuna.FortunaGenerator""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest +from binascii import b2a_hex + +class FortunaGeneratorTests(unittest.TestCase): + def setUp(self): + global FortunaGenerator + from Crypto.Random.Fortuna import FortunaGenerator + + def test_generator(self): + """FortunaGenerator.AESGenerator""" + fg = FortunaGenerator.AESGenerator() + + # We shouldn't be able to read data until we've seeded the generator + self.assertRaises(Exception, fg.pseudo_random_data, 1) + self.assertEqual(0, fg.counter.next_value()) + + # Seed the generator, which should set the key and increment the counter. + fg.reseed(b("Hello")) + self.assertEqual(b("0ea6919d4361551364242a4ba890f8f073676e82cf1a52bb880f7e496648b565"), b2a_hex(fg.key)) + self.assertEqual(1, fg.counter.next_value()) + + # Read 2 full blocks from the generator + self.assertEqual(b("7cbe2c17684ac223d08969ee8b565616") + # counter=1 + b("717661c0d2f4758bd6ba140bf3791abd"), # counter=2 + b2a_hex(fg.pseudo_random_data(32))) + + # Meanwhile, the generator will have re-keyed itself and incremented its counter + self.assertEqual(b("33a1bb21987859caf2bbfc5615bef56d") + # counter=3 + b("e6b71ff9f37112d0c193a135160862b7"), # counter=4 + b2a_hex(fg.key)) + self.assertEqual(5, fg.counter.next_value()) + + # Read another 2 blocks from the generator + self.assertEqual(b("fd6648ba3086e919cee34904ef09a7ff") + # counter=5 + b("021f77580558b8c3e9248275f23042bf"), # counter=6 + b2a_hex(fg.pseudo_random_data(32))) + + + # Try to read more than 2**20 bytes using the internal function. This should fail. + self.assertRaises(AssertionError, fg._pseudo_random_data, 2**20+1) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(FortunaGeneratorTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py new file mode 100644 index 0000000..f94db8a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/Fortuna/test_SHAd256.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/Fortuna/test_SHAd256.py: Self-test for the SHAd256 hash function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.Fortuna.SHAd256""" + +__revision__ = "$Id$" +from Crypto.Util.py3compat import * + +# This is a list of (expected_result, input[, description]) tuples. +test_data = [ + # I could not find any test vectors for SHAd256, so I made these vectors by + # feeding some sample data into several plain SHA256 implementations + # (including OpenSSL, the "sha256sum" tool, and this implementation). + # This is a subset of the resulting test vectors. The complete list can be + # found at: http://www.dlitz.net/crypto/shad256-test-vectors/ + ('5df6e0e2761359d30a8275058e299fcc0381534545f55cf43e41983f5d4c9456', + '', "'' (empty string)"), + ('4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358', + 'abc'), + ('0cffe17f68954dac3a84fb1458bd5ec99209449749b2b308b7cb55812f9563af', + 'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq') +] + +def get_tests(config={}): + from Crypto.Random.Fortuna import SHAd256 + from Crypto.SelfTest.Hash.common import make_hash_tests + return make_hash_tests(SHAd256, "SHAd256", test_data, 32) + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py new file mode 100644 index 0000000..44b3fa1 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/__init__.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/OSRNG/__init__.py: Self-test for OSRNG modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for Crypto.Random.OSRNG package""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + if os.name == 'nt': + from Crypto.SelfTest.Random.OSRNG import test_nt; tests += test_nt.get_tests(config=config) + from Crypto.SelfTest.Random.OSRNG import test_winrandom; tests += test_winrandom.get_tests(config=config) + elif os.name == 'posix': + from Crypto.SelfTest.Random.OSRNG import test_posix; tests += test_posix.get_tests(config=config) + if hasattr(os, 'urandom'): + from Crypto.SelfTest.Random.OSRNG import test_fallback; tests += test_fallback.get_tests(config=config) + from Crypto.SelfTest.Random.OSRNG import test_generic; tests += test_generic.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py new file mode 100644 index 0000000..41909b0 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_fallback.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_fallback.py: Self-test for the OSRNG.fallback.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.fallback""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.fallback.new()""" + # Import the OSRNG.nt module and try to use it + import Crypto.Random.OSRNG.fallback + randobj = Crypto.Random.OSRNG.fallback.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py new file mode 100644 index 0000000..2a40974 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_generic.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the OSRNG.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.new()""" + # Import the OSRNG module and try to use it + import Crypto.Random.OSRNG + randobj = Crypto.Random.OSRNG.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py new file mode 100644 index 0000000..a7a8338 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_nt.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the OSRNG.nt.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.nt""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.nt.new()""" + # Import the OSRNG.nt module and try to use it + import Crypto.Random.OSRNG.nt + randobj = Crypto.Random.OSRNG.nt.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py new file mode 100644 index 0000000..2224afe --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_posix.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_posix.py: Self-test for the OSRNG.posix.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.posix""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.posix.new()""" + # Import the OSRNG.nt module and try to use it + import Crypto.Random.OSRNG.posix + randobj = Crypto.Random.OSRNG.posix.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py new file mode 100644 index 0000000..3010eb7 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/OSRNG/test_winrandom.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.OSRNG.winrandom""" + +__revision__ = "$Id$" + +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.OSRNG.winrandom""" + # Import the winrandom module and try to use it + from Crypto.Random.OSRNG import winrandom + randobj = winrandom.new() + x = randobj.get_bytes(16) + y = randobj.get_bytes(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/__init__.py b/Lib/site-packages/Crypto/SelfTest/Random/__init__.py new file mode 100644 index 0000000..f972bf0 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/__init__.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Random/__init__.py: Self-test for random number generation modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for random number generators""" + +__revision__ = "$Id$" + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest.Random import Fortuna; tests += Fortuna.get_tests(config=config) + from Crypto.SelfTest.Random import OSRNG; tests += OSRNG.get_tests(config=config) + from Crypto.SelfTest.Random import test_random; tests += test_random.get_tests(config=config) + from Crypto.SelfTest.Random import test_rpoolcompat; tests += test_rpoolcompat.get_tests(config=config) + from Crypto.SelfTest.Random import test__UserFriendlyRNG; tests += test__UserFriendlyRNG.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py b/Lib/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py new file mode 100644 index 0000000..da3d835 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/test__UserFriendlyRNG.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# Self-tests for the user-friendly Crypto.Random interface +# +# Written in 2013 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for generic Crypto.Random stuff """ + + + +__revision__ = "$Id$" + +import binascii +import pprint +import unittest +import os +import time +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +try: + import multiprocessing +except ImportError: + multiprocessing = None + +import Crypto.Random._UserFriendlyRNG +import Crypto.Random.random + +class RNGForkTest(unittest.TestCase): + + def _get_reseed_count(self): + """ + Get `FortunaAccumulator.reseed_count`, the global count of the + number of times that the PRNG has been reseeded. + """ + rng_singleton = Crypto.Random._UserFriendlyRNG._get_singleton() + rng_singleton._lock.acquire() + try: + return rng_singleton._fa.reseed_count + finally: + rng_singleton._lock.release() + + def runTest(self): + # Regression test for CVE-2013-1445. We had a bug where, under the + # right conditions, two processes might see the same random sequence. + + if sys.platform.startswith('win'): # windows can't fork + assert not hasattr(os, 'fork') # ... right? + return + + # Wait 150 ms so that we don't trigger the rate-limit prematurely. + time.sleep(0.15) + + reseed_count_before = self._get_reseed_count() + + # One or both of these calls together should trigger a reseed right here. + Crypto.Random._UserFriendlyRNG._get_singleton().reinit() + Crypto.Random.get_random_bytes(1) + + reseed_count_after = self._get_reseed_count() + self.assertNotEqual(reseed_count_before, reseed_count_after) # sanity check: test should reseed parent before forking + + rfiles = [] + for i in range(10): + rfd, wfd = os.pipe() + if os.fork() == 0: + # child + os.close(rfd) + f = os.fdopen(wfd, "wb") + + Crypto.Random.atfork() + + data = Crypto.Random.get_random_bytes(16) + + f.write(data) + f.close() + os._exit(0) + # parent + os.close(wfd) + rfiles.append(os.fdopen(rfd, "rb")) + + results = [] + results_dict = {} + for f in rfiles: + data = binascii.hexlify(f.read()) + results.append(data) + results_dict[data] = 1 + f.close() + + if len(results) != len(list(results_dict.keys())): + raise AssertionError("RNG output duplicated across fork():\n%s" % + (pprint.pformat(results))) + + +# For RNGMultiprocessingForkTest +def _task_main(q): + a = Crypto.Random.get_random_bytes(16) + time.sleep(0.1) # wait 100 ms + b = Crypto.Random.get_random_bytes(16) + q.put(binascii.b2a_hex(a)) + q.put(binascii.b2a_hex(b)) + q.put(None) # Wait for acknowledgment + + +class RNGMultiprocessingForkTest(unittest.TestCase): + + def runTest(self): + # Another regression test for CVE-2013-1445. This is basically the + # same as RNGForkTest, but less compatible with old versions of Python, + # and a little easier to read. + + n_procs = 5 + manager = multiprocessing.Manager() + queues = [manager.Queue(1) for i in range(n_procs)] + + # Reseed the pool + time.sleep(0.15) + Crypto.Random._UserFriendlyRNG._get_singleton().reinit() + Crypto.Random.get_random_bytes(1) + + # Start the child processes + pool = multiprocessing.Pool(processes=n_procs, initializer=Crypto.Random.atfork) + map_result = pool.map_async(_task_main, queues) + + # Get the results, ensuring that no pool processes are reused. + aa = [queues[i].get(30) for i in range(n_procs)] + bb = [queues[i].get(30) for i in range(n_procs)] + res = list(zip(aa, bb)) + + # Shut down the pool + map_result.get(30) + pool.close() + pool.join() + + # Check that the results are unique + if len(set(aa)) != len(aa) or len(set(res)) != len(res): + raise AssertionError("RNG output duplicated across fork():\n%s" % + (pprint.pformat(res),)) + + +def get_tests(config={}): + tests = [] + tests += [RNGForkTest()] + if multiprocessing is not None: + tests += [RNGMultiprocessingForkTest()] + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/test_random.py b/Lib/site-packages/Crypto/SelfTest/Random/test_random.py new file mode 100644 index 0000000..b544514 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/test_random.py @@ -0,0 +1,171 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_generic.py: Self-test for the Crypto.Random.new() function +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Random.new()""" + +__revision__ = "$Id$" + +import unittest +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Random.new()""" + # Import the Random module and try to use it + from Crypto import Random + randobj = Random.new() + x = randobj.read(16) + y = randobj.read(16) + self.assertNotEqual(x, y) + z = Random.get_random_bytes(16) + self.assertNotEqual(x, z) + self.assertNotEqual(y, z) + # Test the Random.random module, which + # implements a subset of Python's random API + # Not implemented: + # seed(), getstate(), setstate(), jumpahead() + # random(), uniform(), triangular(), betavariate() + # expovariate(), gammavariate(), gauss(), + # longnormvariate(), normalvariate(), + # vonmisesvariate(), paretovariate() + # weibullvariate() + # WichmannHill(), whseed(), SystemRandom() + from Crypto.Random import random + x = random.getrandbits(16*8) + y = random.getrandbits(16*8) + self.assertNotEqual(x, y) + # Test randrange + if x>y: + start = y + stop = x + else: + start = x + stop = y + for step in range(1,10): + x = random.randrange(start,stop,step) + y = random.randrange(start,stop,step) + self.assertNotEqual(x, y) + self.assertEqual(start <= x < stop, True) + self.assertEqual(start <= y < stop, True) + self.assertEqual((x - start) % step, 0) + self.assertEqual((y - start) % step, 0) + for i in range(10): + self.assertEqual(random.randrange(1,2), 1) + self.assertRaises(ValueError, random.randrange, start, start) + self.assertRaises(ValueError, random.randrange, stop, start, step) + self.assertRaises(TypeError, random.randrange, start, stop, step, step) + self.assertRaises(TypeError, random.randrange, start, stop, "1") + self.assertRaises(TypeError, random.randrange, "1", stop, step) + self.assertRaises(TypeError, random.randrange, 1, "2", step) + self.assertRaises(ValueError, random.randrange, start, stop, 0) + # Test randint + x = random.randint(start,stop) + y = random.randint(start,stop) + self.assertNotEqual(x, y) + self.assertEqual(start <= x <= stop, True) + self.assertEqual(start <= y <= stop, True) + for i in range(10): + self.assertEqual(random.randint(1,1), 1) + self.assertRaises(ValueError, random.randint, stop, start) + self.assertRaises(TypeError, random.randint, start, stop, step) + self.assertRaises(TypeError, random.randint, "1", stop) + self.assertRaises(TypeError, random.randint, 1, "2") + # Test choice + seq = list(range(10000)) + x = random.choice(seq) + y = random.choice(seq) + self.assertNotEqual(x, y) + self.assertEqual(x in seq, True) + self.assertEqual(y in seq, True) + for i in range(10): + self.assertEqual(random.choice((1,2,3)) in (1,2,3), True) + self.assertEqual(random.choice([1,2,3]) in [1,2,3], True) + if sys.version_info[0] is 3: + self.assertEqual(random.choice(bytearray(b('123'))) in bytearray(b('123')), True) + self.assertEqual(1, random.choice([1])) + self.assertRaises(IndexError, random.choice, []) + self.assertRaises(TypeError, random.choice, 1) + # Test shuffle. Lacks random parameter to specify function. + # Make copies of seq + seq = list(range(500)) + x = list(seq) + y = list(seq) + random.shuffle(x) + random.shuffle(y) + self.assertNotEqual(x, y) + self.assertEqual(len(seq), len(x)) + self.assertEqual(len(seq), len(y)) + for i in range(len(seq)): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + self.assertEqual(seq[i] in x, True) + self.assertEqual(seq[i] in y, True) + z = [1] + random.shuffle(z) + self.assertEqual(z, [1]) + if sys.version_info[0] == 3: + z = bytearray(b('12')) + random.shuffle(z) + self.assertEqual(b('1') in z, True) + self.assertRaises(TypeError, random.shuffle, b('12')) + self.assertRaises(TypeError, random.shuffle, 1) + self.assertRaises(TypeError, random.shuffle, "1") + self.assertRaises(TypeError, random.shuffle, (1,2)) + # 2to3 wraps a list() around it, alas - but I want to shoot + # myself in the foot here! :D + # if sys.version_info[0] == 3: + # self.assertRaises(TypeError, random.shuffle, range(3)) + # Test sample + x = random.sample(seq, 20) + y = random.sample(seq, 20) + self.assertNotEqual(x, y) + for i in range(20): + self.assertEqual(x[i] in seq, True) + self.assertEqual(y[i] in seq, True) + z = random.sample([1], 1) + self.assertEqual(z, [1]) + z = random.sample((1,2,3), 1) + self.assertEqual(z[0] in (1,2,3), True) + z = random.sample("123", 1) + self.assertEqual(z[0] in "123", True) + z = random.sample(list(range(3)), 1) + self.assertEqual(z[0] in range(3), True) + if sys.version_info[0] == 3: + z = random.sample(b("123"), 1) + self.assertEqual(z[0] in b("123"), True) + z = random.sample(bytearray(b("123")), 1) + self.assertEqual(z[0] in bytearray(b("123")), True) + self.assertRaises(TypeError, random.sample, 1) + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py b/Lib/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py new file mode 100644 index 0000000..be538da --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Random/test_rpoolcompat.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for the Crypto.Util.randpool.RandomPool wrapper class""" + +__revision__ = "$Id$" + +import sys +import unittest + +class SimpleTest(unittest.TestCase): + def runTest(self): + """Crypto.Util.randpool.RandomPool""" + # Import the winrandom module and try to use it + from Crypto.Util.randpool import RandomPool + sys.stderr.write("SelfTest: You can ignore the RandomPool_DeprecationWarning that follows.\n") + rpool = RandomPool() + x = rpool.get_bytes(16) + y = rpool.get_bytes(16) + self.assertNotEqual(x, y) + self.assertNotEqual(rpool.entropy, 0) + + rpool.randomize() + rpool.stir('foo') + rpool.add_event('foo') + +def get_tests(config={}): + return [SimpleTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py b/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py new file mode 100644 index 0000000..96c8531 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Signature/__init__.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/__init__.py: Self-test for signature modules +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for signature modules""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + from . import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config) + from . import test_pkcs1_pss; tests += test_pkcs1_pss.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py b/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py new file mode 100644 index 0000000..b2f8a1b --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py @@ -0,0 +1,219 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 signatures +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +import unittest + +from Crypto.PublicKey import RSA +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto.Hash import * +from Crypto import Random +from Crypto.Signature import PKCS1_v1_5 as PKCS +from Crypto.Util.py3compat import * + +def isStr(s): + t = '' + try: + t += s + except TypeError: + return 0 + return 1 + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\n', '\t', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +class PKCS1_15_Tests(unittest.TestCase): + + # List of tuples with test data for PKCS#1 v1.5. + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: data to hash and sign + # Item #2: signature of the data #1, done with the key #0, after + # hashing it with #3 + # Item #3: hash object generator + + _testData = ( + + # + # Taken from ftp://ftp.rsa.com/pub/pkcs/ascii/examples.asc + # "Some Examples of the PKCS Standards", 1999 + # + ( + + # Private key, from 2.1 + { + 'n':'''0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 c0 01 c6 + 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 d0 53 b3 e3 78 2a 1d + e5 dc 5a f4 eb e9 94 68 17 01 14 a1 df e6 7c dc 9a 9a f5 5d + 65 56 20 bb ab''', + 'e':'''01 00 + 01''', + 'd':'''01 23 c5 b6 1b a3 6e db 1d 36 79 90 41 99 a8 9e a8 0c 09 + b9 12 2e 14 00 c0 9a dc f7 78 46 76 d0 1d 23 35 6a 7d 44 d6 + bd 8b d5 0e 94 bf c7 23 fa 87 d8 86 2b 75 17 76 91 c1 1d 75 + 76 92 df 88 81''' + }, + # Data to sign, from 3.1 + '''30 81 a4 02 01 00 30 42 31 0b 30 09 06 + 03 55 04 06 13 02 55 53 31 1d 30 1b 06 03 55 04 0a 13 14 + 45 78 61 6d 70 6c 65 20 4f 72 67 61 6e 69 7a 61 74 69 6f + 6e 31 14 30 12 06 03 55 04 03 13 0b 54 65 73 74 20 55 73 + 65 72 20 31 30 5b 30 0d 06 09 2a 86 48 86 f7 0d 01 01 01 + 05 00 03 4a 00 30 47 02 40 + 0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 + c0 01 c6 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 + d0 53 b3 e3 78 2a 1d e5 dc 5a f4 eb e9 94 68 17 + 01 14 a1 df e6 7c dc 9a 9a f5 5d 65 56 20 bb ab + 02 03 01 00 01''', + # Signature, from 3.2 (at the very end) + '''06 db 36 cb 18 d3 47 5b 9c 01 db 3c 78 95 28 08 + 02 79 bb ae ff 2b 7d 55 8e d6 61 59 87 c8 51 86 + 3f 8a 6c 2c ff bc 89 c3 f7 5a 18 d9 6b 12 7c 71 + 7d 54 d0 d8 04 8d a8 a0 54 46 26 d1 7a 2a 8f be''', + MD2 + ), + + # + # RSA keypair generated with openssl + # + ( + """-----BEGIN RSA PRIVATE KEY----- + MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII + q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8 + Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI + OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr + +rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK + JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9 + n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ== + -----END RSA PRIVATE KEY-----""", + "This is a test\x0a", + # + # PKCS#1 signature computed with openssl + # + '''4a700a16432a291a3194646952687d5316458b8b86fb0a25aa30e0dcecdb + 442676759ac63d56ec1499c3ae4c0013c2053cabd5b5804848994541ac16 + fa243a4d''', + SHA + ), + + # + # Test vector from http://www.di-mgt.com.au/rsa_alg.html#signpkcs1 + # + ( + { + 'n':'''E08973398DD8F5F5E88776397F4EB005BB5383DE0FB7ABDC7DC775290D052E6D + 12DFA68626D4D26FAA5829FC97ECFA82510F3080BEB1509E4644F12CBBD832CF + C6686F07D9B060ACBEEE34096A13F5F7050593DF5EBA3556D961FF197FC981E6 + F86CEA874070EFAC6D2C749F2DFA553AB9997702A648528C4EF357385774575F''', + 'e':'''010001''', + 'd':'''00A403C327477634346CA686B57949014B2E8AD2C862B2C7D748096A8B91F736 + F275D6E8CD15906027314735644D95CD6763CEB49F56AC2F376E1CEE0EBF282D + F439906F34D86E085BD5656AD841F313D72D395EFE33CBFF29E4030B3D05A28F + B7F18EA27637B07957D32F2BDE8706227D04665EC91BAF8B1AC3EC9144AB7F21''' + }, + "abc", + '''60AD5A78FB4A4030EC542C8974CD15F55384E836554CEDD9A322D5F4135C6267 + A9D20970C54E6651070B0144D43844C899320DD8FA7819F7EBC6A7715287332E + C8675C136183B3F8A1F81EF969418267130A756FDBB2C71D9A667446E34E0EAD + 9CF31BFB66F816F319D0B7E430A5F2891553986E003720261C7E9022C0D9F11F''', + SHA + ) + + ) + + def testSign1(self): + for i in range(len(self._testData)): + row = self._testData[i] + # Build the key + if isStr(row[0]): + key = RSA.importKey(row[0]) + else: + comps = [ int(rws(row[0][x]),16) for x in ('n','e','d') ] + key = RSA.construct(comps) + h = row[3].new() + # Data to sign can either be in hex form or not + try: + h.update(t2b(row[1])) + except: + h.update(b(row[1])) + # The real test + signer = PKCS.new(key) + self.assertTrue(signer.can_sign()) + s = signer.sign(h) + self.assertEqual(s, t2b(row[2])) + + def testVerify1(self): + for i in range(len(self._testData)): + row = self._testData[i] + # Build the key + if isStr(row[0]): + key = RSA.importKey(row[0]).publickey() + else: + comps = [ int(rws(row[0][x]),16) for x in ('n','e') ] + key = RSA.construct(comps) + h = row[3].new() + # Data to sign can either be in hex form or not + try: + h.update(t2b(row[1])) + except: + h.update(b(row[1])) + # The real test + verifier = PKCS.new(key) + self.assertFalse(verifier.can_sign()) + result = verifier.verify(h, t2b(row[2])) + self.assertTrue(result) + + def testSignVerify(self): + rng = Random.new().read + key = RSA.generate(1024, rng) + + for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,SHA512,RIPEMD): + h = hashmod.new() + h.update(b('blah blah blah')) + + signer = PKCS.new(key) + s = signer.sign(h) + result = signer.verify(h, s) + self.assertTrue(result) + + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_15_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py b/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py new file mode 100644 index 0000000..b635a9a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Signature/test_pkcs1_pss.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Signature/test_pkcs1_pss.py: Self-test for PKCS#1 PSS signatures +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + + + +__revision__ = "$Id$" + +import unittest + +from Crypto.PublicKey import RSA +from Crypto import Random +from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex +from Crypto.Hash import * +from Crypto.Signature import PKCS1_PSS as PKCS +from Crypto.Util.py3compat import * + +def isStr(s): + t = '' + try: + t += s + except TypeError: + return 0 + return 1 + +def rws(t): + """Remove white spaces, tabs, and new lines from a string""" + for c in ['\t', '\n', ' ']: + t = t.replace(c,'') + return t + +def t2b(t): + """Convert a text string with bytes in hex form to a byte string""" + clean = b(rws(t)) + if len(clean)%2 == 1: + raise ValueError("Even number of characters expected") + return a2b_hex(clean) + +# Helper class to count how many bytes have been requested +# from the key's private RNG, w/o counting those used for blinding +class MyKey: + def __init__(self, key): + self._key = key + self.n = key.n + self.asked = 0 + def _randfunc(self, N): + self.asked += N + return self._key._randfunc(N) + def sign(self, m): + return self._key.sign(m) + def has_private(self): + return self._key.has_private() + def decrypt(self, m): + return self._key.decrypt(m) + def verify(self, m, p): + return self._key.verify(m, p) + def encrypt(self, m, p): + return self._key.encrypt(m, p) + +class PKCS1_PSS_Tests(unittest.TestCase): + + # List of tuples with test data for PKCS#1 PSS + # Each tuple is made up by: + # Item #0: dictionary with RSA key component, or key to import + # Item #1: data to hash and sign + # Item #2: signature of the data #1, done with the key #0, + # and salt #3 after hashing it with #4 + # Item #3: salt + # Item #4: hash object generator + + _testData = ( + + # + # From in pss-vect.txt to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a2 ba 40 ee 07 e3 b2 bd 2f 02 ce 22 7f 36 a1 95 + 02 44 86 e4 9c 19 cb 41 bb bd fb ba 98 b2 2b 0e + 57 7c 2e ea ff a2 0d 88 3a 76 e6 5e 39 4c 69 d4 + b3 c0 5a 1e 8f ad da 27 ed b2 a4 2b c0 00 fe 88 + 8b 9b 32 c2 2d 15 ad d0 cd 76 b3 e7 93 6e 19 95 + 5b 22 0d d1 7d 4e a9 04 b1 ec 10 2b 2e 4d e7 75 + 12 22 aa 99 15 10 24 c7 cb 41 cc 5e a2 1d 00 ee + b4 1f 7c 80 08 34 d2 c6 e0 6b ce 3b ce 7e a9 a5''', + 'e':'''01 00 01''', + # In the test vector, only p and q were given... + # d is computed offline as e^{-1} mod (p-1)(q-1) + 'd':'''50e2c3e38d886110288dfc68a9533e7e12e27d2aa56 + d2cdb3fb6efa990bcff29e1d2987fb711962860e7391b1ce01 + ebadb9e812d2fbdfaf25df4ae26110a6d7a26f0b810f54875e + 17dd5c9fb6d641761245b81e79f8c88f0e55a6dcd5f133abd3 + 5f8f4ec80adf1bf86277a582894cb6ebcd2162f1c7534f1f49 + 47b129151b71''' + }, + + # Data to sign + '''85 9e ef 2f d7 8a ca 00 30 8b dc 47 11 93 bf 55 + bf 9d 78 db 8f 8a 67 2b 48 46 34 f3 c9 c2 6e 64 + 78 ae 10 26 0f e0 dd 8c 08 2e 53 a5 29 3a f2 17 + 3c d5 0c 6d 5d 35 4f eb f7 8b 26 02 1c 25 c0 27 + 12 e7 8c d4 69 4c 9f 46 97 77 e4 51 e7 f8 e9 e0 + 4c d3 73 9c 6b bf ed ae 48 7f b5 56 44 e9 ca 74 + ff 77 a5 3c b7 29 80 2f 6e d4 a5 ff a8 ba 15 98 + 90 fc''', + # Signature + '''8d aa 62 7d 3d e7 59 5d 63 05 6c 7e c6 59 e5 44 + 06 f1 06 10 12 8b aa e8 21 c8 b2 a0 f3 93 6d 54 + dc 3b dc e4 66 89 f6 b7 95 1b b1 8e 84 05 42 76 + 97 18 d5 71 5d 21 0d 85 ef bb 59 61 92 03 2c 42 + be 4c 29 97 2c 85 62 75 eb 6d 5a 45 f0 5f 51 87 + 6f c6 74 3d ed dd 28 ca ec 9b b3 0e a9 9e 02 c3 + 48 82 69 60 4f e4 97 f7 4c cd 7c 7f ca 16 71 89 + 71 23 cb d3 0d ef 5d 54 a2 b5 53 6a d9 0a 74 7e''', + # Salt + '''e3 b5 d5 d0 02 c1 bc e5 0c 2b 65 ef 88 a1 88 d8 + 3b ce 7e 61''', + # Hash algorithm + SHA + ), + + # + # Example 1.1 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 + 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 + d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 + 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df + d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 + c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 + 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 + ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', + 'e':'''01 00 01''', + 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 + 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 + 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 + c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a + e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 + a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c + 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b + d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' + }, + # Message + '''cd c8 7d a2 23 d7 86 df 3b 45 e0 bb bc 72 13 26 + d1 ee 2a f8 06 cc 31 54 75 cc 6f 0d 9c 66 e1 b6 + 23 71 d4 5c e2 39 2e 1a c9 28 44 c3 10 10 2f 15 + 6a 0d 8d 52 c1 f4 c4 0b a3 aa 65 09 57 86 cb 76 + 97 57 a6 56 3b a9 58 fe d0 bc c9 84 e8 b5 17 a3 + d5 f5 15 b2 3b 8a 41 e7 4a a8 67 69 3f 90 df b0 + 61 a6 e8 6d fa ae e6 44 72 c0 0e 5f 20 94 57 29 + cb eb e7 7f 06 ce 78 e0 8f 40 98 fb a4 1f 9d 61 + 93 c0 31 7e 8b 60 d4 b6 08 4a cb 42 d2 9e 38 08 + a3 bc 37 2d 85 e3 31 17 0f cb f7 cc 72 d0 b7 1c + 29 66 48 b3 a4 d1 0f 41 62 95 d0 80 7a a6 25 ca + b2 74 4f d9 ea 8f d2 23 c4 25 37 02 98 28 bd 16 + be 02 54 6f 13 0f d2 e3 3b 93 6d 26 76 e0 8a ed + 1b 73 31 8b 75 0a 01 67 d0''', + # Signature + '''90 74 30 8f b5 98 e9 70 1b 22 94 38 8e 52 f9 71 + fa ac 2b 60 a5 14 5a f1 85 df 52 87 b5 ed 28 87 + e5 7c e7 fd 44 dc 86 34 e4 07 c8 e0 e4 36 0b c2 + 26 f3 ec 22 7f 9d 9e 54 63 8e 8d 31 f5 05 12 15 + df 6e bb 9c 2f 95 79 aa 77 59 8a 38 f9 14 b5 b9 + c1 bd 83 c4 e2 f9 f3 82 a0 d0 aa 35 42 ff ee 65 + 98 4a 60 1b c6 9e b2 8d eb 27 dc a1 2c 82 c2 d4 + c3 f6 6c d5 00 f1 ff 2b 99 4d 8a 4e 30 cb b3 3c''', + # Salt + '''de e9 59 c7 e0 64 11 36 14 20 ff 80 18 5e d5 7f + 3e 67 76 af''', + # Hash + SHA + ), + + # + # Example 1.2 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''a5 6e 4a 0e 70 10 17 58 9a 51 87 dc 7e a8 41 d1 + 56 f2 ec 0e 36 ad 52 a4 4d fe b1 e6 1f 7a d9 91 + d8 c5 10 56 ff ed b1 62 b4 c0 f2 83 a1 2a 88 a3 + 94 df f5 26 ab 72 91 cb b3 07 ce ab fc e0 b1 df + d5 cd 95 08 09 6d 5b 2b 8b 6d f5 d6 71 ef 63 77 + c0 92 1c b2 3c 27 0a 70 e2 59 8e 6f f8 9d 19 f1 + 05 ac c2 d3 f0 cb 35 f2 92 80 e1 38 6b 6f 64 c4 + ef 22 e1 e1 f2 0d 0c e8 cf fb 22 49 bd 9a 21 37''', + 'e':'''01 00 01''', + 'd':'''33 a5 04 2a 90 b2 7d 4f 54 51 ca 9b bb d0 b4 47 + 71 a1 01 af 88 43 40 ae f9 88 5f 2a 4b be 92 e8 + 94 a7 24 ac 3c 56 8c 8f 97 85 3a d0 7c 02 66 c8 + c6 a3 ca 09 29 f1 e8 f1 12 31 88 44 29 fc 4d 9a + e5 5f ee 89 6a 10 ce 70 7c 3e d7 e7 34 e4 47 27 + a3 95 74 50 1a 53 26 83 10 9c 2a ba ca ba 28 3c + 31 b4 bd 2f 53 c3 ee 37 e3 52 ce e3 4f 9e 50 3b + d8 0c 06 22 ad 79 c6 dc ee 88 35 47 c6 a3 b3 25''' + }, + # Message + '''85 13 84 cd fe 81 9c 22 ed 6c 4c cb 30 da eb 5c + f0 59 bc 8e 11 66 b7 e3 53 0c 4c 23 3e 2b 5f 8f + 71 a1 cc a5 82 d4 3e cc 72 b1 bc a1 6d fc 70 13 + 22 6b 9e''', + # Signature + '''3e f7 f4 6e 83 1b f9 2b 32 27 41 42 a5 85 ff ce + fb dc a7 b3 2a e9 0d 10 fb 0f 0c 72 99 84 f0 4e + f2 9a 9d f0 78 07 75 ce 43 73 9b 97 83 83 90 db + 0a 55 05 e6 3d e9 27 02 8d 9d 29 b2 19 ca 2c 45 + 17 83 25 58 a5 5d 69 4a 6d 25 b9 da b6 60 03 c4 + cc cd 90 78 02 19 3b e5 17 0d 26 14 7d 37 b9 35 + 90 24 1b e5 1c 25 05 5f 47 ef 62 75 2c fb e2 14 + 18 fa fe 98 c2 2c 4d 4d 47 72 4f db 56 69 e8 43''', + # Salt + '''ef 28 69 fa 40 c3 46 cb 18 3d ab 3d 7b ff c9 8f + d5 6d f4 2d''', + # Hash + SHA + ), + + # + # Example 2.1 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''01 d4 0c 1b cf 97 a6 8a e7 cd bd 8a 7b f3 e3 4f + a1 9d cc a4 ef 75 a4 74 54 37 5f 94 51 4d 88 fe + d0 06 fb 82 9f 84 19 ff 87 d6 31 5d a6 8a 1f f3 + a0 93 8e 9a bb 34 64 01 1c 30 3a d9 91 99 cf 0c + 7c 7a 8b 47 7d ce 82 9e 88 44 f6 25 b1 15 e5 e9 + c4 a5 9c f8 f8 11 3b 68 34 33 6a 2f d2 68 9b 47 + 2c bb 5e 5c ab e6 74 35 0c 59 b6 c1 7e 17 68 74 + fb 42 f8 fc 3d 17 6a 01 7e dc 61 fd 32 6c 4b 33 + c9''', + 'e':'''01 00 01''', + 'd':'''02 7d 14 7e 46 73 05 73 77 fd 1e a2 01 56 57 72 + 17 6a 7d c3 83 58 d3 76 04 56 85 a2 e7 87 c2 3c + 15 57 6b c1 6b 9f 44 44 02 d6 bf c5 d9 8a 3e 88 + ea 13 ef 67 c3 53 ec a0 c0 dd ba 92 55 bd 7b 8b + b5 0a 64 4a fd fd 1d d5 16 95 b2 52 d2 2e 73 18 + d1 b6 68 7a 1c 10 ff 75 54 5f 3d b0 fe 60 2d 5f + 2b 7f 29 4e 36 01 ea b7 b9 d1 ce cd 76 7f 64 69 + 2e 3e 53 6c a2 84 6c b0 c2 dd 48 6a 39 fa 75 b1''' + }, + # Message + '''da ba 03 20 66 26 3f ae db 65 98 48 11 52 78 a5 + 2c 44 fa a3 a7 6f 37 51 5e d3 36 32 10 72 c4 0a + 9d 9b 53 bc 05 01 40 78 ad f5 20 87 51 46 aa e7 + 0f f0 60 22 6d cb 7b 1f 1f c2 7e 93 60''', + # Signature + '''01 4c 5b a5 33 83 28 cc c6 e7 a9 0b f1 c0 ab 3f + d6 06 ff 47 96 d3 c1 2e 4b 63 9e d9 13 6a 5f ec + 6c 16 d8 88 4b dd 99 cf dc 52 14 56 b0 74 2b 73 + 68 68 cf 90 de 09 9a db 8d 5f fd 1d ef f3 9b a4 + 00 7a b7 46 ce fd b2 2d 7d f0 e2 25 f5 46 27 dc + 65 46 61 31 72 1b 90 af 44 53 63 a8 35 8b 9f 60 + 76 42 f7 8f ab 0a b0 f4 3b 71 68 d6 4b ae 70 d8 + 82 78 48 d8 ef 1e 42 1c 57 54 dd f4 2c 25 89 b5 + b3''', + # Salt + '''57 bf 16 0b cb 02 bb 1d c7 28 0c f0 45 85 30 b7 + d2 83 2f f7''', + SHA + ), + + # + # Example 8.1 to be found in + # ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1-vec.zip + # + ( + # Private key + { + 'n':'''49 53 70 a1 fb 18 54 3c 16 d3 63 1e 31 63 25 5d + f6 2b e6 ee e8 90 d5 f2 55 09 e4 f7 78 a8 ea 6f + bb bc df 85 df f6 4e 0d 97 20 03 ab 36 81 fb ba + 6d d4 1f d5 41 82 9b 2e 58 2d e9 f2 a4 a4 e0 a2 + d0 90 0b ef 47 53 db 3c ee 0e e0 6c 7d fa e8 b1 + d5 3b 59 53 21 8f 9c ce ea 69 5b 08 66 8e de aa + dc ed 94 63 b1 d7 90 d5 eb f2 7e 91 15 b4 6c ad + 4d 9a 2b 8e fa b0 56 1b 08 10 34 47 39 ad a0 73 + 3f''', + 'e':'''01 00 01''', + 'd':'''6c 66 ff e9 89 80 c3 8f cd ea b5 15 98 98 83 61 + 65 f4 b4 b8 17 c4 f6 a8 d4 86 ee 4e a9 13 0f e9 + b9 09 2b d1 36 d1 84 f9 5f 50 4a 60 7e ac 56 58 + 46 d2 fd d6 59 7a 89 67 c7 39 6e f9 5a 6e ee bb + 45 78 a6 43 96 6d ca 4d 8e e3 de 84 2d e6 32 79 + c6 18 15 9c 1a b5 4a 89 43 7b 6a 61 20 e4 93 0a + fb 52 a4 ba 6c ed 8a 49 47 ac 64 b3 0a 34 97 cb + e7 01 c2 d6 26 6d 51 72 19 ad 0e c6 d3 47 db e9''' + }, + # Message + '''81 33 2f 4b e6 29 48 41 5e a1 d8 99 79 2e ea cf + 6c 6e 1d b1 da 8b e1 3b 5c ea 41 db 2f ed 46 70 + 92 e1 ff 39 89 14 c7 14 25 97 75 f5 95 f8 54 7f + 73 56 92 a5 75 e6 92 3a f7 8f 22 c6 99 7d db 90 + fb 6f 72 d7 bb 0d d5 74 4a 31 de cd 3d c3 68 58 + 49 83 6e d3 4a ec 59 63 04 ad 11 84 3c 4f 88 48 + 9f 20 97 35 f5 fb 7f da f7 ce c8 ad dc 58 18 16 + 8f 88 0a cb f4 90 d5 10 05 b7 a8 e8 4e 43 e5 42 + 87 97 75 71 dd 99 ee a4 b1 61 eb 2d f1 f5 10 8f + 12 a4 14 2a 83 32 2e db 05 a7 54 87 a3 43 5c 9a + 78 ce 53 ed 93 bc 55 08 57 d7 a9 fb''', + # Signature + '''02 62 ac 25 4b fa 77 f3 c1 ac a2 2c 51 79 f8 f0 + 40 42 2b 3c 5b af d4 0a 8f 21 cf 0f a5 a6 67 cc + d5 99 3d 42 db af b4 09 c5 20 e2 5f ce 2b 1e e1 + e7 16 57 7f 1e fa 17 f3 da 28 05 2f 40 f0 41 9b + 23 10 6d 78 45 aa f0 11 25 b6 98 e7 a4 df e9 2d + 39 67 bb 00 c4 d0 d3 5b a3 55 2a b9 a8 b3 ee f0 + 7c 7f ec db c5 42 4a c4 db 1e 20 cb 37 d0 b2 74 + 47 69 94 0e a9 07 e1 7f bb ca 67 3b 20 52 23 80 + c5''', + # Salt + '''1d 65 49 1d 79 c8 64 b3 73 00 9b e6 f6 f2 46 7b + ac 4c 78 fa''', + SHA + ) + ) + + def testSign1(self): + for i in range(len(self._testData)): + # Build the key + comps = [ int(rws(self._testData[i][0][x]),16) for x in ('n','e','d') ] + key = MyKey(RSA.construct(comps)) + # Hash function + h = self._testData[i][4].new() + # Data to sign + h.update(t2b(self._testData[i][1])) + # Salt + test_salt = t2b(self._testData[i][3]) + key._randfunc = lambda N: test_salt + # The real test + signer = PKCS.new(key) + self.assertTrue(signer.can_sign()) + s = signer.sign(h) + self.assertEqual(s, t2b(self._testData[i][2])) + + def testVerify1(self): + for i in range(len(self._testData)): + # Build the key + comps = [ int(rws(self._testData[i][0][x]),16) for x in ('n','e') ] + key = MyKey(RSA.construct(comps)) + # Hash function + h = self._testData[i][4].new() + # Data to sign + h.update(t2b(self._testData[i][1])) + # Salt + test_salt = t2b(self._testData[i][3]) + # The real test + key._randfunc = lambda N: test_salt + verifier = PKCS.new(key) + self.assertFalse(verifier.can_sign()) + result = verifier.verify(h, t2b(self._testData[i][2])) + self.assertTrue(result) + + def testSignVerify(self): + h = SHA.new() + h.update(b('blah blah blah')) + + rng = Random.new().read + key = MyKey(RSA.generate(1024,rng)) + + # Helper function to monitor what's request from MGF + global mgfcalls + def newMGF(seed,maskLen): + global mgfcalls + mgfcalls += 1 + return bchr(0x00)*maskLen + + # Verify that PSS is friendly to all ciphers + for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,RIPEMD): + h = hashmod.new() + h.update(b('blah blah blah')) + + # Verify that sign() asks for as many random bytes + # as the hash output size + key.asked = 0 + signer = PKCS.new(key) + s = signer.sign(h) + self.assertTrue(signer.verify(h, s)) + self.assertEqual(key.asked, h.digest_size) + + h = SHA.new() + h.update(b('blah blah blah')) + + # Verify that sign() uses a different salt length + for sLen in (0,3,21): + key.asked = 0 + signer = PKCS.new(key, saltLen=sLen) + s = signer.sign(h) + self.assertEqual(key.asked, sLen) + self.assertTrue(signer.verify(h, s)) + + # Verify that sign() uses the custom MGF + mgfcalls = 0 + signer = PKCS.new(key, newMGF) + s = signer.sign(h) + self.assertEqual(mgfcalls, 1) + self.assertTrue(signer.verify(h, s)) + + # Verify that sign() does not call the RNG + # when salt length is 0, even when a new MGF is provided + key.asked = 0 + mgfcalls = 0 + signer = PKCS.new(key, newMGF, 0) + s = signer.sign(h) + self.assertEqual(key.asked,0) + self.assertEqual(mgfcalls, 1) + self.assertTrue(signer.verify(h, s)) + +def get_tests(config={}): + tests = [] + tests += list_test_cases(PKCS1_PSS_Tests) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 diff --git a/Lib/site-packages/Crypto/SelfTest/Util/__init__.py b/Lib/site-packages/Crypto/SelfTest/Util/__init__.py new file mode 100644 index 0000000..abd640a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Util/__init__.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/__init__.py: Self-test for utility modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test for utility modules""" + +__revision__ = "$Id$" + +import os + +def get_tests(config={}): + tests = [] + if os.name == 'nt': + from Crypto.SelfTest.Util import test_winrandom; tests += test_winrandom.get_tests(config=config) + from Crypto.SelfTest.Util import test_number; tests += test_number.get_tests(config=config) + from Crypto.SelfTest.Util import test_Counter; tests += test_Counter.get_tests(config=config) + return tests + +if __name__ == '__main__': + import unittest + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py b/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py new file mode 100644 index 0000000..1b38f6a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Util/test_Counter.py @@ -0,0 +1,165 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_Counter: Self-test for the Crypto.Util.Counter module +# +# Written in 2009 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Util.Counter""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +import unittest + +class CounterTests(unittest.TestCase): + def setUp(self): + global Counter + from Crypto.Util import Counter + + def test_BE_shortcut(self): + """Big endian, shortcut enabled""" + c = Counter.new(128) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, little_endian=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, disable_shortcut=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, little_endian=False, disable_shortcut=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + + def test_LE_shortcut(self): + """Little endian, shortcut enabled""" + c = Counter.new(128, little_endian=True) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + c = Counter.new(128, little_endian=True, disable_shortcut=False) + self.assertEqual(c.__PCT_CTR_SHORTCUT__,True) # assert_ + + def test_BE_no_shortcut(self): + """Big endian, shortcut disabled""" + c = Counter.new(128, disable_shortcut=True) + self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') + c = Counter.new(128, little_endian=False, disable_shortcut=True) + self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') + + def test_LE_no_shortcut(self): + """Little endian, shortcut disabled""" + c = Counter.new(128, little_endian=True, disable_shortcut=True) + self.assertRaises(AttributeError, getattr, c, '__PCT_CTR_SHORTCUT__') + + def test_BE_defaults(self): + """128-bit, Big endian, defaults""" + c = Counter.new(128) + self.assertEqual(1, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01"), c()) + self.assertEqual(2, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02"), c()) + for i in range(3, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00")+bchr(i), c()) + self.assertEqual(256, c.next_value()) + self.assertEqual(b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00"), c()) + + def test_LE_defaults(self): + """128-bit, Little endian, defaults""" + c = Counter.new(128, little_endian=True) + self.assertEqual(1, c.next_value()) + self.assertEqual(b("\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + self.assertEqual(2, c.next_value()) + self.assertEqual(b("\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + for i in range(3, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i)+b("\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + self.assertEqual(256, c.next_value()) + self.assertEqual(b("\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"), c()) + + def test_BE8_wraparound(self): + """8-bit, Big endian, wraparound""" + c = Counter.new(8) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + + def test_LE8_wraparound(self): + """8-bit, Little endian, wraparound""" + c = Counter.new(8, little_endian=True) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + self.assertRaises(OverflowError, c.next_value) + self.assertRaises(OverflowError, c) + + def test_BE8_wraparound_allowed(self): + """8-bit, Big endian, wraparound with allow_wraparound=True""" + c = Counter.new(8, allow_wraparound=True) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(0, c.next_value()) + self.assertEqual(b("\x00"), c()) + self.assertEqual(1, c.next_value()) + + def test_LE8_wraparound_allowed(self): + """8-bit, Little endian, wraparound with allow_wraparound=True""" + c = Counter.new(8, little_endian=True, allow_wraparound=True) + for i in range(1, 256): + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(0, c.next_value()) + self.assertEqual(b("\x00"), c()) + self.assertEqual(1, c.next_value()) + + def test_BE8_carry(self): + """8-bit, Big endian, carry attribute""" + c = Counter.new(8) + for i in range(1, 256): + self.assertEqual(0, c.carry) + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(1, c.carry) + + def test_LE8_carry(self): + """8-bit, Little endian, carry attribute""" + c = Counter.new(8, little_endian=True) + for i in range(1, 256): + self.assertEqual(0, c.carry) + self.assertEqual(i, c.next_value()) + self.assertEqual(bchr(i), c()) + self.assertEqual(1, c.carry) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(CounterTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py b/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py new file mode 100644 index 0000000..6d63a2a --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Util/test_asn1.py @@ -0,0 +1,293 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_asn.py: Self-test for the Crypto.Util.asn1 module +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for Crypto.Util.asn1""" + +__revision__ = "$Id$" + +import unittest +import sys + +from Crypto.Util.py3compat import * +from Crypto.Util.asn1 import DerSequence, DerObject + +class DerObjectTests(unittest.TestCase): + + def testObjEncode1(self): + # No payload + der = DerObject(b('\x33')) + self.assertEqual(der.encode(), b('\x33\x00')) + # Small payload + der.payload = b('\x45') + self.assertEqual(der.encode(), b('\x33\x01\x45')) + # Invariant + self.assertEqual(der.encode(), b('\x33\x01\x45')) + # Initialize with numerical tag + der = DerObject(b(0x33)) + der.payload = b('\x45') + self.assertEqual(der.encode(), b('\x33\x01\x45')) + + def testObjEncode2(self): + # Known types + der = DerObject('SEQUENCE') + self.assertEqual(der.encode(), b('\x30\x00')) + der = DerObject('BIT STRING') + self.assertEqual(der.encode(), b('\x03\x00')) + + def testObjEncode3(self): + # Long payload + der = DerObject(b('\x34')) + der.payload = b("0")*128 + self.assertEqual(der.encode(), b('\x34\x81\x80' + "0"*128)) + + def testObjDecode1(self): + # Decode short payload + der = DerObject() + der.decode(b('\x20\x02\x01\x02')) + self.assertEqual(der.payload, b("\x01\x02")) + self.assertEqual(der.typeTag, 0x20) + + def testObjDecode2(self): + # Decode short payload + der = DerObject() + der.decode(b('\x22\x81\x80' + "1"*128)) + self.assertEqual(der.payload, b("1")*128) + self.assertEqual(der.typeTag, 0x22) + +class DerSequenceTests(unittest.TestCase): + + def testEncode1(self): + # Empty sequence + der = DerSequence() + self.assertEqual(der.encode(), b('0\x00')) + self.assertFalse(der.hasOnlyInts()) + # One single-byte integer (zero) + der.append(0) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) + self.assertTrue(der.hasOnlyInts()) + # Invariant + self.assertEqual(der.encode(), b('0\x03\x02\x01\x00')) + + def testEncode2(self): + # One single-byte integer (non-zero) + der = DerSequence() + der.append(127) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x7f')) + # Indexing + der[0] = 1 + self.assertEqual(len(der),1) + self.assertEqual(der[0],1) + self.assertEqual(der[-1],1) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) + # + der[:] = [1] + self.assertEqual(len(der),1) + self.assertEqual(der[0],1) + self.assertEqual(der.encode(), b('0\x03\x02\x01\x01')) + + def testEncode3(self): + # One multi-byte integer (non-zero) + der = DerSequence() + der.append(0x180) + self.assertEqual(der.encode(), b('0\x04\x02\x02\x01\x80')) + + def testEncode4(self): + # One very long integer + der = DerSequence() + der.append(2**2048) + self.assertEqual(der.encode(), b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + + def testEncode5(self): + # One single-byte integer (looks negative) + der = DerSequence() + der.append(0xFF) + self.assertEqual(der.encode(), b('0\x04\x02\x02\x00\xff')) + + def testEncode6(self): + # Two integers + der = DerSequence() + der.append(0x180) + der.append(0xFF) + self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertTrue(der.hasOnlyInts()) + # + der.append(0x01) + der[1:] = [9,8] + self.assertEqual(len(der),3) + self.assertEqual(der[1:],[9,8]) + self.assertEqual(der[1:-1],[9]) + self.assertEqual(der.encode(), b('0\x0A\x02\x02\x01\x80\x02\x01\x09\x02\x01\x08')) + + def testEncode6(self): + # One integer and another type (no matter what it is) + der = DerSequence() + der.append(0x180) + der.append(b('\x00\x02\x00\x00')) + self.assertEqual(der.encode(), b('0\x08\x02\x02\x01\x80\x00\x02\x00\x00')) + self.assertFalse(der.hasOnlyInts()) + + #### + + def testDecode1(self): + # Empty sequence + der = DerSequence() + der.decode(b('0\x00')) + self.assertEqual(len(der),0) + # One single-byte integer (zero) + der.decode(b('0\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0) + # Invariant + der.decode(b('0\x03\x02\x01\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0) + + def testDecode2(self): + # One single-byte integer (non-zero) + der = DerSequence() + der.decode(b('0\x03\x02\x01\x7f')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],127) + + def testDecode3(self): + # One multi-byte integer (non-zero) + der = DerSequence() + der.decode(b('0\x04\x02\x02\x01\x80')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0x180) + + def testDecode4(self): + # One very long integer + der = DerSequence() + der.decode(b('0\x82\x01\x05')+ + b('\x02\x82\x01\x01\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00')+ + b('\x00\x00\x00\x00\x00\x00\x00\x00\x00')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],2**2048) + + def testDecode5(self): + # One single-byte integer (looks negative) + der = DerSequence() + der.decode(b('0\x04\x02\x02\x00\xff')) + self.assertEqual(len(der),1) + self.assertEqual(der[0],0xFF) + + def testDecode6(self): + # Two integers + der = DerSequence() + der.decode(b('0\x08\x02\x02\x01\x80\x02\x02\x00\xff')) + self.assertEqual(len(der),2) + self.assertEqual(der[0],0x180) + self.assertEqual(der[1],0xFF) + + def testDecode7(self): + # One integer and 2 other types + der = DerSequence() + der.decode(b('0\x0A\x02\x02\x01\x80\x24\x02\xb6\x63\x12\x00')) + self.assertEqual(len(der),3) + self.assertEqual(der[0],0x180) + self.assertEqual(der[1],b('\x24\x02\xb6\x63')) + self.assertEqual(der[2],b('\x12\x00')) + + def testDecode8(self): + # Only 2 other types + der = DerSequence() + der.decode(b('0\x06\x24\x02\xb6\x63\x12\x00')) + self.assertEqual(len(der),2) + self.assertEqual(der[0],b('\x24\x02\xb6\x63')) + self.assertEqual(der[1],b('\x12\x00')) + + def testErrDecode1(self): + # Not a sequence + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('')) + self.assertRaises(ValueError, der.decode, b('\x00')) + self.assertRaises(ValueError, der.decode, b('\x30')) + + def testErrDecode2(self): + # Wrong payload type + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('\x30\x00\x00'), True) + + def testErrDecode3(self): + # Wrong length format + der = DerSequence() + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\x01\x00')) + self.assertRaises(ValueError, der.decode, b('\x30\x81\x03\x02\x01\x01')) + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x81\x01\x01')) + + def testErrDecode4(self): + # Wrong integer format + der = DerSequence() + # Multi-byte encoding for zero + #self.assertRaises(ValueError, der.decode, '\x30\x04\x02\x02\x00\x00') + # Negative integer + self.assertRaises(ValueError, der.decode, b('\x30\x04\x02\x01\xFF')) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + listTests = [] + listTests += list_test_cases(DerObjectTests) + listTests += list_test_cases(DerSequenceTests) + return listTests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Util/test_number.py b/Lib/site-packages/Crypto/SelfTest/Util/test_number.py new file mode 100644 index 0000000..f088a73 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Util/test_number.py @@ -0,0 +1,295 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_number.py: Self-test for parts of the Crypto.Util.number module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-tests for (some of) Crypto.Util.number""" + +__revision__ = "$Id$" + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +import unittest + +# NB: In some places, we compare tuples instead of just output values so that +# if any inputs cause a test failure, we'll be able to tell which ones. + +class MiscTests(unittest.TestCase): + def setUp(self): + global number, math + from Crypto.Util import number + import math + + def test_ceil_shift(self): + """Util.number.ceil_shift""" + self.assertRaises(AssertionError, number.ceil_shift, -1, 1) + self.assertRaises(AssertionError, number.ceil_shift, 1, -1) + + # b = 0 + self.assertEqual(0, number.ceil_shift(0, 0)) + self.assertEqual(1, number.ceil_shift(1, 0)) + self.assertEqual(2, number.ceil_shift(2, 0)) + self.assertEqual(3, number.ceil_shift(3, 0)) + + # b = 1 + self.assertEqual(0, number.ceil_shift(0, 1)) + self.assertEqual(1, number.ceil_shift(1, 1)) + self.assertEqual(1, number.ceil_shift(2, 1)) + self.assertEqual(2, number.ceil_shift(3, 1)) + + # b = 2 + self.assertEqual(0, number.ceil_shift(0, 2)) + self.assertEqual(1, number.ceil_shift(1, 2)) + self.assertEqual(1, number.ceil_shift(2, 2)) + self.assertEqual(1, number.ceil_shift(3, 2)) + self.assertEqual(1, number.ceil_shift(4, 2)) + self.assertEqual(2, number.ceil_shift(5, 2)) + self.assertEqual(2, number.ceil_shift(6, 2)) + self.assertEqual(2, number.ceil_shift(7, 2)) + self.assertEqual(2, number.ceil_shift(8, 2)) + self.assertEqual(3, number.ceil_shift(9, 2)) + + for b in range(3, 1+129, 3): # 3, 6, ... , 129 + self.assertEqual(0, number.ceil_shift(0, b)) + + n = 1 + while n <= 2**(b+2): + (q, r) = divmod(n-1, 2**b) + expected = q + int(not not r) + self.assertEqual((n-1, b, expected), + (n-1, b, number.ceil_shift(n-1, b))) + + (q, r) = divmod(n, 2**b) + expected = q + int(not not r) + self.assertEqual((n, b, expected), + (n, b, number.ceil_shift(n, b))) + + (q, r) = divmod(n+1, 2**b) + expected = q + int(not not r) + self.assertEqual((n+1, b, expected), + (n+1, b, number.ceil_shift(n+1, b))) + + n *= 2 + + def test_ceil_div(self): + """Util.number.ceil_div""" + self.assertRaises(TypeError, number.ceil_div, "1", 1) + self.assertRaises(ZeroDivisionError, number.ceil_div, 1, 0) + self.assertRaises(ZeroDivisionError, number.ceil_div, -1, 0) + + # b = -1 + self.assertEqual(0, number.ceil_div(0, -1)) + self.assertEqual(-1, number.ceil_div(1, -1)) + self.assertEqual(-2, number.ceil_div(2, -1)) + self.assertEqual(-3, number.ceil_div(3, -1)) + + # b = 1 + self.assertEqual(0, number.ceil_div(0, 1)) + self.assertEqual(1, number.ceil_div(1, 1)) + self.assertEqual(2, number.ceil_div(2, 1)) + self.assertEqual(3, number.ceil_div(3, 1)) + + # b = 2 + self.assertEqual(0, number.ceil_div(0, 2)) + self.assertEqual(1, number.ceil_div(1, 2)) + self.assertEqual(1, number.ceil_div(2, 2)) + self.assertEqual(2, number.ceil_div(3, 2)) + self.assertEqual(2, number.ceil_div(4, 2)) + self.assertEqual(3, number.ceil_div(5, 2)) + + # b = 3 + self.assertEqual(0, number.ceil_div(0, 3)) + self.assertEqual(1, number.ceil_div(1, 3)) + self.assertEqual(1, number.ceil_div(2, 3)) + self.assertEqual(1, number.ceil_div(3, 3)) + self.assertEqual(2, number.ceil_div(4, 3)) + self.assertEqual(2, number.ceil_div(5, 3)) + self.assertEqual(2, number.ceil_div(6, 3)) + self.assertEqual(3, number.ceil_div(7, 3)) + + # b = 4 + self.assertEqual(0, number.ceil_div(0, 4)) + self.assertEqual(1, number.ceil_div(1, 4)) + self.assertEqual(1, number.ceil_div(2, 4)) + self.assertEqual(1, number.ceil_div(3, 4)) + self.assertEqual(1, number.ceil_div(4, 4)) + self.assertEqual(2, number.ceil_div(5, 4)) + self.assertEqual(2, number.ceil_div(6, 4)) + self.assertEqual(2, number.ceil_div(7, 4)) + self.assertEqual(2, number.ceil_div(8, 4)) + self.assertEqual(3, number.ceil_div(9, 4)) + + # b = -4 + self.assertEqual(3, number.ceil_div(-9, -4)) + self.assertEqual(2, number.ceil_div(-8, -4)) + self.assertEqual(2, number.ceil_div(-7, -4)) + self.assertEqual(2, number.ceil_div(-6, -4)) + self.assertEqual(2, number.ceil_div(-5, -4)) + self.assertEqual(1, number.ceil_div(-4, -4)) + self.assertEqual(1, number.ceil_div(-3, -4)) + self.assertEqual(1, number.ceil_div(-2, -4)) + self.assertEqual(1, number.ceil_div(-1, -4)) + self.assertEqual(0, number.ceil_div(0, -4)) + self.assertEqual(0, number.ceil_div(1, -4)) + self.assertEqual(0, number.ceil_div(2, -4)) + self.assertEqual(0, number.ceil_div(3, -4)) + self.assertEqual(-1, number.ceil_div(4, -4)) + self.assertEqual(-1, number.ceil_div(5, -4)) + self.assertEqual(-1, number.ceil_div(6, -4)) + self.assertEqual(-1, number.ceil_div(7, -4)) + self.assertEqual(-2, number.ceil_div(8, -4)) + self.assertEqual(-2, number.ceil_div(9, -4)) + + def test_exact_log2(self): + """Util.number.exact_log2""" + self.assertRaises(TypeError, number.exact_log2, "0") + self.assertRaises(ValueError, number.exact_log2, -1) + self.assertRaises(ValueError, number.exact_log2, 0) + self.assertEqual(0, number.exact_log2(1)) + self.assertEqual(1, number.exact_log2(2)) + self.assertRaises(ValueError, number.exact_log2, 3) + self.assertEqual(2, number.exact_log2(4)) + self.assertRaises(ValueError, number.exact_log2, 5) + self.assertRaises(ValueError, number.exact_log2, 6) + self.assertRaises(ValueError, number.exact_log2, 7) + e = 3 + n = 8 + while e < 16: + if n == 2**e: + self.assertEqual(e, number.exact_log2(n), "expected=2**%d, n=%d" % (e, n)) + e += 1 + else: + self.assertRaises(ValueError, number.exact_log2, n) + n += 1 + + for e in range(16, 1+64, 2): + self.assertRaises(ValueError, number.exact_log2, 2**e-1) + self.assertEqual(e, number.exact_log2(2**e)) + self.assertRaises(ValueError, number.exact_log2, 2**e+1) + + def test_exact_div(self): + """Util.number.exact_div""" + + # Positive numbers + self.assertEqual(1, number.exact_div(1, 1)) + self.assertRaises(ValueError, number.exact_div, 1, 2) + self.assertEqual(1, number.exact_div(2, 2)) + self.assertRaises(ValueError, number.exact_div, 3, 2) + self.assertEqual(2, number.exact_div(4, 2)) + + # Negative numbers + self.assertEqual(-1, number.exact_div(-1, 1)) + self.assertEqual(-1, number.exact_div(1, -1)) + self.assertRaises(ValueError, number.exact_div, -1, 2) + self.assertEqual(1, number.exact_div(-2, -2)) + self.assertEqual(-2, number.exact_div(-4, 2)) + + # Zero dividend + self.assertEqual(0, number.exact_div(0, 1)) + self.assertEqual(0, number.exact_div(0, 2)) + + # Zero divisor (allow_divzero == False) + self.assertRaises(ZeroDivisionError, number.exact_div, 0, 0) + self.assertRaises(ZeroDivisionError, number.exact_div, 1, 0) + + # Zero divisor (allow_divzero == True) + self.assertEqual(0, number.exact_div(0, 0, allow_divzero=True)) + self.assertRaises(ValueError, number.exact_div, 1, 0, allow_divzero=True) + + def test_floor_div(self): + """Util.number.floor_div""" + self.assertRaises(TypeError, number.floor_div, "1", 1) + for a in range(-10, 10): + for b in range(-10, 10): + if b == 0: + self.assertRaises(ZeroDivisionError, number.floor_div, a, b) + else: + self.assertEqual((a, b, int(math.floor(float(a) / b))), + (a, b, number.floor_div(a, b))) + + def test_getStrongPrime(self): + """Util.number.getStrongPrime""" + self.assertRaises(ValueError, number.getStrongPrime, 256) + self.assertRaises(ValueError, number.getStrongPrime, 513) + bits = 512 + x = number.getStrongPrime(bits) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+1 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD(x-1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + e = 2**16+2 + x = number.getStrongPrime(bits, e) + self.assertEqual(number.GCD((x-1)>>1, e), 1) + self.assertNotEqual(x % 2, 0) + self.assertEqual(x > (1 << bits-1)-1, 1) + self.assertEqual(x < (1 << bits), 1) + + def test_isPrime(self): + """Util.number.isPrime""" + self.assertEqual(number.isPrime(-3), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(-2), False) # Regression test: negative numbers should not be prime + self.assertEqual(number.isPrime(1), False) # Regression test: isPrime(1) caused some versions of PyCrypto to crash. + self.assertEqual(number.isPrime(2), True) + self.assertEqual(number.isPrime(3), True) + self.assertEqual(number.isPrime(4), False) + self.assertEqual(number.isPrime(2**1279-1), True) + self.assertEqual(number.isPrime(-(2**1279-1)), False) # Regression test: negative numbers should not be prime + # test some known gmp pseudo-primes taken from + # http://www.trnicely.net/misc/mpzspsp.html + for composite in (43 * 127 * 211, 61 * 151 * 211, 15259 * 30517, + 346141 * 692281, 1007119 * 2014237, 3589477 * 7178953, + 4859419 * 9718837, 2730439 * 5460877, + 245127919 * 490255837, 963939391 * 1927878781, + 4186358431 * 8372716861, 1576820467 * 3153640933): + self.assertEqual(number.isPrime(int(composite)), False) + + def test_size(self): + self.assertEqual(number.size(2),2) + self.assertEqual(number.size(3),2) + self.assertEqual(number.size(0xa2),8) + self.assertEqual(number.size(0xa2ba40),8*3) + self.assertEqual(number.size(0xa2ba40ee07e3b2bd2f02ce227f36a195024486e49c19cb41bbbdfbba98b22b0e577c2eeaffa20d883a76e65e394c69d4b3c05a1e8fadda27edb2a42bc000fe888b9b32c22d15add0cd76b3e7936e19955b220dd17d4ea904b1ec102b2e4de7751222aa99151024c7cb41cc5ea21d00eeb41f7c800834d2c6e06bce3bce7ea9a5), 1024) + + def test_negative_number_roundtrip_mpzToLongObj_longObjToMPZ(self): + """Test that mpzToLongObj and longObjToMPZ (internal functions) roundtrip negative numbers correctly.""" + n = -100000000000000000000000000000000000 + e = 2 + k = number._fastmath.rsa_construct(n, e) + self.assertEqual(n, k.n) + self.assertEqual(e, k.e) + +def get_tests(config={}): + from Crypto.SelfTest.st_common import list_test_cases + return list_test_cases(MiscTests) + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/Util/test_winrandom.py b/Lib/site-packages/Crypto/SelfTest/Util/test_winrandom.py new file mode 100644 index 0000000..3fc5145 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/Util/test_winrandom.py @@ -0,0 +1,48 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/Util/test_winrandom.py: Self-test for the winrandom module +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self-test suite for Crypto.Util.winrandom""" + +__revision__ = "$Id$" + +import unittest + +class WinRandomImportTest(unittest.TestCase): + def runTest(self): + """winrandom: simple test""" + # Import the winrandom module and try to use it + from Crypto.Util import winrandom + randobj = winrandom.new() + x = randobj.get_bytes(16) + y = randobj.get_bytes(16) + self.assertNotEqual(x, y) + +def get_tests(config={}): + return [WinRandomImportTest()] + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/__init__.py b/Lib/site-packages/Crypto/SelfTest/__init__.py new file mode 100644 index 0000000..297a0f9 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/__init__.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/__init__.py: Self-test for PyCrypto +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Self tests + +These tests should perform quickly and can ideally be used every time an +application runs. +""" + +__revision__ = "$Id$" + +import sys +import unittest +from io import StringIO + +class SelfTestError(Exception): + def __init__(self, message, result): + Exception.__init__(self, message, result) + self.message = message + self.result = result + +def run(module=None, verbosity=0, stream=None, tests=None, config=None, **kwargs): + """Execute self-tests. + + This raises SelfTestError if any test is unsuccessful. + + You may optionally pass in a sub-module of SelfTest if you only want to + perform some of the tests. For example, the following would test only the + hash modules: + + Crypto.SelfTest.run(Crypto.SelfTest.Hash) + + """ + if config is None: + config = {} + suite = unittest.TestSuite() + if module is None: + if tests is None: + tests = get_tests(config=config) + suite.addTests(tests) + else: + if tests is None: + suite.addTests(module.get_tests(config=config)) + else: + raise ValueError("'module' and 'tests' arguments are mutually exclusive") + if stream is None: + kwargs['stream'] = StringIO() + runner = unittest.TextTestRunner(verbosity=verbosity, **kwargs) + result = runner.run(suite) + if not result.wasSuccessful(): + if stream is None: + sys.stderr.write(stream.getvalue()) + raise SelfTestError("Self-test failed", result) + return result + +def get_tests(config={}): + tests = [] + from Crypto.SelfTest import Cipher; tests += Cipher.get_tests(config=config) + from Crypto.SelfTest import Hash; tests += Hash.get_tests(config=config) + from Crypto.SelfTest import Protocol; tests += Protocol.get_tests(config=config) + from Crypto.SelfTest import PublicKey; tests += PublicKey.get_tests(config=config) + from Crypto.SelfTest import Random; tests += Random.get_tests(config=config) + from Crypto.SelfTest import Util; tests += Util.get_tests(config=config) + from Crypto.SelfTest import Signature; tests += Signature.get_tests(config=config) + return tests + +if __name__ == '__main__': + suite = lambda: unittest.TestSuite(get_tests()) + unittest.main(defaultTest='suite') + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/SelfTest/st_common.py b/Lib/site-packages/Crypto/SelfTest/st_common.py new file mode 100644 index 0000000..c56eac5 --- /dev/null +++ b/Lib/site-packages/Crypto/SelfTest/st_common.py @@ -0,0 +1,62 @@ +# -*- coding: utf-8 -*- +# +# SelfTest/st_common.py: Common functions for SelfTest modules +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Common functions for SelfTest modules""" + +__revision__ = "$Id$" + +import unittest +import binascii +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +class _list_testloader(unittest.TestLoader): + suiteClass = list + +def list_test_cases(class_): + """Return a list of TestCase instances given a TestCase class + + This is useful when you have defined test* methods on your TestCase class. + """ + return _list_testloader().loadTestsFromTestCase(class_) + +def strip_whitespace(s): + """Remove whitespace from a text or byte string""" + if isinstance(s,str): + return b("".join(s.split())) + else: + return b("").join(s.split()) + +def a2b_hex(s): + """Convert hexadecimal to binary, ignoring whitespace""" + return binascii.a2b_hex(strip_whitespace(s)) + +def b2a_hex(s): + """Convert binary to hexadecimal""" + # For completeness + return binascii.b2a_hex(s) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py b/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py new file mode 100644 index 0000000..b8670dc --- /dev/null +++ b/Lib/site-packages/Crypto/Signature/PKCS1_PSS.py @@ -0,0 +1,355 @@ +# -*- coding: utf-8 -*- +# +# Signature/PKCS1_PSS.py : PKCS#1 PPS +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""RSA digital signature protocol with appendix according to PKCS#1 PSS. + +See RFC3447__ or the `original RSA Labs specification`__. + +This scheme is more properly called ``RSASSA-PSS``. + +For example, a sender may authenticate a message using SHA-1 and PSS like +this: + + >>> from Crypto.Signature import PKCS1_PSS + >>> from Crypto.Hash import SHA + >>> from Crypto.PublicKey import RSA + >>> from Crypto import Random + >>> + >>> message = 'To be signed' + >>> key = RSA.importKey(open('privkey.der').read()) + >>> h = SHA.new() + >>> h.update(message) + >>> signer = PKCS1_PSS.new(key) + >>> signature = PKCS1_PSS.sign(key) + +At the receiver side, verification can be done like using the public part of +the RSA key: + + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> h = SHA.new() + >>> h.update(message) + >>> verifier = PKCS1_PSS.new(key) + >>> if verifier.verify(h, signature): + >>> print "The signature is authentic." + >>> else: + >>> print "The signature is not authentic." + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125 +""" + +# Allow nested scopes in Python 2.1 +# See http://oreilly.com/pub/a/python/2001/04/19/pythonnews.html + + +__revision__ = "$Id$" +__all__ = [ 'new', 'PSS_SigScheme' ] + +from Crypto.Util.py3compat import * +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +import Crypto.Util.number +from Crypto.Util.number import ceil_shift, ceil_div, long_to_bytes +from Crypto.Util.strxor import strxor + +class PSS_SigScheme: + """This signature scheme can perform PKCS#1 PSS RSA signature or verification.""" + + def __init__(self, key, mgfunc, saltLen): + """Initialize this PKCS#1 PSS signature scheme object. + + :Parameters: + key : an RSA key object + If a private half is given, both signature and verification are possible. + If a public half is given, only verification is possible. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + saltLen : int + Length of the salt, in bytes. + """ + self._key = key + self._saltLen = saltLen + self._mgfunc = mgfunc + + def can_sign(self): + """Return True if this cipher object can be used for signing messages.""" + return self._key.has_private() + + def sign(self, mhash): + """Produce the PKCS#1 PSS signature of a message. + + This function is named ``RSASSA-PSS-SIGN``, and is specified in + section 8.1.1 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + + :Return: The PSS signature encoded as a string. + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + hash algorithm. + :Raise TypeError: + If the RSA key has no private half. + + :attention: Modify the salt length and the mask generation function only + if you know what you are doing. + The receiver must use the same parameters too. + """ + # TODO: Verify the key is RSA + + randfunc = self._key._randfunc + + # Set defaults for salt length and mask generation function + if self._saltLen == None: + sLen = mhash.digest_size + else: + sLen = self._saltLen + if self._mgfunc: + mgf = self._mgfunc + else: + mgf = lambda x,y: MGF1(x,y,mhash) + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.1 in RFC3447 + k = ceil_div(modBits,8) # Convert from bits to bytes + # Step 1 + em = EMSA_PSS_ENCODE(mhash, modBits-1, randfunc, mgf, sLen) + # Step 2a (OS2IP) and 2b (RSASP1) + m = self._key.decrypt(em) + # Step 2c (I2OSP) + S = bchr(0x00)*(k-len(m)) + m + return S + + def verify(self, mhash, S): + """Verify that a certain PKCS#1 PSS signature is authentic. + + This function checks if the party holding the private half of the given + RSA key has really signed the message. + + This function is called ``RSASSA-PSS-VERIFY``, and is specified in section + 8.1.2 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + S : string + The signature that needs to be validated. + + :Return: True if verification is correct. False otherwise. + """ + # TODO: Verify the key is RSA + + # Set defaults for salt length and mask generation function + if self._saltLen == None: + sLen = mhash.digest_size + else: + sLen = self._saltLen + if self._mgfunc: + mgf = self._mgfunc + else: + mgf = lambda x,y: MGF1(x,y,mhash) + + modBits = Crypto.Util.number.size(self._key.n) + + # See 8.1.2 in RFC3447 + k = ceil_div(modBits,8) # Convert from bits to bytes + # Step 1 + if len(S) != k: + return False + # Step 2a (O2SIP), 2b (RSAVP1), and partially 2c (I2OSP) + # Note that signature must be smaller than the module + # but RSA.py won't complain about it. + # TODO: Fix RSA object; don't do it here. + em = self._key.encrypt(S, 0)[0] + # Step 2c + emLen = ceil_div(modBits-1,8) + em = bchr(0x00)*(emLen-len(em)) + em + # Step 3 + try: + result = EMSA_PSS_VERIFY(mhash, em, modBits-1, mgf, sLen) + except ValueError: + return False + # Step 4 + return result + +def MGF1(mgfSeed, maskLen, hash): + """Mask Generation Function, described in B.2.1""" + T = b("") + for counter in range(ceil_div(maskLen, hash.digest_size)): + c = long_to_bytes(counter, 4) + T = T + hash.new(mgfSeed + c).digest() + assert(len(T)>=maskLen) + return T[:maskLen] + +def EMSA_PSS_ENCODE(mhash, emBits, randFunc, mgf, sLen): + """ + Implement the ``EMSA-PSS-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.1). + + The original ``EMSA-PSS-ENCODE`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message being signed. + emBits : int + Maximum length of the final encoding, in bits. + randFunc : callable + An RNG function that accepts as only parameter an int, and returns + a string of random bytes, to be used as salt. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Return: An ``emLen`` byte long string that encodes the hash + (with ``emLen = \ceil(emBits/8)``). + + :Raise ValueError: + When digest or salt length are too big. + """ + + emLen = ceil_div(emBits,8) + + # Bitmask of digits that fill up + lmask = 0 + for i in range(8*emLen-emBits): + lmask = lmask>>1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + raise ValueError("Digest or salt length are too long for given key size.") + # Step 4 + salt = b("") + if randFunc and sLen>0: + salt = randFunc(sLen) + # Step 5 and 6 + h = mhash.new(bchr(0x00)*8 + mhash.digest() + salt) + # Step 7 and 8 + db = bchr(0x00)*(emLen-sLen-mhash.digest_size-2) + bchr(0x01) + salt + # Step 9 + dbMask = mgf(h.digest(), emLen-mhash.digest_size-1) + # Step 10 + maskedDB = strxor(db,dbMask) + # Step 11 + maskedDB = bchr(bord(maskedDB[0]) & ~lmask) + maskedDB[1:] + # Step 12 + em = maskedDB + h.digest() + bchr(0xBC) + return em + +def EMSA_PSS_VERIFY(mhash, em, emBits, mgf, sLen): + """ + Implement the ``EMSA-PSS-VERIFY`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.1.2). + + ``EMSA-PSS-VERIFY`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + mhash : hash object + The hash object that holds the digest of the message to be verified. + em : string + The signature to verify, therefore proving that the sender really signed + the message that was received. + emBits : int + Length of the final encoding (em), in bits. + mgf : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + sLen : int + Length of the salt, in bytes. + + :Return: 0 if the encoding is consistent, 1 if it is inconsistent. + + :Raise ValueError: + When digest or salt length are too big. + """ + + emLen = ceil_div(emBits,8) + + # Bitmask of digits that fill up + lmask = 0 + for i in range(8*emLen-emBits): + lmask = lmask>>1 | 0x80 + + # Step 1 and 2 have been already done + # Step 3 + if emLen < mhash.digest_size+sLen+2: + return False + # Step 4 + if ord(em[-1:])!=0xBC: + return False + # Step 5 + maskedDB = em[:emLen-mhash.digest_size-1] + h = em[emLen-mhash.digest_size-1:-1] + # Step 6 + if lmask & bord(em[0]): + return False + # Step 7 + dbMask = mgf(h, emLen-mhash.digest_size-1) + # Step 8 + db = strxor(maskedDB, dbMask) + # Step 9 + db = bchr(bord(db[0]) & ~lmask) + db[1:] + # Step 10 + if not db.startswith(bchr(0x00)*(emLen-mhash.digest_size-sLen-2) + bchr(0x01)): + return False + # Step 11 + salt = b("") + if sLen: salt = db[-sLen:] + # Step 12 and 13 + hp = mhash.new(bchr(0x00)*8 + mhash.digest() + salt).digest() + # Step 14 + if h!=hp: + return False + return True + +def new(key, mgfunc=None, saltLen=None): + """Return a signature scheme object `PSS_SigScheme` that + can be used to perform PKCS#1 PSS signature or verification. + + :Parameters: + key : RSA key object + The key to use to sign or verify the message. This is a `Crypto.PublicKey.RSA` object. + Signing is only possible if *key* is a private RSA key. + mgfunc : callable + A mask generation function that accepts two parameters: a string to + use as seed, and the lenth of the mask to generate, in bytes. + If not specified, the standard MGF1 is used. + saltLen : int + Length of the salt, in bytes. If not specified, it matches the output + size of the hash function. + + """ + return PSS_SigScheme(key, mgfunc, saltLen) + diff --git a/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py b/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py new file mode 100644 index 0000000..73ac251 --- /dev/null +++ b/Lib/site-packages/Crypto/Signature/PKCS1_v1_5.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- +# +# Signature/PKCS1-v1_5.py : PKCS#1 v1.5 +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +""" +RSA digital signature protocol according to PKCS#1 v1.5 + +See RFC3447__ or the `original RSA Labs specification`__. + +This scheme is more properly called ``RSASSA-PKCS1-v1_5``. + +For example, a sender may authenticate a message using SHA-1 like +this: + + >>> from Crypto.Signature import PKCS1_v1_5 + >>> from Crypto.Hash import SHA + >>> from Crypto.PublicKey import RSA + >>> + >>> message = 'To be signed' + >>> key = RSA.importKey(open('privkey.der').read()) + >>> h = SHA.new(message) + >>> signer = PKCS1_v1_5.new(key) + >>> signature = signer.sign(h) + +At the receiver side, verification can be done using the public part of +the RSA key: + + >>> key = RSA.importKey(open('pubkey.der').read()) + >>> h = SHA.new(message) + >>> verifier = PKCS1_v1_5.new(key) + >>> if verifier.verify(h, signature): + >>> print "The signature is authentic." + >>> else: + >>> print "The signature is not authentic." + +:undocumented: __revision__, __package__ + +.. __: http://www.ietf.org/rfc/rfc3447.txt +.. __: http://www.rsa.com/rsalabs/node.asp?id=2125 +""" + +__revision__ = "$Id$" +__all__ = [ 'new', 'PKCS115_SigScheme' ] + +import Crypto.Util.number +from Crypto.Util.number import ceil_div +from Crypto.Util.asn1 import DerSequence, DerNull, DerOctetString +from Crypto.Util.py3compat import * + +class PKCS115_SigScheme: + """This signature scheme can perform PKCS#1 v1.5 RSA signature or verification.""" + + def __init__(self, key): + """Initialize this PKCS#1 v1.5 signature scheme object. + + :Parameters: + key : an RSA key object + If a private half is given, both signature and verification are possible. + If a public half is given, only verification is possible. + """ + self._key = key + + def can_sign(self): + """Return True if this cipher object can be used for signing messages.""" + return self._key.has_private() + + def sign(self, mhash): + """Produce the PKCS#1 v1.5 signature of a message. + + This function is named ``RSASSA-PKCS1-V1_5-SIGN``, and is specified in + section 8.2.1 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + + :Return: The signature encoded as a string. + :Raise ValueError: + If the RSA key length is not sufficiently long to deal with the given + hash algorithm. + :Raise TypeError: + If the RSA key has no private half. + """ + # TODO: Verify the key is RSA + + # See 8.2.1 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + em = EMSA_PKCS1_V1_5_ENCODE(mhash, k) + # Step 2a (OS2IP) and 2b (RSASP1) + m = self._key.decrypt(em) + # Step 2c (I2OSP) + S = bchr(0x00)*(k-len(m)) + m + return S + + def verify(self, mhash, S): + """Verify that a certain PKCS#1 v1.5 signature is authentic. + + This function checks if the party holding the private half of the key + really signed the message. + + This function is named ``RSASSA-PKCS1-V1_5-VERIFY``, and is specified in + section 8.2.2 of RFC3447. + + :Parameters: + mhash : hash object + The hash that was carried out over the message. This is an object + belonging to the `Crypto.Hash` module. + S : string + The signature that needs to be validated. + + :Return: True if verification is correct. False otherwise. + """ + # TODO: Verify the key is RSA + + # See 8.2.2 in RFC3447 + modBits = Crypto.Util.number.size(self._key.n) + k = ceil_div(modBits,8) # Convert from bits to bytes + + # Step 1 + if len(S) != k: + return 0 + # Step 2a (O2SIP) and 2b (RSAVP1) + # Note that signature must be smaller than the module + # but RSA.py won't complain about it. + # TODO: Fix RSA object; don't do it here. + m = self._key.encrypt(S, 0)[0] + # Step 2c (I2OSP) + em1 = bchr(0x00)*(k-len(m)) + m + # Step 3 + try: + em2 = EMSA_PKCS1_V1_5_ENCODE(mhash, k) + except ValueError: + return 0 + # Step 4 + # By comparing the full encodings (as opposed to checking each + # of its components one at a time) we avoid attacks to the padding + # scheme like Bleichenbacher's (see http://www.mail-archive.com/cryptography@metzdowd.com/msg06537). + # + return em1==em2 + +def EMSA_PKCS1_V1_5_ENCODE(hash, emLen): + """ + Implement the ``EMSA-PKCS1-V1_5-ENCODE`` function, as defined + in PKCS#1 v2.1 (RFC3447, 9.2). + + ``EMSA-PKCS1-V1_5-ENCODE`` actually accepts the message ``M`` as input, + and hash it internally. Here, we expect that the message has already + been hashed instead. + + :Parameters: + hash : hash object + The hash object that holds the digest of the message being signed. + emLen : int + The length the final encoding must have, in bytes. + + :attention: the early standard (RFC2313) stated that ``DigestInfo`` + had to be BER-encoded. This means that old signatures + might have length tags in indefinite form, which + is not supported in DER. Such encoding cannot be + reproduced by this function. + + :attention: the same standard defined ``DigestAlgorithm`` to be + of ``AlgorithmIdentifier`` type, where the PARAMETERS + item is optional. Encodings for ``MD2/4/5`` without + ``PARAMETERS`` cannot be reproduced by this function. + + :Return: An ``emLen`` byte long string that encodes the hash. + """ + + # First, build the ASN.1 DER object DigestInfo: + # + # DigestInfo ::= SEQUENCE { + # digestAlgorithm AlgorithmIdentifier, + # digest OCTET STRING + # } + # + # where digestAlgorithm identifies the hash function and shall be an + # algorithm ID with an OID in the set PKCS1-v1-5DigestAlgorithms. + # + # PKCS1-v1-5DigestAlgorithms ALGORITHM-IDENTIFIER ::= { + # { OID id-md2 PARAMETERS NULL }| + # { OID id-md5 PARAMETERS NULL }| + # { OID id-sha1 PARAMETERS NULL }| + # { OID id-sha256 PARAMETERS NULL }| + # { OID id-sha384 PARAMETERS NULL }| + # { OID id-sha512 PARAMETERS NULL } + # } + # + digestAlgo = DerSequence([hash.oid, DerNull().encode()]) + digest = DerOctetString(hash.digest()) + digestInfo = DerSequence([ + digestAlgo.encode(), + digest.encode() + ]).encode() + + # We need at least 11 bytes for the remaining data: 3 fixed bytes and + # at least 8 bytes of padding). + if emLen +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +"""Fast counter functions for CTR cipher modes. + +CTR is a chaining mode for symmetric block encryption or decryption. +Messages are divideded into blocks, and the cipher operation takes +place on each block using the secret key and a unique *counter block*. + +The most straightforward way to fulfil the uniqueness property is +to start with an initial, random *counter block* value, and increment it as +the next block is processed. + +The block ciphers from `Crypto.Cipher` (when configured in *MODE_CTR* mode) +invoke a callable object (the *counter* parameter) to get the next *counter block*. +Unfortunately, the Python calling protocol leads to major performance degradations. + +The counter functions instantiated by this module will be invoked directly +by the ciphers in `Crypto.Cipher`. The fact that the Python layer is bypassed +lead to more efficient (and faster) execution of CTR cipher modes. + +An example of usage is the following: + + >>> from Crypto.Cipher import AES + >>> from Crypto.Util import Counter + >>> + >>> pt = b'\x00'*1000000 + >>> ctr = Counter.new(128) + >>> cipher = AES.new(b'\x00'*16, AES.MODE_CTR, counter=ctr) + >>> ct = cipher.encrypt(pt) + +:undocumented: __package__ +""" +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * +from Crypto.Util.py3compat import * + +from Crypto.Util import _counter +import struct + +# Factory function +def new(nbits, prefix=b(""), suffix=b(""), initial_value=1, overflow=0, little_endian=False, allow_wraparound=False, disable_shortcut=False): + """Create a stateful counter block function suitable for CTR encryption modes. + + Each call to the function returns the next counter block. + Each counter block is made up by three parts:: + + prefix || counter value || postfix + + The counter value is incremented by one at each call. + + :Parameters: + nbits : integer + Length of the desired counter, in bits. It must be a multiple of 8. + prefix : byte string + The constant prefix of the counter block. By default, no prefix is + used. + suffix : byte string + The constant postfix of the counter block. By default, no suffix is + used. + initial_value : integer + The initial value of the counter. Default value is 1. + little_endian : boolean + If True, the counter number will be encoded in little endian format. + If False (default), in big endian format. + allow_wraparound : boolean + If True, the function will raise an *OverflowError* exception as soon + as the counter wraps around. If False (default), the counter will + simply restart from zero. + disable_shortcut : boolean + If True, do not make ciphers from `Crypto.Cipher` bypass the Python + layer when invoking the counter block function. + If False (default), bypass the Python layer. + :Returns: + The counter block function. + """ + + # Sanity-check the message size + (nbytes, remainder) = divmod(nbits, 8) + if remainder != 0: + # In the future, we might support arbitrary bit lengths, but for now we don't. + raise ValueError("nbits must be a multiple of 8; got %d" % (nbits,)) + if nbytes < 1: + raise ValueError("nbits too small") + elif nbytes > 0xffff: + raise ValueError("nbits too large") + + initval = _encode(initial_value, nbytes, little_endian) + + if little_endian: + return _counter._newLE(bstr(prefix), bstr(suffix), initval, allow_wraparound=allow_wraparound, disable_shortcut=disable_shortcut) + else: + return _counter._newBE(bstr(prefix), bstr(suffix), initval, allow_wraparound=allow_wraparound, disable_shortcut=disable_shortcut) + +def _encode(n, nbytes, little_endian=False): + retval = [] + n = int(n) + for i in range(nbytes): + if little_endian: + retval.append(bchr(n & 0xff)) + else: + retval.insert(0, bchr(n & 0xff)) + n >>= 8 + return b("").join(retval) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Util/RFC1751.py b/Lib/site-packages/Crypto/Util/RFC1751.py new file mode 100644 index 0000000..5ad69ff --- /dev/null +++ b/Lib/site-packages/Crypto/Util/RFC1751.py @@ -0,0 +1,365 @@ +# rfc1751.py : Converts between 128-bit strings and a human-readable +# sequence of words, as defined in RFC1751: "A Convention for +# Human-Readable 128-bit Keys", by Daniel L. McDonald. +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + + +import binascii +from Crypto.Util.py3compat import * +from functools import reduce + +binary={0:'0000', 1:'0001', 2:'0010', 3:'0011', 4:'0100', 5:'0101', + 6:'0110', 7:'0111', 8:'1000', 9:'1001', 10:'1010', 11:'1011', + 12:'1100', 13:'1101', 14:'1110', 15:'1111'} + +def _key2bin(s): + "Convert a key into a string of binary digits" + kl=[bord(x) for x in s] + kl=[binary[x>>4]+binary[x&15] for x in kl] + return ''.join(kl) + +def _extract(key, start, length): + """Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its + numeric value.""" + k=key[start:start+length] + return reduce(lambda x,y: x*2+ord(y)-48, k, 0) + +def key_to_english (key): + """key_to_english(key:string(2.x)/bytes(3.x)) : string + Transform an arbitrary key into a string containing English words. + The key length must be a multiple of 8. + """ + english='' + for index in range(0, len(key), 8): # Loop over 8-byte subkeys + subkey=key[index:index+8] + # Compute the parity of the key + skbin=_key2bin(subkey) ; p=0 + for i in range(0, 64, 2): p=p+_extract(skbin, i, 2) + # Append parity bits to the subkey + skbin=_key2bin(subkey+bchr((p<<6) & 255)) + for i in range(0, 64, 11): + english=english+wordlist[_extract(skbin, i, 11)]+' ' + + return english[:-1] # Remove the trailing space + +def english_to_key (s): + """english_to_key(string):string(2.x)/bytes(2.x) + Transform a string into a corresponding key. + The string must contain words separated by whitespace; the number + of words must be a multiple of 6. + """ + + L=s.upper().split() ; key=b('') + for index in range(0, len(L), 6): + sublist=L[index:index+6] ; char=9*[0] ; bits=0 + for i in sublist: + index = wordlist.index(i) + shift = (8-(bits+11)%8) %8 + y = index << shift + cl, cc, cr = (y>>16), (y>>8)&0xff, y & 0xff + if (shift>5): + char[bits>>3] = char[bits>>3] | cl + char[(bits>>3)+1] = char[(bits>>3)+1] | cc + char[(bits>>3)+2] = char[(bits>>3)+2] | cr + elif shift>-3: + char[bits>>3] = char[bits>>3] | cc + char[(bits>>3)+1] = char[(bits>>3)+1] | cr + else: char[bits>>3] = char[bits>>3] | cr + bits=bits+11 + subkey=reduce(lambda x,y:x+bchr(y), char, b('')) + + # Check the parity of the resulting key + skbin=_key2bin(subkey) + p=0 + for i in range(0, 64, 2): p=p+_extract(skbin, i, 2) + if (p&3) != _extract(skbin, 64, 2): + raise ValueError("Parity error in resulting key") + key=key+subkey[0:8] + return key + +wordlist=[ "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD", + "AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA", + "AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK", + "ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE", + "AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM", + "BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET", + "BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO", + "BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT", + "BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT", + "CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY", + "CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN", + "DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG", + "DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB", + "DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO", + "ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE", + "EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW", + "FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR", + "FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP", + "GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO", + "GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD", + "HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM", + "HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT", + "HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE", + "HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL", + "INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT", + "ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET", + "JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT", + "KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB", + "LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE", + "LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT", + "LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG", + "LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW", + "MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT", + "MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG", + "MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED", + "NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD", + "NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF", + "OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL", + "OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT", + "OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD", + "PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG", + "PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT", + "PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB", + "PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT", + "RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM", + "RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB", + "RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM", + "SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET", + "SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY", + "SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY", + "SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN", + "TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE", + "TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP", + "TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP", + "US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS", + "WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT", + "WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE", + "YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT", + "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS", + "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE", + "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA", + "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN", + "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW", + "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA", + "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM", + "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW", + "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL", + "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM", + "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK", + "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH", + "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT", + "BEAU", "BECK", "BEEF", "BEEN", "BEER", + "BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN", + "BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", + "BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE", + "BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", + "BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", + "BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", + "BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", + "BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", + "BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", + "BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", + "BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", + "BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", + "BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", + "CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL", + "CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", + "CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", + "CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", + "CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", + "CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", + "COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN", + "COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK", + "COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST", + "COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", + "CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", + "CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", + "DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN", + "DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", + "DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", + "DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", + "DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT", + "DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", + "DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", + "DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", + "DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", + "DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", + "DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", + "EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", + "EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", + "EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", + "FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", + "FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", + "FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST", + "FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", + "FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", + "FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", + "FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", + "FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", + "FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", + "FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", + "FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", + "FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", + "GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH", + "GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", + "GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", + "GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", + "GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", + "GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", + "GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", + "GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", + "GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", + "GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", + "HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK", + "HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE", + "HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", + "HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", + "HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", + "HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", + "HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", + "HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", + "HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", + "HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", + "HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", + "HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", + "INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", + "ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", + "JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", + "JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", + "JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", + "JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", + "JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", + "KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", + "KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", + "KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", + "KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", + "LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", + "LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", + "LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", + "LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST", + "LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", + "LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", + "LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST", + "LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", + "LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", + "LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", + "LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", + "LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", + "MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", + "MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK", + "MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE", + "MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", + "MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", + "MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", + "MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", + "MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", + "MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", + "MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", + "MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", + "MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", + "NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", + "NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", + "NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", + "NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", + "NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", + "OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", + "OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", + "ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", + "OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", + "OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", + "RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", + "RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", + "RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA", + "REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", + "RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD", + "ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", + "ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", + "ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", + "RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", + "RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", + "SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE", + "SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", + "SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", + "SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS", + "SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", + "SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", + "SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", + "SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", + "SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", + "SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", + "SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", + "SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", + "SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", + "SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", + "STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", + "SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", + "SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", + "TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", + "TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", + "TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS", + "TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN", + "THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", + "TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY", + "TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", + "TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", + "TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", + "TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", + "TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", + "TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", + "USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", + "VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", + "VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", + "WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", + "WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM", + "WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", + "WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", + "WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", + "WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", + "WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE", + "WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", + "WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", + "YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", + "YELL", "YOGA", "YOKE" ] + +if __name__=='__main__': + data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'), + ('CCAC2AED591056BE4F90FD441C534766', + 'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'), + ('EFF81F9BFBC65350920CDD7416DE8009', + 'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL') + ] + + for key, words in data: + print('Trying key', key) + key=binascii.a2b_hex(key) + w2=key_to_english(key) + if w2!=words: + print('key_to_english fails on key', repr(key), ', producing', str(w2)) + k2=english_to_key(words) + if k2!=key: + print('english_to_key fails on key', repr(key), ', producing', repr(k2)) + + diff --git a/Lib/site-packages/Crypto/Util/__init__.py b/Lib/site-packages/Crypto/Util/__init__.py new file mode 100644 index 0000000..a3bef8a --- /dev/null +++ b/Lib/site-packages/Crypto/Util/__init__.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Miscellaneous modules + +Contains useful modules that don't belong into any of the +other Crypto.* subpackages. + +Crypto.Util.number Number-theoretic functions (primality testing, etc.) +Crypto.Util.randpool Random number generation +Crypto.Util.RFC1751 Converts between 128-bit keys and human-readable + strings of words. +Crypto.Util.asn1 Minimal support for ASN.1 DER encoding + +""" + +__all__ = ['randpool', 'RFC1751', 'number', 'strxor', 'asn1' ] + +__revision__ = "$Id$" + diff --git a/Lib/site-packages/Crypto/Util/_number_new.py b/Lib/site-packages/Crypto/Util/_number_new.py new file mode 100644 index 0000000..8b99177 --- /dev/null +++ b/Lib/site-packages/Crypto/Util/_number_new.py @@ -0,0 +1,119 @@ +# -*- coding: ascii -*- +# +# Util/_number_new.py : utility functions +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +## NOTE: Do not import this module directly. Import these functions from Crypto.Util.number. + +__revision__ = "$Id$" +__all__ = ['ceil_shift', 'ceil_div', 'floor_div', 'exact_log2', 'exact_div'] + +import sys +if sys.version_info[0] == 2 and sys.version_info[1] == 1: + from Crypto.Util.py21compat import * + +def ceil_shift(n, b): + """Return ceil(n / 2**b) without performing any floating-point or division operations. + + This is done by right-shifting n by b bits and incrementing the result by 1 + if any '1' bits were shifted out. + """ + if not isinstance(n, int) or not isinstance(b, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(n).__name__, type(b).__name__)) + + assert n >= 0 and b >= 0 # I haven't tested or even thought about negative values + mask = (1 << b) - 1 + if n & mask: + return (n >> b) + 1 + else: + return n >> b + +def ceil_div(a, b): + """Return ceil(a / b) without performing any floating-point operations.""" + + if not isinstance(a, int) or not isinstance(b, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(a).__name__, type(b).__name__)) + + (q, r) = divmod(a, b) + if r: + return q + 1 + else: + return q + +def floor_div(a, b): + if not isinstance(a, int) or not isinstance(b, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(a).__name__, type(b).__name__)) + + (q, r) = divmod(a, b) + return q + +def exact_log2(num): + """Find and return an integer i >= 0 such that num == 2**i. + + If no such integer exists, this function raises ValueError. + """ + + if not isinstance(num, int): + raise TypeError("unsupported operand type: %r" % (type(num).__name__,)) + + n = int(num) + if n <= 0: + raise ValueError("cannot compute logarithm of non-positive number") + + i = 0 + while n != 0: + if (n & 1) and n != 1: + raise ValueError("No solution could be found") + i += 1 + n >>= 1 + i -= 1 + + assert num == (1 << i) + return i + +def exact_div(p, d, allow_divzero=False): + """Find and return an integer n such that p == n * d + + If no such integer exists, this function raises ValueError. + + Both operands must be integers. + + If the second operand is zero, this function will raise ZeroDivisionError + unless allow_divzero is true (default: False). + """ + + if not isinstance(p, int) or not isinstance(d, int): + raise TypeError("unsupported operand type(s): %r and %r" % (type(p).__name__, type(d).__name__)) + + if d == 0 and allow_divzero: + n = 0 + if p != n * d: + raise ValueError("No solution could be found") + else: + (n, r) = divmod(p, d) + if r != 0: + raise ValueError("No solution could be found") + + assert p == n * d + return n + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Util/asn1.py b/Lib/site-packages/Crypto/Util/asn1.py new file mode 100644 index 0000000..175aff2 --- /dev/null +++ b/Lib/site-packages/Crypto/Util/asn1.py @@ -0,0 +1,286 @@ +# -*- coding: ascii -*- +# +# Util/asn1.py : Minimal support for ASN.1 DER binary encoding. +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +from Crypto.Util.number import long_to_bytes, bytes_to_long +import sys +from Crypto.Util.py3compat import * + +__all__ = [ 'DerObject', 'DerInteger', 'DerOctetString', 'DerNull', 'DerSequence', 'DerObjectId' ] + +class DerObject: + """Base class for defining a single DER object. + + Instantiate this class ONLY when you have to decode a DER element. + """ + + # Known TAG types + typeTags = { 'SEQUENCE': 0x30, 'BIT STRING': 0x03, 'INTEGER': 0x02, + 'OCTET STRING': 0x04, 'NULL': 0x05, 'OBJECT IDENTIFIER': 0x06 } + + def __init__(self, ASN1Type=None, payload=b('')): + """Initialize the DER object according to a specific type. + + The ASN.1 type is either specified as the ASN.1 string (e.g. + 'SEQUENCE'), directly with its numerical tag or with no tag + at all (None).""" + if isInt(ASN1Type) or ASN1Type is None: + self.typeTag = ASN1Type + else: + if len(ASN1Type)==1: + self.typeTag = ord(ASN1Type) + else: + self.typeTag = self.typeTags.get(ASN1Type) + self.payload = payload + + def isType(self, ASN1Type): + return self.typeTags[ASN1Type]==self.typeTag + + def _lengthOctets(self, payloadLen): + """Return a byte string that encodes the given payload length (in + bytes) in a format suitable for a DER length tag (L). + """ + if payloadLen>127: + encoding = long_to_bytes(payloadLen) + return bchr(len(encoding)+128) + encoding + return bchr(payloadLen) + + def encode(self): + """Return a complete DER element, fully encoded as a TLV.""" + return bchr(self.typeTag) + self._lengthOctets(len(self.payload)) + self.payload + + def _decodeLen(self, idx, der): + """Given a (part of a) DER element, and an index to the first byte of + a DER length tag (L), return a tuple with the payload size, + and the index of the first byte of the such payload (V). + + Raises a ValueError exception if the DER length is invalid. + Raises an IndexError exception if the DER element is too short. + """ + length = bord(der[idx]) + if length<=127: + return (length,idx+1) + payloadLength = bytes_to_long(der[idx+1:idx+1+(length & 0x7F)]) + if payloadLength<=127: + raise ValueError("Not a DER length tag.") + return (payloadLength, idx+1+(length & 0x7F)) + + def decode(self, derEle, noLeftOvers=0): + """Decode a complete DER element, and re-initializes this + object with it. + + @param derEle A complete DER element. It must start with a DER T + tag. + @param noLeftOvers Indicate whether it is acceptable to complete the + parsing of the DER element and find that not all + bytes in derEle have been used. + @return Index of the first unused byte in the given DER element. + + Raises a ValueError exception in case of parsing errors. + Raises an IndexError exception if the DER element is too short. + """ + try: + self.typeTag = bord(derEle[0]) + if (self.typeTag & 0x1F)==0x1F: + raise ValueError("Unsupported DER tag") + (length,idx) = self._decodeLen(1, derEle) + if noLeftOvers and len(derEle) != (idx+length): + raise ValueError("Not a DER structure") + self.payload = derEle[idx:idx+length] + except IndexError: + raise ValueError("Not a valid DER SEQUENCE.") + return idx+length + +class DerInteger(DerObject): + def __init__(self, value = 0): + """Class to model an INTEGER DER element. + + Limitation: only non-negative values are supported. + """ + DerObject.__init__(self, 'INTEGER') + self.value = value + + def encode(self): + """Return a complete INTEGER DER element, fully encoded as a TLV.""" + self.payload = long_to_bytes(self.value) + if bord(self.payload[0])>127: + self.payload = bchr(0x00) + self.payload + return DerObject.encode(self) + + def decode(self, derEle, noLeftOvers=0): + """Decode a complete INTEGER DER element, and re-initializes this + object with it. + + @param derEle A complete INTEGER DER element. It must start with a DER + INTEGER tag. + @param noLeftOvers Indicate whether it is acceptable to complete the + parsing of the DER element and find that not all + bytes in derEle have been used. + @return Index of the first unused byte in the given DER element. + + Raises a ValueError exception if the DER element is not a + valid non-negative INTEGER. + Raises an IndexError exception if the DER element is too short. + """ + tlvLength = DerObject.decode(self, derEle, noLeftOvers) + if self.typeTag!=self.typeTags['INTEGER']: + raise ValueError ("Not a DER INTEGER.") + if bord(self.payload[0])>127: + raise ValueError ("Negative INTEGER.") + self.value = bytes_to_long(self.payload) + return tlvLength + +class DerSequence(DerObject): + """Class to model a SEQUENCE DER element. + + This object behave like a dynamic Python sequence. + Sub-elements that are INTEGERs, look like Python integers. + Any other sub-element is a binary string encoded as the complete DER + sub-element (TLV). + """ + + def __init__(self, startSeq=None): + """Initialize the SEQUENCE DER object. Always empty + initially.""" + DerObject.__init__(self, 'SEQUENCE') + if startSeq==None: + self._seq = [] + else: + self._seq = startSeq + + ## A few methods to make it behave like a python sequence + + def __delitem__(self, n): + del self._seq[n] + def __getitem__(self, n): + return self._seq[n] + def __setitem__(self, key, value): + self._seq[key] = value + def __setslice__(self,i,j,sequence): + self._seq[i:j] = sequence + def __delslice__(self,i,j): + del self._seq[i:j] + def __getslice__(self, i, j): + return self._seq[max(0, i):max(0, j)] + def __len__(self): + return len(self._seq) + def append(self, item): + return self._seq.append(item) + + def hasInts(self): + """Return the number of items in this sequence that are numbers.""" + return len(list(filter(isInt, self._seq))) + + def hasOnlyInts(self): + """Return True if all items in this sequence are numbers.""" + return self._seq and self.hasInts()==len(self._seq) + + def encode(self): + """Return the DER encoding for the ASN.1 SEQUENCE, containing + the non-negative integers and longs added to this object. + + Limitation: Raises a ValueError exception if it some elements + in the sequence are neither Python integers nor complete DER INTEGERs. + """ + self.payload = b('') + for item in self._seq: + try: + self.payload += item + except: + try: + self.payload += DerInteger(item).encode() + except: + raise ValueError("Trying to DER encode an unknown object") + return DerObject.encode(self) + + def decode(self, derEle, noLeftOvers=0): + """Decode a complete SEQUENCE DER element, and re-initializes this + object with it. + + @param derEle A complete SEQUENCE DER element. It must start with a DER + SEQUENCE tag. + @param noLeftOvers Indicate whether it is acceptable to complete the + parsing of the DER element and find that not all + bytes in derEle have been used. + @return Index of the first unused byte in the given DER element. + + DER INTEGERs are decoded into Python integers. Any other DER + element is not decoded. Its validity is not checked. + + Raises a ValueError exception if the DER element is not a + valid DER SEQUENCE. + Raises an IndexError exception if the DER element is too short. + """ + + self._seq = [] + try: + tlvLength = DerObject.decode(self, derEle, noLeftOvers) + if self.typeTag!=self.typeTags['SEQUENCE']: + raise ValueError("Not a DER SEQUENCE.") + # Scan one TLV at once + idx = 0 + while idx= 5 to avoid timing attack vulnerability.", PowmInsecureWarning) + +# New functions +from ._number_new import * + +# Commented out and replaced with faster versions below +## def long2str(n): +## s='' +## while n>0: +## s=chr(n & 255)+s +## n=n>>8 +## return s + +## import types +## def str2long(s): +## if type(s)!=types.StringType: return s # Integers will be left alone +## return reduce(lambda x,y : x*256+ord(y), s, 0L) + +def size (N): + """size(N:long) : int + Returns the size of the number N in bits. + """ + bits = 0 + while N >> bits: + bits += 1 + return bits + +def getRandomNumber(N, randfunc=None): + """Deprecated. Use getRandomInteger or getRandomNBitInteger instead.""" + warnings.warn("Crypto.Util.number.getRandomNumber has confusing semantics"+ + "and has been deprecated. Use getRandomInteger or getRandomNBitInteger instead.", + GetRandomNumber_DeprecationWarning) + return getRandomNBitInteger(N, randfunc) + +def getRandomInteger(N, randfunc=None): + """getRandomInteger(N:int, randfunc:callable):long + Return a random number with at most N bits. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + if randfunc is None: + _import_Random() + randfunc = Random.new().read + + S = randfunc(N>>3) + odd_bits = N % 8 + if odd_bits != 0: + char = ord(randfunc(1)) >> (8-odd_bits) + S = bchr(char) + S + value = bytes_to_long(S) + return value + +def getRandomRange(a, b, randfunc=None): + """getRandomRange(a:int, b:int, randfunc:callable):long + Return a random number n so that a <= n < b. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + range_ = b - a - 1 + bits = size(range_) + value = getRandomInteger(bits, randfunc) + while value > range_: + value = getRandomInteger(bits, randfunc) + return a + value + +def getRandomNBitInteger(N, randfunc=None): + """getRandomInteger(N:int, randfunc:callable):long + Return a random number with exactly N-bits, i.e. a random number + between 2**(N-1) and (2**N)-1. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + value = getRandomInteger (N-1, randfunc) + value |= 2 ** (N-1) # Ensure high bit is set + assert size(value) >= N + return value + +def GCD(x,y): + """GCD(x:long, y:long): long + Return the GCD of x and y. + """ + x = abs(x) ; y = abs(y) + while x > 0: + x, y = y % x, x + return y + +def inverse(u, v): + """inverse(u:long, v:long):long + Return the inverse of u mod v. + """ + u3, v3 = int(u), int(v) + u1, v1 = 1, 0 + while v3 > 0: + q=divmod(u3, v3)[0] + u1, v1 = v1, u1 - v1*q + u3, v3 = v3, u3 - v3*q + while u1<0: + u1 = u1 + v + return u1 + +# Given a number of bits to generate and a random generation function, +# find a prime number of the appropriate size. + +def getPrime(N, randfunc=None): + """getPrime(N:int, randfunc:callable):long + Return a random N-bit prime number. + + If randfunc is omitted, then Random.new().read is used. + """ + if randfunc is None: + _import_Random() + randfunc = Random.new().read + + number=getRandomNBitInteger(N, randfunc) | 1 + while (not isPrime(number, randfunc=randfunc)): + number=number+2 + return number + + +def _rabinMillerTest(n, rounds, randfunc=None): + """_rabinMillerTest(n:long, rounds:int, randfunc:callable):int + Tests if n is prime. + Returns 0 when n is definitly composite. + Returns 1 when n is probably prime. + Returns 2 when n is definitly prime. + + If randfunc is omitted, then Random.new().read is used. + + This function is for internal use only and may be renamed or removed in + the future. + """ + # check special cases (n==2, n even, n < 2) + if n < 3 or (n & 1) == 0: + return n == 2 + # n might be very large so it might be beneficial to precalculate n-1 + n_1 = n - 1 + # determine m and b so that 2**b * m = n - 1 and b maximal + b = 0 + m = n_1 + while (m & 1) == 0: + b += 1 + m >>= 1 + + tested = [] + # we need to do at most n-2 rounds. + for i in range (min (rounds, n-2)): + # randomly choose a < n and make sure it hasn't been tested yet + a = getRandomRange (2, n, randfunc) + while a in tested: + a = getRandomRange (2, n, randfunc) + tested.append (a) + # do the rabin-miller test + z = pow (a, m, n) # (a**m) % n + if z == 1 or z == n_1: + continue + composite = 1 + for r in range (b): + z = (z * z) % n + if z == 1: + return 0 + elif z == n_1: + composite = 0 + break + if composite: + return 0 + return 1 + +def getStrongPrime(N, e=0, false_positive_prob=1e-6, randfunc=None): + """getStrongPrime(N:int, e:int, false_positive_prob:float, randfunc:callable):long + Return a random strong N-bit prime number. + In this context p is a strong prime if p-1 and p+1 have at + least one large prime factor. + N should be a multiple of 128 and > 512. + + If e is provided the returned prime p-1 will be coprime to e + and thus suitable for RSA where e is the public exponent. + + The optional false_positive_prob is the statistical probability + that true is returned even though it is not (pseudo-prime). + It defaults to 1e-6 (less than 1:1000000). + Note that the real probability of a false-positive is far less. This is + just the mathematically provable limit. + + randfunc should take a single int parameter and return that + many random bytes as a string. + If randfunc is omitted, then Random.new().read is used. + """ + # This function was implemented following the + # instructions found in the paper: + # "FAST GENERATION OF RANDOM, STRONG RSA PRIMES" + # by Robert D. Silverman + # RSA Laboratories + # May 17, 1997 + # which by the time of writing could be freely downloaded here: + # http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.17.2713&rep=rep1&type=pdf + + # Use the accelerator if available + if _fastmath is not None: + return _fastmath.getStrongPrime(int(N), int(e), false_positive_prob, + randfunc) + + if (N < 512) or ((N % 128) != 0): + raise ValueError ("bits must be multiple of 128 and > 512") + + rabin_miller_rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + + # calculate range for X + # lower_bound = sqrt(2) * 2^{511 + 128*x} + # upper_bound = 2^{512 + 128*x} - 1 + x = (N - 512) >> 7; + # We need to approximate the sqrt(2) in the lower_bound by an integer + # expression because floating point math overflows with these numbers + lower_bound = divmod(14142135623730950489 * (2 ** (511 + 128*x)), + 10000000000000000000)[0] + upper_bound = (1 << (512 + 128*x)) - 1 + # Randomly choose X in calculated range + X = getRandomRange (lower_bound, upper_bound, randfunc) + + # generate p1 and p2 + p = [0, 0] + for i in (0, 1): + # randomly choose 101-bit y + y = getRandomNBitInteger (101, randfunc) + # initialize the field for sieving + field = [0] * 5 * len (sieve_base) + # sieve the field + for prime in sieve_base: + offset = y % prime + for j in range ((prime - offset) % prime, len (field), prime): + field[j] = 1 + + # look for suitable p[i] starting at y + result = 0 + for j in range(len(field)): + composite = field[j] + # look for next canidate + if composite: + continue + tmp = y + j + result = _rabinMillerTest (tmp, rabin_miller_rounds) + if result > 0: + p[i] = tmp + break + if result == 0: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + + # Calculate R + # R = (p2^{-1} mod p1) * p2 - (p1^{-1} mod p2) * p1 + tmp1 = inverse (p[1], p[0]) * p[1] # (p2^-1 mod p1)*p2 + tmp2 = inverse (p[0], p[1]) * p[0] # (p1^-1 mod p2)*p1 + R = tmp1 - tmp2 # (p2^-1 mod p1)*p2 - (p1^-1 mod p2)*p1 + + # search for final prime number starting by Y0 + # Y0 = X + (R - X mod p1p2) + increment = p[0] * p[1] + X = X + (R - (X % increment)) + while 1: + is_possible_prime = 1 + # first check candidate against sieve_base + for prime in sieve_base: + if (X % prime) == 0: + is_possible_prime = 0 + break + # if e is given make sure that e and X-1 are coprime + # this is not necessarily a strong prime criterion but useful when + # creating them for RSA where the p-1 and q-1 should be coprime to + # the public exponent e + if e and is_possible_prime: + if e & 1: + if GCD (e, X-1) != 1: + is_possible_prime = 0 + else: + if GCD (e, divmod((X-1),2)[0]) != 1: + is_possible_prime = 0 + + # do some Rabin-Miller-Tests + if is_possible_prime: + result = _rabinMillerTest (X, rabin_miller_rounds) + if result > 0: + break + X += increment + # abort when X has more bits than requested + # TODO: maybe we shouldn't abort but rather start over. + if X >= 1 << N: + raise RuntimeError ("Couln't find prime in field. " + "Developer: Increase field_size") + return X + +def isPrime(N, false_positive_prob=1e-6, randfunc=None): + """isPrime(N:long, false_positive_prob:float, randfunc:callable):bool + Return true if N is prime. + + The optional false_positive_prob is the statistical probability + that true is returned even though it is not (pseudo-prime). + It defaults to 1e-6 (less than 1:1000000). + Note that the real probability of a false-positive is far less. This is + just the mathematically provable limit. + + If randfunc is omitted, then Random.new().read is used. + """ + if _fastmath is not None: + return _fastmath.isPrime(int(N), false_positive_prob, randfunc) + + if N < 3 or N & 1 == 0: + return N == 2 + for p in sieve_base: + if N == p: + return 1 + if N % p == 0: + return 0 + + rounds = int(math.ceil(-math.log(false_positive_prob)/math.log(4))) + return _rabinMillerTest(N, rounds, randfunc) + + +# Improved conversion functions contributed by Barry Warsaw, after +# careful benchmarking + +import struct + +def long_to_bytes(n, blocksize=0): + """long_to_bytes(n:long, blocksize:int) : string + Convert a long integer to a byte string. + + If optional blocksize is given and greater than zero, pad the front of the + byte string with binary zeros so that the length is a multiple of + blocksize. + """ + # after much testing, this algorithm was deemed to be the fastest + s = b('') + n = int(n) + pack = struct.pack + while n > 0: + s = pack('>I', n & 0xffffffff) + s + n = n >> 32 + # strip off leading zeros + for i in range(len(s)): + if s[i] != b('\000')[0]: + break + else: + # only happens when n == 0 + s = b('\000') + i = 0 + s = s[i:] + # add back some pad bytes. this could be done more efficiently w.r.t. the + # de-padding being done above, but sigh... + if blocksize > 0 and len(s) % blocksize: + s = (blocksize - len(s) % blocksize) * b('\000') + s + return s + +def bytes_to_long(s): + """bytes_to_long(string) : long + Convert a byte string to a long integer. + + This is (essentially) the inverse of long_to_bytes(). + """ + acc = 0 + unpack = struct.unpack + length = len(s) + if length % 4: + extra = (4 - length % 4) + s = b('\000') * extra + s + length = length + extra + for i in range(0, length, 4): + acc = (acc << 32) + unpack('>I', s[i:i+4])[0] + return acc + +# For backwards compatibility... +import warnings +def long2str(n, blocksize=0): + warnings.warn("long2str() has been replaced by long_to_bytes()") + return long_to_bytes(n, blocksize) +def str2long(s): + warnings.warn("str2long() has been replaced by bytes_to_long()") + return bytes_to_long(s) + +def _import_Random(): + # This is called in a function instead of at the module level in order to + # avoid problems with recursive imports + global Random, StrongRandom + from Crypto import Random + from Crypto.Random.random import StrongRandom + + + +# The first 10000 primes used for checking primality. +# This should be enough to eliminate most of the odd +# numbers before needing to do a Rabin-Miller test at all. +sieve_base = ( + 2, 3, 5, 7, 11, 13, 17, 19, 23, 29, + 31, 37, 41, 43, 47, 53, 59, 61, 67, 71, + 73, 79, 83, 89, 97, 101, 103, 107, 109, 113, + 127, 131, 137, 139, 149, 151, 157, 163, 167, 173, + 179, 181, 191, 193, 197, 199, 211, 223, 227, 229, + 233, 239, 241, 251, 257, 263, 269, 271, 277, 281, + 283, 293, 307, 311, 313, 317, 331, 337, 347, 349, + 353, 359, 367, 373, 379, 383, 389, 397, 401, 409, + 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, + 467, 479, 487, 491, 499, 503, 509, 521, 523, 541, + 547, 557, 563, 569, 571, 577, 587, 593, 599, 601, + 607, 613, 617, 619, 631, 641, 643, 647, 653, 659, + 661, 673, 677, 683, 691, 701, 709, 719, 727, 733, + 739, 743, 751, 757, 761, 769, 773, 787, 797, 809, + 811, 821, 823, 827, 829, 839, 853, 857, 859, 863, + 877, 881, 883, 887, 907, 911, 919, 929, 937, 941, + 947, 953, 967, 971, 977, 983, 991, 997, 1009, 1013, + 1019, 1021, 1031, 1033, 1039, 1049, 1051, 1061, 1063, 1069, + 1087, 1091, 1093, 1097, 1103, 1109, 1117, 1123, 1129, 1151, + 1153, 1163, 1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, + 1229, 1231, 1237, 1249, 1259, 1277, 1279, 1283, 1289, 1291, + 1297, 1301, 1303, 1307, 1319, 1321, 1327, 1361, 1367, 1373, + 1381, 1399, 1409, 1423, 1427, 1429, 1433, 1439, 1447, 1451, + 1453, 1459, 1471, 1481, 1483, 1487, 1489, 1493, 1499, 1511, + 1523, 1531, 1543, 1549, 1553, 1559, 1567, 1571, 1579, 1583, + 1597, 1601, 1607, 1609, 1613, 1619, 1621, 1627, 1637, 1657, + 1663, 1667, 1669, 1693, 1697, 1699, 1709, 1721, 1723, 1733, + 1741, 1747, 1753, 1759, 1777, 1783, 1787, 1789, 1801, 1811, + 1823, 1831, 1847, 1861, 1867, 1871, 1873, 1877, 1879, 1889, + 1901, 1907, 1913, 1931, 1933, 1949, 1951, 1973, 1979, 1987, + 1993, 1997, 1999, 2003, 2011, 2017, 2027, 2029, 2039, 2053, + 2063, 2069, 2081, 2083, 2087, 2089, 2099, 2111, 2113, 2129, + 2131, 2137, 2141, 2143, 2153, 2161, 2179, 2203, 2207, 2213, + 2221, 2237, 2239, 2243, 2251, 2267, 2269, 2273, 2281, 2287, + 2293, 2297, 2309, 2311, 2333, 2339, 2341, 2347, 2351, 2357, + 2371, 2377, 2381, 2383, 2389, 2393, 2399, 2411, 2417, 2423, + 2437, 2441, 2447, 2459, 2467, 2473, 2477, 2503, 2521, 2531, + 2539, 2543, 2549, 2551, 2557, 2579, 2591, 2593, 2609, 2617, + 2621, 2633, 2647, 2657, 2659, 2663, 2671, 2677, 2683, 2687, + 2689, 2693, 2699, 2707, 2711, 2713, 2719, 2729, 2731, 2741, + 2749, 2753, 2767, 2777, 2789, 2791, 2797, 2801, 2803, 2819, + 2833, 2837, 2843, 2851, 2857, 2861, 2879, 2887, 2897, 2903, + 2909, 2917, 2927, 2939, 2953, 2957, 2963, 2969, 2971, 2999, + 3001, 3011, 3019, 3023, 3037, 3041, 3049, 3061, 3067, 3079, + 3083, 3089, 3109, 3119, 3121, 3137, 3163, 3167, 3169, 3181, + 3187, 3191, 3203, 3209, 3217, 3221, 3229, 3251, 3253, 3257, + 3259, 3271, 3299, 3301, 3307, 3313, 3319, 3323, 3329, 3331, + 3343, 3347, 3359, 3361, 3371, 3373, 3389, 3391, 3407, 3413, + 3433, 3449, 3457, 3461, 3463, 3467, 3469, 3491, 3499, 3511, + 3517, 3527, 3529, 3533, 3539, 3541, 3547, 3557, 3559, 3571, + 3581, 3583, 3593, 3607, 3613, 3617, 3623, 3631, 3637, 3643, + 3659, 3671, 3673, 3677, 3691, 3697, 3701, 3709, 3719, 3727, + 3733, 3739, 3761, 3767, 3769, 3779, 3793, 3797, 3803, 3821, + 3823, 3833, 3847, 3851, 3853, 3863, 3877, 3881, 3889, 3907, + 3911, 3917, 3919, 3923, 3929, 3931, 3943, 3947, 3967, 3989, + 4001, 4003, 4007, 4013, 4019, 4021, 4027, 4049, 4051, 4057, + 4073, 4079, 4091, 4093, 4099, 4111, 4127, 4129, 4133, 4139, + 4153, 4157, 4159, 4177, 4201, 4211, 4217, 4219, 4229, 4231, + 4241, 4243, 4253, 4259, 4261, 4271, 4273, 4283, 4289, 4297, + 4327, 4337, 4339, 4349, 4357, 4363, 4373, 4391, 4397, 4409, + 4421, 4423, 4441, 4447, 4451, 4457, 4463, 4481, 4483, 4493, + 4507, 4513, 4517, 4519, 4523, 4547, 4549, 4561, 4567, 4583, + 4591, 4597, 4603, 4621, 4637, 4639, 4643, 4649, 4651, 4657, + 4663, 4673, 4679, 4691, 4703, 4721, 4723, 4729, 4733, 4751, + 4759, 4783, 4787, 4789, 4793, 4799, 4801, 4813, 4817, 4831, + 4861, 4871, 4877, 4889, 4903, 4909, 4919, 4931, 4933, 4937, + 4943, 4951, 4957, 4967, 4969, 4973, 4987, 4993, 4999, 5003, + 5009, 5011, 5021, 5023, 5039, 5051, 5059, 5077, 5081, 5087, + 5099, 5101, 5107, 5113, 5119, 5147, 5153, 5167, 5171, 5179, + 5189, 5197, 5209, 5227, 5231, 5233, 5237, 5261, 5273, 5279, + 5281, 5297, 5303, 5309, 5323, 5333, 5347, 5351, 5381, 5387, + 5393, 5399, 5407, 5413, 5417, 5419, 5431, 5437, 5441, 5443, + 5449, 5471, 5477, 5479, 5483, 5501, 5503, 5507, 5519, 5521, + 5527, 5531, 5557, 5563, 5569, 5573, 5581, 5591, 5623, 5639, + 5641, 5647, 5651, 5653, 5657, 5659, 5669, 5683, 5689, 5693, + 5701, 5711, 5717, 5737, 5741, 5743, 5749, 5779, 5783, 5791, + 5801, 5807, 5813, 5821, 5827, 5839, 5843, 5849, 5851, 5857, + 5861, 5867, 5869, 5879, 5881, 5897, 5903, 5923, 5927, 5939, + 5953, 5981, 5987, 6007, 6011, 6029, 6037, 6043, 6047, 6053, + 6067, 6073, 6079, 6089, 6091, 6101, 6113, 6121, 6131, 6133, + 6143, 6151, 6163, 6173, 6197, 6199, 6203, 6211, 6217, 6221, + 6229, 6247, 6257, 6263, 6269, 6271, 6277, 6287, 6299, 6301, + 6311, 6317, 6323, 6329, 6337, 6343, 6353, 6359, 6361, 6367, + 6373, 6379, 6389, 6397, 6421, 6427, 6449, 6451, 6469, 6473, + 6481, 6491, 6521, 6529, 6547, 6551, 6553, 6563, 6569, 6571, + 6577, 6581, 6599, 6607, 6619, 6637, 6653, 6659, 6661, 6673, + 6679, 6689, 6691, 6701, 6703, 6709, 6719, 6733, 6737, 6761, + 6763, 6779, 6781, 6791, 6793, 6803, 6823, 6827, 6829, 6833, + 6841, 6857, 6863, 6869, 6871, 6883, 6899, 6907, 6911, 6917, + 6947, 6949, 6959, 6961, 6967, 6971, 6977, 6983, 6991, 6997, + 7001, 7013, 7019, 7027, 7039, 7043, 7057, 7069, 7079, 7103, + 7109, 7121, 7127, 7129, 7151, 7159, 7177, 7187, 7193, 7207, + 7211, 7213, 7219, 7229, 7237, 7243, 7247, 7253, 7283, 7297, + 7307, 7309, 7321, 7331, 7333, 7349, 7351, 7369, 7393, 7411, + 7417, 7433, 7451, 7457, 7459, 7477, 7481, 7487, 7489, 7499, + 7507, 7517, 7523, 7529, 7537, 7541, 7547, 7549, 7559, 7561, + 7573, 7577, 7583, 7589, 7591, 7603, 7607, 7621, 7639, 7643, + 7649, 7669, 7673, 7681, 7687, 7691, 7699, 7703, 7717, 7723, + 7727, 7741, 7753, 7757, 7759, 7789, 7793, 7817, 7823, 7829, + 7841, 7853, 7867, 7873, 7877, 7879, 7883, 7901, 7907, 7919, + 7927, 7933, 7937, 7949, 7951, 7963, 7993, 8009, 8011, 8017, + 8039, 8053, 8059, 8069, 8081, 8087, 8089, 8093, 8101, 8111, + 8117, 8123, 8147, 8161, 8167, 8171, 8179, 8191, 8209, 8219, + 8221, 8231, 8233, 8237, 8243, 8263, 8269, 8273, 8287, 8291, + 8293, 8297, 8311, 8317, 8329, 8353, 8363, 8369, 8377, 8387, + 8389, 8419, 8423, 8429, 8431, 8443, 8447, 8461, 8467, 8501, + 8513, 8521, 8527, 8537, 8539, 8543, 8563, 8573, 8581, 8597, + 8599, 8609, 8623, 8627, 8629, 8641, 8647, 8663, 8669, 8677, + 8681, 8689, 8693, 8699, 8707, 8713, 8719, 8731, 8737, 8741, + 8747, 8753, 8761, 8779, 8783, 8803, 8807, 8819, 8821, 8831, + 8837, 8839, 8849, 8861, 8863, 8867, 8887, 8893, 8923, 8929, + 8933, 8941, 8951, 8963, 8969, 8971, 8999, 9001, 9007, 9011, + 9013, 9029, 9041, 9043, 9049, 9059, 9067, 9091, 9103, 9109, + 9127, 9133, 9137, 9151, 9157, 9161, 9173, 9181, 9187, 9199, + 9203, 9209, 9221, 9227, 9239, 9241, 9257, 9277, 9281, 9283, + 9293, 9311, 9319, 9323, 9337, 9341, 9343, 9349, 9371, 9377, + 9391, 9397, 9403, 9413, 9419, 9421, 9431, 9433, 9437, 9439, + 9461, 9463, 9467, 9473, 9479, 9491, 9497, 9511, 9521, 9533, + 9539, 9547, 9551, 9587, 9601, 9613, 9619, 9623, 9629, 9631, + 9643, 9649, 9661, 9677, 9679, 9689, 9697, 9719, 9721, 9733, + 9739, 9743, 9749, 9767, 9769, 9781, 9787, 9791, 9803, 9811, + 9817, 9829, 9833, 9839, 9851, 9857, 9859, 9871, 9883, 9887, + 9901, 9907, 9923, 9929, 9931, 9941, 9949, 9967, 9973, 10007, + 10009, 10037, 10039, 10061, 10067, 10069, 10079, 10091, 10093, 10099, + 10103, 10111, 10133, 10139, 10141, 10151, 10159, 10163, 10169, 10177, + 10181, 10193, 10211, 10223, 10243, 10247, 10253, 10259, 10267, 10271, + 10273, 10289, 10301, 10303, 10313, 10321, 10331, 10333, 10337, 10343, + 10357, 10369, 10391, 10399, 10427, 10429, 10433, 10453, 10457, 10459, + 10463, 10477, 10487, 10499, 10501, 10513, 10529, 10531, 10559, 10567, + 10589, 10597, 10601, 10607, 10613, 10627, 10631, 10639, 10651, 10657, + 10663, 10667, 10687, 10691, 10709, 10711, 10723, 10729, 10733, 10739, + 10753, 10771, 10781, 10789, 10799, 10831, 10837, 10847, 10853, 10859, + 10861, 10867, 10883, 10889, 10891, 10903, 10909, 10937, 10939, 10949, + 10957, 10973, 10979, 10987, 10993, 11003, 11027, 11047, 11057, 11059, + 11069, 11071, 11083, 11087, 11093, 11113, 11117, 11119, 11131, 11149, + 11159, 11161, 11171, 11173, 11177, 11197, 11213, 11239, 11243, 11251, + 11257, 11261, 11273, 11279, 11287, 11299, 11311, 11317, 11321, 11329, + 11351, 11353, 11369, 11383, 11393, 11399, 11411, 11423, 11437, 11443, + 11447, 11467, 11471, 11483, 11489, 11491, 11497, 11503, 11519, 11527, + 11549, 11551, 11579, 11587, 11593, 11597, 11617, 11621, 11633, 11657, + 11677, 11681, 11689, 11699, 11701, 11717, 11719, 11731, 11743, 11777, + 11779, 11783, 11789, 11801, 11807, 11813, 11821, 11827, 11831, 11833, + 11839, 11863, 11867, 11887, 11897, 11903, 11909, 11923, 11927, 11933, + 11939, 11941, 11953, 11959, 11969, 11971, 11981, 11987, 12007, 12011, + 12037, 12041, 12043, 12049, 12071, 12073, 12097, 12101, 12107, 12109, + 12113, 12119, 12143, 12149, 12157, 12161, 12163, 12197, 12203, 12211, + 12227, 12239, 12241, 12251, 12253, 12263, 12269, 12277, 12281, 12289, + 12301, 12323, 12329, 12343, 12347, 12373, 12377, 12379, 12391, 12401, + 12409, 12413, 12421, 12433, 12437, 12451, 12457, 12473, 12479, 12487, + 12491, 12497, 12503, 12511, 12517, 12527, 12539, 12541, 12547, 12553, + 12569, 12577, 12583, 12589, 12601, 12611, 12613, 12619, 12637, 12641, + 12647, 12653, 12659, 12671, 12689, 12697, 12703, 12713, 12721, 12739, + 12743, 12757, 12763, 12781, 12791, 12799, 12809, 12821, 12823, 12829, + 12841, 12853, 12889, 12893, 12899, 12907, 12911, 12917, 12919, 12923, + 12941, 12953, 12959, 12967, 12973, 12979, 12983, 13001, 13003, 13007, + 13009, 13033, 13037, 13043, 13049, 13063, 13093, 13099, 13103, 13109, + 13121, 13127, 13147, 13151, 13159, 13163, 13171, 13177, 13183, 13187, + 13217, 13219, 13229, 13241, 13249, 13259, 13267, 13291, 13297, 13309, + 13313, 13327, 13331, 13337, 13339, 13367, 13381, 13397, 13399, 13411, + 13417, 13421, 13441, 13451, 13457, 13463, 13469, 13477, 13487, 13499, + 13513, 13523, 13537, 13553, 13567, 13577, 13591, 13597, 13613, 13619, + 13627, 13633, 13649, 13669, 13679, 13681, 13687, 13691, 13693, 13697, + 13709, 13711, 13721, 13723, 13729, 13751, 13757, 13759, 13763, 13781, + 13789, 13799, 13807, 13829, 13831, 13841, 13859, 13873, 13877, 13879, + 13883, 13901, 13903, 13907, 13913, 13921, 13931, 13933, 13963, 13967, + 13997, 13999, 14009, 14011, 14029, 14033, 14051, 14057, 14071, 14081, + 14083, 14087, 14107, 14143, 14149, 14153, 14159, 14173, 14177, 14197, + 14207, 14221, 14243, 14249, 14251, 14281, 14293, 14303, 14321, 14323, + 14327, 14341, 14347, 14369, 14387, 14389, 14401, 14407, 14411, 14419, + 14423, 14431, 14437, 14447, 14449, 14461, 14479, 14489, 14503, 14519, + 14533, 14537, 14543, 14549, 14551, 14557, 14561, 14563, 14591, 14593, + 14621, 14627, 14629, 14633, 14639, 14653, 14657, 14669, 14683, 14699, + 14713, 14717, 14723, 14731, 14737, 14741, 14747, 14753, 14759, 14767, + 14771, 14779, 14783, 14797, 14813, 14821, 14827, 14831, 14843, 14851, + 14867, 14869, 14879, 14887, 14891, 14897, 14923, 14929, 14939, 14947, + 14951, 14957, 14969, 14983, 15013, 15017, 15031, 15053, 15061, 15073, + 15077, 15083, 15091, 15101, 15107, 15121, 15131, 15137, 15139, 15149, + 15161, 15173, 15187, 15193, 15199, 15217, 15227, 15233, 15241, 15259, + 15263, 15269, 15271, 15277, 15287, 15289, 15299, 15307, 15313, 15319, + 15329, 15331, 15349, 15359, 15361, 15373, 15377, 15383, 15391, 15401, + 15413, 15427, 15439, 15443, 15451, 15461, 15467, 15473, 15493, 15497, + 15511, 15527, 15541, 15551, 15559, 15569, 15581, 15583, 15601, 15607, + 15619, 15629, 15641, 15643, 15647, 15649, 15661, 15667, 15671, 15679, + 15683, 15727, 15731, 15733, 15737, 15739, 15749, 15761, 15767, 15773, + 15787, 15791, 15797, 15803, 15809, 15817, 15823, 15859, 15877, 15881, + 15887, 15889, 15901, 15907, 15913, 15919, 15923, 15937, 15959, 15971, + 15973, 15991, 16001, 16007, 16033, 16057, 16061, 16063, 16067, 16069, + 16073, 16087, 16091, 16097, 16103, 16111, 16127, 16139, 16141, 16183, + 16187, 16189, 16193, 16217, 16223, 16229, 16231, 16249, 16253, 16267, + 16273, 16301, 16319, 16333, 16339, 16349, 16361, 16363, 16369, 16381, + 16411, 16417, 16421, 16427, 16433, 16447, 16451, 16453, 16477, 16481, + 16487, 16493, 16519, 16529, 16547, 16553, 16561, 16567, 16573, 16603, + 16607, 16619, 16631, 16633, 16649, 16651, 16657, 16661, 16673, 16691, + 16693, 16699, 16703, 16729, 16741, 16747, 16759, 16763, 16787, 16811, + 16823, 16829, 16831, 16843, 16871, 16879, 16883, 16889, 16901, 16903, + 16921, 16927, 16931, 16937, 16943, 16963, 16979, 16981, 16987, 16993, + 17011, 17021, 17027, 17029, 17033, 17041, 17047, 17053, 17077, 17093, + 17099, 17107, 17117, 17123, 17137, 17159, 17167, 17183, 17189, 17191, + 17203, 17207, 17209, 17231, 17239, 17257, 17291, 17293, 17299, 17317, + 17321, 17327, 17333, 17341, 17351, 17359, 17377, 17383, 17387, 17389, + 17393, 17401, 17417, 17419, 17431, 17443, 17449, 17467, 17471, 17477, + 17483, 17489, 17491, 17497, 17509, 17519, 17539, 17551, 17569, 17573, + 17579, 17581, 17597, 17599, 17609, 17623, 17627, 17657, 17659, 17669, + 17681, 17683, 17707, 17713, 17729, 17737, 17747, 17749, 17761, 17783, + 17789, 17791, 17807, 17827, 17837, 17839, 17851, 17863, 17881, 17891, + 17903, 17909, 17911, 17921, 17923, 17929, 17939, 17957, 17959, 17971, + 17977, 17981, 17987, 17989, 18013, 18041, 18043, 18047, 18049, 18059, + 18061, 18077, 18089, 18097, 18119, 18121, 18127, 18131, 18133, 18143, + 18149, 18169, 18181, 18191, 18199, 18211, 18217, 18223, 18229, 18233, + 18251, 18253, 18257, 18269, 18287, 18289, 18301, 18307, 18311, 18313, + 18329, 18341, 18353, 18367, 18371, 18379, 18397, 18401, 18413, 18427, + 18433, 18439, 18443, 18451, 18457, 18461, 18481, 18493, 18503, 18517, + 18521, 18523, 18539, 18541, 18553, 18583, 18587, 18593, 18617, 18637, + 18661, 18671, 18679, 18691, 18701, 18713, 18719, 18731, 18743, 18749, + 18757, 18773, 18787, 18793, 18797, 18803, 18839, 18859, 18869, 18899, + 18911, 18913, 18917, 18919, 18947, 18959, 18973, 18979, 19001, 19009, + 19013, 19031, 19037, 19051, 19069, 19073, 19079, 19081, 19087, 19121, + 19139, 19141, 19157, 19163, 19181, 19183, 19207, 19211, 19213, 19219, + 19231, 19237, 19249, 19259, 19267, 19273, 19289, 19301, 19309, 19319, + 19333, 19373, 19379, 19381, 19387, 19391, 19403, 19417, 19421, 19423, + 19427, 19429, 19433, 19441, 19447, 19457, 19463, 19469, 19471, 19477, + 19483, 19489, 19501, 19507, 19531, 19541, 19543, 19553, 19559, 19571, + 19577, 19583, 19597, 19603, 19609, 19661, 19681, 19687, 19697, 19699, + 19709, 19717, 19727, 19739, 19751, 19753, 19759, 19763, 19777, 19793, + 19801, 19813, 19819, 19841, 19843, 19853, 19861, 19867, 19889, 19891, + 19913, 19919, 19927, 19937, 19949, 19961, 19963, 19973, 19979, 19991, + 19993, 19997, 20011, 20021, 20023, 20029, 20047, 20051, 20063, 20071, + 20089, 20101, 20107, 20113, 20117, 20123, 20129, 20143, 20147, 20149, + 20161, 20173, 20177, 20183, 20201, 20219, 20231, 20233, 20249, 20261, + 20269, 20287, 20297, 20323, 20327, 20333, 20341, 20347, 20353, 20357, + 20359, 20369, 20389, 20393, 20399, 20407, 20411, 20431, 20441, 20443, + 20477, 20479, 20483, 20507, 20509, 20521, 20533, 20543, 20549, 20551, + 20563, 20593, 20599, 20611, 20627, 20639, 20641, 20663, 20681, 20693, + 20707, 20717, 20719, 20731, 20743, 20747, 20749, 20753, 20759, 20771, + 20773, 20789, 20807, 20809, 20849, 20857, 20873, 20879, 20887, 20897, + 20899, 20903, 20921, 20929, 20939, 20947, 20959, 20963, 20981, 20983, + 21001, 21011, 21013, 21017, 21019, 21023, 21031, 21059, 21061, 21067, + 21089, 21101, 21107, 21121, 21139, 21143, 21149, 21157, 21163, 21169, + 21179, 21187, 21191, 21193, 21211, 21221, 21227, 21247, 21269, 21277, + 21283, 21313, 21317, 21319, 21323, 21341, 21347, 21377, 21379, 21383, + 21391, 21397, 21401, 21407, 21419, 21433, 21467, 21481, 21487, 21491, + 21493, 21499, 21503, 21517, 21521, 21523, 21529, 21557, 21559, 21563, + 21569, 21577, 21587, 21589, 21599, 21601, 21611, 21613, 21617, 21647, + 21649, 21661, 21673, 21683, 21701, 21713, 21727, 21737, 21739, 21751, + 21757, 21767, 21773, 21787, 21799, 21803, 21817, 21821, 21839, 21841, + 21851, 21859, 21863, 21871, 21881, 21893, 21911, 21929, 21937, 21943, + 21961, 21977, 21991, 21997, 22003, 22013, 22027, 22031, 22037, 22039, + 22051, 22063, 22067, 22073, 22079, 22091, 22093, 22109, 22111, 22123, + 22129, 22133, 22147, 22153, 22157, 22159, 22171, 22189, 22193, 22229, + 22247, 22259, 22271, 22273, 22277, 22279, 22283, 22291, 22303, 22307, + 22343, 22349, 22367, 22369, 22381, 22391, 22397, 22409, 22433, 22441, + 22447, 22453, 22469, 22481, 22483, 22501, 22511, 22531, 22541, 22543, + 22549, 22567, 22571, 22573, 22613, 22619, 22621, 22637, 22639, 22643, + 22651, 22669, 22679, 22691, 22697, 22699, 22709, 22717, 22721, 22727, + 22739, 22741, 22751, 22769, 22777, 22783, 22787, 22807, 22811, 22817, + 22853, 22859, 22861, 22871, 22877, 22901, 22907, 22921, 22937, 22943, + 22961, 22963, 22973, 22993, 23003, 23011, 23017, 23021, 23027, 23029, + 23039, 23041, 23053, 23057, 23059, 23063, 23071, 23081, 23087, 23099, + 23117, 23131, 23143, 23159, 23167, 23173, 23189, 23197, 23201, 23203, + 23209, 23227, 23251, 23269, 23279, 23291, 23293, 23297, 23311, 23321, + 23327, 23333, 23339, 23357, 23369, 23371, 23399, 23417, 23431, 23447, + 23459, 23473, 23497, 23509, 23531, 23537, 23539, 23549, 23557, 23561, + 23563, 23567, 23581, 23593, 23599, 23603, 23609, 23623, 23627, 23629, + 23633, 23663, 23669, 23671, 23677, 23687, 23689, 23719, 23741, 23743, + 23747, 23753, 23761, 23767, 23773, 23789, 23801, 23813, 23819, 23827, + 23831, 23833, 23857, 23869, 23873, 23879, 23887, 23893, 23899, 23909, + 23911, 23917, 23929, 23957, 23971, 23977, 23981, 23993, 24001, 24007, + 24019, 24023, 24029, 24043, 24049, 24061, 24071, 24077, 24083, 24091, + 24097, 24103, 24107, 24109, 24113, 24121, 24133, 24137, 24151, 24169, + 24179, 24181, 24197, 24203, 24223, 24229, 24239, 24247, 24251, 24281, + 24317, 24329, 24337, 24359, 24371, 24373, 24379, 24391, 24407, 24413, + 24419, 24421, 24439, 24443, 24469, 24473, 24481, 24499, 24509, 24517, + 24527, 24533, 24547, 24551, 24571, 24593, 24611, 24623, 24631, 24659, + 24671, 24677, 24683, 24691, 24697, 24709, 24733, 24749, 24763, 24767, + 24781, 24793, 24799, 24809, 24821, 24841, 24847, 24851, 24859, 24877, + 24889, 24907, 24917, 24919, 24923, 24943, 24953, 24967, 24971, 24977, + 24979, 24989, 25013, 25031, 25033, 25037, 25057, 25073, 25087, 25097, + 25111, 25117, 25121, 25127, 25147, 25153, 25163, 25169, 25171, 25183, + 25189, 25219, 25229, 25237, 25243, 25247, 25253, 25261, 25301, 25303, + 25307, 25309, 25321, 25339, 25343, 25349, 25357, 25367, 25373, 25391, + 25409, 25411, 25423, 25439, 25447, 25453, 25457, 25463, 25469, 25471, + 25523, 25537, 25541, 25561, 25577, 25579, 25583, 25589, 25601, 25603, + 25609, 25621, 25633, 25639, 25643, 25657, 25667, 25673, 25679, 25693, + 25703, 25717, 25733, 25741, 25747, 25759, 25763, 25771, 25793, 25799, + 25801, 25819, 25841, 25847, 25849, 25867, 25873, 25889, 25903, 25913, + 25919, 25931, 25933, 25939, 25943, 25951, 25969, 25981, 25997, 25999, + 26003, 26017, 26021, 26029, 26041, 26053, 26083, 26099, 26107, 26111, + 26113, 26119, 26141, 26153, 26161, 26171, 26177, 26183, 26189, 26203, + 26209, 26227, 26237, 26249, 26251, 26261, 26263, 26267, 26293, 26297, + 26309, 26317, 26321, 26339, 26347, 26357, 26371, 26387, 26393, 26399, + 26407, 26417, 26423, 26431, 26437, 26449, 26459, 26479, 26489, 26497, + 26501, 26513, 26539, 26557, 26561, 26573, 26591, 26597, 26627, 26633, + 26641, 26647, 26669, 26681, 26683, 26687, 26693, 26699, 26701, 26711, + 26713, 26717, 26723, 26729, 26731, 26737, 26759, 26777, 26783, 26801, + 26813, 26821, 26833, 26839, 26849, 26861, 26863, 26879, 26881, 26891, + 26893, 26903, 26921, 26927, 26947, 26951, 26953, 26959, 26981, 26987, + 26993, 27011, 27017, 27031, 27043, 27059, 27061, 27067, 27073, 27077, + 27091, 27103, 27107, 27109, 27127, 27143, 27179, 27191, 27197, 27211, + 27239, 27241, 27253, 27259, 27271, 27277, 27281, 27283, 27299, 27329, + 27337, 27361, 27367, 27397, 27407, 27409, 27427, 27431, 27437, 27449, + 27457, 27479, 27481, 27487, 27509, 27527, 27529, 27539, 27541, 27551, + 27581, 27583, 27611, 27617, 27631, 27647, 27653, 27673, 27689, 27691, + 27697, 27701, 27733, 27737, 27739, 27743, 27749, 27751, 27763, 27767, + 27773, 27779, 27791, 27793, 27799, 27803, 27809, 27817, 27823, 27827, + 27847, 27851, 27883, 27893, 27901, 27917, 27919, 27941, 27943, 27947, + 27953, 27961, 27967, 27983, 27997, 28001, 28019, 28027, 28031, 28051, + 28057, 28069, 28081, 28087, 28097, 28099, 28109, 28111, 28123, 28151, + 28163, 28181, 28183, 28201, 28211, 28219, 28229, 28277, 28279, 28283, + 28289, 28297, 28307, 28309, 28319, 28349, 28351, 28387, 28393, 28403, + 28409, 28411, 28429, 28433, 28439, 28447, 28463, 28477, 28493, 28499, + 28513, 28517, 28537, 28541, 28547, 28549, 28559, 28571, 28573, 28579, + 28591, 28597, 28603, 28607, 28619, 28621, 28627, 28631, 28643, 28649, + 28657, 28661, 28663, 28669, 28687, 28697, 28703, 28711, 28723, 28729, + 28751, 28753, 28759, 28771, 28789, 28793, 28807, 28813, 28817, 28837, + 28843, 28859, 28867, 28871, 28879, 28901, 28909, 28921, 28927, 28933, + 28949, 28961, 28979, 29009, 29017, 29021, 29023, 29027, 29033, 29059, + 29063, 29077, 29101, 29123, 29129, 29131, 29137, 29147, 29153, 29167, + 29173, 29179, 29191, 29201, 29207, 29209, 29221, 29231, 29243, 29251, + 29269, 29287, 29297, 29303, 29311, 29327, 29333, 29339, 29347, 29363, + 29383, 29387, 29389, 29399, 29401, 29411, 29423, 29429, 29437, 29443, + 29453, 29473, 29483, 29501, 29527, 29531, 29537, 29567, 29569, 29573, + 29581, 29587, 29599, 29611, 29629, 29633, 29641, 29663, 29669, 29671, + 29683, 29717, 29723, 29741, 29753, 29759, 29761, 29789, 29803, 29819, + 29833, 29837, 29851, 29863, 29867, 29873, 29879, 29881, 29917, 29921, + 29927, 29947, 29959, 29983, 29989, 30011, 30013, 30029, 30047, 30059, + 30071, 30089, 30091, 30097, 30103, 30109, 30113, 30119, 30133, 30137, + 30139, 30161, 30169, 30181, 30187, 30197, 30203, 30211, 30223, 30241, + 30253, 30259, 30269, 30271, 30293, 30307, 30313, 30319, 30323, 30341, + 30347, 30367, 30389, 30391, 30403, 30427, 30431, 30449, 30467, 30469, + 30491, 30493, 30497, 30509, 30517, 30529, 30539, 30553, 30557, 30559, + 30577, 30593, 30631, 30637, 30643, 30649, 30661, 30671, 30677, 30689, + 30697, 30703, 30707, 30713, 30727, 30757, 30763, 30773, 30781, 30803, + 30809, 30817, 30829, 30839, 30841, 30851, 30853, 30859, 30869, 30871, + 30881, 30893, 30911, 30931, 30937, 30941, 30949, 30971, 30977, 30983, + 31013, 31019, 31033, 31039, 31051, 31063, 31069, 31079, 31081, 31091, + 31121, 31123, 31139, 31147, 31151, 31153, 31159, 31177, 31181, 31183, + 31189, 31193, 31219, 31223, 31231, 31237, 31247, 31249, 31253, 31259, + 31267, 31271, 31277, 31307, 31319, 31321, 31327, 31333, 31337, 31357, + 31379, 31387, 31391, 31393, 31397, 31469, 31477, 31481, 31489, 31511, + 31513, 31517, 31531, 31541, 31543, 31547, 31567, 31573, 31583, 31601, + 31607, 31627, 31643, 31649, 31657, 31663, 31667, 31687, 31699, 31721, + 31723, 31727, 31729, 31741, 31751, 31769, 31771, 31793, 31799, 31817, + 31847, 31849, 31859, 31873, 31883, 31891, 31907, 31957, 31963, 31973, + 31981, 31991, 32003, 32009, 32027, 32029, 32051, 32057, 32059, 32063, + 32069, 32077, 32083, 32089, 32099, 32117, 32119, 32141, 32143, 32159, + 32173, 32183, 32189, 32191, 32203, 32213, 32233, 32237, 32251, 32257, + 32261, 32297, 32299, 32303, 32309, 32321, 32323, 32327, 32341, 32353, + 32359, 32363, 32369, 32371, 32377, 32381, 32401, 32411, 32413, 32423, + 32429, 32441, 32443, 32467, 32479, 32491, 32497, 32503, 32507, 32531, + 32533, 32537, 32561, 32563, 32569, 32573, 32579, 32587, 32603, 32609, + 32611, 32621, 32633, 32647, 32653, 32687, 32693, 32707, 32713, 32717, + 32719, 32749, 32771, 32779, 32783, 32789, 32797, 32801, 32803, 32831, + 32833, 32839, 32843, 32869, 32887, 32909, 32911, 32917, 32933, 32939, + 32941, 32957, 32969, 32971, 32983, 32987, 32993, 32999, 33013, 33023, + 33029, 33037, 33049, 33053, 33071, 33073, 33083, 33091, 33107, 33113, + 33119, 33149, 33151, 33161, 33179, 33181, 33191, 33199, 33203, 33211, + 33223, 33247, 33287, 33289, 33301, 33311, 33317, 33329, 33331, 33343, + 33347, 33349, 33353, 33359, 33377, 33391, 33403, 33409, 33413, 33427, + 33457, 33461, 33469, 33479, 33487, 33493, 33503, 33521, 33529, 33533, + 33547, 33563, 33569, 33577, 33581, 33587, 33589, 33599, 33601, 33613, + 33617, 33619, 33623, 33629, 33637, 33641, 33647, 33679, 33703, 33713, + 33721, 33739, 33749, 33751, 33757, 33767, 33769, 33773, 33791, 33797, + 33809, 33811, 33827, 33829, 33851, 33857, 33863, 33871, 33889, 33893, + 33911, 33923, 33931, 33937, 33941, 33961, 33967, 33997, 34019, 34031, + 34033, 34039, 34057, 34061, 34123, 34127, 34129, 34141, 34147, 34157, + 34159, 34171, 34183, 34211, 34213, 34217, 34231, 34253, 34259, 34261, + 34267, 34273, 34283, 34297, 34301, 34303, 34313, 34319, 34327, 34337, + 34351, 34361, 34367, 34369, 34381, 34403, 34421, 34429, 34439, 34457, + 34469, 34471, 34483, 34487, 34499, 34501, 34511, 34513, 34519, 34537, + 34543, 34549, 34583, 34589, 34591, 34603, 34607, 34613, 34631, 34649, + 34651, 34667, 34673, 34679, 34687, 34693, 34703, 34721, 34729, 34739, + 34747, 34757, 34759, 34763, 34781, 34807, 34819, 34841, 34843, 34847, + 34849, 34871, 34877, 34883, 34897, 34913, 34919, 34939, 34949, 34961, + 34963, 34981, 35023, 35027, 35051, 35053, 35059, 35069, 35081, 35083, + 35089, 35099, 35107, 35111, 35117, 35129, 35141, 35149, 35153, 35159, + 35171, 35201, 35221, 35227, 35251, 35257, 35267, 35279, 35281, 35291, + 35311, 35317, 35323, 35327, 35339, 35353, 35363, 35381, 35393, 35401, + 35407, 35419, 35423, 35437, 35447, 35449, 35461, 35491, 35507, 35509, + 35521, 35527, 35531, 35533, 35537, 35543, 35569, 35573, 35591, 35593, + 35597, 35603, 35617, 35671, 35677, 35729, 35731, 35747, 35753, 35759, + 35771, 35797, 35801, 35803, 35809, 35831, 35837, 35839, 35851, 35863, + 35869, 35879, 35897, 35899, 35911, 35923, 35933, 35951, 35963, 35969, + 35977, 35983, 35993, 35999, 36007, 36011, 36013, 36017, 36037, 36061, + 36067, 36073, 36083, 36097, 36107, 36109, 36131, 36137, 36151, 36161, + 36187, 36191, 36209, 36217, 36229, 36241, 36251, 36263, 36269, 36277, + 36293, 36299, 36307, 36313, 36319, 36341, 36343, 36353, 36373, 36383, + 36389, 36433, 36451, 36457, 36467, 36469, 36473, 36479, 36493, 36497, + 36523, 36527, 36529, 36541, 36551, 36559, 36563, 36571, 36583, 36587, + 36599, 36607, 36629, 36637, 36643, 36653, 36671, 36677, 36683, 36691, + 36697, 36709, 36713, 36721, 36739, 36749, 36761, 36767, 36779, 36781, + 36787, 36791, 36793, 36809, 36821, 36833, 36847, 36857, 36871, 36877, + 36887, 36899, 36901, 36913, 36919, 36923, 36929, 36931, 36943, 36947, + 36973, 36979, 36997, 37003, 37013, 37019, 37021, 37039, 37049, 37057, + 37061, 37087, 37097, 37117, 37123, 37139, 37159, 37171, 37181, 37189, + 37199, 37201, 37217, 37223, 37243, 37253, 37273, 37277, 37307, 37309, + 37313, 37321, 37337, 37339, 37357, 37361, 37363, 37369, 37379, 37397, + 37409, 37423, 37441, 37447, 37463, 37483, 37489, 37493, 37501, 37507, + 37511, 37517, 37529, 37537, 37547, 37549, 37561, 37567, 37571, 37573, + 37579, 37589, 37591, 37607, 37619, 37633, 37643, 37649, 37657, 37663, + 37691, 37693, 37699, 37717, 37747, 37781, 37783, 37799, 37811, 37813, + 37831, 37847, 37853, 37861, 37871, 37879, 37889, 37897, 37907, 37951, + 37957, 37963, 37967, 37987, 37991, 37993, 37997, 38011, 38039, 38047, + 38053, 38069, 38083, 38113, 38119, 38149, 38153, 38167, 38177, 38183, + 38189, 38197, 38201, 38219, 38231, 38237, 38239, 38261, 38273, 38281, + 38287, 38299, 38303, 38317, 38321, 38327, 38329, 38333, 38351, 38371, + 38377, 38393, 38431, 38447, 38449, 38453, 38459, 38461, 38501, 38543, + 38557, 38561, 38567, 38569, 38593, 38603, 38609, 38611, 38629, 38639, + 38651, 38653, 38669, 38671, 38677, 38693, 38699, 38707, 38711, 38713, + 38723, 38729, 38737, 38747, 38749, 38767, 38783, 38791, 38803, 38821, + 38833, 38839, 38851, 38861, 38867, 38873, 38891, 38903, 38917, 38921, + 38923, 38933, 38953, 38959, 38971, 38977, 38993, 39019, 39023, 39041, + 39043, 39047, 39079, 39089, 39097, 39103, 39107, 39113, 39119, 39133, + 39139, 39157, 39161, 39163, 39181, 39191, 39199, 39209, 39217, 39227, + 39229, 39233, 39239, 39241, 39251, 39293, 39301, 39313, 39317, 39323, + 39341, 39343, 39359, 39367, 39371, 39373, 39383, 39397, 39409, 39419, + 39439, 39443, 39451, 39461, 39499, 39503, 39509, 39511, 39521, 39541, + 39551, 39563, 39569, 39581, 39607, 39619, 39623, 39631, 39659, 39667, + 39671, 39679, 39703, 39709, 39719, 39727, 39733, 39749, 39761, 39769, + 39779, 39791, 39799, 39821, 39827, 39829, 39839, 39841, 39847, 39857, + 39863, 39869, 39877, 39883, 39887, 39901, 39929, 39937, 39953, 39971, + 39979, 39983, 39989, 40009, 40013, 40031, 40037, 40039, 40063, 40087, + 40093, 40099, 40111, 40123, 40127, 40129, 40151, 40153, 40163, 40169, + 40177, 40189, 40193, 40213, 40231, 40237, 40241, 40253, 40277, 40283, + 40289, 40343, 40351, 40357, 40361, 40387, 40423, 40427, 40429, 40433, + 40459, 40471, 40483, 40487, 40493, 40499, 40507, 40519, 40529, 40531, + 40543, 40559, 40577, 40583, 40591, 40597, 40609, 40627, 40637, 40639, + 40693, 40697, 40699, 40709, 40739, 40751, 40759, 40763, 40771, 40787, + 40801, 40813, 40819, 40823, 40829, 40841, 40847, 40849, 40853, 40867, + 40879, 40883, 40897, 40903, 40927, 40933, 40939, 40949, 40961, 40973, + 40993, 41011, 41017, 41023, 41039, 41047, 41051, 41057, 41077, 41081, + 41113, 41117, 41131, 41141, 41143, 41149, 41161, 41177, 41179, 41183, + 41189, 41201, 41203, 41213, 41221, 41227, 41231, 41233, 41243, 41257, + 41263, 41269, 41281, 41299, 41333, 41341, 41351, 41357, 41381, 41387, + 41389, 41399, 41411, 41413, 41443, 41453, 41467, 41479, 41491, 41507, + 41513, 41519, 41521, 41539, 41543, 41549, 41579, 41593, 41597, 41603, + 41609, 41611, 41617, 41621, 41627, 41641, 41647, 41651, 41659, 41669, + 41681, 41687, 41719, 41729, 41737, 41759, 41761, 41771, 41777, 41801, + 41809, 41813, 41843, 41849, 41851, 41863, 41879, 41887, 41893, 41897, + 41903, 41911, 41927, 41941, 41947, 41953, 41957, 41959, 41969, 41981, + 41983, 41999, 42013, 42017, 42019, 42023, 42043, 42061, 42071, 42073, + 42083, 42089, 42101, 42131, 42139, 42157, 42169, 42179, 42181, 42187, + 42193, 42197, 42209, 42221, 42223, 42227, 42239, 42257, 42281, 42283, + 42293, 42299, 42307, 42323, 42331, 42337, 42349, 42359, 42373, 42379, + 42391, 42397, 42403, 42407, 42409, 42433, 42437, 42443, 42451, 42457, + 42461, 42463, 42467, 42473, 42487, 42491, 42499, 42509, 42533, 42557, + 42569, 42571, 42577, 42589, 42611, 42641, 42643, 42649, 42667, 42677, + 42683, 42689, 42697, 42701, 42703, 42709, 42719, 42727, 42737, 42743, + 42751, 42767, 42773, 42787, 42793, 42797, 42821, 42829, 42839, 42841, + 42853, 42859, 42863, 42899, 42901, 42923, 42929, 42937, 42943, 42953, + 42961, 42967, 42979, 42989, 43003, 43013, 43019, 43037, 43049, 43051, + 43063, 43067, 43093, 43103, 43117, 43133, 43151, 43159, 43177, 43189, + 43201, 43207, 43223, 43237, 43261, 43271, 43283, 43291, 43313, 43319, + 43321, 43331, 43391, 43397, 43399, 43403, 43411, 43427, 43441, 43451, + 43457, 43481, 43487, 43499, 43517, 43541, 43543, 43573, 43577, 43579, + 43591, 43597, 43607, 43609, 43613, 43627, 43633, 43649, 43651, 43661, + 43669, 43691, 43711, 43717, 43721, 43753, 43759, 43777, 43781, 43783, + 43787, 43789, 43793, 43801, 43853, 43867, 43889, 43891, 43913, 43933, + 43943, 43951, 43961, 43963, 43969, 43973, 43987, 43991, 43997, 44017, + 44021, 44027, 44029, 44041, 44053, 44059, 44071, 44087, 44089, 44101, + 44111, 44119, 44123, 44129, 44131, 44159, 44171, 44179, 44189, 44201, + 44203, 44207, 44221, 44249, 44257, 44263, 44267, 44269, 44273, 44279, + 44281, 44293, 44351, 44357, 44371, 44381, 44383, 44389, 44417, 44449, + 44453, 44483, 44491, 44497, 44501, 44507, 44519, 44531, 44533, 44537, + 44543, 44549, 44563, 44579, 44587, 44617, 44621, 44623, 44633, 44641, + 44647, 44651, 44657, 44683, 44687, 44699, 44701, 44711, 44729, 44741, + 44753, 44771, 44773, 44777, 44789, 44797, 44809, 44819, 44839, 44843, + 44851, 44867, 44879, 44887, 44893, 44909, 44917, 44927, 44939, 44953, + 44959, 44963, 44971, 44983, 44987, 45007, 45013, 45053, 45061, 45077, + 45083, 45119, 45121, 45127, 45131, 45137, 45139, 45161, 45179, 45181, + 45191, 45197, 45233, 45247, 45259, 45263, 45281, 45289, 45293, 45307, + 45317, 45319, 45329, 45337, 45341, 45343, 45361, 45377, 45389, 45403, + 45413, 45427, 45433, 45439, 45481, 45491, 45497, 45503, 45523, 45533, + 45541, 45553, 45557, 45569, 45587, 45589, 45599, 45613, 45631, 45641, + 45659, 45667, 45673, 45677, 45691, 45697, 45707, 45737, 45751, 45757, + 45763, 45767, 45779, 45817, 45821, 45823, 45827, 45833, 45841, 45853, + 45863, 45869, 45887, 45893, 45943, 45949, 45953, 45959, 45971, 45979, + 45989, 46021, 46027, 46049, 46051, 46061, 46073, 46091, 46093, 46099, + 46103, 46133, 46141, 46147, 46153, 46171, 46181, 46183, 46187, 46199, + 46219, 46229, 46237, 46261, 46271, 46273, 46279, 46301, 46307, 46309, + 46327, 46337, 46349, 46351, 46381, 46399, 46411, 46439, 46441, 46447, + 46451, 46457, 46471, 46477, 46489, 46499, 46507, 46511, 46523, 46549, + 46559, 46567, 46573, 46589, 46591, 46601, 46619, 46633, 46639, 46643, + 46649, 46663, 46679, 46681, 46687, 46691, 46703, 46723, 46727, 46747, + 46751, 46757, 46769, 46771, 46807, 46811, 46817, 46819, 46829, 46831, + 46853, 46861, 46867, 46877, 46889, 46901, 46919, 46933, 46957, 46993, + 46997, 47017, 47041, 47051, 47057, 47059, 47087, 47093, 47111, 47119, + 47123, 47129, 47137, 47143, 47147, 47149, 47161, 47189, 47207, 47221, + 47237, 47251, 47269, 47279, 47287, 47293, 47297, 47303, 47309, 47317, + 47339, 47351, 47353, 47363, 47381, 47387, 47389, 47407, 47417, 47419, + 47431, 47441, 47459, 47491, 47497, 47501, 47507, 47513, 47521, 47527, + 47533, 47543, 47563, 47569, 47581, 47591, 47599, 47609, 47623, 47629, + 47639, 47653, 47657, 47659, 47681, 47699, 47701, 47711, 47713, 47717, + 47737, 47741, 47743, 47777, 47779, 47791, 47797, 47807, 47809, 47819, + 47837, 47843, 47857, 47869, 47881, 47903, 47911, 47917, 47933, 47939, + 47947, 47951, 47963, 47969, 47977, 47981, 48017, 48023, 48029, 48049, + 48073, 48079, 48091, 48109, 48119, 48121, 48131, 48157, 48163, 48179, + 48187, 48193, 48197, 48221, 48239, 48247, 48259, 48271, 48281, 48299, + 48311, 48313, 48337, 48341, 48353, 48371, 48383, 48397, 48407, 48409, + 48413, 48437, 48449, 48463, 48473, 48479, 48481, 48487, 48491, 48497, + 48523, 48527, 48533, 48539, 48541, 48563, 48571, 48589, 48593, 48611, + 48619, 48623, 48647, 48649, 48661, 48673, 48677, 48679, 48731, 48733, + 48751, 48757, 48761, 48767, 48779, 48781, 48787, 48799, 48809, 48817, + 48821, 48823, 48847, 48857, 48859, 48869, 48871, 48883, 48889, 48907, + 48947, 48953, 48973, 48989, 48991, 49003, 49009, 49019, 49031, 49033, + 49037, 49043, 49057, 49069, 49081, 49103, 49109, 49117, 49121, 49123, + 49139, 49157, 49169, 49171, 49177, 49193, 49199, 49201, 49207, 49211, + 49223, 49253, 49261, 49277, 49279, 49297, 49307, 49331, 49333, 49339, + 49363, 49367, 49369, 49391, 49393, 49409, 49411, 49417, 49429, 49433, + 49451, 49459, 49463, 49477, 49481, 49499, 49523, 49529, 49531, 49537, + 49547, 49549, 49559, 49597, 49603, 49613, 49627, 49633, 49639, 49663, + 49667, 49669, 49681, 49697, 49711, 49727, 49739, 49741, 49747, 49757, + 49783, 49787, 49789, 49801, 49807, 49811, 49823, 49831, 49843, 49853, + 49871, 49877, 49891, 49919, 49921, 49927, 49937, 49939, 49943, 49957, + 49991, 49993, 49999, 50021, 50023, 50033, 50047, 50051, 50053, 50069, + 50077, 50087, 50093, 50101, 50111, 50119, 50123, 50129, 50131, 50147, + 50153, 50159, 50177, 50207, 50221, 50227, 50231, 50261, 50263, 50273, + 50287, 50291, 50311, 50321, 50329, 50333, 50341, 50359, 50363, 50377, + 50383, 50387, 50411, 50417, 50423, 50441, 50459, 50461, 50497, 50503, + 50513, 50527, 50539, 50543, 50549, 50551, 50581, 50587, 50591, 50593, + 50599, 50627, 50647, 50651, 50671, 50683, 50707, 50723, 50741, 50753, + 50767, 50773, 50777, 50789, 50821, 50833, 50839, 50849, 50857, 50867, + 50873, 50891, 50893, 50909, 50923, 50929, 50951, 50957, 50969, 50971, + 50989, 50993, 51001, 51031, 51043, 51047, 51059, 51061, 51071, 51109, + 51131, 51133, 51137, 51151, 51157, 51169, 51193, 51197, 51199, 51203, + 51217, 51229, 51239, 51241, 51257, 51263, 51283, 51287, 51307, 51329, + 51341, 51343, 51347, 51349, 51361, 51383, 51407, 51413, 51419, 51421, + 51427, 51431, 51437, 51439, 51449, 51461, 51473, 51479, 51481, 51487, + 51503, 51511, 51517, 51521, 51539, 51551, 51563, 51577, 51581, 51593, + 51599, 51607, 51613, 51631, 51637, 51647, 51659, 51673, 51679, 51683, + 51691, 51713, 51719, 51721, 51749, 51767, 51769, 51787, 51797, 51803, + 51817, 51827, 51829, 51839, 51853, 51859, 51869, 51871, 51893, 51899, + 51907, 51913, 51929, 51941, 51949, 51971, 51973, 51977, 51991, 52009, + 52021, 52027, 52051, 52057, 52067, 52069, 52081, 52103, 52121, 52127, + 52147, 52153, 52163, 52177, 52181, 52183, 52189, 52201, 52223, 52237, + 52249, 52253, 52259, 52267, 52289, 52291, 52301, 52313, 52321, 52361, + 52363, 52369, 52379, 52387, 52391, 52433, 52453, 52457, 52489, 52501, + 52511, 52517, 52529, 52541, 52543, 52553, 52561, 52567, 52571, 52579, + 52583, 52609, 52627, 52631, 52639, 52667, 52673, 52691, 52697, 52709, + 52711, 52721, 52727, 52733, 52747, 52757, 52769, 52783, 52807, 52813, + 52817, 52837, 52859, 52861, 52879, 52883, 52889, 52901, 52903, 52919, + 52937, 52951, 52957, 52963, 52967, 52973, 52981, 52999, 53003, 53017, + 53047, 53051, 53069, 53077, 53087, 53089, 53093, 53101, 53113, 53117, + 53129, 53147, 53149, 53161, 53171, 53173, 53189, 53197, 53201, 53231, + 53233, 53239, 53267, 53269, 53279, 53281, 53299, 53309, 53323, 53327, + 53353, 53359, 53377, 53381, 53401, 53407, 53411, 53419, 53437, 53441, + 53453, 53479, 53503, 53507, 53527, 53549, 53551, 53569, 53591, 53593, + 53597, 53609, 53611, 53617, 53623, 53629, 53633, 53639, 53653, 53657, + 53681, 53693, 53699, 53717, 53719, 53731, 53759, 53773, 53777, 53783, + 53791, 53813, 53819, 53831, 53849, 53857, 53861, 53881, 53887, 53891, + 53897, 53899, 53917, 53923, 53927, 53939, 53951, 53959, 53987, 53993, + 54001, 54011, 54013, 54037, 54049, 54059, 54083, 54091, 54101, 54121, + 54133, 54139, 54151, 54163, 54167, 54181, 54193, 54217, 54251, 54269, + 54277, 54287, 54293, 54311, 54319, 54323, 54331, 54347, 54361, 54367, + 54371, 54377, 54401, 54403, 54409, 54413, 54419, 54421, 54437, 54443, + 54449, 54469, 54493, 54497, 54499, 54503, 54517, 54521, 54539, 54541, + 54547, 54559, 54563, 54577, 54581, 54583, 54601, 54617, 54623, 54629, + 54631, 54647, 54667, 54673, 54679, 54709, 54713, 54721, 54727, 54751, + 54767, 54773, 54779, 54787, 54799, 54829, 54833, 54851, 54869, 54877, + 54881, 54907, 54917, 54919, 54941, 54949, 54959, 54973, 54979, 54983, + 55001, 55009, 55021, 55049, 55051, 55057, 55061, 55073, 55079, 55103, + 55109, 55117, 55127, 55147, 55163, 55171, 55201, 55207, 55213, 55217, + 55219, 55229, 55243, 55249, 55259, 55291, 55313, 55331, 55333, 55337, + 55339, 55343, 55351, 55373, 55381, 55399, 55411, 55439, 55441, 55457, + 55469, 55487, 55501, 55511, 55529, 55541, 55547, 55579, 55589, 55603, + 55609, 55619, 55621, 55631, 55633, 55639, 55661, 55663, 55667, 55673, + 55681, 55691, 55697, 55711, 55717, 55721, 55733, 55763, 55787, 55793, + 55799, 55807, 55813, 55817, 55819, 55823, 55829, 55837, 55843, 55849, + 55871, 55889, 55897, 55901, 55903, 55921, 55927, 55931, 55933, 55949, + 55967, 55987, 55997, 56003, 56009, 56039, 56041, 56053, 56081, 56087, + 56093, 56099, 56101, 56113, 56123, 56131, 56149, 56167, 56171, 56179, + 56197, 56207, 56209, 56237, 56239, 56249, 56263, 56267, 56269, 56299, + 56311, 56333, 56359, 56369, 56377, 56383, 56393, 56401, 56417, 56431, + 56437, 56443, 56453, 56467, 56473, 56477, 56479, 56489, 56501, 56503, + 56509, 56519, 56527, 56531, 56533, 56543, 56569, 56591, 56597, 56599, + 56611, 56629, 56633, 56659, 56663, 56671, 56681, 56687, 56701, 56711, + 56713, 56731, 56737, 56747, 56767, 56773, 56779, 56783, 56807, 56809, + 56813, 56821, 56827, 56843, 56857, 56873, 56891, 56893, 56897, 56909, + 56911, 56921, 56923, 56929, 56941, 56951, 56957, 56963, 56983, 56989, + 56993, 56999, 57037, 57041, 57047, 57059, 57073, 57077, 57089, 57097, + 57107, 57119, 57131, 57139, 57143, 57149, 57163, 57173, 57179, 57191, + 57193, 57203, 57221, 57223, 57241, 57251, 57259, 57269, 57271, 57283, + 57287, 57301, 57329, 57331, 57347, 57349, 57367, 57373, 57383, 57389, + 57397, 57413, 57427, 57457, 57467, 57487, 57493, 57503, 57527, 57529, + 57557, 57559, 57571, 57587, 57593, 57601, 57637, 57641, 57649, 57653, + 57667, 57679, 57689, 57697, 57709, 57713, 57719, 57727, 57731, 57737, + 57751, 57773, 57781, 57787, 57791, 57793, 57803, 57809, 57829, 57839, + 57847, 57853, 57859, 57881, 57899, 57901, 57917, 57923, 57943, 57947, + 57973, 57977, 57991, 58013, 58027, 58031, 58043, 58049, 58057, 58061, + 58067, 58073, 58099, 58109, 58111, 58129, 58147, 58151, 58153, 58169, + 58171, 58189, 58193, 58199, 58207, 58211, 58217, 58229, 58231, 58237, + 58243, 58271, 58309, 58313, 58321, 58337, 58363, 58367, 58369, 58379, + 58391, 58393, 58403, 58411, 58417, 58427, 58439, 58441, 58451, 58453, + 58477, 58481, 58511, 58537, 58543, 58549, 58567, 58573, 58579, 58601, + 58603, 58613, 58631, 58657, 58661, 58679, 58687, 58693, 58699, 58711, + 58727, 58733, 58741, 58757, 58763, 58771, 58787, 58789, 58831, 58889, + 58897, 58901, 58907, 58909, 58913, 58921, 58937, 58943, 58963, 58967, + 58979, 58991, 58997, 59009, 59011, 59021, 59023, 59029, 59051, 59053, + 59063, 59069, 59077, 59083, 59093, 59107, 59113, 59119, 59123, 59141, + 59149, 59159, 59167, 59183, 59197, 59207, 59209, 59219, 59221, 59233, + 59239, 59243, 59263, 59273, 59281, 59333, 59341, 59351, 59357, 59359, + 59369, 59377, 59387, 59393, 59399, 59407, 59417, 59419, 59441, 59443, + 59447, 59453, 59467, 59471, 59473, 59497, 59509, 59513, 59539, 59557, + 59561, 59567, 59581, 59611, 59617, 59621, 59627, 59629, 59651, 59659, + 59663, 59669, 59671, 59693, 59699, 59707, 59723, 59729, 59743, 59747, + 59753, 59771, 59779, 59791, 59797, 59809, 59833, 59863, 59879, 59887, + 59921, 59929, 59951, 59957, 59971, 59981, 59999, 60013, 60017, 60029, + 60037, 60041, 60077, 60083, 60089, 60091, 60101, 60103, 60107, 60127, + 60133, 60139, 60149, 60161, 60167, 60169, 60209, 60217, 60223, 60251, + 60257, 60259, 60271, 60289, 60293, 60317, 60331, 60337, 60343, 60353, + 60373, 60383, 60397, 60413, 60427, 60443, 60449, 60457, 60493, 60497, + 60509, 60521, 60527, 60539, 60589, 60601, 60607, 60611, 60617, 60623, + 60631, 60637, 60647, 60649, 60659, 60661, 60679, 60689, 60703, 60719, + 60727, 60733, 60737, 60757, 60761, 60763, 60773, 60779, 60793, 60811, + 60821, 60859, 60869, 60887, 60889, 60899, 60901, 60913, 60917, 60919, + 60923, 60937, 60943, 60953, 60961, 61001, 61007, 61027, 61031, 61043, + 61051, 61057, 61091, 61099, 61121, 61129, 61141, 61151, 61153, 61169, + 61211, 61223, 61231, 61253, 61261, 61283, 61291, 61297, 61331, 61333, + 61339, 61343, 61357, 61363, 61379, 61381, 61403, 61409, 61417, 61441, + 61463, 61469, 61471, 61483, 61487, 61493, 61507, 61511, 61519, 61543, + 61547, 61553, 61559, 61561, 61583, 61603, 61609, 61613, 61627, 61631, + 61637, 61643, 61651, 61657, 61667, 61673, 61681, 61687, 61703, 61717, + 61723, 61729, 61751, 61757, 61781, 61813, 61819, 61837, 61843, 61861, + 61871, 61879, 61909, 61927, 61933, 61949, 61961, 61967, 61979, 61981, + 61987, 61991, 62003, 62011, 62017, 62039, 62047, 62053, 62057, 62071, + 62081, 62099, 62119, 62129, 62131, 62137, 62141, 62143, 62171, 62189, + 62191, 62201, 62207, 62213, 62219, 62233, 62273, 62297, 62299, 62303, + 62311, 62323, 62327, 62347, 62351, 62383, 62401, 62417, 62423, 62459, + 62467, 62473, 62477, 62483, 62497, 62501, 62507, 62533, 62539, 62549, + 62563, 62581, 62591, 62597, 62603, 62617, 62627, 62633, 62639, 62653, + 62659, 62683, 62687, 62701, 62723, 62731, 62743, 62753, 62761, 62773, + 62791, 62801, 62819, 62827, 62851, 62861, 62869, 62873, 62897, 62903, + 62921, 62927, 62929, 62939, 62969, 62971, 62981, 62983, 62987, 62989, + 63029, 63031, 63059, 63067, 63073, 63079, 63097, 63103, 63113, 63127, + 63131, 63149, 63179, 63197, 63199, 63211, 63241, 63247, 63277, 63281, + 63299, 63311, 63313, 63317, 63331, 63337, 63347, 63353, 63361, 63367, + 63377, 63389, 63391, 63397, 63409, 63419, 63421, 63439, 63443, 63463, + 63467, 63473, 63487, 63493, 63499, 63521, 63527, 63533, 63541, 63559, + 63577, 63587, 63589, 63599, 63601, 63607, 63611, 63617, 63629, 63647, + 63649, 63659, 63667, 63671, 63689, 63691, 63697, 63703, 63709, 63719, + 63727, 63737, 63743, 63761, 63773, 63781, 63793, 63799, 63803, 63809, + 63823, 63839, 63841, 63853, 63857, 63863, 63901, 63907, 63913, 63929, + 63949, 63977, 63997, 64007, 64013, 64019, 64033, 64037, 64063, 64067, + 64081, 64091, 64109, 64123, 64151, 64153, 64157, 64171, 64187, 64189, + 64217, 64223, 64231, 64237, 64271, 64279, 64283, 64301, 64303, 64319, + 64327, 64333, 64373, 64381, 64399, 64403, 64433, 64439, 64451, 64453, + 64483, 64489, 64499, 64513, 64553, 64567, 64577, 64579, 64591, 64601, + 64609, 64613, 64621, 64627, 64633, 64661, 64663, 64667, 64679, 64693, + 64709, 64717, 64747, 64763, 64781, 64783, 64793, 64811, 64817, 64849, + 64853, 64871, 64877, 64879, 64891, 64901, 64919, 64921, 64927, 64937, + 64951, 64969, 64997, 65003, 65011, 65027, 65029, 65033, 65053, 65063, + 65071, 65089, 65099, 65101, 65111, 65119, 65123, 65129, 65141, 65147, + 65167, 65171, 65173, 65179, 65183, 65203, 65213, 65239, 65257, 65267, + 65269, 65287, 65293, 65309, 65323, 65327, 65353, 65357, 65371, 65381, + 65393, 65407, 65413, 65419, 65423, 65437, 65447, 65449, 65479, 65497, + 65519, 65521, 65537, 65539, 65543, 65551, 65557, 65563, 65579, 65581, + 65587, 65599, 65609, 65617, 65629, 65633, 65647, 65651, 65657, 65677, + 65687, 65699, 65701, 65707, 65713, 65717, 65719, 65729, 65731, 65761, + 65777, 65789, 65809, 65827, 65831, 65837, 65839, 65843, 65851, 65867, + 65881, 65899, 65921, 65927, 65929, 65951, 65957, 65963, 65981, 65983, + 65993, 66029, 66037, 66041, 66047, 66067, 66071, 66083, 66089, 66103, + 66107, 66109, 66137, 66161, 66169, 66173, 66179, 66191, 66221, 66239, + 66271, 66293, 66301, 66337, 66343, 66347, 66359, 66361, 66373, 66377, + 66383, 66403, 66413, 66431, 66449, 66457, 66463, 66467, 66491, 66499, + 66509, 66523, 66529, 66533, 66541, 66553, 66569, 66571, 66587, 66593, + 66601, 66617, 66629, 66643, 66653, 66683, 66697, 66701, 66713, 66721, + 66733, 66739, 66749, 66751, 66763, 66791, 66797, 66809, 66821, 66841, + 66851, 66853, 66863, 66877, 66883, 66889, 66919, 66923, 66931, 66943, + 66947, 66949, 66959, 66973, 66977, 67003, 67021, 67033, 67043, 67049, + 67057, 67061, 67073, 67079, 67103, 67121, 67129, 67139, 67141, 67153, + 67157, 67169, 67181, 67187, 67189, 67211, 67213, 67217, 67219, 67231, + 67247, 67261, 67271, 67273, 67289, 67307, 67339, 67343, 67349, 67369, + 67391, 67399, 67409, 67411, 67421, 67427, 67429, 67433, 67447, 67453, + 67477, 67481, 67489, 67493, 67499, 67511, 67523, 67531, 67537, 67547, + 67559, 67567, 67577, 67579, 67589, 67601, 67607, 67619, 67631, 67651, + 67679, 67699, 67709, 67723, 67733, 67741, 67751, 67757, 67759, 67763, + 67777, 67783, 67789, 67801, 67807, 67819, 67829, 67843, 67853, 67867, + 67883, 67891, 67901, 67927, 67931, 67933, 67939, 67943, 67957, 67961, + 67967, 67979, 67987, 67993, 68023, 68041, 68053, 68059, 68071, 68087, + 68099, 68111, 68113, 68141, 68147, 68161, 68171, 68207, 68209, 68213, + 68219, 68227, 68239, 68261, 68279, 68281, 68311, 68329, 68351, 68371, + 68389, 68399, 68437, 68443, 68447, 68449, 68473, 68477, 68483, 68489, + 68491, 68501, 68507, 68521, 68531, 68539, 68543, 68567, 68581, 68597, + 68611, 68633, 68639, 68659, 68669, 68683, 68687, 68699, 68711, 68713, + 68729, 68737, 68743, 68749, 68767, 68771, 68777, 68791, 68813, 68819, + 68821, 68863, 68879, 68881, 68891, 68897, 68899, 68903, 68909, 68917, + 68927, 68947, 68963, 68993, 69001, 69011, 69019, 69029, 69031, 69061, + 69067, 69073, 69109, 69119, 69127, 69143, 69149, 69151, 69163, 69191, + 69193, 69197, 69203, 69221, 69233, 69239, 69247, 69257, 69259, 69263, + 69313, 69317, 69337, 69341, 69371, 69379, 69383, 69389, 69401, 69403, + 69427, 69431, 69439, 69457, 69463, 69467, 69473, 69481, 69491, 69493, + 69497, 69499, 69539, 69557, 69593, 69623, 69653, 69661, 69677, 69691, + 69697, 69709, 69737, 69739, 69761, 69763, 69767, 69779, 69809, 69821, + 69827, 69829, 69833, 69847, 69857, 69859, 69877, 69899, 69911, 69929, + 69931, 69941, 69959, 69991, 69997, 70001, 70003, 70009, 70019, 70039, + 70051, 70061, 70067, 70079, 70099, 70111, 70117, 70121, 70123, 70139, + 70141, 70157, 70163, 70177, 70181, 70183, 70199, 70201, 70207, 70223, + 70229, 70237, 70241, 70249, 70271, 70289, 70297, 70309, 70313, 70321, + 70327, 70351, 70373, 70379, 70381, 70393, 70423, 70429, 70439, 70451, + 70457, 70459, 70481, 70487, 70489, 70501, 70507, 70529, 70537, 70549, + 70571, 70573, 70583, 70589, 70607, 70619, 70621, 70627, 70639, 70657, + 70663, 70667, 70687, 70709, 70717, 70729, 70753, 70769, 70783, 70793, + 70823, 70841, 70843, 70849, 70853, 70867, 70877, 70879, 70891, 70901, + 70913, 70919, 70921, 70937, 70949, 70951, 70957, 70969, 70979, 70981, + 70991, 70997, 70999, 71011, 71023, 71039, 71059, 71069, 71081, 71089, + 71119, 71129, 71143, 71147, 71153, 71161, 71167, 71171, 71191, 71209, + 71233, 71237, 71249, 71257, 71261, 71263, 71287, 71293, 71317, 71327, + 71329, 71333, 71339, 71341, 71347, 71353, 71359, 71363, 71387, 71389, + 71399, 71411, 71413, 71419, 71429, 71437, 71443, 71453, 71471, 71473, + 71479, 71483, 71503, 71527, 71537, 71549, 71551, 71563, 71569, 71593, + 71597, 71633, 71647, 71663, 71671, 71693, 71699, 71707, 71711, 71713, + 71719, 71741, 71761, 71777, 71789, 71807, 71809, 71821, 71837, 71843, + 71849, 71861, 71867, 71879, 71881, 71887, 71899, 71909, 71917, 71933, + 71941, 71947, 71963, 71971, 71983, 71987, 71993, 71999, 72019, 72031, + 72043, 72047, 72053, 72073, 72077, 72089, 72091, 72101, 72103, 72109, + 72139, 72161, 72167, 72169, 72173, 72211, 72221, 72223, 72227, 72229, + 72251, 72253, 72269, 72271, 72277, 72287, 72307, 72313, 72337, 72341, + 72353, 72367, 72379, 72383, 72421, 72431, 72461, 72467, 72469, 72481, + 72493, 72497, 72503, 72533, 72547, 72551, 72559, 72577, 72613, 72617, + 72623, 72643, 72647, 72649, 72661, 72671, 72673, 72679, 72689, 72701, + 72707, 72719, 72727, 72733, 72739, 72763, 72767, 72797, 72817, 72823, + 72859, 72869, 72871, 72883, 72889, 72893, 72901, 72907, 72911, 72923, + 72931, 72937, 72949, 72953, 72959, 72973, 72977, 72997, 73009, 73013, + 73019, 73037, 73039, 73043, 73061, 73063, 73079, 73091, 73121, 73127, + 73133, 73141, 73181, 73189, 73237, 73243, 73259, 73277, 73291, 73303, + 73309, 73327, 73331, 73351, 73361, 73363, 73369, 73379, 73387, 73417, + 73421, 73433, 73453, 73459, 73471, 73477, 73483, 73517, 73523, 73529, + 73547, 73553, 73561, 73571, 73583, 73589, 73597, 73607, 73609, 73613, + 73637, 73643, 73651, 73673, 73679, 73681, 73693, 73699, 73709, 73721, + 73727, 73751, 73757, 73771, 73783, 73819, 73823, 73847, 73849, 73859, + 73867, 73877, 73883, 73897, 73907, 73939, 73943, 73951, 73961, 73973, + 73999, 74017, 74021, 74027, 74047, 74051, 74071, 74077, 74093, 74099, + 74101, 74131, 74143, 74149, 74159, 74161, 74167, 74177, 74189, 74197, + 74201, 74203, 74209, 74219, 74231, 74257, 74279, 74287, 74293, 74297, + 74311, 74317, 74323, 74353, 74357, 74363, 74377, 74381, 74383, 74411, + 74413, 74419, 74441, 74449, 74453, 74471, 74489, 74507, 74509, 74521, + 74527, 74531, 74551, 74561, 74567, 74573, 74587, 74597, 74609, 74611, + 74623, 74653, 74687, 74699, 74707, 74713, 74717, 74719, 74729, 74731, + 74747, 74759, 74761, 74771, 74779, 74797, 74821, 74827, 74831, 74843, + 74857, 74861, 74869, 74873, 74887, 74891, 74897, 74903, 74923, 74929, + 74933, 74941, 74959, 75011, 75013, 75017, 75029, 75037, 75041, 75079, + 75083, 75109, 75133, 75149, 75161, 75167, 75169, 75181, 75193, 75209, + 75211, 75217, 75223, 75227, 75239, 75253, 75269, 75277, 75289, 75307, + 75323, 75329, 75337, 75347, 75353, 75367, 75377, 75389, 75391, 75401, + 75403, 75407, 75431, 75437, 75479, 75503, 75511, 75521, 75527, 75533, + 75539, 75541, 75553, 75557, 75571, 75577, 75583, 75611, 75617, 75619, + 75629, 75641, 75653, 75659, 75679, 75683, 75689, 75703, 75707, 75709, + 75721, 75731, 75743, 75767, 75773, 75781, 75787, 75793, 75797, 75821, + 75833, 75853, 75869, 75883, 75913, 75931, 75937, 75941, 75967, 75979, + 75983, 75989, 75991, 75997, 76001, 76003, 76031, 76039, 76079, 76081, + 76091, 76099, 76103, 76123, 76129, 76147, 76157, 76159, 76163, 76207, + 76213, 76231, 76243, 76249, 76253, 76259, 76261, 76283, 76289, 76303, + 76333, 76343, 76367, 76369, 76379, 76387, 76403, 76421, 76423, 76441, + 76463, 76471, 76481, 76487, 76493, 76507, 76511, 76519, 76537, 76541, + 76543, 76561, 76579, 76597, 76603, 76607, 76631, 76649, 76651, 76667, + 76673, 76679, 76697, 76717, 76733, 76753, 76757, 76771, 76777, 76781, + 76801, 76819, 76829, 76831, 76837, 76847, 76871, 76873, 76883, 76907, + 76913, 76919, 76943, 76949, 76961, 76963, 76991, 77003, 77017, 77023, + 77029, 77041, 77047, 77069, 77081, 77093, 77101, 77137, 77141, 77153, + 77167, 77171, 77191, 77201, 77213, 77237, 77239, 77243, 77249, 77261, + 77263, 77267, 77269, 77279, 77291, 77317, 77323, 77339, 77347, 77351, + 77359, 77369, 77377, 77383, 77417, 77419, 77431, 77447, 77471, 77477, + 77479, 77489, 77491, 77509, 77513, 77521, 77527, 77543, 77549, 77551, + 77557, 77563, 77569, 77573, 77587, 77591, 77611, 77617, 77621, 77641, + 77647, 77659, 77681, 77687, 77689, 77699, 77711, 77713, 77719, 77723, + 77731, 77743, 77747, 77761, 77773, 77783, 77797, 77801, 77813, 77839, + 77849, 77863, 77867, 77893, 77899, 77929, 77933, 77951, 77969, 77977, + 77983, 77999, 78007, 78017, 78031, 78041, 78049, 78059, 78079, 78101, + 78121, 78137, 78139, 78157, 78163, 78167, 78173, 78179, 78191, 78193, + 78203, 78229, 78233, 78241, 78259, 78277, 78283, 78301, 78307, 78311, + 78317, 78341, 78347, 78367, 78401, 78427, 78437, 78439, 78467, 78479, + 78487, 78497, 78509, 78511, 78517, 78539, 78541, 78553, 78569, 78571, + 78577, 78583, 78593, 78607, 78623, 78643, 78649, 78653, 78691, 78697, + 78707, 78713, 78721, 78737, 78779, 78781, 78787, 78791, 78797, 78803, + 78809, 78823, 78839, 78853, 78857, 78877, 78887, 78889, 78893, 78901, + 78919, 78929, 78941, 78977, 78979, 78989, 79031, 79039, 79043, 79063, + 79087, 79103, 79111, 79133, 79139, 79147, 79151, 79153, 79159, 79181, + 79187, 79193, 79201, 79229, 79231, 79241, 79259, 79273, 79279, 79283, + 79301, 79309, 79319, 79333, 79337, 79349, 79357, 79367, 79379, 79393, + 79397, 79399, 79411, 79423, 79427, 79433, 79451, 79481, 79493, 79531, + 79537, 79549, 79559, 79561, 79579, 79589, 79601, 79609, 79613, 79621, + 79627, 79631, 79633, 79657, 79669, 79687, 79691, 79693, 79697, 79699, + 79757, 79769, 79777, 79801, 79811, 79813, 79817, 79823, 79829, 79841, + 79843, 79847, 79861, 79867, 79873, 79889, 79901, 79903, 79907, 79939, + 79943, 79967, 79973, 79979, 79987, 79997, 79999, 80021, 80039, 80051, + 80071, 80077, 80107, 80111, 80141, 80147, 80149, 80153, 80167, 80173, + 80177, 80191, 80207, 80209, 80221, 80231, 80233, 80239, 80251, 80263, + 80273, 80279, 80287, 80309, 80317, 80329, 80341, 80347, 80363, 80369, + 80387, 80407, 80429, 80447, 80449, 80471, 80473, 80489, 80491, 80513, + 80527, 80537, 80557, 80567, 80599, 80603, 80611, 80621, 80627, 80629, + 80651, 80657, 80669, 80671, 80677, 80681, 80683, 80687, 80701, 80713, + 80737, 80747, 80749, 80761, 80777, 80779, 80783, 80789, 80803, 80809, + 80819, 80831, 80833, 80849, 80863, 80897, 80909, 80911, 80917, 80923, + 80929, 80933, 80953, 80963, 80989, 81001, 81013, 81017, 81019, 81023, + 81031, 81041, 81043, 81047, 81049, 81071, 81077, 81083, 81097, 81101, + 81119, 81131, 81157, 81163, 81173, 81181, 81197, 81199, 81203, 81223, + 81233, 81239, 81281, 81283, 81293, 81299, 81307, 81331, 81343, 81349, + 81353, 81359, 81371, 81373, 81401, 81409, 81421, 81439, 81457, 81463, + 81509, 81517, 81527, 81533, 81547, 81551, 81553, 81559, 81563, 81569, + 81611, 81619, 81629, 81637, 81647, 81649, 81667, 81671, 81677, 81689, + 81701, 81703, 81707, 81727, 81737, 81749, 81761, 81769, 81773, 81799, + 81817, 81839, 81847, 81853, 81869, 81883, 81899, 81901, 81919, 81929, + 81931, 81937, 81943, 81953, 81967, 81971, 81973, 82003, 82007, 82009, + 82013, 82021, 82031, 82037, 82039, 82051, 82067, 82073, 82129, 82139, + 82141, 82153, 82163, 82171, 82183, 82189, 82193, 82207, 82217, 82219, + 82223, 82231, 82237, 82241, 82261, 82267, 82279, 82301, 82307, 82339, + 82349, 82351, 82361, 82373, 82387, 82393, 82421, 82457, 82463, 82469, + 82471, 82483, 82487, 82493, 82499, 82507, 82529, 82531, 82549, 82559, + 82561, 82567, 82571, 82591, 82601, 82609, 82613, 82619, 82633, 82651, + 82657, 82699, 82721, 82723, 82727, 82729, 82757, 82759, 82763, 82781, + 82787, 82793, 82799, 82811, 82813, 82837, 82847, 82883, 82889, 82891, + 82903, 82913, 82939, 82963, 82981, 82997, 83003, 83009, 83023, 83047, + 83059, 83063, 83071, 83077, 83089, 83093, 83101, 83117, 83137, 83177, + 83203, 83207, 83219, 83221, 83227, 83231, 83233, 83243, 83257, 83267, + 83269, 83273, 83299, 83311, 83339, 83341, 83357, 83383, 83389, 83399, + 83401, 83407, 83417, 83423, 83431, 83437, 83443, 83449, 83459, 83471, + 83477, 83497, 83537, 83557, 83561, 83563, 83579, 83591, 83597, 83609, + 83617, 83621, 83639, 83641, 83653, 83663, 83689, 83701, 83717, 83719, + 83737, 83761, 83773, 83777, 83791, 83813, 83833, 83843, 83857, 83869, + 83873, 83891, 83903, 83911, 83921, 83933, 83939, 83969, 83983, 83987, + 84011, 84017, 84047, 84053, 84059, 84061, 84067, 84089, 84121, 84127, + 84131, 84137, 84143, 84163, 84179, 84181, 84191, 84199, 84211, 84221, + 84223, 84229, 84239, 84247, 84263, 84299, 84307, 84313, 84317, 84319, + 84347, 84349, 84377, 84389, 84391, 84401, 84407, 84421, 84431, 84437, + 84443, 84449, 84457, 84463, 84467, 84481, 84499, 84503, 84509, 84521, + 84523, 84533, 84551, 84559, 84589, 84629, 84631, 84649, 84653, 84659, + 84673, 84691, 84697, 84701, 84713, 84719, 84731, 84737, 84751, 84761, + 84787, 84793, 84809, 84811, 84827, 84857, 84859, 84869, 84871, 84913, + 84919, 84947, 84961, 84967, 84977, 84979, 84991, 85009, 85021, 85027, + 85037, 85049, 85061, 85081, 85087, 85091, 85093, 85103, 85109, 85121, + 85133, 85147, 85159, 85193, 85199, 85201, 85213, 85223, 85229, 85237, + 85243, 85247, 85259, 85297, 85303, 85313, 85331, 85333, 85361, 85363, + 85369, 85381, 85411, 85427, 85429, 85439, 85447, 85451, 85453, 85469, + 85487, 85513, 85517, 85523, 85531, 85549, 85571, 85577, 85597, 85601, + 85607, 85619, 85621, 85627, 85639, 85643, 85661, 85667, 85669, 85691, + 85703, 85711, 85717, 85733, 85751, 85781, 85793, 85817, 85819, 85829, + 85831, 85837, 85843, 85847, 85853, 85889, 85903, 85909, 85931, 85933, + 85991, 85999, 86011, 86017, 86027, 86029, 86069, 86077, 86083, 86111, + 86113, 86117, 86131, 86137, 86143, 86161, 86171, 86179, 86183, 86197, + 86201, 86209, 86239, 86243, 86249, 86257, 86263, 86269, 86287, 86291, + 86293, 86297, 86311, 86323, 86341, 86351, 86353, 86357, 86369, 86371, + 86381, 86389, 86399, 86413, 86423, 86441, 86453, 86461, 86467, 86477, + 86491, 86501, 86509, 86531, 86533, 86539, 86561, 86573, 86579, 86587, + 86599, 86627, 86629, 86677, 86689, 86693, 86711, 86719, 86729, 86743, + 86753, 86767, 86771, 86783, 86813, 86837, 86843, 86851, 86857, 86861, + 86869, 86923, 86927, 86929, 86939, 86951, 86959, 86969, 86981, 86993, + 87011, 87013, 87037, 87041, 87049, 87071, 87083, 87103, 87107, 87119, + 87121, 87133, 87149, 87151, 87179, 87181, 87187, 87211, 87221, 87223, + 87251, 87253, 87257, 87277, 87281, 87293, 87299, 87313, 87317, 87323, + 87337, 87359, 87383, 87403, 87407, 87421, 87427, 87433, 87443, 87473, + 87481, 87491, 87509, 87511, 87517, 87523, 87539, 87541, 87547, 87553, + 87557, 87559, 87583, 87587, 87589, 87613, 87623, 87629, 87631, 87641, + 87643, 87649, 87671, 87679, 87683, 87691, 87697, 87701, 87719, 87721, + 87739, 87743, 87751, 87767, 87793, 87797, 87803, 87811, 87833, 87853, + 87869, 87877, 87881, 87887, 87911, 87917, 87931, 87943, 87959, 87961, + 87973, 87977, 87991, 88001, 88003, 88007, 88019, 88037, 88069, 88079, + 88093, 88117, 88129, 88169, 88177, 88211, 88223, 88237, 88241, 88259, + 88261, 88289, 88301, 88321, 88327, 88337, 88339, 88379, 88397, 88411, + 88423, 88427, 88463, 88469, 88471, 88493, 88499, 88513, 88523, 88547, + 88589, 88591, 88607, 88609, 88643, 88651, 88657, 88661, 88663, 88667, + 88681, 88721, 88729, 88741, 88747, 88771, 88789, 88793, 88799, 88801, + 88807, 88811, 88813, 88817, 88819, 88843, 88853, 88861, 88867, 88873, + 88883, 88897, 88903, 88919, 88937, 88951, 88969, 88993, 88997, 89003, + 89009, 89017, 89021, 89041, 89051, 89057, 89069, 89071, 89083, 89087, + 89101, 89107, 89113, 89119, 89123, 89137, 89153, 89189, 89203, 89209, + 89213, 89227, 89231, 89237, 89261, 89269, 89273, 89293, 89303, 89317, + 89329, 89363, 89371, 89381, 89387, 89393, 89399, 89413, 89417, 89431, + 89443, 89449, 89459, 89477, 89491, 89501, 89513, 89519, 89521, 89527, + 89533, 89561, 89563, 89567, 89591, 89597, 89599, 89603, 89611, 89627, + 89633, 89653, 89657, 89659, 89669, 89671, 89681, 89689, 89753, 89759, + 89767, 89779, 89783, 89797, 89809, 89819, 89821, 89833, 89839, 89849, + 89867, 89891, 89897, 89899, 89909, 89917, 89923, 89939, 89959, 89963, + 89977, 89983, 89989, 90001, 90007, 90011, 90017, 90019, 90023, 90031, + 90053, 90059, 90067, 90071, 90073, 90089, 90107, 90121, 90127, 90149, + 90163, 90173, 90187, 90191, 90197, 90199, 90203, 90217, 90227, 90239, + 90247, 90263, 90271, 90281, 90289, 90313, 90353, 90359, 90371, 90373, + 90379, 90397, 90401, 90403, 90407, 90437, 90439, 90469, 90473, 90481, + 90499, 90511, 90523, 90527, 90529, 90533, 90547, 90583, 90599, 90617, + 90619, 90631, 90641, 90647, 90659, 90677, 90679, 90697, 90703, 90709, + 90731, 90749, 90787, 90793, 90803, 90821, 90823, 90833, 90841, 90847, + 90863, 90887, 90901, 90907, 90911, 90917, 90931, 90947, 90971, 90977, + 90989, 90997, 91009, 91019, 91033, 91079, 91081, 91097, 91099, 91121, + 91127, 91129, 91139, 91141, 91151, 91153, 91159, 91163, 91183, 91193, + 91199, 91229, 91237, 91243, 91249, 91253, 91283, 91291, 91297, 91303, + 91309, 91331, 91367, 91369, 91373, 91381, 91387, 91393, 91397, 91411, + 91423, 91433, 91453, 91457, 91459, 91463, 91493, 91499, 91513, 91529, + 91541, 91571, 91573, 91577, 91583, 91591, 91621, 91631, 91639, 91673, + 91691, 91703, 91711, 91733, 91753, 91757, 91771, 91781, 91801, 91807, + 91811, 91813, 91823, 91837, 91841, 91867, 91873, 91909, 91921, 91939, + 91943, 91951, 91957, 91961, 91967, 91969, 91997, 92003, 92009, 92033, + 92041, 92051, 92077, 92083, 92107, 92111, 92119, 92143, 92153, 92173, + 92177, 92179, 92189, 92203, 92219, 92221, 92227, 92233, 92237, 92243, + 92251, 92269, 92297, 92311, 92317, 92333, 92347, 92353, 92357, 92363, + 92369, 92377, 92381, 92383, 92387, 92399, 92401, 92413, 92419, 92431, + 92459, 92461, 92467, 92479, 92489, 92503, 92507, 92551, 92557, 92567, + 92569, 92581, 92593, 92623, 92627, 92639, 92641, 92647, 92657, 92669, + 92671, 92681, 92683, 92693, 92699, 92707, 92717, 92723, 92737, 92753, + 92761, 92767, 92779, 92789, 92791, 92801, 92809, 92821, 92831, 92849, + 92857, 92861, 92863, 92867, 92893, 92899, 92921, 92927, 92941, 92951, + 92957, 92959, 92987, 92993, 93001, 93047, 93053, 93059, 93077, 93083, + 93089, 93097, 93103, 93113, 93131, 93133, 93139, 93151, 93169, 93179, + 93187, 93199, 93229, 93239, 93241, 93251, 93253, 93257, 93263, 93281, + 93283, 93287, 93307, 93319, 93323, 93329, 93337, 93371, 93377, 93383, + 93407, 93419, 93427, 93463, 93479, 93481, 93487, 93491, 93493, 93497, + 93503, 93523, 93529, 93553, 93557, 93559, 93563, 93581, 93601, 93607, + 93629, 93637, 93683, 93701, 93703, 93719, 93739, 93761, 93763, 93787, + 93809, 93811, 93827, 93851, 93871, 93887, 93889, 93893, 93901, 93911, + 93913, 93923, 93937, 93941, 93949, 93967, 93971, 93979, 93983, 93997, + 94007, 94009, 94033, 94049, 94057, 94063, 94079, 94099, 94109, 94111, + 94117, 94121, 94151, 94153, 94169, 94201, 94207, 94219, 94229, 94253, + 94261, 94273, 94291, 94307, 94309, 94321, 94327, 94331, 94343, 94349, + 94351, 94379, 94397, 94399, 94421, 94427, 94433, 94439, 94441, 94447, + 94463, 94477, 94483, 94513, 94529, 94531, 94541, 94543, 94547, 94559, + 94561, 94573, 94583, 94597, 94603, 94613, 94621, 94649, 94651, 94687, + 94693, 94709, 94723, 94727, 94747, 94771, 94777, 94781, 94789, 94793, + 94811, 94819, 94823, 94837, 94841, 94847, 94849, 94873, 94889, 94903, + 94907, 94933, 94949, 94951, 94961, 94993, 94999, 95003, 95009, 95021, + 95027, 95063, 95071, 95083, 95087, 95089, 95093, 95101, 95107, 95111, + 95131, 95143, 95153, 95177, 95189, 95191, 95203, 95213, 95219, 95231, + 95233, 95239, 95257, 95261, 95267, 95273, 95279, 95287, 95311, 95317, + 95327, 95339, 95369, 95383, 95393, 95401, 95413, 95419, 95429, 95441, + 95443, 95461, 95467, 95471, 95479, 95483, 95507, 95527, 95531, 95539, + 95549, 95561, 95569, 95581, 95597, 95603, 95617, 95621, 95629, 95633, + 95651, 95701, 95707, 95713, 95717, 95723, 95731, 95737, 95747, 95773, + 95783, 95789, 95791, 95801, 95803, 95813, 95819, 95857, 95869, 95873, + 95881, 95891, 95911, 95917, 95923, 95929, 95947, 95957, 95959, 95971, + 95987, 95989, 96001, 96013, 96017, 96043, 96053, 96059, 96079, 96097, + 96137, 96149, 96157, 96167, 96179, 96181, 96199, 96211, 96221, 96223, + 96233, 96259, 96263, 96269, 96281, 96289, 96293, 96323, 96329, 96331, + 96337, 96353, 96377, 96401, 96419, 96431, 96443, 96451, 96457, 96461, + 96469, 96479, 96487, 96493, 96497, 96517, 96527, 96553, 96557, 96581, + 96587, 96589, 96601, 96643, 96661, 96667, 96671, 96697, 96703, 96731, + 96737, 96739, 96749, 96757, 96763, 96769, 96779, 96787, 96797, 96799, + 96821, 96823, 96827, 96847, 96851, 96857, 96893, 96907, 96911, 96931, + 96953, 96959, 96973, 96979, 96989, 96997, 97001, 97003, 97007, 97021, + 97039, 97073, 97081, 97103, 97117, 97127, 97151, 97157, 97159, 97169, + 97171, 97177, 97187, 97213, 97231, 97241, 97259, 97283, 97301, 97303, + 97327, 97367, 97369, 97373, 97379, 97381, 97387, 97397, 97423, 97429, + 97441, 97453, 97459, 97463, 97499, 97501, 97511, 97523, 97547, 97549, + 97553, 97561, 97571, 97577, 97579, 97583, 97607, 97609, 97613, 97649, + 97651, 97673, 97687, 97711, 97729, 97771, 97777, 97787, 97789, 97813, + 97829, 97841, 97843, 97847, 97849, 97859, 97861, 97871, 97879, 97883, + 97919, 97927, 97931, 97943, 97961, 97967, 97973, 97987, 98009, 98011, + 98017, 98041, 98047, 98057, 98081, 98101, 98123, 98129, 98143, 98179, + 98207, 98213, 98221, 98227, 98251, 98257, 98269, 98297, 98299, 98317, + 98321, 98323, 98327, 98347, 98369, 98377, 98387, 98389, 98407, 98411, + 98419, 98429, 98443, 98453, 98459, 98467, 98473, 98479, 98491, 98507, + 98519, 98533, 98543, 98561, 98563, 98573, 98597, 98621, 98627, 98639, + 98641, 98663, 98669, 98689, 98711, 98713, 98717, 98729, 98731, 98737, + 98773, 98779, 98801, 98807, 98809, 98837, 98849, 98867, 98869, 98873, + 98887, 98893, 98897, 98899, 98909, 98911, 98927, 98929, 98939, 98947, + 98953, 98963, 98981, 98993, 98999, 99013, 99017, 99023, 99041, 99053, + 99079, 99083, 99089, 99103, 99109, 99119, 99131, 99133, 99137, 99139, + 99149, 99173, 99181, 99191, 99223, 99233, 99241, 99251, 99257, 99259, + 99277, 99289, 99317, 99347, 99349, 99367, 99371, 99377, 99391, 99397, + 99401, 99409, 99431, 99439, 99469, 99487, 99497, 99523, 99527, 99529, + 99551, 99559, 99563, 99571, 99577, 99581, 99607, 99611, 99623, 99643, + 99661, 99667, 99679, 99689, 99707, 99709, 99713, 99719, 99721, 99733, + 99761, 99767, 99787, 99793, 99809, 99817, 99823, 99829, 99833, 99839, + 99859, 99871, 99877, 99881, 99901, 99907, 99923, 99929, 99961, 99971, + 99989, 99991, 100003, 100019, 100043, 100049, 100057, 100069, 100103, 100109, +100129, 100151, 100153, 100169, 100183, 100189, 100193, 100207, 100213, 100237, +100267, 100271, 100279, 100291, 100297, 100313, 100333, 100343, 100357, 100361, +100363, 100379, 100391, 100393, 100403, 100411, 100417, 100447, 100459, 100469, +100483, 100493, 100501, 100511, 100517, 100519, 100523, 100537, 100547, 100549, +100559, 100591, 100609, 100613, 100621, 100649, 100669, 100673, 100693, 100699, +100703, 100733, 100741, 100747, 100769, 100787, 100799, 100801, 100811, 100823, +100829, 100847, 100853, 100907, 100913, 100927, 100931, 100937, 100943, 100957, +100981, 100987, 100999, 101009, 101021, 101027, 101051, 101063, 101081, 101089, +101107, 101111, 101113, 101117, 101119, 101141, 101149, 101159, 101161, 101173, +101183, 101197, 101203, 101207, 101209, 101221, 101267, 101273, 101279, 101281, +101287, 101293, 101323, 101333, 101341, 101347, 101359, 101363, 101377, 101383, +101399, 101411, 101419, 101429, 101449, 101467, 101477, 101483, 101489, 101501, +101503, 101513, 101527, 101531, 101533, 101537, 101561, 101573, 101581, 101599, +101603, 101611, 101627, 101641, 101653, 101663, 101681, 101693, 101701, 101719, +101723, 101737, 101741, 101747, 101749, 101771, 101789, 101797, 101807, 101833, +101837, 101839, 101863, 101869, 101873, 101879, 101891, 101917, 101921, 101929, +101939, 101957, 101963, 101977, 101987, 101999, 102001, 102013, 102019, 102023, +102031, 102043, 102059, 102061, 102071, 102077, 102079, 102101, 102103, 102107, +102121, 102139, 102149, 102161, 102181, 102191, 102197, 102199, 102203, 102217, +102229, 102233, 102241, 102251, 102253, 102259, 102293, 102299, 102301, 102317, +102329, 102337, 102359, 102367, 102397, 102407, 102409, 102433, 102437, 102451, +102461, 102481, 102497, 102499, 102503, 102523, 102533, 102539, 102547, 102551, +102559, 102563, 102587, 102593, 102607, 102611, 102643, 102647, 102653, 102667, +102673, 102677, 102679, 102701, 102761, 102763, 102769, 102793, 102797, 102811, +102829, 102841, 102859, 102871, 102877, 102881, 102911, 102913, 102929, 102931, +102953, 102967, 102983, 103001, 103007, 103043, 103049, 103067, 103069, 103079, +103087, 103091, 103093, 103099, 103123, 103141, 103171, 103177, 103183, 103217, +103231, 103237, 103289, 103291, 103307, 103319, 103333, 103349, 103357, 103387, +103391, 103393, 103399, 103409, 103421, 103423, 103451, 103457, 103471, 103483, +103511, 103529, 103549, 103553, 103561, 103567, 103573, 103577, 103583, 103591, +103613, 103619, 103643, 103651, 103657, 103669, 103681, 103687, 103699, 103703, +103723, 103769, 103787, 103801, 103811, 103813, 103837, 103841, 103843, 103867, +103889, 103903, 103913, 103919, 103951, 103963, 103967, 103969, 103979, 103981, +103991, 103993, 103997, 104003, 104009, 104021, 104033, 104047, 104053, 104059, +104087, 104089, 104107, 104113, 104119, 104123, 104147, 104149, 104161, 104173, +104179, 104183, 104207, 104231, 104233, 104239, 104243, 104281, 104287, 104297, +104309, 104311, 104323, 104327, 104347, 104369, 104381, 104383, 104393, 104399, +104417, 104459, 104471, 104473, 104479, 104491, 104513, 104527, 104537, 104543, +104549, 104551, 104561, 104579, 104593, 104597, 104623, 104639, 104651, 104659, +104677, 104681, 104683, 104693, 104701, 104707, 104711, 104717, 104723, 104729, +) diff --git a/Lib/site-packages/Crypto/Util/py3compat.py b/Lib/site-packages/Crypto/Util/py3compat.py new file mode 100644 index 0000000..3244e7b --- /dev/null +++ b/Lib/site-packages/Crypto/Util/py3compat.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# +# Util/py3compat.py : Compatibility code for handling Py3k / Python 2.x +# +# Written in 2010 by Thorsten Behrens +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Compatibility code for handling string/bytes changes from Python 2.x to Py3k + +In Python 2.x, strings (of type ''str'') contain binary data, including encoded +Unicode text (e.g. UTF-8). The separate type ''unicode'' holds Unicode text. +Unicode literals are specified via the u'...' prefix. Indexing or slicing +either type always produces a string of the same type as the original. +Data read from a file is always of '''str'' type. + +In Python 3.x, strings (type ''str'') may only contain Unicode text. The u'...' +prefix and the ''unicode'' type are now redundant. A new type (called +''bytes'') has to be used for binary data (including any particular +''encoding'' of a string). The b'...' prefix allows one to specify a binary +literal. Indexing or slicing a string produces another string. Slicing a byte +string produces another byte string, but the indexing operation produces an +integer. Data read from a file is of '''str'' type if the file was opened in +text mode, or of ''bytes'' type otherwise. + +Since PyCrypto aims at supporting both Python 2.x and 3.x, the following helper +functions are used to keep the rest of the library as independent as possible +from the actual Python version. + +In general, the code should always deal with binary strings, and use integers +instead of 1-byte character strings. + +b(s) + Take a text string literal (with no prefix or with u'...' prefix) and + make a byte string. +bchr(c) + Take an integer and make a 1-character byte string. +bord(c) + Take the result of indexing on a byte string and make an integer. +tobytes(s) + Take a text string, a byte string, or a sequence of character taken from + a byte string and make a byte string. +""" + +__revision__ = "$Id$" + +import sys + +if sys.version_info[0] == 2: + def b(s): + return s + def bchr(s): + return chr(s) + def bstr(s): + return str(s) + def bord(s): + return ord(s) + if sys.version_info[1] == 1: + def tobytes(s): + try: + return s.encode('latin-1') + except: + return ''.join(s) + else: + def tobytes(s): + if isinstance(s, str): + return s.encode("latin-1") + else: + return ''.join(s) +else: + def b(s): + return s.encode("latin-1") # utf-8 would cause some side-effects we don't want + def bchr(s): + return bytes([s]) + def bstr(s): + if isinstance(s,str): + return bytes(s,"latin-1") + else: + return bytes(s) + def bord(s): + return s + def tobytes(s): + if isinstance(s,bytes): + return s + else: + if isinstance(s,str): + return s.encode("latin-1") + else: + return bytes(s) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/Util/randpool.py b/Lib/site-packages/Crypto/Util/randpool.py new file mode 100644 index 0000000..8b5a0b7 --- /dev/null +++ b/Lib/site-packages/Crypto/Util/randpool.py @@ -0,0 +1,82 @@ +# +# randpool.py : Cryptographically strong random number generation +# +# Part of the Python Cryptography Toolkit +# +# Written by Andrew M. Kuchling, Mark Moraes, and others +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== +# + +__revision__ = "$Id$" + +from Crypto.pct_warnings import RandomPool_DeprecationWarning +import Crypto.Random +import warnings + +class RandomPool: + """Deprecated. Use Random.new() instead. + + See http://www.pycrypto.org/randpool-broken + """ + def __init__(self, numbytes = 160, cipher=None, hash=None, file=None): + warnings.warn("This application uses RandomPool, which is BROKEN in older releases. See http://www.pycrypto.org/randpool-broken", + RandomPool_DeprecationWarning) + self.__rng = Crypto.Random.new() + self.bytes = numbytes + self.bits = self.bytes * 8 + self.entropy = self.bits + + def get_bytes(self, N): + return self.__rng.read(N) + + def _updateEntropyEstimate(self, nbits): + self.entropy += nbits + if self.entropy < 0: + self.entropy = 0 + elif self.entropy > self.bits: + self.entropy = self.bits + + def _randomize(self, N=0, devname="/dev/urandom"): + """Dummy _randomize() function""" + self.__rng.flush() + + def randomize(self, N=0): + """Dummy randomize() function""" + self.__rng.flush() + + def stir(self, s=''): + """Dummy stir() function""" + self.__rng.flush() + + def stir_n(self, N=3): + """Dummy stir_n() function""" + self.__rng.flush() + + def add_event(self, s=''): + """Dummy add_event() function""" + self.__rng.flush() + + def getBytes(self, N): + """Dummy getBytes() function""" + return self.get_bytes(N) + + def addEvent(self, event, s=""): + """Dummy addEvent() function""" + return self.add_event() diff --git a/Lib/site-packages/Crypto/Util/winrandom.py b/Lib/site-packages/Crypto/Util/winrandom.py new file mode 100644 index 0000000..0242815 --- /dev/null +++ b/Lib/site-packages/Crypto/Util/winrandom.py @@ -0,0 +1,28 @@ +# +# Util/winrandom.py : Stub for Crypto.Random.OSRNG.winrandom +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +__revision__ = "$Id$" + +from Crypto.Random.OSRNG.winrandom import * + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/Crypto/__init__.py b/Lib/site-packages/Crypto/__init__.py new file mode 100644 index 0000000..c27402e --- /dev/null +++ b/Lib/site-packages/Crypto/__init__.py @@ -0,0 +1,51 @@ +# -*- coding: utf-8 -*- +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +"""Python Cryptography Toolkit + +A collection of cryptographic modules implementing various algorithms +and protocols. + +Subpackages: + +Crypto.Cipher + Secret-key (AES, DES, ARC4) and public-key encryption (RSA PKCS#1) algorithms +Crypto.Hash + Hashing algorithms (MD5, SHA, HMAC) +Crypto.Protocol + Cryptographic protocols (Chaffing, all-or-nothing transform, key derivation + functions). This package does not contain any network protocols. +Crypto.PublicKey + Public-key encryption and signature algorithms (RSA, DSA) +Crypto.Signature + Public-key signature algorithms (RSA PKCS#1) +Crypto.Util + Various useful modules and functions (long-to-string conversion, random number + generation, number theoretic functions) +""" + +__all__ = ['Cipher', 'Hash', 'Protocol', 'PublicKey', 'Util', 'Signature'] + +__version__ = '2.6.1' # See also below and setup.py +__revision__ = "$Id$" + +# New software should look at this instead of at __version__ above. +version_info = (2, 6, 1, 'final', 0) # See also above and setup.py + diff --git a/Lib/site-packages/Crypto/pct_warnings.py b/Lib/site-packages/Crypto/pct_warnings.py new file mode 100644 index 0000000..9b4361e --- /dev/null +++ b/Lib/site-packages/Crypto/pct_warnings.py @@ -0,0 +1,60 @@ +# -*- coding: ascii -*- +# +# pct_warnings.py : PyCrypto warnings file +# +# Written in 2008 by Dwayne C. Litzenberger +# +# =================================================================== +# The contents of this file are dedicated to the public domain. To +# the extent that dedication to the public domain is not available, +# everyone is granted a worldwide, perpetual, royalty-free, +# non-exclusive license to exercise all rights associated with the +# contents of this file for any purpose whatsoever. +# No rights are reserved. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS +# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN +# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. +# =================================================================== + +# +# Base classes. All our warnings inherit from one of these in order to allow +# the user to specifically filter them. +# + +class CryptoWarning(Warning): + """Base class for PyCrypto warnings""" + +class CryptoDeprecationWarning(DeprecationWarning, CryptoWarning): + """Base PyCrypto DeprecationWarning class""" + +class CryptoRuntimeWarning(RuntimeWarning, CryptoWarning): + """Base PyCrypto RuntimeWarning class""" + +# +# Warnings that we might actually use +# + +class RandomPool_DeprecationWarning(CryptoDeprecationWarning): + """Issued when Crypto.Util.randpool.RandomPool is instantiated.""" + +class ClockRewindWarning(CryptoRuntimeWarning): + """Warning for when the system clock moves backwards.""" + +class GetRandomNumber_DeprecationWarning(CryptoDeprecationWarning): + """Issued when Crypto.Util.number.getRandomNumber is invoked.""" + +class PowmInsecureWarning(CryptoRuntimeWarning): + """Warning for when _fastmath is built without mpz_powm_sec""" + +# By default, we want this warning to be shown every time we compensate for +# clock rewinding. +import warnings as _warnings +_warnings.filterwarnings('always', category=ClockRewindWarning, append=1) + +# vim:set ts=4 sw=4 sts=4 expandtab: diff --git a/Lib/site-packages/OpenSSL/SSL.py b/Lib/site-packages/OpenSSL/SSL.py new file mode 100644 index 0000000..d0cc933 --- /dev/null +++ b/Lib/site-packages/OpenSSL/SSL.py @@ -0,0 +1,1948 @@ +from sys import platform +from functools import wraps, partial +from itertools import count, chain +from weakref import WeakValueDictionary +from errno import errorcode + +from six import text_type as _text_type +from six import binary_type as _binary_type +from six import integer_types as integer_types +from six import int2byte, indexbytes + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + native as _native, + text_to_bytes_and_warn as _text_to_bytes_and_warn, + path_string as _path_string, + UNSPECIFIED as _UNSPECIFIED, +) + +from OpenSSL.crypto import ( + FILETYPE_PEM, _PassphraseHelper, PKey, X509Name, X509, X509Store) + +try: + _memoryview = memoryview +except NameError: + class _memoryview(object): + pass + +try: + _buffer = buffer +except NameError: + class _buffer(object): + pass + +OPENSSL_VERSION_NUMBER = _lib.OPENSSL_VERSION_NUMBER +SSLEAY_VERSION = _lib.SSLEAY_VERSION +SSLEAY_CFLAGS = _lib.SSLEAY_CFLAGS +SSLEAY_PLATFORM = _lib.SSLEAY_PLATFORM +SSLEAY_DIR = _lib.SSLEAY_DIR +SSLEAY_BUILT_ON = _lib.SSLEAY_BUILT_ON + +SENT_SHUTDOWN = _lib.SSL_SENT_SHUTDOWN +RECEIVED_SHUTDOWN = _lib.SSL_RECEIVED_SHUTDOWN + +SSLv2_METHOD = 1 +SSLv3_METHOD = 2 +SSLv23_METHOD = 3 +TLSv1_METHOD = 4 +TLSv1_1_METHOD = 5 +TLSv1_2_METHOD = 6 + +OP_NO_SSLv2 = _lib.SSL_OP_NO_SSLv2 +OP_NO_SSLv3 = _lib.SSL_OP_NO_SSLv3 +OP_NO_TLSv1 = _lib.SSL_OP_NO_TLSv1 + +OP_NO_TLSv1_1 = getattr(_lib, "SSL_OP_NO_TLSv1_1", 0) +OP_NO_TLSv1_2 = getattr(_lib, "SSL_OP_NO_TLSv1_2", 0) + +try: + MODE_RELEASE_BUFFERS = _lib.SSL_MODE_RELEASE_BUFFERS +except AttributeError: + pass + +OP_SINGLE_DH_USE = _lib.SSL_OP_SINGLE_DH_USE +OP_EPHEMERAL_RSA = _lib.SSL_OP_EPHEMERAL_RSA +OP_MICROSOFT_SESS_ID_BUG = _lib.SSL_OP_MICROSOFT_SESS_ID_BUG +OP_NETSCAPE_CHALLENGE_BUG = _lib.SSL_OP_NETSCAPE_CHALLENGE_BUG +OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG = _lib.SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG +OP_SSLREF2_REUSE_CERT_TYPE_BUG = _lib.SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG +OP_MICROSOFT_BIG_SSLV3_BUFFER = _lib.SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER +try: + OP_MSIE_SSLV2_RSA_PADDING = _lib.SSL_OP_MSIE_SSLV2_RSA_PADDING +except AttributeError: + pass +OP_SSLEAY_080_CLIENT_DH_BUG = _lib.SSL_OP_SSLEAY_080_CLIENT_DH_BUG +OP_TLS_D5_BUG = _lib.SSL_OP_TLS_D5_BUG +OP_TLS_BLOCK_PADDING_BUG = _lib.SSL_OP_TLS_BLOCK_PADDING_BUG +OP_DONT_INSERT_EMPTY_FRAGMENTS = _lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS +OP_CIPHER_SERVER_PREFERENCE = _lib.SSL_OP_CIPHER_SERVER_PREFERENCE +OP_TLS_ROLLBACK_BUG = _lib.SSL_OP_TLS_ROLLBACK_BUG +OP_PKCS1_CHECK_1 = _lib.SSL_OP_PKCS1_CHECK_1 +OP_PKCS1_CHECK_2 = _lib.SSL_OP_PKCS1_CHECK_2 +OP_NETSCAPE_CA_DN_BUG = _lib.SSL_OP_NETSCAPE_CA_DN_BUG +OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG= _lib.SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG +try: + OP_NO_COMPRESSION = _lib.SSL_OP_NO_COMPRESSION +except AttributeError: + pass + +OP_NO_QUERY_MTU = _lib.SSL_OP_NO_QUERY_MTU +OP_COOKIE_EXCHANGE = _lib.SSL_OP_COOKIE_EXCHANGE +try: + OP_NO_TICKET = _lib.SSL_OP_NO_TICKET +except AttributeError: + pass + +OP_ALL = _lib.SSL_OP_ALL + +VERIFY_PEER = _lib.SSL_VERIFY_PEER +VERIFY_FAIL_IF_NO_PEER_CERT = _lib.SSL_VERIFY_FAIL_IF_NO_PEER_CERT +VERIFY_CLIENT_ONCE = _lib.SSL_VERIFY_CLIENT_ONCE +VERIFY_NONE = _lib.SSL_VERIFY_NONE + +SESS_CACHE_OFF = _lib.SSL_SESS_CACHE_OFF +SESS_CACHE_CLIENT = _lib.SSL_SESS_CACHE_CLIENT +SESS_CACHE_SERVER = _lib.SSL_SESS_CACHE_SERVER +SESS_CACHE_BOTH = _lib.SSL_SESS_CACHE_BOTH +SESS_CACHE_NO_AUTO_CLEAR = _lib.SSL_SESS_CACHE_NO_AUTO_CLEAR +SESS_CACHE_NO_INTERNAL_LOOKUP = _lib.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP +SESS_CACHE_NO_INTERNAL_STORE = _lib.SSL_SESS_CACHE_NO_INTERNAL_STORE +SESS_CACHE_NO_INTERNAL = _lib.SSL_SESS_CACHE_NO_INTERNAL + +SSL_ST_CONNECT = _lib.SSL_ST_CONNECT +SSL_ST_ACCEPT = _lib.SSL_ST_ACCEPT +SSL_ST_MASK = _lib.SSL_ST_MASK +SSL_ST_INIT = _lib.SSL_ST_INIT +SSL_ST_BEFORE = _lib.SSL_ST_BEFORE +SSL_ST_OK = _lib.SSL_ST_OK +SSL_ST_RENEGOTIATE = _lib.SSL_ST_RENEGOTIATE + +SSL_CB_LOOP = _lib.SSL_CB_LOOP +SSL_CB_EXIT = _lib.SSL_CB_EXIT +SSL_CB_READ = _lib.SSL_CB_READ +SSL_CB_WRITE = _lib.SSL_CB_WRITE +SSL_CB_ALERT = _lib.SSL_CB_ALERT +SSL_CB_READ_ALERT = _lib.SSL_CB_READ_ALERT +SSL_CB_WRITE_ALERT = _lib.SSL_CB_WRITE_ALERT +SSL_CB_ACCEPT_LOOP = _lib.SSL_CB_ACCEPT_LOOP +SSL_CB_ACCEPT_EXIT = _lib.SSL_CB_ACCEPT_EXIT +SSL_CB_CONNECT_LOOP = _lib.SSL_CB_CONNECT_LOOP +SSL_CB_CONNECT_EXIT = _lib.SSL_CB_CONNECT_EXIT +SSL_CB_HANDSHAKE_START = _lib.SSL_CB_HANDSHAKE_START +SSL_CB_HANDSHAKE_DONE = _lib.SSL_CB_HANDSHAKE_DONE + +class Error(Exception): + """ + An error occurred in an `OpenSSL.SSL` API. + """ + + + +_raise_current_error = partial(_exception_from_error_queue, Error) + + +class WantReadError(Error): + pass + + + +class WantWriteError(Error): + pass + + + +class WantX509LookupError(Error): + pass + + + +class ZeroReturnError(Error): + pass + + + +class SysCallError(Error): + pass + + +class _CallbackExceptionHelper(object): + """ + A base class for wrapper classes that allow for intelligent exception + handling in OpenSSL callbacks. + + :ivar list _problems: Any exceptions that occurred while executing in a + context where they could not be raised in the normal way. Typically + this is because OpenSSL has called into some Python code and requires a + return value. The exceptions are saved to be raised later when it is + possible to do so. + """ + def __init__(self): + self._problems = [] + + + def raise_if_problem(self): + """ + Raise an exception from the OpenSSL error queue or that was previously + captured whe running a callback. + """ + if self._problems: + try: + _raise_current_error() + except Error: + pass + raise self._problems.pop(0) + + +class _VerifyHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as a certificate verification + callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ok, store_ctx): + cert = X509.__new__(X509) + cert._x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) + error_number = _lib.X509_STORE_CTX_get_error(store_ctx) + error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) + + index = _lib.SSL_get_ex_data_X509_STORE_CTX_idx() + ssl = _lib.X509_STORE_CTX_get_ex_data(store_ctx, index) + connection = Connection._reverse_mapping[ssl] + + try: + result = callback(connection, cert, error_number, error_depth, ok) + except Exception as e: + self._problems.append(e) + return 0 + else: + if result: + _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK) + return 1 + else: + return 0 + + self.callback = _ffi.callback( + "int (*)(int, X509_STORE_CTX *)", wrapper) + + +class _NpnAdvertiseHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN advertisement callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + protos = callback(conn) + + # Join the protocols into a Python bytestring, length-prefixing + # each element. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_advertise_callback_args = [ + _ffi.new("unsigned int *", len(protostr)), + _ffi.new("unsigned char[]", protostr), + ] + outlen[0] = conn._npn_advertise_callback_args[0][0] + out[0] = conn._npn_advertise_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, const unsigned char **, unsigned int *, void *)", + wrapper + ) + + +class _NpnSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an NPN selection callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is actually made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + l = indexbytes(instr, 0) + proto = instr[1:l+1] + protolist.append(proto) + instr = instr[l+1:] + + # Call the callback + outstr = callback(conn, protolist) + + # Save our callback arguments on the connection object. This is + # done to make sure that they don't get freed before OpenSSL + # uses them. Then, return them appropriately in the output + # parameters. + conn._npn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._npn_select_callback_args[0][0] + out[0] = conn._npn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)", + wrapper + ) + + +class _ALPNSelectHelper(_CallbackExceptionHelper): + """ + Wrap a callback such that it can be used as an ALPN selection callback. + """ + def __init__(self, callback): + _CallbackExceptionHelper.__init__(self) + + @wraps(callback) + def wrapper(ssl, out, outlen, in_, inlen, arg): + try: + conn = Connection._reverse_mapping[ssl] + + # The string passed to us is made up of multiple + # length-prefixed bytestrings. We need to split that into a + # list. + instr = _ffi.buffer(in_, inlen)[:] + protolist = [] + while instr: + encoded_len = indexbytes(instr, 0) + proto = instr[1:encoded_len + 1] + protolist.append(proto) + instr = instr[encoded_len + 1:] + + # Call the callback + outstr = callback(conn, protolist) + + if not isinstance(outstr, _binary_type): + raise TypeError("ALPN callback must return a bytestring.") + + # Save our callback arguments on the connection object to make + # sure that they don't get freed before OpenSSL can use them. + # Then, return them in the appropriate output parameters. + conn._alpn_select_callback_args = [ + _ffi.new("unsigned char *", len(outstr)), + _ffi.new("unsigned char[]", outstr), + ] + outlen[0] = conn._alpn_select_callback_args[0][0] + out[0] = conn._alpn_select_callback_args[1] + return 0 + except Exception as e: + self._problems.append(e) + return 2 # SSL_TLSEXT_ERR_ALERT_FATAL + + self.callback = _ffi.callback( + "int (*)(SSL *, unsigned char **, unsigned char *, " + "const unsigned char *, unsigned int, void *)", + wrapper + ) + + +def _asFileDescriptor(obj): + fd = None + if not isinstance(obj, integer_types): + meth = getattr(obj, "fileno", None) + if meth is not None: + obj = meth() + + if isinstance(obj, integer_types): + fd = obj + + if not isinstance(fd, integer_types): + raise TypeError("argument must be an int, or have a fileno() method.") + elif fd < 0: + raise ValueError( + "file descriptor cannot be a negative integer (%i)" % (fd,)) + + return fd + + + +def SSLeay_version(type): + """ + Return a string describing the version of OpenSSL in use. + + :param type: One of the SSLEAY_ constants defined in this module. + """ + return _ffi.string(_lib.SSLeay_version(type)) + + +def _requires_npn(func): + """ + Wraps any function that requires NPN support in OpenSSL, ensuring that + NotImplementedError is raised if NPN is not present. + """ + @wraps(func) + def wrapper(*args, **kwargs): + if not _lib.Cryptography_HAS_NEXTPROTONEG: + raise NotImplementedError("NPN not available.") + + return func(*args, **kwargs) + + return wrapper + + + +def _requires_alpn(func): + """ + Wraps any function that requires ALPN support in OpenSSL, ensuring that + NotImplementedError is raised if ALPN support is not present. + """ + @wraps(func) + def wrapper(*args, **kwargs): + if not _lib.Cryptography_HAS_ALPN: + raise NotImplementedError("ALPN not available.") + + return func(*args, **kwargs) + + return wrapper + + + +class Session(object): + pass + + + +class Context(object): + """ + :py:obj:`OpenSSL.SSL.Context` instances define the parameters for setting up + new SSL connections. + """ + _methods = { + SSLv2_METHOD: "SSLv2_method", + SSLv3_METHOD: "SSLv3_method", + SSLv23_METHOD: "SSLv23_method", + TLSv1_METHOD: "TLSv1_method", + TLSv1_1_METHOD: "TLSv1_1_method", + TLSv1_2_METHOD: "TLSv1_2_method", + } + _methods = dict( + (identifier, getattr(_lib, name)) + for (identifier, name) in _methods.items() + if getattr(_lib, name, None) is not None) + + + def __init__(self, method): + """ + :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, or + TLSv1_METHOD. + """ + if not isinstance(method, integer_types): + raise TypeError("method must be an integer") + + try: + method_func = self._methods[method] + except KeyError: + raise ValueError("No such protocol") + + method_obj = method_func() + if method_obj == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + context = _lib.SSL_CTX_new(method_obj) + if context == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + context = _ffi.gc(context, _lib.SSL_CTX_free) + + self._context = context + self._passphrase_helper = None + self._passphrase_callback = None + self._passphrase_userdata = None + self._verify_helper = None + self._verify_callback = None + self._info_callback = None + self._tlsext_servername_callback = None + self._app_data = None + self._npn_advertise_helper = None + self._npn_advertise_callback = None + self._npn_select_helper = None + self._npn_select_callback = None + self._alpn_select_helper = None + self._alpn_select_callback = None + + # SSL_CTX_set_app_data(self->ctx, self); + # SSL_CTX_set_mode(self->ctx, SSL_MODE_ENABLE_PARTIAL_WRITE | + # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER | + # SSL_MODE_AUTO_RETRY); + self.set_mode(_lib.SSL_MODE_ENABLE_PARTIAL_WRITE) + + + def load_verify_locations(self, cafile, capath=None): + """ + Let SSL know where we can find trusted certificates for the certificate + chain + + :param cafile: In which file we can find the certificates (``bytes`` or + ``unicode``). + :param capath: In which directory we can find the certificates + (``bytes`` or ``unicode``). + + :return: None + """ + if cafile is None: + cafile = _ffi.NULL + else: + cafile = _path_string(cafile) + + if capath is None: + capath = _ffi.NULL + else: + capath = _path_string(capath) + + load_result = _lib.SSL_CTX_load_verify_locations(self._context, cafile, capath) + if not load_result: + _raise_current_error() + + + def _wrap_callback(self, callback): + @wraps(callback) + def wrapper(size, verify, userdata): + return callback(size, verify, self._passphrase_userdata) + return _PassphraseHelper( + FILETYPE_PEM, wrapper, more_args=True, truncate=True) + + + def set_passwd_cb(self, callback, userdata=None): + """ + Set the passphrase callback + + :param callback: The Python callback to use + :param userdata: (optional) A Python object which will be given as + argument to the callback + :return: None + """ + if not callable(callback): + raise TypeError("callback must be callable") + + self._passphrase_helper = self._wrap_callback(callback) + self._passphrase_callback = self._passphrase_helper.callback + _lib.SSL_CTX_set_default_passwd_cb( + self._context, self._passphrase_callback) + self._passphrase_userdata = userdata + + + def set_default_verify_paths(self): + """ + Use the platform-specific CA certificate locations + + :return: None + """ + set_result = _lib.SSL_CTX_set_default_verify_paths(self._context) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def use_certificate_chain_file(self, certfile): + """ + Load a certificate chain from a file + + :param certfile: The name of the certificate chain file (``bytes`` or + ``unicode``). + + :return: None + """ + certfile = _path_string(certfile) + + result = _lib.SSL_CTX_use_certificate_chain_file(self._context, certfile) + if not result: + _raise_current_error() + + + def use_certificate_file(self, certfile, filetype=FILETYPE_PEM): + """ + Load a certificate from a file + + :param certfile: The name of the certificate file (``bytes`` or + ``unicode``). + :param filetype: (optional) The encoding of the file, default is PEM + + :return: None + """ + certfile = _path_string(certfile) + if not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_certificate_file(self._context, certfile, filetype) + if not use_result: + _raise_current_error() + + + def use_certificate(self, cert): + """ + Load a certificate from a X509 object + + :param cert: The X509 object + :return: None + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + + use_result = _lib.SSL_CTX_use_certificate(self._context, cert._x509) + if not use_result: + _raise_current_error() + + + def add_extra_chain_cert(self, certobj): + """ + Add certificate to chain + + :param certobj: The X509 certificate object to add to the chain + :return: None + """ + if not isinstance(certobj, X509): + raise TypeError("certobj must be an X509 instance") + + copy = _lib.X509_dup(certobj._x509) + add_result = _lib.SSL_CTX_add_extra_chain_cert(self._context, copy) + if not add_result: + # TODO: This is untested. + _lib.X509_free(copy) + _raise_current_error() + + + def _raise_passphrase_exception(self): + if self._passphrase_helper is None: + _raise_current_error() + exception = self._passphrase_helper.raise_if_problem(Error) + if exception is not None: + raise exception + + + def use_privatekey_file(self, keyfile, filetype=_UNSPECIFIED): + """ + Load a private key from a file + + :param keyfile: The name of the key file (``bytes`` or ``unicode``) + :param filetype: (optional) The encoding of the file, default is PEM + + :return: None + """ + keyfile = _path_string(keyfile) + + if filetype is _UNSPECIFIED: + filetype = FILETYPE_PEM + elif not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_PrivateKey_file( + self._context, keyfile, filetype) + if not use_result: + self._raise_passphrase_exception() + + + def use_privatekey(self, pkey): + """ + Load a private key from a PKey object + + :param pkey: The PKey object + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + use_result = _lib.SSL_CTX_use_PrivateKey(self._context, pkey._pkey) + if not use_result: + self._raise_passphrase_exception() + + + def check_privatekey(self): + """ + Check that the private key and certificate match up + + :return: None (raises an exception if something's wrong) + """ + if not _lib.SSL_CTX_check_private_key(self._context): + _raise_current_error() + + + def load_client_ca(self, cafile): + """ + Load the trusted certificates that will be sent to the client (basically + telling the client "These are the guys I trust"). Does not actually + imply any of the certificates are trusted; that must be configured + separately. + + :param cafile: The name of the certificates file + :return: None + """ + + def set_session_id(self, buf): + """ + Set the session identifier. This is needed if you want to do session + resumption. + + :param buf: A Python object that can be safely converted to a string + :returns: None + """ + + def set_session_cache_mode(self, mode): + """ + Enable/disable session caching and specify the mode used. + + :param mode: One or more of the SESS_CACHE_* flags (combine using + bitwise or) + :returns: The previously set caching mode. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_session_cache_mode(self._context, mode) + + + def get_session_cache_mode(self): + """ + :returns: The currently used cache mode. + """ + return _lib.SSL_CTX_get_session_cache_mode(self._context) + + + def set_verify(self, mode, callback): + """ + Set the verify mode and verify callback + + :param mode: The verify mode, this is either VERIFY_NONE or + VERIFY_PEER combined with possible other flags + :param callback: The Python callback to use + :return: None + + See SSL_CTX_set_verify(3SSL) for further details. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + if not callable(callback): + raise TypeError("callback must be callable") + + self._verify_helper = _VerifyHelper(callback) + self._verify_callback = self._verify_helper.callback + _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) + + + def set_verify_depth(self, depth): + """ + Set the verify depth + + :param depth: An integer specifying the verify depth + :return: None + """ + if not isinstance(depth, integer_types): + raise TypeError("depth must be an integer") + + _lib.SSL_CTX_set_verify_depth(self._context, depth) + + + def get_verify_mode(self): + """ + Get the verify mode + + :return: The verify mode + """ + return _lib.SSL_CTX_get_verify_mode(self._context) + + + def get_verify_depth(self): + """ + Get the verify depth + + :return: The verify depth + """ + return _lib.SSL_CTX_get_verify_depth(self._context) + + + def load_tmp_dh(self, dhfile): + """ + Load parameters for Ephemeral Diffie-Hellman + + :param dhfile: The file to load EDH parameters from (``bytes`` or + ``unicode``). + + :return: None + """ + dhfile = _path_string(dhfile) + + bio = _lib.BIO_new_file(dhfile, b"r") + if bio == _ffi.NULL: + _raise_current_error() + bio = _ffi.gc(bio, _lib.BIO_free) + + dh = _lib.PEM_read_bio_DHparams(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + dh = _ffi.gc(dh, _lib.DH_free) + _lib.SSL_CTX_set_tmp_dh(self._context, dh) + + + def set_tmp_ecdh(self, curve): + """ + Select a curve to use for ECDHE key exchange. + + :param curve: A curve object to use as returned by either + :py:meth:`OpenSSL.crypto.get_elliptic_curve` or + :py:meth:`OpenSSL.crypto.get_elliptic_curves`. + + :return: None + """ + _lib.SSL_CTX_set_tmp_ecdh(self._context, curve._to_EC_KEY()) + + + def set_cipher_list(self, cipher_list): + """ + Change the cipher list + + :param cipher_list: A cipher list, see ciphers(1) + :return: None + """ + if isinstance(cipher_list, _text_type): + cipher_list = cipher_list.encode("ascii") + + if not isinstance(cipher_list, bytes): + raise TypeError("cipher_list must be bytes or unicode") + + result = _lib.SSL_CTX_set_cipher_list(self._context, cipher_list) + if not result: + _raise_current_error() + + + def set_client_ca_list(self, certificate_authorities): + """ + Set the list of preferred client certificate signers for this server context. + + This list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authorities: a sequence of X509Names. + :return: None + """ + name_stack = _lib.sk_X509_NAME_new_null() + if name_stack == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + try: + for ca_name in certificate_authorities: + if not isinstance(ca_name, X509Name): + raise TypeError( + "client CAs must be X509Name objects, not %s objects" % ( + type(ca_name).__name__,)) + copy = _lib.X509_NAME_dup(ca_name._name) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + push_result = _lib.sk_X509_NAME_push(name_stack, copy) + if not push_result: + _lib.X509_NAME_free(copy) + _raise_current_error() + except: + _lib.sk_X509_NAME_free(name_stack) + raise + + _lib.SSL_CTX_set_client_CA_list(self._context, name_stack) + + + def add_client_ca(self, certificate_authority): + """ + Add the CA certificate to the list of preferred signers for this context. + + The list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authority: certificate authority's X509 certificate. + :return: None + """ + if not isinstance(certificate_authority, X509): + raise TypeError("certificate_authority must be an X509 instance") + + add_result = _lib.SSL_CTX_add_client_CA( + self._context, certificate_authority._x509) + if not add_result: + # TODO: This is untested. + _raise_current_error() + + + def set_timeout(self, timeout): + """ + Set session timeout + + :param timeout: The timeout in seconds + :return: The previous session timeout + """ + if not isinstance(timeout, integer_types): + raise TypeError("timeout must be an integer") + + return _lib.SSL_CTX_set_timeout(self._context, timeout) + + + def get_timeout(self): + """ + Get the session timeout + + :return: The session timeout + """ + return _lib.SSL_CTX_get_timeout(self._context) + + + def set_info_callback(self, callback): + """ + Set the info callback + + :param callback: The Python callback to use + :return: None + """ + @wraps(callback) + def wrapper(ssl, where, return_code): + callback(Connection._reverse_mapping[ssl], where, return_code) + self._info_callback = _ffi.callback( + "void (*)(const SSL *, int, int)", wrapper) + _lib.SSL_CTX_set_info_callback(self._context, self._info_callback) + + + def get_app_data(self): + """ + Get the application data (supplied via set_app_data()) + + :return: The application data + """ + return self._app_data + + + def set_app_data(self, data): + """ + Set the application data (will be returned from get_app_data()) + + :param data: Any Python object + :return: None + """ + self._app_data = data + + + def get_cert_store(self): + """ + Get the certificate store for the context. + + :return: A X509Store object or None if it does not have one. + """ + store = _lib.SSL_CTX_get_cert_store(self._context) + if store == _ffi.NULL: + # TODO: This is untested. + return None + + pystore = X509Store.__new__(X509Store) + pystore._store = store + return pystore + + + def set_options(self, options): + """ + Add options. Options set before are not cleared! + + :param options: The options to add. + :return: The new option bitmask. + """ + if not isinstance(options, integer_types): + raise TypeError("options must be an integer") + + return _lib.SSL_CTX_set_options(self._context, options) + + + def set_mode(self, mode): + """ + Add modes via bitmask. Modes set before are not cleared! + + :param mode: The mode to add. + :return: The new mode bitmask. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_mode(self._context, mode) + + + def set_tlsext_servername_callback(self, callback): + """ + Specify a callback function to be called when clients specify a server name. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. + """ + @wraps(callback) + def wrapper(ssl, alert, arg): + callback(Connection._reverse_mapping[ssl]) + return 0 + + self._tlsext_servername_callback = _ffi.callback( + "int (*)(const SSL *, int *, void *)", wrapper) + _lib.SSL_CTX_set_tlsext_servername_callback( + self._context, self._tlsext_servername_callback) + + + @_requires_npn + def set_npn_advertise_callback(self, callback): + """ + Specify a callback function that will be called when offering `Next + Protocol Negotiation + `_ as a server. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. It should return a list of + bytestrings representing the advertised protocols, like + ``[b'http/1.1', b'spdy/2']``. + """ + self._npn_advertise_helper = _NpnAdvertiseHelper(callback) + self._npn_advertise_callback = self._npn_advertise_helper.callback + _lib.SSL_CTX_set_next_protos_advertised_cb( + self._context, self._npn_advertise_callback, _ffi.NULL) + + + @_requires_npn + def set_npn_select_callback(self, callback): + """ + Specify a callback function that will be called when a server offers + Next Protocol Negotiation options. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g. ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + """ + self._npn_select_helper = _NpnSelectHelper(callback) + self._npn_select_callback = self._npn_select_helper.callback + _lib.SSL_CTX_set_next_proto_select_cb( + self._context, self._npn_select_callback, _ffi.NULL) + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the clients ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + input_str_len = _ffi.cast("unsigned", len(protostr)) + _lib.SSL_CTX_set_alpn_protos(self._context, input_str, input_str_len) + + @_requires_alpn + def set_alpn_select_callback(self, callback): + """ + Set the callback to handle ALPN protocol choice. + + :param callback: The callback function. It will be invoked with two + arguments: the Connection, and a list of offered protocols as + bytestrings, e.g ``[b'http/1.1', b'spdy/2']``. It should return + one of those bytestrings, the chosen protocol. + """ + self._alpn_select_helper = _ALPNSelectHelper(callback) + self._alpn_select_callback = self._alpn_select_helper.callback + _lib.SSL_CTX_set_alpn_select_cb( + self._context, self._alpn_select_callback, _ffi.NULL) + +ContextType = Context + + + +class Connection(object): + """ + """ + _reverse_mapping = WeakValueDictionary() + + def __init__(self, context, socket=None): + """ + Create a new Connection object, using the given OpenSSL.SSL.Context + instance and socket. + + :param context: An SSL Context to use for this connection + :param socket: The socket to use for transport layer + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + ssl = _lib.SSL_new(context._context) + self._ssl = _ffi.gc(ssl, _lib.SSL_free) + self._context = context + + # References to strings used for Next Protocol Negotiation. OpenSSL's + # header files suggest that these might get copied at some point, but + # doesn't specify when, so we store them here to make sure they don't + # get freed before OpenSSL uses them. + self._npn_advertise_callback_args = None + self._npn_select_callback_args = None + + # References to strings used for Application Layer Protocol + # Negotiation. These strings get copied at some point but it's well + # after the callback returns, so we have to hang them somewhere to + # avoid them getting freed. + self._alpn_select_callback_args = None + + self._reverse_mapping[self._ssl] = self + + if socket is None: + self._socket = None + # Don't set up any gc for these, SSL_free will take care of them. + self._into_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + self._from_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + + if self._into_ssl == _ffi.NULL or self._from_ssl == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + _lib.SSL_set_bio(self._ssl, self._into_ssl, self._from_ssl) + else: + self._into_ssl = None + self._from_ssl = None + self._socket = socket + set_result = _lib.SSL_set_fd(self._ssl, _asFileDescriptor(self._socket)) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def __getattr__(self, name): + """ + Look up attributes on the wrapped socket object if they are not found on + the Connection object. + """ + return getattr(self._socket, name) + + + def _raise_ssl_error(self, ssl, result): + if self._context._verify_helper is not None: + self._context._verify_helper.raise_if_problem() + if self._context._npn_advertise_helper is not None: + self._context._npn_advertise_helper.raise_if_problem() + if self._context._npn_select_helper is not None: + self._context._npn_select_helper.raise_if_problem() + if self._context._alpn_select_helper is not None: + self._context._alpn_select_helper.raise_if_problem() + + error = _lib.SSL_get_error(ssl, result) + if error == _lib.SSL_ERROR_WANT_READ: + raise WantReadError() + elif error == _lib.SSL_ERROR_WANT_WRITE: + raise WantWriteError() + elif error == _lib.SSL_ERROR_ZERO_RETURN: + raise ZeroReturnError() + elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP: + # TODO: This is untested. + raise WantX509LookupError() + elif error == _lib.SSL_ERROR_SYSCALL: + if _lib.ERR_peek_error() == 0: + if result < 0: + if platform == "win32": + errno = _ffi.getwinerror()[0] + else: + errno = _ffi.errno + raise SysCallError(errno, errorcode.get(errno)) + else: + raise SysCallError(-1, "Unexpected EOF") + else: + # TODO: This is untested. + _raise_current_error() + elif error == _lib.SSL_ERROR_NONE: + pass + else: + _raise_current_error() + + + def get_context(self): + """ + Get session context + """ + return self._context + + + def set_context(self, context): + """ + Switch this connection to a new session context + + :param context: A :py:class:`Context` instance giving the new session + context to use. + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + _lib.SSL_set_SSL_CTX(self._ssl, context._context) + self._context = context + + + def get_servername(self): + """ + Retrieve the servername extension value if provided in the client hello + message, or None if there wasn't one. + + :return: A byte string giving the server name or :py:data:`None`. + """ + name = _lib.SSL_get_servername(self._ssl, _lib.TLSEXT_NAMETYPE_host_name) + if name == _ffi.NULL: + return None + + return _ffi.string(name) + + + def set_tlsext_host_name(self, name): + """ + Set the value of the servername extension to send in the client hello. + + :param name: A byte string giving the name. + """ + if not isinstance(name, bytes): + raise TypeError("name must be a byte string") + elif b"\0" in name: + raise TypeError("name must not contain NUL byte") + + # XXX I guess this can fail sometimes? + _lib.SSL_set_tlsext_host_name(self._ssl, name) + + + def pending(self): + """ + Get the number of bytes that can be safely read from the connection + + :return: The number of bytes available in the receive buffer. + """ + return _lib.SSL_pending(self._ssl) + + + def send(self, buf, flags=0): + """ + Send data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + # Backward compatibility + buf = _text_to_bytes_and_warn("buf", buf) + + if isinstance(buf, _memoryview): + buf = buf.tobytes() + if isinstance(buf, _buffer): + buf = str(buf) + if not isinstance(buf, bytes): + raise TypeError("data must be a memoryview, buffer or byte string") + + result = _lib.SSL_write(self._ssl, buf, len(buf)) + self._raise_ssl_error(self._ssl, result) + return result + write = send + + + def sendall(self, buf, flags=0): + """ + Send "all" data on the connection. This calls send() repeatedly until + all data is sent. If an error occurs, it's impossible to tell how much + data has been sent. + + :param buf: The string, buffer or memoryview to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + if isinstance(buf, _memoryview): + buf = buf.tobytes() + if isinstance(buf, _buffer): + buf = str(buf) + if not isinstance(buf, bytes): + raise TypeError("buf must be a memoryview, buffer or byte string") + + left_to_send = len(buf) + total_sent = 0 + data = _ffi.new("char[]", buf) + + while left_to_send: + result = _lib.SSL_write(self._ssl, data + total_sent, left_to_send) + self._raise_ssl_error(self._ssl, result) + total_sent += result + left_to_send -= result + + + def recv(self, bufsiz, flags=None): + """ + Receive data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param bufsiz: The maximum number of bytes to read + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The string read from the Connection + """ + buf = _ffi.new("char[]", bufsiz) + result = _lib.SSL_read(self._ssl, buf, bufsiz) + self._raise_ssl_error(self._ssl, result) + return _ffi.buffer(buf, result)[:] + read = recv + + + def recv_into(self, buffer, nbytes=None, flags=None): + """ + Receive data on the connection and store the data into a buffer rather + than creating a new string. + + :param buffer: The buffer to copy into. + :param nbytes: (optional) The maximum number of bytes to read into the + buffer. If not present, defaults to the size of the buffer. If + larger than the size of the buffer, is reduced to the size of the + buffer. + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored. + :return: The number of bytes read into the buffer. + """ + if nbytes is None: + nbytes = len(buffer) + else: + nbytes = min(nbytes, len(buffer)) + + # We need to create a temporary buffer. This is annoying, it would be + # better if we could pass memoryviews straight into the SSL_read call, + # but right now we can't. Revisit this if CFFI gets that ability. + buf = _ffi.new("char[]", nbytes) + result = _lib.SSL_read(self._ssl, buf, nbytes) + self._raise_ssl_error(self._ssl, result) + + # This strange line is all to avoid a memory copy. The buffer protocol + # should allow us to assign a CFFI buffer to the LHS of this line, but + # on CPython 3.3+ that segfaults. As a workaround, we can temporarily + # wrap it in a memoryview, except on Python 2.6 which doesn't have a + # memoryview type. + try: + buffer[:result] = memoryview(_ffi.buffer(buf, result)) + except NameError: + buffer[:result] = _ffi.buffer(buf, result) + + return result + + + def _handle_bio_errors(self, bio, result): + if _lib.BIO_should_retry(bio): + if _lib.BIO_should_read(bio): + raise WantReadError() + elif _lib.BIO_should_write(bio): + # TODO: This is untested. + raise WantWriteError() + elif _lib.BIO_should_io_special(bio): + # TODO: This is untested. I think io_special means the socket + # BIO has a not-yet connected socket. + raise ValueError("BIO_should_io_special") + else: + # TODO: This is untested. + raise ValueError("unknown bio failure") + else: + # TODO: This is untested. + _raise_current_error() + + + def bio_read(self, bufsiz): + """ + When using non-socket connections this function reads the "dirty" data + that would have traveled away on the network. + + :param bufsiz: The maximum number of bytes to read + :return: The string read. + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(bufsiz, integer_types): + raise TypeError("bufsiz must be an integer") + + buf = _ffi.new("char[]", bufsiz) + result = _lib.BIO_read(self._from_ssl, buf, bufsiz) + if result <= 0: + self._handle_bio_errors(self._from_ssl, result) + + return _ffi.buffer(buf, result)[:] + + + def bio_write(self, buf): + """ + When using non-socket connections this function sends "dirty" data that + would have traveled in on the network. + + :param buf: The string to put into the memory BIO. + :return: The number of bytes written + """ + buf = _text_to_bytes_and_warn("buf", buf) + + if self._into_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(buf, bytes): + raise TypeError("buf must be a byte string") + + result = _lib.BIO_write(self._into_ssl, buf, len(buf)) + if result <= 0: + self._handle_bio_errors(self._into_ssl, result) + return result + + + def renegotiate(self): + """ + Renegotiate the session + + :return: True if the renegotiation can be started, false otherwise + """ + + def do_handshake(self): + """ + Perform an SSL handshake (usually called after renegotiate() or one of + set_*_state()). This can raise the same exceptions as send and recv. + + :return: None. + """ + result = _lib.SSL_do_handshake(self._ssl) + self._raise_ssl_error(self._ssl, result) + + + def renegotiate_pending(self): + """ + Check if there's a renegotiation in progress, it will return false once + a renegotiation is finished. + + :return: Whether there's a renegotiation in progress + """ + + def total_renegotiations(self): + """ + Find out the total number of renegotiations. + + :return: The number of renegotiations. + """ + return _lib.SSL_total_renegotiations(self._ssl) + + + def connect(self, addr): + """ + Connect to remote host and set up client-side SSL + + :param addr: A remote address + :return: What the socket's connect method returns + """ + _lib.SSL_set_connect_state(self._ssl) + return self._socket.connect(addr) + + + def connect_ex(self, addr): + """ + Connect to remote host and set up client-side SSL. Note that if the socket's + connect_ex method doesn't return 0, SSL won't be initialized. + + :param addr: A remove address + :return: What the socket's connect_ex method returns + """ + connect_ex = self._socket.connect_ex + self.set_connect_state() + return connect_ex(addr) + + + def accept(self): + """ + Accept incoming connection and set up SSL on it + + :return: A (conn,addr) pair where conn is a Connection and addr is an + address + """ + client, addr = self._socket.accept() + conn = Connection(self._context, client) + conn.set_accept_state() + return (conn, addr) + + + def bio_shutdown(self): + """ + When using non-socket connections this function signals end of + data on the input for this connection. + + :return: None + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + _lib.BIO_set_mem_eof_return(self._into_ssl, 0) + + + def shutdown(self): + """ + Send closure alert + + :return: True if the shutdown completed successfully (i.e. both sides + have sent closure alerts), false otherwise (i.e. you have to + wait for a ZeroReturnError on a recv() method call + """ + result = _lib.SSL_shutdown(self._ssl) + if result < 0: + self._raise_ssl_error(self._ssl, result) + elif result > 0: + return True + else: + return False + + + def get_cipher_list(self): + """ + Get the session cipher list + + :return: A list of cipher strings + """ + ciphers = [] + for i in count(): + result = _lib.SSL_get_cipher_list(self._ssl, i) + if result == _ffi.NULL: + break + ciphers.append(_native(_ffi.string(result))) + return ciphers + + + def get_client_ca_list(self): + """ + Get CAs whose certificates are suggested for client authentication. + + :return: If this is a server connection, a list of X509Names representing + the acceptable CAs as set by :py:meth:`OpenSSL.SSL.Context.set_client_ca_list` or + :py:meth:`OpenSSL.SSL.Context.add_client_ca`. If this is a client connection, + the list of such X509Names sent by the server, or an empty list if that + has not yet happened. + """ + ca_names = _lib.SSL_get_client_CA_list(self._ssl) + if ca_names == _ffi.NULL: + # TODO: This is untested. + return [] + + result = [] + for i in range(_lib.sk_X509_NAME_num(ca_names)): + name = _lib.sk_X509_NAME_value(ca_names, i) + copy = _lib.X509_NAME_dup(name) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + pyname = X509Name.__new__(X509Name) + pyname._name = _ffi.gc(copy, _lib.X509_NAME_free) + result.append(pyname) + return result + + + def makefile(self): + """ + The makefile() method is not implemented, since there is no dup semantics + for SSL connections + + :raise: NotImplementedError + """ + raise NotImplementedError("Cannot make file object of OpenSSL.SSL.Connection") + + + def get_app_data(self): + """ + Get application data + + :return: The application data + """ + return self._app_data + + + def set_app_data(self, data): + """ + Set application data + + :param data - The application data + :return: None + """ + self._app_data = data + + + def get_shutdown(self): + """ + Get shutdown state + + :return: The shutdown state, a bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + """ + return _lib.SSL_get_shutdown(self._ssl) + + + def set_shutdown(self, state): + """ + Set shutdown state + + :param state - bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + :return: None + """ + if not isinstance(state, integer_types): + raise TypeError("state must be an integer") + + _lib.SSL_set_shutdown(self._ssl, state) + + + def state_string(self): + """ + Get a verbose state description + + :return: A string representing the state + """ + + def server_random(self): + """ + Get a copy of the server hello nonce. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.s3.server_random, + _lib.SSL3_RANDOM_SIZE)[:] + + + def client_random(self): + """ + Get a copy of the client hello nonce. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.s3.client_random, + _lib.SSL3_RANDOM_SIZE)[:] + + + def master_key(self): + """ + Get a copy of the master key. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.session.master_key, + self._ssl.session.master_key_length)[:] + + + def sock_shutdown(self, *args, **kwargs): + """ + See shutdown(2) + + :return: What the socket's shutdown() method returns + """ + return self._socket.shutdown(*args, **kwargs) + + + def get_peer_certificate(self): + """ + Retrieve the other side's certificate (if any) + + :return: The peer's certificate + """ + cert = _lib.SSL_get_peer_certificate(self._ssl) + if cert != _ffi.NULL: + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert, _lib.X509_free) + return pycert + return None + + + def get_peer_cert_chain(self): + """ + Retrieve the other side's certificate (if any) + + :return: A list of X509 instances giving the peer's certificate chain, + or None if it does not have one. + """ + cert_stack = _lib.SSL_get_peer_cert_chain(self._ssl) + if cert_stack == _ffi.NULL: + return None + + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + # TODO could incref instead of dup here + cert = _lib.X509_dup(_lib.sk_X509_value(cert_stack, i)) + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert, _lib.X509_free) + result.append(pycert) + return result + + + def want_read(self): + """ + Checks if more data has to be read from the transport layer to complete an + operation. + + :return: True iff more data has to be read + """ + return _lib.SSL_want_read(self._ssl) + + + def want_write(self): + """ + Checks if there is data to write to the transport layer to complete an + operation. + + :return: True iff there is data to write + """ + return _lib.SSL_want_write(self._ssl) + + + def set_accept_state(self): + """ + Set the connection to work in server mode. The handshake will be handled + automatically by read/write. + + :return: None + """ + _lib.SSL_set_accept_state(self._ssl) + + + def set_connect_state(self): + """ + Set the connection to work in client mode. The handshake will be handled + automatically by read/write. + + :return: None + """ + _lib.SSL_set_connect_state(self._ssl) + + + def get_session(self): + """ + Returns the Session currently used. + + @return: An instance of :py:class:`OpenSSL.SSL.Session` or :py:obj:`None` if + no session exists. + """ + session = _lib.SSL_get1_session(self._ssl) + if session == _ffi.NULL: + return None + + pysession = Session.__new__(Session) + pysession._session = _ffi.gc(session, _lib.SSL_SESSION_free) + return pysession + + + def set_session(self, session): + """ + Set the session to be used when the TLS/SSL connection is established. + + :param session: A Session instance representing the session to use. + :returns: None + """ + if not isinstance(session, Session): + raise TypeError("session must be a Session instance") + + result = _lib.SSL_set_session(self._ssl, session._session) + if not result: + _raise_current_error() + + + def _get_finished_message(self, function): + """ + Helper to implement :py:meth:`get_finished` and + :py:meth:`get_peer_finished`. + + :param function: Either :py:data:`SSL_get_finished`: or + :py:data:`SSL_get_peer_finished`. + + :return: :py:data:`None` if the desired message has not yet been + received, otherwise the contents of the message. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + # The OpenSSL documentation says nothing about what might happen if the + # count argument given is zero. Specifically, it doesn't say whether + # the output buffer may be NULL in that case or not. Inspection of the + # implementation reveals that it calls memcpy() unconditionally. + # Section 7.1.4, paragraph 1 of the C standard suggests that + # memcpy(NULL, source, 0) is not guaranteed to produce defined (let + # alone desirable) behavior (though it probably does on just about + # every implementation...) + # + # Allocate a tiny buffer to pass in (instead of just passing NULL as + # one might expect) for the initial call so as to be safe against this + # potentially undefined behavior. + empty = _ffi.new("char[]", 0) + size = function(self._ssl, empty, 0) + if size == 0: + # No Finished message so far. + return None + + buf = _ffi.new("char[]", size) + function(self._ssl, buf, size) + return _ffi.buffer(buf, size)[:] + + + def get_finished(self): + """ + Obtain the latest `handshake finished` message sent to the peer. + + :return: The contents of the message or :py:obj:`None` if the TLS + handshake has not yet completed. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + return self._get_finished_message(_lib.SSL_get_finished) + + + def get_peer_finished(self): + """ + Obtain the latest `handshake finished` message received from the peer. + + :return: The contents of the message or :py:obj:`None` if the TLS + handshake has not yet completed. + :rtype: :py:class:`bytes` or :py:class:`NoneType` + """ + return self._get_finished_message(_lib.SSL_get_peer_finished) + + + def get_cipher_name(self): + """ + Obtain the name of the currently used cipher. + + :returns: The name of the currently used cipher or :py:obj:`None` + if no connection has been established. + :rtype: :py:class:`unicode` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + name = _ffi.string(_lib.SSL_CIPHER_get_name(cipher)) + return name.decode("utf-8") + + + def get_cipher_bits(self): + """ + Obtain the number of secret bits of the currently used cipher. + + :returns: The number of secret bits of the currently used cipher + or :py:obj:`None` if no connection has been established. + :rtype: :py:class:`int` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + return _lib.SSL_CIPHER_get_bits(cipher, _ffi.NULL) + + + def get_cipher_version(self): + """ + Obtain the protocol version of the currently used cipher. + + :returns: The protocol name of the currently used cipher + or :py:obj:`None` if no connection has been established. + :rtype: :py:class:`unicode` or :py:class:`NoneType` + """ + cipher = _lib.SSL_get_current_cipher(self._ssl) + if cipher == _ffi.NULL: + return None + else: + version =_ffi.string(_lib.SSL_CIPHER_get_version(cipher)) + return version.decode("utf-8") + + + @_requires_npn + def get_next_proto_negotiated(self): + """ + Get the protocol that was negotiated by NPN. + """ + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_next_proto_negotiated(self._ssl, data, data_len) + + return _ffi.buffer(data[0], data_len[0])[:] + + @_requires_alpn + def set_alpn_protos(self, protos): + """ + Specify the client's ALPN protocol list. + + These protocols are offered to the server during protocol negotiation. + + :param protos: A list of the protocols to be offered to the server. + This list should be a Python list of bytestrings representing the + protocols to offer, e.g. ``[b'http/1.1', b'spdy/2']``. + """ + # Take the list of protocols and join them together, prefixing them + # with their lengths. + protostr = b''.join( + chain.from_iterable((int2byte(len(p)), p) for p in protos) + ) + + # Build a C string from the list. We don't need to save this off + # because OpenSSL immediately copies the data out. + input_str = _ffi.new("unsigned char[]", protostr) + input_str_len = _ffi.cast("unsigned", len(protostr)) + _lib.SSL_set_alpn_protos(self._ssl, input_str, input_str_len) + + + def get_alpn_proto_negotiated(self): + """ + Get the protocol that was negotiated by ALPN. + """ + if not _lib.Cryptography_HAS_ALPN: + raise NotImplementedError("ALPN not available") + + data = _ffi.new("unsigned char **") + data_len = _ffi.new("unsigned int *") + + _lib.SSL_get0_alpn_selected(self._ssl, data, data_len) + + if not data_len: + return b'' + + return _ffi.buffer(data[0], data_len[0])[:] + + + +ConnectionType = Connection + +# This is similar to the initialization calls at the end of OpenSSL/crypto.py +# but is exercised mostly by the Context initializer. +_lib.SSL_library_init() diff --git a/Lib/site-packages/OpenSSL/__init__.py b/Lib/site-packages/OpenSSL/__init__.py new file mode 100644 index 0000000..db96e1f --- /dev/null +++ b/Lib/site-packages/OpenSSL/__init__.py @@ -0,0 +1,12 @@ +# Copyright (C) AB Strakt +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +from OpenSSL import rand, crypto, SSL +from OpenSSL.version import __version__ + +__all__ = [ + 'rand', 'crypto', 'SSL', 'tsafe', '__version__'] diff --git a/Lib/site-packages/OpenSSL/_util.py b/Lib/site-packages/OpenSSL/_util.py new file mode 100644 index 0000000..0cc34d8 --- /dev/null +++ b/Lib/site-packages/OpenSSL/_util.py @@ -0,0 +1,127 @@ +from warnings import warn +import sys + +from six import PY3, binary_type, text_type + +from cryptography.hazmat.bindings.openssl.binding import Binding +binding = Binding() +ffi = binding.ffi +lib = binding.lib + + + +def text(charp): + """ + Get a native string type representing of the given CFFI ``char*`` object. + + :param charp: A C-style string represented using CFFI. + + :return: :class:`str` + """ + if not charp: + return "" + return native(ffi.string(charp)) + + + +def exception_from_error_queue(exception_type): + """ + Convert an OpenSSL library failure into a Python exception. + + When a call to the native OpenSSL library fails, this is usually signalled + by the return value, and an error code is stored in an error queue + associated with the current thread. The err library provides functions to + obtain these error codes and textual error messages. + """ + + errors = [] + + while True: + error = lib.ERR_get_error() + if error == 0: + break + errors.append(( + text(lib.ERR_lib_error_string(error)), + text(lib.ERR_func_error_string(error)), + text(lib.ERR_reason_error_string(error)))) + + raise exception_type(errors) + + + +def native(s): + """ + Convert :py:class:`bytes` or :py:class:`unicode` to the native + :py:class:`str` type, using UTF-8 encoding if conversion is necessary. + + :raise UnicodeError: The input string is not UTF-8 decodeable. + + :raise TypeError: The input is neither :py:class:`bytes` nor + :py:class:`unicode`. + """ + if not isinstance(s, (binary_type, text_type)): + raise TypeError("%r is neither bytes nor unicode" % s) + if PY3: + if isinstance(s, binary_type): + return s.decode("utf-8") + else: + if isinstance(s, text_type): + return s.encode("utf-8") + return s + + + +def path_string(s): + """ + Convert a Python string to a :py:class:`bytes` string identifying the same + path and which can be passed into an OpenSSL API accepting a filename. + + :param s: An instance of :py:class:`bytes` or :py:class:`unicode`. + + :return: An instance of :py:class:`bytes`. + """ + if isinstance(s, binary_type): + return s + elif isinstance(s, text_type): + return s.encode(sys.getfilesystemencoding()) + else: + raise TypeError("Path must be represented as bytes or unicode string") + + +if PY3: + def byte_string(s): + return s.encode("charmap") +else: + def byte_string(s): + return s + + +# A marker object to observe whether some optional arguments are passed any +# value or not. +UNSPECIFIED = object() + +_TEXT_WARNING = ( + text_type.__name__ + " for {0} is no longer accepted, use bytes" +) + +def text_to_bytes_and_warn(label, obj): + """ + If ``obj`` is text, emit a warning that it should be bytes instead and try + to convert it to bytes automatically. + + :param str label: The name of the parameter from which ``obj`` was taken + (so a developer can easily find the source of the problem and correct + it). + + :return: If ``obj`` is the text string type, a ``bytes`` object giving the + UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is + returned. + """ + if isinstance(obj, text_type): + warn( + _TEXT_WARNING.format(label), + category=DeprecationWarning, + stacklevel=3 + ) + return obj.encode('utf-8') + return obj diff --git a/Lib/site-packages/OpenSSL/crypto.py b/Lib/site-packages/OpenSSL/crypto.py new file mode 100644 index 0000000..50ff74f --- /dev/null +++ b/Lib/site-packages/OpenSSL/crypto.py @@ -0,0 +1,2639 @@ +from time import time +from base64 import b16encode +from functools import partial +from operator import __eq__, __ne__, __lt__, __le__, __gt__, __ge__ +from warnings import warn as _warn + +from six import ( + integer_types as _integer_types, + text_type as _text_type, + PY3 as _PY3) + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + byte_string as _byte_string, + native as _native, + UNSPECIFIED as _UNSPECIFIED, + text_to_bytes_and_warn as _text_to_bytes_and_warn, +) + +FILETYPE_PEM = _lib.SSL_FILETYPE_PEM +FILETYPE_ASN1 = _lib.SSL_FILETYPE_ASN1 + +# TODO This was an API mistake. OpenSSL has no such constant. +FILETYPE_TEXT = 2 ** 16 - 1 + +TYPE_RSA = _lib.EVP_PKEY_RSA +TYPE_DSA = _lib.EVP_PKEY_DSA + + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.crypto` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) + + + +def _untested_error(where): + """ + An OpenSSL API failed somehow. Additionally, the failure which was + encountered isn't one that's exercised by the test suite so future behavior + of pyOpenSSL is now somewhat less predictable. + """ + raise RuntimeError("Unknown %s failure" % (where,)) + + + +def _new_mem_buf(buffer=None): + """ + Allocate a new OpenSSL memory BIO. + + Arrange for the garbage collector to clean it up automatically. + + :param buffer: None or some bytes to use to put into the BIO so that they + can be read out. + """ + if buffer is None: + bio = _lib.BIO_new(_lib.BIO_s_mem()) + free = _lib.BIO_free + else: + data = _ffi.new("char[]", buffer) + bio = _lib.BIO_new_mem_buf(data, len(buffer)) + # Keep the memory alive as long as the bio is alive! + def free(bio, ref=data): + return _lib.BIO_free(bio) + + if bio == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + bio = _ffi.gc(bio, free) + return bio + + + +def _bio_to_string(bio): + """ + Copy the contents of an OpenSSL BIO object into a Python byte string. + """ + result_buffer = _ffi.new('char**') + buffer_length = _lib.BIO_get_mem_data(bio, result_buffer) + return _ffi.buffer(result_buffer[0], buffer_length)[:] + + + +def _set_asn1_time(boundary, when): + """ + The the time value of an ASN1 time object. + + @param boundary: An ASN1_GENERALIZEDTIME pointer (or an object safely + castable to that type) which will have its value set. + @param when: A string representation of the desired time value. + + @raise TypeError: If C{when} is not a L{bytes} string. + @raise ValueError: If C{when} does not represent a time in the required + format. + @raise RuntimeError: If the time value cannot be set for some other + (unspecified) reason. + """ + if not isinstance(when, bytes): + raise TypeError("when must be a byte string") + + set_result = _lib.ASN1_GENERALIZEDTIME_set_string( + _ffi.cast('ASN1_GENERALIZEDTIME*', boundary), when) + if set_result == 0: + dummy = _ffi.gc(_lib.ASN1_STRING_new(), _lib.ASN1_STRING_free) + _lib.ASN1_STRING_set(dummy, when, len(when)) + check_result = _lib.ASN1_GENERALIZEDTIME_check( + _ffi.cast('ASN1_GENERALIZEDTIME*', dummy)) + if not check_result: + raise ValueError("Invalid string") + else: + _untested_error() + + + +def _get_asn1_time(timestamp): + """ + Retrieve the time value of an ASN1 time object. + + @param timestamp: An ASN1_GENERALIZEDTIME* (or an object safely castable to + that type) from which the time value will be retrieved. + + @return: The time value from C{timestamp} as a L{bytes} string in a certain + format. Or C{None} if the object contains no time value. + """ + string_timestamp = _ffi.cast('ASN1_STRING*', timestamp) + if _lib.ASN1_STRING_length(string_timestamp) == 0: + return None + elif _lib.ASN1_STRING_type(string_timestamp) == _lib.V_ASN1_GENERALIZEDTIME: + return _ffi.string(_lib.ASN1_STRING_data(string_timestamp)) + else: + generalized_timestamp = _ffi.new("ASN1_GENERALIZEDTIME**") + _lib.ASN1_TIME_to_generalizedtime(timestamp, generalized_timestamp) + if generalized_timestamp[0] == _ffi.NULL: + # This may happen: + # - if timestamp was not an ASN1_TIME + # - if allocating memory for the ASN1_GENERALIZEDTIME failed + # - if a copy of the time data from timestamp cannot be made for + # the newly allocated ASN1_GENERALIZEDTIME + # + # These are difficult to test. cffi enforces the ASN1_TIME type. + # Memory allocation failures are a pain to trigger + # deterministically. + _untested_error("ASN1_TIME_to_generalizedtime") + else: + string_timestamp = _ffi.cast( + "ASN1_STRING*", generalized_timestamp[0]) + string_data = _lib.ASN1_STRING_data(string_timestamp) + string_result = _ffi.string(string_data) + _lib.ASN1_GENERALIZEDTIME_free(generalized_timestamp[0]) + return string_result + + + +class PKey(object): + _only_public = False + _initialized = True + + def __init__(self): + pkey = _lib.EVP_PKEY_new() + self._pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + self._initialized = False + + + def generate_key(self, type, bits): + """ + Generate a key of a given type, with a given number of a bits + + :param type: The key type (TYPE_RSA or TYPE_DSA) + :param bits: The number of bits + + :return: None + """ + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if not isinstance(bits, int): + raise TypeError("bits must be an integer") + + # TODO Check error return + exponent = _lib.BN_new() + exponent = _ffi.gc(exponent, _lib.BN_free) + _lib.BN_set_word(exponent, _lib.RSA_F4) + + if type == TYPE_RSA: + if bits <= 0: + raise ValueError("Invalid number of bits") + + rsa = _lib.RSA_new() + + result = _lib.RSA_generate_key_ex(rsa, bits, exponent, _ffi.NULL) + if result == 0: + # TODO: The test for this case is commented out. Different + # builds of OpenSSL appear to have different failure modes that + # make it hard to test. Visual inspection of the OpenSSL + # source reveals that a return value of 0 signals an error. + # Manual testing on a particular build of OpenSSL suggests that + # this is probably the appropriate way to handle those errors. + _raise_current_error() + + result = _lib.EVP_PKEY_assign_RSA(self._pkey, rsa) + if not result: + # TODO: It appears as though this can fail if an engine is in + # use which does not support RSA. + _raise_current_error() + + elif type == TYPE_DSA: + dsa = _lib.DSA_generate_parameters( + bits, _ffi.NULL, 0, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL) + if dsa == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + if not _lib.DSA_generate_key(dsa): + # TODO: This is untested. + _raise_current_error() + if not _lib.EVP_PKEY_assign_DSA(self._pkey, dsa): + # TODO: This is untested. + _raise_current_error() + else: + raise Error("No such key type") + + self._initialized = True + + + def check(self): + """ + Check the consistency of an RSA private key. + + :return: True if key is consistent. + :raise Error: if the key is inconsistent. + :raise TypeError: if the key is of a type which cannot be checked. + Only RSA keys can currently be checked. + """ + if self._only_public: + raise TypeError("public key only") + + if _lib.EVP_PKEY_type(self._pkey.type) != _lib.EVP_PKEY_RSA: + raise TypeError("key type unsupported") + + rsa = _lib.EVP_PKEY_get1_RSA(self._pkey) + rsa = _ffi.gc(rsa, _lib.RSA_free) + result = _lib.RSA_check_key(rsa) + if result: + return True + _raise_current_error() + + + def type(self): + """ + Returns the type of the key + + :return: The type of the key. + """ + return self._pkey.type + + + def bits(self): + """ + Returns the number of bits of the key + + :return: The number of bits of the key. + """ + return _lib.EVP_PKEY_bits(self._pkey) +PKeyType = PKey + + + +class _EllipticCurve(object): + """ + A representation of a supported elliptic curve. + + @cvar _curves: :py:obj:`None` until an attempt is made to load the curves. + Thereafter, a :py:type:`set` containing :py:type:`_EllipticCurve` + instances each of which represents one curve supported by the system. + @type _curves: :py:type:`NoneType` or :py:type:`set` + """ + _curves = None + + if _PY3: + # This only necessary on Python 3. Morever, it is broken on Python 2. + def __ne__(self, other): + """ + Implement cooperation with the right-hand side argument of ``!=``. + + Python 3 seems to have dropped this cooperation in this very narrow + circumstance. + """ + if isinstance(other, _EllipticCurve): + return super(_EllipticCurve, self).__ne__(other) + return NotImplemented + + + @classmethod + def _load_elliptic_curves(cls, lib): + """ + Get the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if lib.Cryptography_HAS_EC: + num_curves = lib.EC_get_builtin_curves(_ffi.NULL, 0) + builtin_curves = _ffi.new('EC_builtin_curve[]', num_curves) + # The return value on this call should be num_curves again. We could + # check it to make sure but if it *isn't* then.. what could we do? + # Abort the whole process, I suppose...? -exarkun + lib.EC_get_builtin_curves(builtin_curves, num_curves) + return set( + cls.from_nid(lib, c.nid) + for c in builtin_curves) + return set() + + + @classmethod + def _get_elliptic_curves(cls, lib): + """ + Get, cache, and return the curves supported by OpenSSL. + + :param lib: The OpenSSL library binding object. + + :return: A :py:type:`set` of ``cls`` instances giving the names of the + elliptic curves the underlying library supports. + """ + if cls._curves is None: + cls._curves = cls._load_elliptic_curves(lib) + return cls._curves + + + @classmethod + def from_nid(cls, lib, nid): + """ + Instantiate a new :py:class:`_EllipticCurve` associated with the given + OpenSSL NID. + + :param lib: The OpenSSL library binding object. + + :param nid: The OpenSSL NID the resulting curve object will represent. + This must be a curve NID (and not, for example, a hash NID) or + subsequent operations will fail in unpredictable ways. + :type nid: :py:class:`int` + + :return: The curve object. + """ + return cls(lib, nid, _ffi.string(lib.OBJ_nid2sn(nid)).decode("ascii")) + + + def __init__(self, lib, nid, name): + """ + :param _lib: The :py:mod:`cryptography` binding instance used to + interface with OpenSSL. + + :param _nid: The OpenSSL NID identifying the curve this object + represents. + :type _nid: :py:class:`int` + + :param name: The OpenSSL short name identifying the curve this object + represents. + :type name: :py:class:`unicode` + """ + self._lib = lib + self._nid = nid + self.name = name + + + def __repr__(self): + return "" % (self.name,) + + + def _to_EC_KEY(self): + """ + Create a new OpenSSL EC_KEY structure initialized to use this curve. + + The structure is automatically garbage collected when the Python object + is garbage collected. + """ + key = self._lib.EC_KEY_new_by_curve_name(self._nid) + return _ffi.gc(key, _lib.EC_KEY_free) + + + +def get_elliptic_curves(): + """ + Return a set of objects representing the elliptic curves supported in the + OpenSSL build in use. + + The curve objects have a :py:class:`unicode` ``name`` attribute by which + they identify themselves. + + The curve objects are useful as values for the argument accepted by + :py:meth:`Context.set_tmp_ecdh` to specify which elliptical curve should be + used for ECDHE key exchange. + """ + return _EllipticCurve._get_elliptic_curves(_lib) + + + +def get_elliptic_curve(name): + """ + Return a single curve object selected by name. + + See :py:func:`get_elliptic_curves` for information about curve objects. + + :param name: The OpenSSL short name identifying the curve object to + retrieve. + :type name: :py:class:`unicode` + + If the named curve is not supported then :py:class:`ValueError` is raised. + """ + for curve in get_elliptic_curves(): + if curve.name == name: + return curve + raise ValueError("unknown curve name", name) + + + +class X509Name(object): + def __init__(self, name): + """ + Create a new X509Name, copying the given X509Name instance. + + :param name: An X509Name object to copy + """ + name = _lib.X509_NAME_dup(name._name) + self._name = _ffi.gc(name, _lib.X509_NAME_free) + + + def __setattr__(self, name, value): + if name.startswith('_'): + return super(X509Name, self).__setattr__(name, value) + + # Note: we really do not want str subclasses here, so we do not use + # isinstance. + if type(name) is not str: + raise TypeError("attribute name must be string, not '%.200s'" % ( + type(value).__name__,)) + + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + try: + _raise_current_error() + except Error: + pass + raise AttributeError("No such attribute") + + # If there's an old entry for this NID, remove it + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + ent_obj = _lib.X509_NAME_ENTRY_get_object(ent) + ent_nid = _lib.OBJ_obj2nid(ent_obj) + if nid == ent_nid: + ent = _lib.X509_NAME_delete_entry(self._name, i) + _lib.X509_NAME_ENTRY_free(ent) + break + + if isinstance(value, _text_type): + value = value.encode('utf-8') + + add_result = _lib.X509_NAME_add_entry_by_NID( + self._name, nid, _lib.MBSTRING_UTF8, value, -1, -1, 0) + if not add_result: + _raise_current_error() + + + def __getattr__(self, name): + """ + Find attribute. An X509Name object has the following attributes: + countryName (alias C), stateOrProvince (alias ST), locality (alias L), + organization (alias O), organizationalUnit (alias OU), commonName (alias + CN) and more... + """ + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + # This is a bit weird. OBJ_txt2nid indicated failure, but it seems + # a lower level function, a2d_ASN1_OBJECT, also feels the need to + # push something onto the error queue. If we don't clean that up + # now, someone else will bump into it later and be quite confused. + # See lp#314814. + try: + _raise_current_error() + except Error: + pass + return super(X509Name, self).__getattr__(name) + + entry_index = _lib.X509_NAME_get_index_by_NID(self._name, nid, -1) + if entry_index == -1: + return None + + entry = _lib.X509_NAME_get_entry(self._name, entry_index) + data = _lib.X509_NAME_ENTRY_get_data(entry) + + result_buffer = _ffi.new("unsigned char**") + data_length = _lib.ASN1_STRING_to_UTF8(result_buffer, data) + if data_length < 0: + # TODO: This is untested. + _raise_current_error() + + try: + result = _ffi.buffer(result_buffer[0], data_length)[:].decode('utf-8') + finally: + # XXX untested + _lib.OPENSSL_free(result_buffer[0]) + return result + + + def _cmp(op): + def f(self, other): + if not isinstance(other, X509Name): + return NotImplemented + result = _lib.X509_NAME_cmp(self._name, other._name) + return op(result, 0) + return f + + __eq__ = _cmp(__eq__) + __ne__ = _cmp(__ne__) + + __lt__ = _cmp(__lt__) + __le__ = _cmp(__le__) + + __gt__ = _cmp(__gt__) + __ge__ = _cmp(__ge__) + + def __repr__(self): + """ + String representation of an X509Name + """ + result_buffer = _ffi.new("char[]", 512); + format_result = _lib.X509_NAME_oneline( + self._name, result_buffer, len(result_buffer)) + + if format_result == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + return "" % ( + _native(_ffi.string(result_buffer)),) + + + def hash(self): + """ + Return the hash value of this name + + :return: None + """ + return _lib.X509_NAME_hash(self._name) + + + def der(self): + """ + Return the DER encoding of this name + + :return: A :py:class:`bytes` instance giving the DER encoded form of + this name. + """ + result_buffer = _ffi.new('unsigned char**') + encode_result = _lib.i2d_X509_NAME(self._name, result_buffer) + if encode_result < 0: + # TODO: This is untested. + _raise_current_error() + + string_result = _ffi.buffer(result_buffer[0], encode_result)[:] + _lib.OPENSSL_free(result_buffer[0]) + return string_result + + + def get_components(self): + """ + Returns the split-up components of this name. + + :return: List of tuples (name, value). + """ + result = [] + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + + fname = _lib.X509_NAME_ENTRY_get_object(ent) + fval = _lib.X509_NAME_ENTRY_get_data(ent) + + nid = _lib.OBJ_obj2nid(fname) + name = _lib.OBJ_nid2sn(nid) + + result.append(( + _ffi.string(name), + _ffi.string( + _lib.ASN1_STRING_data(fval), + _lib.ASN1_STRING_length(fval)))) + + return result +X509NameType = X509Name + + +class X509Extension(object): + def __init__(self, type_name, critical, value, subject=None, issuer=None): + """ + :param typename: The name of the extension to create. + :type typename: :py:data:`str` + + :param critical: A flag indicating whether this is a critical extension. + + :param value: The value of the extension. + :type value: :py:data:`str` + + :param subject: Optional X509 cert to use as subject. + :type subject: :py:class:`X509` + + :param issuer: Optional X509 cert to use as issuer. + :type issuer: :py:class:`X509` + + :return: The X509Extension object + """ + ctx = _ffi.new("X509V3_CTX*") + + # A context is necessary for any extension which uses the r2i conversion + # method. That is, X509V3_EXT_nconf may segfault if passed a NULL ctx. + # Start off by initializing most of the fields to NULL. + _lib.X509V3_set_ctx(ctx, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL, 0) + + # We have no configuration database - but perhaps we should (some + # extensions may require it). + _lib.X509V3_set_ctx_nodb(ctx) + + # Initialize the subject and issuer, if appropriate. ctx is a local, + # and as far as I can tell none of the X509V3_* APIs invoked here steal + # any references, so no need to mess with reference counts or duplicates. + if issuer is not None: + if not isinstance(issuer, X509): + raise TypeError("issuer must be an X509 instance") + ctx.issuer_cert = issuer._x509 + if subject is not None: + if not isinstance(subject, X509): + raise TypeError("subject must be an X509 instance") + ctx.subject_cert = subject._x509 + + if critical: + # There are other OpenSSL APIs which would let us pass in critical + # separately, but they're harder to use, and since value is already + # a pile of crappy junk smuggling a ton of utterly important + # structured data, what's the point of trying to avoid nasty stuff + # with strings? (However, X509V3_EXT_i2d in particular seems like it + # would be a better API to invoke. I do not know where to get the + # ext_struc it desires for its last parameter, though.) + value = b"critical," + value + + extension = _lib.X509V3_EXT_nconf(_ffi.NULL, ctx, type_name, value) + if extension == _ffi.NULL: + _raise_current_error() + self._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + + + @property + def _nid(self): + return _lib.OBJ_obj2nid(self._extension.object) + + _prefixes = { + _lib.GEN_EMAIL: "email", + _lib.GEN_DNS: "DNS", + _lib.GEN_URI: "URI", + } + + def _subjectAltNameString(self): + method = _lib.X509V3_EXT_get(self._extension) + if method == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + payload = self._extension.value.data + length = self._extension.value.length + + payloadptr = _ffi.new("unsigned char**") + payloadptr[0] = payload + + if method.it != _ffi.NULL: + ptr = _lib.ASN1_ITEM_ptr(method.it) + data = _lib.ASN1_item_d2i(_ffi.NULL, payloadptr, length, ptr) + names = _ffi.cast("GENERAL_NAMES*", data) + else: + names = _ffi.cast( + "GENERAL_NAMES*", + method.d2i(_ffi.NULL, payloadptr, length)) + + parts = [] + for i in range(_lib.sk_GENERAL_NAME_num(names)): + name = _lib.sk_GENERAL_NAME_value(names, i) + try: + label = self._prefixes[name.type] + except KeyError: + bio = _new_mem_buf() + _lib.GENERAL_NAME_print(bio, name) + parts.append(_native(_bio_to_string(bio))) + else: + value = _native( + _ffi.buffer(name.d.ia5.data, name.d.ia5.length)[:]) + parts.append(label + ":" + value) + return ", ".join(parts) + + + def __str__(self): + """ + :return: a nice text representation of the extension + """ + if _lib.NID_subject_alt_name == self._nid: + return self._subjectAltNameString() + + bio = _new_mem_buf() + print_result = _lib.X509V3_EXT_print(bio, self._extension, 0, 0) + if not print_result: + # TODO: This is untested. + _raise_current_error() + + return _native(_bio_to_string(bio)) + + + def get_critical(self): + """ + Returns the critical field of the X509Extension + + :return: The critical field. + """ + return _lib.X509_EXTENSION_get_critical(self._extension) + + + def get_short_name(self): + """ + Returns the short version of the type name of the X509Extension + + :return: The short type name. + """ + obj = _lib.X509_EXTENSION_get_object(self._extension) + nid = _lib.OBJ_obj2nid(obj) + return _ffi.string(_lib.OBJ_nid2sn(nid)) + + + def get_data(self): + """ + Returns the data of the X509Extension + + :return: A :py:data:`str` giving the X509Extension's ASN.1 encoded data. + """ + octet_result = _lib.X509_EXTENSION_get_data(self._extension) + string_result = _ffi.cast('ASN1_STRING*', octet_result) + char_result = _lib.ASN1_STRING_data(string_result) + result_length = _lib.ASN1_STRING_length(string_result) + return _ffi.buffer(char_result, result_length)[:] + +X509ExtensionType = X509Extension + + +class X509Req(object): + def __init__(self): + req = _lib.X509_REQ_new() + self._req = _ffi.gc(req, _lib.X509_REQ_free) + + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate request + + :param pkey: The public key to use + :return: None + """ + set_result = _lib.X509_REQ_set_pubkey(self._req, pkey._pkey) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def get_pubkey(self): + """ + Get the public key from the certificate request + + :return: The public key + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_REQ_get_pubkey(self._req) + if pkey._pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + + def set_version(self, version): + """ + Set the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :param version: The version number + :return: None + """ + set_result = _lib.X509_REQ_set_version(self._req, version) + if not set_result: + _raise_current_error() + + + def get_version(self): + """ + Get the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :return: an integer giving the value of the version subfield + """ + return _lib.X509_REQ_get_version(self._req) + + + def get_subject(self): + """ + Create an X509Name object for the subject of the certificate request + + :return: An X509Name object + """ + name = X509Name.__new__(X509Name) + name._name = _lib.X509_REQ_get_subject_name(self._req) + if name._name == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # The name is owned by the X509Req structure. As long as the X509Name + # Python object is alive, keep the X509Req Python object alive. + name._owner = self + + return name + + + def add_extensions(self, extensions): + """ + Add extensions to the request. + + :param extensions: a sequence of X509Extension objects + :return: None + """ + stack = _lib.sk_X509_EXTENSION_new_null() + if stack == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) + + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + # TODO push can fail (here and elsewhere) + _lib.sk_X509_EXTENSION_push(stack, ext._extension) + + add_result = _lib.X509_REQ_add_extensions(self._req, stack) + if not add_result: + # TODO: This is untested. + _raise_current_error() + + + def get_extensions(self): + """ + Get extensions to the request. + + :return: A :py:class:`list` of :py:class:`X509Extension` objects. + """ + exts = [] + native_exts_obj = _lib.X509_REQ_get_extensions(self._req) + for i in range(_lib.sk_X509_EXTENSION_num(native_exts_obj)): + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.sk_X509_EXTENSION_value(native_exts_obj, i) + exts.append(ext) + return exts + + + def sign(self, pkey, digest): + """ + Sign the certificate request using the supplied key and digest + + :param pkey: The key to sign with + :param digest: The message digest to use + :return: None + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_REQ_sign(self._req, pkey._pkey, digest_obj) + if not sign_result: + # TODO: This is untested. + _raise_current_error() + + + def verify(self, pkey): + """ + Verifies a certificate request using the supplied public key + + :param key: a public key + :return: True if the signature is correct. + + :raise OpenSSL.crypto.Error: If the signature is invalid or there is a + problem verifying the signature. + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + result = _lib.X509_REQ_verify(self._req, pkey._pkey) + if result <= 0: + _raise_current_error() + + return result + + +X509ReqType = X509Req + + + +class X509(object): + def __init__(self): + # TODO Allocation failure? And why not __new__ instead of __init__? + x509 = _lib.X509_new() + self._x509 = _ffi.gc(x509, _lib.X509_free) + + + def set_version(self, version): + """ + Set version number of the certificate + + :param version: The version number + :type version: :py:class:`int` + + :return: None + """ + if not isinstance(version, int): + raise TypeError("version must be an integer") + + _lib.X509_set_version(self._x509, version) + + + def get_version(self): + """ + Return version number of the certificate + + :return: Version number as a Python integer + """ + return _lib.X509_get_version(self._x509) + + + def get_pubkey(self): + """ + Get the public key of the certificate + + :return: The public key + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_get_pubkey(self._x509) + if pkey._pkey == _ffi.NULL: + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate + + :param pkey: The public key + + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + set_result = _lib.X509_set_pubkey(self._x509, pkey._pkey) + if not set_result: + _raise_current_error() + + + def sign(self, pkey, digest): + """ + Sign the certificate using the supplied key and digest + + :param pkey: The key to sign with + :param digest: The message digest to use + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + if pkey._only_public: + raise ValueError("Key only has public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + evp_md = _lib.EVP_get_digestbyname(_byte_string(digest)) + if evp_md == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_sign(self._x509, pkey._pkey, evp_md) + if not sign_result: + _raise_current_error() + + + def get_signature_algorithm(self): + """ + Retrieve the signature algorithm used in the certificate + + :return: A byte string giving the name of the signature algorithm used in + the certificate. + :raise ValueError: If the signature algorithm is undefined. + """ + alg = self._x509.cert_info.signature.algorithm + nid = _lib.OBJ_obj2nid(alg) + if nid == _lib.NID_undef: + raise ValueError("Undefined signature algorithm") + return _ffi.string(_lib.OBJ_nid2ln(nid)) + + + def digest(self, digest_name): + """ + Return the digest of the X509 object. + + :param digest_name: The name of the digest algorithm to use. + :type digest_name: :py:class:`bytes` + + :return: The digest of the object + """ + digest = _lib.EVP_get_digestbyname(_byte_string(digest_name)) + if digest == _ffi.NULL: + raise ValueError("No such digest method") + + result_buffer = _ffi.new("char[]", _lib.EVP_MAX_MD_SIZE) + result_length = _ffi.new("unsigned int[]", 1) + result_length[0] = len(result_buffer) + + digest_result = _lib.X509_digest( + self._x509, digest, result_buffer, result_length) + + if not digest_result: + # TODO: This is untested. + _raise_current_error() + + return b":".join([ + b16encode(ch).upper() for ch + in _ffi.buffer(result_buffer, result_length[0])]) + + + def subject_name_hash(self): + """ + Return the hash of the X509 subject. + + :return: The hash of the subject. + """ + return _lib.X509_subject_name_hash(self._x509) + + + def set_serial_number(self, serial): + """ + Set serial number of the certificate + + :param serial: The serial number + :type serial: :py:class:`int` + + :return: None + """ + if not isinstance(serial, _integer_types): + raise TypeError("serial must be an integer") + + hex_serial = hex(serial)[2:] + if not isinstance(hex_serial, bytes): + hex_serial = hex_serial.encode('ascii') + + bignum_serial = _ffi.new("BIGNUM**") + + # BN_hex2bn stores the result in &bignum. Unless it doesn't feel like + # it. If bignum is still NULL after this call, then the return value is + # actually the result. I hope. -exarkun + small_serial = _lib.BN_hex2bn(bignum_serial, hex_serial) + + if bignum_serial[0] == _ffi.NULL: + set_result = _lib.ASN1_INTEGER_set( + _lib.X509_get_serialNumber(self._x509), small_serial) + if set_result: + # TODO Not tested + _raise_current_error() + else: + asn1_serial = _lib.BN_to_ASN1_INTEGER(bignum_serial[0], _ffi.NULL) + _lib.BN_free(bignum_serial[0]) + if asn1_serial == _ffi.NULL: + # TODO Not tested + _raise_current_error() + asn1_serial = _ffi.gc(asn1_serial, _lib.ASN1_INTEGER_free) + set_result = _lib.X509_set_serialNumber(self._x509, asn1_serial) + if not set_result: + # TODO Not tested + _raise_current_error() + + + def get_serial_number(self): + """ + Return serial number of the certificate + + :return: Serial number as a Python integer + """ + asn1_serial = _lib.X509_get_serialNumber(self._x509) + bignum_serial = _lib.ASN1_INTEGER_to_BN(asn1_serial, _ffi.NULL) + try: + hex_serial = _lib.BN_bn2hex(bignum_serial) + try: + hexstring_serial = _ffi.string(hex_serial) + serial = int(hexstring_serial, 16) + return serial + finally: + _lib.OPENSSL_free(hex_serial) + finally: + _lib.BN_free(bignum_serial) + + + def gmtime_adj_notAfter(self, amount): + """ + Adjust the time stamp for when the certificate stops being valid + + :param amount: The number of seconds by which to adjust the ending + validity time. + :type amount: :py:class:`int` + + :return: None + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notAfter = _lib.X509_get_notAfter(self._x509) + _lib.X509_gmtime_adj(notAfter, amount) + + + def gmtime_adj_notBefore(self, amount): + """ + Change the timestamp for when the certificate starts being valid to the current + time plus an offset. + + :param amount: The number of seconds by which to adjust the starting validity + time. + :return: None + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notBefore = _lib.X509_get_notBefore(self._x509) + _lib.X509_gmtime_adj(notBefore, amount) + + + def has_expired(self): + """ + Check whether the certificate has expired. + + :return: True if the certificate has expired, false otherwise + """ + now = int(time()) + notAfter = _lib.X509_get_notAfter(self._x509) + return _lib.ASN1_UTCTIME_cmp_time_t( + _ffi.cast('ASN1_UTCTIME*', notAfter), now) < 0 + + + def _get_boundary_time(self, which): + return _get_asn1_time(which(self._x509)) + + + def get_notBefore(self): + """ + Retrieve the time stamp for when the certificate starts being valid + + :return: A string giving the timestamp, in the format:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + or None if there is no value set. + """ + return self._get_boundary_time(_lib.X509_get_notBefore) + + + def _set_boundary_time(self, which, when): + return _set_asn1_time(which(self._x509), when) + + + def set_notBefore(self, when): + """ + Set the time stamp for when the certificate starts being valid + + :param when: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + :type when: :py:class:`bytes` + + :return: None + """ + return self._set_boundary_time(_lib.X509_get_notBefore, when) + + + def get_notAfter(self): + """ + Retrieve the time stamp for when the certificate stops being valid + + :return: A string giving the timestamp, in the format:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + or None if there is no value set. + """ + return self._get_boundary_time(_lib.X509_get_notAfter) + + + def set_notAfter(self, when): + """ + Set the time stamp for when the certificate stops being valid + + :param when: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + :type when: :py:class:`bytes` + + :return: None + """ + return self._set_boundary_time(_lib.X509_get_notAfter, when) + + + def _get_name(self, which): + name = X509Name.__new__(X509Name) + name._name = which(self._x509) + if name._name == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # The name is owned by the X509 structure. As long as the X509Name + # Python object is alive, keep the X509 Python object alive. + name._owner = self + + return name + + + def _set_name(self, which, name): + if not isinstance(name, X509Name): + raise TypeError("name must be an X509Name") + set_result = which(self._x509, name._name) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def get_issuer(self): + """ + Create an X509Name object for the issuer of the certificate + + :return: An X509Name object + """ + return self._get_name(_lib.X509_get_issuer_name) + + + def set_issuer(self, issuer): + """ + Set the issuer of the certificate + + :param issuer: The issuer name + :type issuer: :py:class:`X509Name` + + :return: None + """ + return self._set_name(_lib.X509_set_issuer_name, issuer) + + + def get_subject(self): + """ + Create an X509Name object for the subject of the certificate + + :return: An X509Name object + """ + return self._get_name(_lib.X509_get_subject_name) + + + def set_subject(self, subject): + """ + Set the subject of the certificate + + :param subject: The subject name + :type subject: :py:class:`X509Name` + :return: None + """ + return self._set_name(_lib.X509_set_subject_name, subject) + + + def get_extension_count(self): + """ + Get the number of extensions on the certificate. + + :return: The number of extensions as an integer. + """ + return _lib.X509_get_ext_count(self._x509) + + + def add_extensions(self, extensions): + """ + Add extensions to the certificate. + + :param extensions: a sequence of X509Extension objects + :return: None + """ + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + add_result = _lib.X509_add_ext(self._x509, ext._extension, -1) + if not add_result: + _raise_current_error() + + + def get_extension(self, index): + """ + Get a specific extension of the certificate by index. + + :param index: The index of the extension to retrieve. + :return: The X509Extension object at the specified index. + """ + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.X509_get_ext(self._x509, index) + if ext._extension == _ffi.NULL: + raise IndexError("extension index out of bounds") + + extension = _lib.X509_EXTENSION_dup(ext._extension) + ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + return ext + +X509Type = X509 + + + +class X509Store(object): + def __init__(self): + store = _lib.X509_STORE_new() + self._store = _ffi.gc(store, _lib.X509_STORE_free) + + + def add_cert(self, cert): + if not isinstance(cert, X509): + raise TypeError() + + result = _lib.X509_STORE_add_cert(self._store, cert._x509) + if not result: + _raise_current_error() + + +X509StoreType = X509Store + + +class X509StoreContextError(Exception): + """ + An error occurred while verifying a certificate using + `OpenSSL.X509StoreContext.verify_certificate`. + + :ivar certificate: The certificate which caused verificate failure. + :type cert: :class:`X509` + + """ + def __init__(self, message, certificate): + super(X509StoreContextError, self).__init__(message) + self.certificate = certificate + + +class X509StoreContext(object): + """ + An X.509 store context. + + An :py:class:`X509StoreContext` is used to define some of the criteria for + certificate verification. The information encapsulated in this object + includes, but is not limited to, a set of trusted certificates, + verification parameters, and revoked certificates. + + Of these, only the set of trusted certificates is currently exposed. + + :ivar _store_ctx: The underlying X509_STORE_CTX structure used by this + instance. It is dynamically allocated and automatically garbage + collected. + + :ivar _store: See the ``store`` ``__init__`` parameter. + + :ivar _cert: See the ``certificate`` ``__init__`` parameter. + """ + + def __init__(self, store, certificate): + """ + :param X509Store store: The certificates which will be trusted for the + purposes of any verifications. + + :param X509 certificate: The certificate to be verified. + """ + store_ctx = _lib.X509_STORE_CTX_new() + self._store_ctx = _ffi.gc(store_ctx, _lib.X509_STORE_CTX_free) + self._store = store + self._cert = certificate + # Make the store context available for use after instantiating this + # class by initializing it now. Per testing, subsequent calls to + # :py:meth:`_init` have no adverse affect. + self._init() + + + def _init(self): + """ + Set up the store context for a subsequent verification operation. + """ + ret = _lib.X509_STORE_CTX_init(self._store_ctx, self._store._store, self._cert._x509, _ffi.NULL) + if ret <= 0: + _raise_current_error() + + + def _cleanup(self): + """ + Internally cleans up the store context. + + The store context can then be reused with a new call to + :py:meth:`_init`. + """ + _lib.X509_STORE_CTX_cleanup(self._store_ctx) + + + def _exception_from_context(self): + """ + Convert an OpenSSL native context error failure into a Python + exception. + + When a call to native OpenSSL X509_verify_cert fails, additonal information + about the failure can be obtained from the store context. + """ + errors = [ + _lib.X509_STORE_CTX_get_error(self._store_ctx), + _lib.X509_STORE_CTX_get_error_depth(self._store_ctx), + _native(_ffi.string(_lib.X509_verify_cert_error_string( + _lib.X509_STORE_CTX_get_error(self._store_ctx)))), + ] + # A context error should always be associated with a certificate, so we + # expect this call to never return :class:`None`. + _x509 = _lib.X509_STORE_CTX_get_current_cert(self._store_ctx) + _cert = _lib.X509_dup(_x509) + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(_cert, _lib.X509_free) + return X509StoreContextError(errors, pycert) + + + def set_store(self, store): + """ + Set the context's trust store. + + :param X509Store store: The certificates which will be trusted for the + purposes of any *future* verifications. + """ + self._store = store + + + def verify_certificate(self): + """ + Verify a certificate in a context. + + :param store_ctx: The :py:class:`X509StoreContext` to verify. + :raises: Error + """ + # Always re-initialize the store context in case + # :py:meth:`verify_certificate` is called multiple times. + self._init() + ret = _lib.X509_verify_cert(self._store_ctx) + self._cleanup() + if ret <= 0: + raise self._exception_from_context() + + + +def load_certificate(type, buffer): + """ + Load a certificate from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + + :param buffer: The buffer the certificate is stored in + :type buffer: :py:class:`bytes` + + :return: The X509 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + x509 = _lib.d2i_X509_bio(bio, _ffi.NULL); + else: + raise ValueError( + "type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if x509 == _ffi.NULL: + _raise_current_error() + + cert = X509.__new__(X509) + cert._x509 = _ffi.gc(x509, _lib.X509_free) + return cert + + +def dump_certificate(type, cert): + """ + Dump a certificate to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param cert: The certificate to dump + :return: The buffer with the dumped certificate in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509(bio, cert._x509) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_bio(bio, cert._x509) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + return _bio_to_string(bio) + + + +def dump_privatekey(type, pkey, cipher=None, passphrase=None): + """ + Dump a private key to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param pkey: The PKey to dump + :param cipher: (optional) if encrypted PEM format, the cipher to + use + :param passphrase: (optional) if encrypted PEM format, this can be either + the passphrase to use, or a callback for providing the + passphrase. + :return: The buffer with the dumped key in + :rtype: :py:data:`str` + """ + bio = _new_mem_buf() + + if cipher is not None: + if passphrase is None: + raise TypeError( + "if a value is given for cipher " + "one must also be given for passphrase") + cipher_obj = _lib.EVP_get_cipherbyname(_byte_string(cipher)) + if cipher_obj == _ffi.NULL: + raise ValueError("Invalid cipher name") + else: + cipher_obj = _ffi.NULL + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_PrivateKey( + bio, pkey._pkey, cipher_obj, _ffi.NULL, 0, + helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_PrivateKey_bio(bio, pkey._pkey) + elif type == FILETYPE_TEXT: + rsa = _lib.EVP_PKEY_get1_RSA(pkey._pkey) + result_code = _lib.RSA_print(bio, rsa, 0) + # TODO RSA_free(rsa)? + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + if result_code == 0: + _raise_current_error() + + return _bio_to_string(bio) + + + +def _X509_REVOKED_dup(original): + copy = _lib.X509_REVOKED_new() + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + if original.serialNumber != _ffi.NULL: + _lib.ASN1_INTEGER_free(copy.serialNumber) + copy.serialNumber = _lib.ASN1_INTEGER_dup(original.serialNumber) + + if original.revocationDate != _ffi.NULL: + _lib.ASN1_TIME_free(copy.revocationDate) + copy.revocationDate = _lib.M_ASN1_TIME_dup(original.revocationDate) + + if original.extensions != _ffi.NULL: + extension_stack = _lib.sk_X509_EXTENSION_new_null() + for i in range(_lib.sk_X509_EXTENSION_num(original.extensions)): + original_ext = _lib.sk_X509_EXTENSION_value(original.extensions, i) + copy_ext = _lib.X509_EXTENSION_dup(original_ext) + _lib.sk_X509_EXTENSION_push(extension_stack, copy_ext) + copy.extensions = extension_stack + + copy.sequence = original.sequence + return copy + + + +class Revoked(object): + # http://www.openssl.org/docs/apps/x509v3_config.html#CRL_distribution_points_ + # which differs from crl_reasons of crypto/x509v3/v3_enum.c that matches + # OCSP_crl_reason_str. We use the latter, just like the command line + # program. + _crl_reasons = [ + b"unspecified", + b"keyCompromise", + b"CACompromise", + b"affiliationChanged", + b"superseded", + b"cessationOfOperation", + b"certificateHold", + # b"removeFromCRL", + ] + + def __init__(self): + revoked = _lib.X509_REVOKED_new() + self._revoked = _ffi.gc(revoked, _lib.X509_REVOKED_free) + + + def set_serial(self, hex_str): + """ + Set the serial number of a revoked Revoked structure + + :param hex_str: The new serial number. + :type hex_str: :py:data:`str` + :return: None + """ + bignum_serial = _ffi.gc(_lib.BN_new(), _lib.BN_free) + bignum_ptr = _ffi.new("BIGNUM**") + bignum_ptr[0] = bignum_serial + bn_result = _lib.BN_hex2bn(bignum_ptr, hex_str) + if not bn_result: + raise ValueError("bad hex string") + + asn1_serial = _ffi.gc( + _lib.BN_to_ASN1_INTEGER(bignum_serial, _ffi.NULL), + _lib.ASN1_INTEGER_free) + _lib.X509_REVOKED_set_serialNumber(self._revoked, asn1_serial) + + + def get_serial(self): + """ + Return the serial number of a Revoked structure + + :return: The serial number as a string + """ + bio = _new_mem_buf() + + result = _lib.i2a_ASN1_INTEGER(bio, self._revoked.serialNumber) + if result < 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + + def _delete_reason(self): + stack = self._revoked.extensions + for i in range(_lib.sk_X509_EXTENSION_num(stack)): + ext = _lib.sk_X509_EXTENSION_value(stack, i) + if _lib.OBJ_obj2nid(ext.object) == _lib.NID_crl_reason: + _lib.X509_EXTENSION_free(ext) + _lib.sk_X509_EXTENSION_delete(stack, i) + break + + + def set_reason(self, reason): + """ + Set the reason of a Revoked object. + + If :py:data:`reason` is :py:data:`None`, delete the reason instead. + + :param reason: The reason string. + :type reason: :py:class:`str` or :py:class:`NoneType` + :return: None + """ + if reason is None: + self._delete_reason() + elif not isinstance(reason, bytes): + raise TypeError("reason must be None or a byte string") + else: + reason = reason.lower().replace(b' ', b'') + reason_code = [r.lower() for r in self._crl_reasons].index(reason) + + new_reason_ext = _lib.ASN1_ENUMERATED_new() + if new_reason_ext == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + new_reason_ext = _ffi.gc(new_reason_ext, _lib.ASN1_ENUMERATED_free) + + set_result = _lib.ASN1_ENUMERATED_set(new_reason_ext, reason_code) + if set_result == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + self._delete_reason() + add_result = _lib.X509_REVOKED_add1_ext_i2d( + self._revoked, _lib.NID_crl_reason, new_reason_ext, 0, 0) + + if not add_result: + # TODO: This is untested. + _raise_current_error() + + + def get_reason(self): + """ + Return the reason of a Revoked object. + + :return: The reason as a string + """ + extensions = self._revoked.extensions + for i in range(_lib.sk_X509_EXTENSION_num(extensions)): + ext = _lib.sk_X509_EXTENSION_value(extensions, i) + if _lib.OBJ_obj2nid(ext.object) == _lib.NID_crl_reason: + bio = _new_mem_buf() + + print_result = _lib.X509V3_EXT_print(bio, ext, 0, 0) + if not print_result: + print_result = _lib.M_ASN1_OCTET_STRING_print(bio, ext.value) + if print_result == 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + + def all_reasons(self): + """ + Return a list of all the supported reason strings. + + :return: A list of reason strings. + """ + return self._crl_reasons[:] + + + def set_rev_date(self, when): + """ + Set the revocation timestamp + + :param when: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + :return: None + """ + return _set_asn1_time(self._revoked.revocationDate, when) + + + def get_rev_date(self): + """ + Retrieve the revocation date + + :return: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + """ + return _get_asn1_time(self._revoked.revocationDate) + + + +class CRL(object): + def __init__(self): + """ + Create a new empty CRL object. + """ + crl = _lib.X509_CRL_new() + self._crl = _ffi.gc(crl, _lib.X509_CRL_free) + + + def get_revoked(self): + """ + Return revoked portion of the CRL structure (by value not reference). + + :return: A tuple of Revoked objects. + """ + results = [] + revoked_stack = self._crl.crl.revoked + for i in range(_lib.sk_X509_REVOKED_num(revoked_stack)): + revoked = _lib.sk_X509_REVOKED_value(revoked_stack, i) + revoked_copy = _X509_REVOKED_dup(revoked) + pyrev = Revoked.__new__(Revoked) + pyrev._revoked = _ffi.gc(revoked_copy, _lib.X509_REVOKED_free) + results.append(pyrev) + if results: + return tuple(results) + + + def add_revoked(self, revoked): + """ + Add a revoked (by value not reference) to the CRL structure + + :param revoked: The new revoked. + :type revoked: :class:`X509` + + :return: None + """ + copy = _X509_REVOKED_dup(revoked._revoked) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + add_result = _lib.X509_CRL_add0_revoked(self._crl, copy) + if add_result == 0: + # TODO: This is untested. + _raise_current_error() + + + def export(self, cert, key, type=FILETYPE_PEM, days=100, + digest=_UNSPECIFIED): + """ + export a CRL as a string + + :param cert: Used to sign CRL. + :type cert: :class:`X509` + + :param key: Used to sign CRL. + :type key: :class:`PKey` + + :param type: The export format, either :py:data:`FILETYPE_PEM`, + :py:data:`FILETYPE_ASN1`, or :py:data:`FILETYPE_TEXT`. + + :param int days: The number of days until the next update of this CRL. + + :param bytes digest: The name of the message digest to use (eg + ``b"sha1"``). + + :return: :py:data:`bytes` + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + if not isinstance(key, PKey): + raise TypeError("key must be a PKey instance") + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if digest is _UNSPECIFIED: + _warn( + "The default message digest (md5) is deprecated. " + "Pass the name of a message digest explicitly.", + category=DeprecationWarning, + stacklevel=2, + ) + digest = b"md5" + + digest_obj = _lib.EVP_get_digestbyname(digest) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + bio = _lib.BIO_new(_lib.BIO_s_mem()) + if bio == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # A scratch time object to give different values to different CRL fields + sometime = _lib.ASN1_TIME_new() + if sometime == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + _lib.X509_gmtime_adj(sometime, 0) + _lib.X509_CRL_set_lastUpdate(self._crl, sometime) + + _lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60) + _lib.X509_CRL_set_nextUpdate(self._crl, sometime) + + _lib.X509_CRL_set_issuer_name(self._crl, _lib.X509_get_subject_name(cert._x509)) + + sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, digest_obj) + if not sign_result: + _raise_current_error() + + if type == FILETYPE_PEM: + ret = _lib.PEM_write_bio_X509_CRL(bio, self._crl) + elif type == FILETYPE_ASN1: + ret = _lib.i2d_X509_CRL_bio(bio, self._crl) + elif type == FILETYPE_TEXT: + ret = _lib.X509_CRL_print(bio, self._crl) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT") + + if not ret: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) +CRLType = CRL + + + +class PKCS7(object): + def type_is_signed(self): + """ + Check if this NID_pkcs7_signed object + + :return: True if the PKCS7 is of type signed + """ + if _lib.PKCS7_type_is_signed(self._pkcs7): + return True + return False + + + def type_is_enveloped(self): + """ + Check if this NID_pkcs7_enveloped object + + :returns: True if the PKCS7 is of type enveloped + """ + if _lib.PKCS7_type_is_enveloped(self._pkcs7): + return True + return False + + + def type_is_signedAndEnveloped(self): + """ + Check if this NID_pkcs7_signedAndEnveloped object + + :returns: True if the PKCS7 is of type signedAndEnveloped + """ + if _lib.PKCS7_type_is_signedAndEnveloped(self._pkcs7): + return True + return False + + + def type_is_data(self): + """ + Check if this NID_pkcs7_data object + + :return: True if the PKCS7 is of type data + """ + if _lib.PKCS7_type_is_data(self._pkcs7): + return True + return False + + + def get_type_name(self): + """ + Returns the type name of the PKCS7 structure + + :return: A string with the typename + """ + nid = _lib.OBJ_obj2nid(self._pkcs7.type) + string_type = _lib.OBJ_nid2sn(nid) + return _ffi.string(string_type) + +PKCS7Type = PKCS7 + + + +class PKCS12(object): + def __init__(self): + self._pkey = None + self._cert = None + self._cacerts = None + self._friendlyname = None + + + def get_certificate(self): + """ + Return certificate portion of the PKCS12 structure + + :return: X509 object containing the certificate + """ + return self._cert + + + def set_certificate(self, cert): + """ + Replace the certificate portion of the PKCS12 structure + + :param cert: The new certificate. + :type cert: :py:class:`X509` or :py:data:`None` + :return: None + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + self._cert = cert + + + def get_privatekey(self): + """ + Return private key portion of the PKCS12 structure + + :returns: PKey object containing the private key + """ + return self._pkey + + + def set_privatekey(self, pkey): + """ + Replace or set the certificate portion of the PKCS12 structure + + :param pkey: The new private key. + :type pkey: :py:class:`PKey` + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + self._pkey = pkey + + + def get_ca_certificates(self): + """ + Return CA certificates within of the PKCS12 object + + :return: A newly created tuple containing the CA certificates in the chain, + if any are present, or None if no CA certificates are present. + """ + if self._cacerts is not None: + return tuple(self._cacerts) + + + def set_ca_certificates(self, cacerts): + """ + Replace or set the CA certificates within the PKCS12 object. + + :param cacerts: The new CA certificates. + :type cacerts: :py:data:`None` or an iterable of :py:class:`X509` + :return: None + """ + if cacerts is None: + self._cacerts = None + else: + cacerts = list(cacerts) + for cert in cacerts: + if not isinstance(cert, X509): + raise TypeError("iterable must only contain X509 instances") + self._cacerts = cacerts + + + def set_friendlyname(self, name): + """ + Replace or set the certificate portion of the PKCS12 structure + + :param name: The new friendly name. + :type name: :py:class:`bytes` + :return: None + """ + if name is None: + self._friendlyname = None + elif not isinstance(name, bytes): + raise TypeError("name must be a byte string or None (not %r)" % (name,)) + self._friendlyname = name + + + def get_friendlyname(self): + """ + Return friendly name portion of the PKCS12 structure + + :returns: String containing the friendlyname + """ + return self._friendlyname + + + def export(self, passphrase=None, iter=2048, maciter=1): + """ + Dump a PKCS12 object as a string. See also "man PKCS12_create". + + :param passphrase: used to encrypt the PKCS12 + :type passphrase: :py:data:`bytes` + + :param iter: How many times to repeat the encryption + :type iter: :py:data:`int` + + :param maciter: How many times to repeat the MAC + :type maciter: :py:data:`int` + + :return: The string containing the PKCS12 + """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + + if self._cacerts is None: + cacerts = _ffi.NULL + else: + cacerts = _lib.sk_X509_new_null() + cacerts = _ffi.gc(cacerts, _lib.sk_X509_free) + for cert in self._cacerts: + _lib.sk_X509_push(cacerts, cert._x509) + + if passphrase is None: + passphrase = _ffi.NULL + + friendlyname = self._friendlyname + if friendlyname is None: + friendlyname = _ffi.NULL + + if self._pkey is None: + pkey = _ffi.NULL + else: + pkey = self._pkey._pkey + + if self._cert is None: + cert = _ffi.NULL + else: + cert = self._cert._x509 + + pkcs12 = _lib.PKCS12_create( + passphrase, friendlyname, pkey, cert, cacerts, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + iter, maciter, 0) + if pkcs12 == _ffi.NULL: + _raise_current_error() + pkcs12 = _ffi.gc(pkcs12, _lib.PKCS12_free) + + bio = _new_mem_buf() + _lib.i2d_PKCS12_bio(bio, pkcs12) + return _bio_to_string(bio) + +PKCS12Type = PKCS12 + + + +class NetscapeSPKI(object): + def __init__(self): + spki = _lib.NETSCAPE_SPKI_new() + self._spki = _ffi.gc(spki, _lib.NETSCAPE_SPKI_free) + + + def sign(self, pkey, digest): + """ + Sign the certificate request using the supplied key and digest + + :param pkey: The key to sign with + :param digest: The message digest to use + :return: None + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.NETSCAPE_SPKI_sign(self._spki, pkey._pkey, digest_obj) + if not sign_result: + # TODO: This is untested. + _raise_current_error() + + + def verify(self, key): + """ + Verifies a certificate request using the supplied public key + + :param key: a public key + :return: True if the signature is correct. + :raise OpenSSL.crypto.Error: If the signature is invalid or there is a + problem verifying the signature. + """ + answer = _lib.NETSCAPE_SPKI_verify(self._spki, key._pkey) + if answer <= 0: + _raise_current_error() + return True + + + def b64_encode(self): + """ + Generate a base64 encoded string from an SPKI + + :return: The base64 encoded string + """ + encoded = _lib.NETSCAPE_SPKI_b64_encode(self._spki) + result = _ffi.string(encoded) + _lib.CRYPTO_free(encoded) + return result + + + def get_pubkey(self): + """ + Get the public key of the certificate + + :return: The public key + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.NETSCAPE_SPKI_get_pubkey(self._spki) + if pkey._pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate + + :param pkey: The public key + :return: None + """ + set_result = _lib.NETSCAPE_SPKI_set_pubkey(self._spki, pkey._pkey) + if not set_result: + # TODO: This is untested. + _raise_current_error() +NetscapeSPKIType = NetscapeSPKI + + +class _PassphraseHelper(object): + def __init__(self, type, passphrase, more_args=False, truncate=False): + if type != FILETYPE_PEM and passphrase is not None: + raise ValueError("only FILETYPE_PEM key format supports encryption") + self._passphrase = passphrase + self._more_args = more_args + self._truncate = truncate + self._problems = [] + + + @property + def callback(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return _ffi.NULL + elif callable(self._passphrase): + return _ffi.callback("pem_password_cb", self._read_passphrase) + else: + raise TypeError("Last argument must be string or callable") + + + @property + def callback_args(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return self._passphrase + elif callable(self._passphrase): + return _ffi.NULL + else: + raise TypeError("Last argument must be string or callable") + + + def raise_if_problem(self, exceptionType=Error): + try: + _exception_from_error_queue(exceptionType) + except exceptionType as e: + from_queue = e + if self._problems: + raise self._problems[0] + return from_queue + + + def _read_passphrase(self, buf, size, rwflag, userdata): + try: + if self._more_args: + result = self._passphrase(size, rwflag, userdata) + else: + result = self._passphrase(rwflag) + if not isinstance(result, bytes): + raise ValueError("String expected") + if len(result) > size: + if self._truncate: + result = result[:size] + else: + raise ValueError("passphrase returned by callback is too long") + for i in range(len(result)): + buf[i] = result[i:i + 1] + return len(result) + except Exception as e: + self._problems.append(e) + return 0 + + + +def load_privatekey(type, buffer, passphrase=None): + """ + Load a private key from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the key is stored in + :param passphrase: (optional) if encrypted PEM format, this can be + either the passphrase to use, or a callback for + providing the passphrase. + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PrivateKey( + bio, _ffi.NULL, helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + return pkey + + + +def dump_certificate_request(type, req): + """ + Dump a certificate request to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param req: The certificate request to dump + :return: The buffer with the dumped certificate request in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509_REQ(bio, req._req) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_REQ_bio(bio, req._req) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_REQ_print_ex(bio, req._req, 0, 0) + else: + raise ValueError("type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT") + + if result_code == 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + + +def load_certificate_request(type, buffer): + """ + Load a certificate request from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the certificate request is stored in + :return: The X509Req object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + req = _lib.PEM_read_bio_X509_REQ(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + req = _lib.d2i_X509_REQ_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if req == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + x509req = X509Req.__new__(X509Req) + x509req._req = _ffi.gc(req, _lib.X509_REQ_free) + return x509req + + + +def sign(pkey, data, digest): + """ + Sign data with a digest + + :param pkey: Pkey to sign with + :param data: data to be signed + :param digest: message digest to use + :return: signature + """ + data = _text_to_bytes_and_warn("data", data) + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + md_ctx = _ffi.new("EVP_MD_CTX*") + md_ctx = _ffi.gc(md_ctx, _lib.EVP_MD_CTX_cleanup) + + _lib.EVP_SignInit(md_ctx, digest_obj) + _lib.EVP_SignUpdate(md_ctx, data, len(data)) + + signature_buffer = _ffi.new("unsigned char[]", 512) + signature_length = _ffi.new("unsigned int*") + signature_length[0] = len(signature_buffer) + final_result = _lib.EVP_SignFinal( + md_ctx, signature_buffer, signature_length, pkey._pkey) + + if final_result != 1: + # TODO: This is untested. + _raise_current_error() + + return _ffi.buffer(signature_buffer, signature_length[0])[:] + + + +def verify(cert, signature, data, digest): + """ + Verify a signature + + :param cert: signing certificate (X509 object) + :param signature: signature returned by sign function + :param data: data to be verified + :param digest: message digest to use + :return: None if the signature is correct, raise exception otherwise + """ + data = _text_to_bytes_and_warn("data", data) + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + pkey = _lib.X509_get_pubkey(cert._x509) + if pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + + md_ctx = _ffi.new("EVP_MD_CTX*") + md_ctx = _ffi.gc(md_ctx, _lib.EVP_MD_CTX_cleanup) + + _lib.EVP_VerifyInit(md_ctx, digest_obj) + _lib.EVP_VerifyUpdate(md_ctx, data, len(data)) + verify_result = _lib.EVP_VerifyFinal(md_ctx, signature, len(signature), pkey) + + if verify_result != 1: + _raise_current_error() + + +def load_crl(type, buffer): + """ + Load a certificate revocation list from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the CRL is stored in + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + crl = _lib.PEM_read_bio_X509_CRL(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if crl == _ffi.NULL: + _raise_current_error() + + result = CRL.__new__(CRL) + result._crl = crl + return result + + + +def load_pkcs7_data(type, buffer): + """ + Load pkcs7 data from a buffer + + :param type: The file type (one of FILETYPE_PEM or FILETYPE_ASN1) + :param buffer: The buffer with the pkcs7 data. + :return: The PKCS7 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + pkcs7 = _lib.PEM_read_bio_PKCS7(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + pkcs7 = _lib.d2i_PKCS7_bio(bio, _ffi.NULL) + else: + # TODO: This is untested. + _raise_current_error() + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if pkcs7 == _ffi.NULL: + _raise_current_error() + + pypkcs7 = PKCS7.__new__(PKCS7) + pypkcs7._pkcs7 = _ffi.gc(pkcs7, _lib.PKCS7_free) + return pypkcs7 + + + +def load_pkcs12(buffer, passphrase=None): + """ + Load a PKCS12 object from a buffer + + :param buffer: The buffer the certificate is stored in + :param passphrase: (Optional) The password to decrypt the PKCS12 lump + :returns: The PKCS12 object + """ + passphrase = _text_to_bytes_and_warn("passphrase", passphrase) + + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + # Use null passphrase if passphrase is None or empty string. With PKCS#12 + # password based encryption no password and a zero length password are two + # different things, but OpenSSL implementation will try both to figure out + # which one works. + if not passphrase: + passphrase = _ffi.NULL + + p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL) + if p12 == _ffi.NULL: + _raise_current_error() + p12 = _ffi.gc(p12, _lib.PKCS12_free) + + pkey = _ffi.new("EVP_PKEY**") + cert = _ffi.new("X509**") + cacerts = _ffi.new("Cryptography_STACK_OF_X509**") + + parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts) + if not parse_result: + _raise_current_error() + + cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free) + + # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the + # queue for no particular reason. This error isn't interesting to anyone + # outside this function. It's not even interesting to us. Get rid of it. + try: + _raise_current_error() + except Error: + pass + + if pkey[0] == _ffi.NULL: + pykey = None + else: + pykey = PKey.__new__(PKey) + pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free) + + if cert[0] == _ffi.NULL: + pycert = None + friendlyname = None + else: + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert[0], _lib.X509_free) + + friendlyname_length = _ffi.new("int*") + friendlyname_buffer = _lib.X509_alias_get0(cert[0], friendlyname_length) + friendlyname = _ffi.buffer(friendlyname_buffer, friendlyname_length[0])[:] + if friendlyname_buffer == _ffi.NULL: + friendlyname = None + + pycacerts = [] + for i in range(_lib.sk_X509_num(cacerts)): + pycacert = X509.__new__(X509) + pycacert._x509 = _lib.sk_X509_value(cacerts, i) + pycacerts.append(pycacert) + if not pycacerts: + pycacerts = None + + pkcs12 = PKCS12.__new__(PKCS12) + pkcs12._pkey = pykey + pkcs12._cert = pycert + pkcs12._cacerts = pycacerts + pkcs12._friendlyname = friendlyname + return pkcs12 + + +def _initialize_openssl_threads(get_ident, Lock): + import _ssl + return + + locks = list(Lock() for n in range(_lib.CRYPTO_num_locks())) + + def locking_function(mode, index, filename, line): + if mode & _lib.CRYPTO_LOCK: + locks[index].acquire() + else: + locks[index].release() + + _lib.CRYPTO_set_id_callback( + _ffi.callback("unsigned long (*)(void)", get_ident)) + + _lib.CRYPTO_set_locking_callback( + _ffi.callback( + "void (*)(int, int, const char*, int)", locking_function)) + + +try: + from thread import get_ident + from threading import Lock +except ImportError: + pass +else: + _initialize_openssl_threads(get_ident, Lock) + del get_ident, Lock + +# There are no direct unit tests for this initialization. It is tested +# indirectly since it is necessary for functions like dump_privatekey when +# using encryption. +# +# Thus OpenSSL.test.test_crypto.FunctionTests.test_dump_privatekey_passphrase +# and some other similar tests may fail without this (though they may not if +# the Python runtime has already done some initialization of the underlying +# OpenSSL library (and is linked against the same one that cryptography is +# using)). +_lib.OpenSSL_add_all_algorithms() + +# This is similar but exercised mainly by exception_from_error_queue. It calls +# both ERR_load_crypto_strings() and ERR_load_SSL_strings(). +_lib.SSL_load_error_strings() diff --git a/Lib/site-packages/OpenSSL/rand.py b/Lib/site-packages/OpenSSL/rand.py new file mode 100644 index 0000000..3adf693 --- /dev/null +++ b/Lib/site-packages/OpenSSL/rand.py @@ -0,0 +1,180 @@ +""" +PRNG management routines, thin wrappers. + +See the file RATIONALE for a short explanation of why this module was written. +""" + +from functools import partial + +from six import integer_types as _integer_types + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + path_string as _path_string) + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.rand` API. + """ + +_raise_current_error = partial(_exception_from_error_queue, Error) + +_unspecified = object() + +_builtin_bytes = bytes + +def bytes(num_bytes): + """ + Get some random bytes as a string. + + :param num_bytes: The number of bytes to fetch + :return: A string of random bytes + """ + if not isinstance(num_bytes, _integer_types): + raise TypeError("num_bytes must be an integer") + + if num_bytes < 0: + raise ValueError("num_bytes must not be negative") + + result_buffer = _ffi.new("char[]", num_bytes) + result_code = _lib.RAND_bytes(result_buffer, num_bytes) + if result_code == -1: + # TODO: No tests for this code path. Triggering a RAND_bytes failure + # might involve supplying a custom ENGINE? That's hard. + _raise_current_error() + + return _ffi.buffer(result_buffer)[:] + + + +def add(buffer, entropy): + """ + Add data with a given entropy to the PRNG + + :param buffer: Buffer with random data + :param entropy: The entropy (in bytes) measurement of the buffer + :return: None + """ + if not isinstance(buffer, _builtin_bytes): + raise TypeError("buffer must be a byte string") + + if not isinstance(entropy, int): + raise TypeError("entropy must be an integer") + + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_add(buffer, len(buffer), entropy) + + + +def seed(buffer): + """ + Alias for rand_add, with entropy equal to length + + :param buffer: Buffer with random data + :return: None + """ + if not isinstance(buffer, _builtin_bytes): + raise TypeError("buffer must be a byte string") + + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_seed(buffer, len(buffer)) + + + +def status(): + """ + Retrieve the status of the PRNG + + :return: True if the PRNG is seeded enough, false otherwise + """ + return _lib.RAND_status() + + + +def egd(path, bytes=_unspecified): + """ + Query an entropy gathering daemon (EGD) for random data and add it to the + PRNG. I haven't found any problems when the socket is missing, the function + just returns 0. + + :param path: The path to the EGD socket + :param bytes: (optional) The number of bytes to read, default is 255 + :returns: The number of bytes read (NB: a value of 0 isn't necessarily an + error, check rand.status()) + """ + if not isinstance(path, _builtin_bytes): + raise TypeError("path must be a byte string") + + if bytes is _unspecified: + bytes = 255 + elif not isinstance(bytes, int): + raise TypeError("bytes must be an integer") + + return _lib.RAND_egd_bytes(path, bytes) + + + +def cleanup(): + """ + Erase the memory used by the PRNG. + + :return: None + """ + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_cleanup() + + + +def load_file(filename, maxbytes=_unspecified): + """ + Seed the PRNG with data from a file + + :param filename: The file to read data from (``bytes`` or ``unicode``). + :param maxbytes: (optional) The number of bytes to read, default is to read + the entire file + + :return: The number of bytes read + """ + filename = _path_string(filename) + + if maxbytes is _unspecified: + maxbytes = -1 + elif not isinstance(maxbytes, int): + raise TypeError("maxbytes must be an integer") + + return _lib.RAND_load_file(filename, maxbytes) + + + +def write_file(filename): + """ + Save PRNG state to a file + + :param filename: The file to write data to (``bytes`` or ``unicode``). + + :return: The number of bytes written + """ + filename = _path_string(filename) + return _lib.RAND_write_file(filename) + + +# TODO There are no tests for screen at all +def screen(): + """ + Add the current contents of the screen to the PRNG state. Availability: + Windows. + + :return: None + """ + _lib.RAND_screen() + +if getattr(_lib, 'RAND_screen', None) is None: + del screen + + +# TODO There are no tests for the RAND strings being loaded, whatever that +# means. +_lib.ERR_load_RAND_strings() diff --git a/Lib/site-packages/OpenSSL/test/__init__.py b/Lib/site-packages/OpenSSL/test/__init__.py new file mode 100644 index 0000000..9b08060 --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/__init__.py @@ -0,0 +1,6 @@ +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +Package containing unit tests for :py:mod:`OpenSSL`. +""" diff --git a/Lib/site-packages/OpenSSL/test/test_crypto.py b/Lib/site-packages/OpenSSL/test/test_crypto.py new file mode 100644 index 0000000..f6f0751 --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/test_crypto.py @@ -0,0 +1,3671 @@ +# Copyright (c) Jean-Paul Calderone +# See LICENSE file for details. + +""" +Unit tests for :py:mod:`OpenSSL.crypto`. +""" + +from unittest import main +from warnings import catch_warnings, simplefilter + +import base64 +import os +import re +from subprocess import PIPE, Popen +from datetime import datetime, timedelta + +from six import u, b, binary_type, PY3 +from warnings import simplefilter +from warnings import catch_warnings + +from OpenSSL.crypto import TYPE_RSA, TYPE_DSA, Error, PKey, PKeyType +from OpenSSL.crypto import X509, X509Type, X509Name, X509NameType +from OpenSSL.crypto import X509Store, X509StoreType, X509StoreContext, X509StoreContextError +from OpenSSL.crypto import X509Req, X509ReqType +from OpenSSL.crypto import X509Extension, X509ExtensionType +from OpenSSL.crypto import load_certificate, load_privatekey +from OpenSSL.crypto import FILETYPE_PEM, FILETYPE_ASN1, FILETYPE_TEXT +from OpenSSL.crypto import dump_certificate, load_certificate_request +from OpenSSL.crypto import dump_certificate_request, dump_privatekey +from OpenSSL.crypto import PKCS7Type, load_pkcs7_data +from OpenSSL.crypto import PKCS12, PKCS12Type, load_pkcs12 +from OpenSSL.crypto import CRL, Revoked, load_crl +from OpenSSL.crypto import NetscapeSPKI, NetscapeSPKIType +from OpenSSL.crypto import ( + sign, verify, get_elliptic_curve, get_elliptic_curves) +from OpenSSL.test.util import ( + EqualityTestsMixin, TestCase, WARNING_TYPE_EXPECTED +) +from OpenSSL._util import native, lib + +def normalize_certificate_pem(pem): + return dump_certificate(FILETYPE_PEM, load_certificate(FILETYPE_PEM, pem)) + + +def normalize_privatekey_pem(pem): + return dump_privatekey(FILETYPE_PEM, load_privatekey(FILETYPE_PEM, pem)) + + +GOOD_CIPHER = "blowfish" +BAD_CIPHER = "zippers" + +GOOD_DIGEST = "MD5" +BAD_DIGEST = "monkeys" + +root_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIIC7TCCAlagAwIBAgIIPQzE4MbeufQwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE +BhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdU +ZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwIhgPMjAwOTAzMjUxMjM2 +NThaGA8yMDE3MDYxMTEyMzY1OFowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklM +MRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9U +ZXN0aW5nIFJvb3QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPmaQumL +urpE527uSEHdL1pqcDRmWzu+98Y6YHzT/J7KWEamyMCNZ6fRW1JCR782UQ8a07fy +2xXsKy4WdKaxyG8CcatwmXvpvRQ44dSANMihHELpANTdyVp6DCysED6wkQFurHlF +1dshEaJw8b/ypDhmbVIo6Ci1xvCJqivbLFnbAgMBAAGjgbswgbgwHQYDVR0OBBYE +FINVdy1eIfFJDAkk51QJEo3IfgSuMIGIBgNVHSMEgYAwfoAUg1V3LV4h8UkMCSTn +VAkSjch+BK6hXKRaMFgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UE +BxMHQ2hpY2FnbzEQMA4GA1UEChMHVGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBS +b290IENBggg9DMTgxt659DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GB +AGGCDazMJGoWNBpc03u6+smc95dEead2KlZXBATOdFT1VesY3+nUOqZhEhTGlDMi +hkgaZnzoIq/Uamidegk4hirsCT/R+6vsKAAxNTcBjUeZjlykCJWy5ojShGftXIKY +w/njVbKMXrvc83qmTdGl3TAM0fxQIpqgcglFLveEBgzn +-----END CERTIFICATE----- +""") + +root_key_pem = b("""-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQD5mkLpi7q6ROdu7khB3S9aanA0Zls7vvfGOmB80/yeylhGpsjA +jWen0VtSQke/NlEPGtO38tsV7CsuFnSmschvAnGrcJl76b0UOOHUgDTIoRxC6QDU +3claegwsrBA+sJEBbqx5RdXbIRGicPG/8qQ4Zm1SKOgotcbwiaor2yxZ2wIDAQAB +AoGBAPCgMpmLxzwDaUmcFbTJUvlLW1hoxNNYSu2jIZm1k/hRAcE60JYwvBkgz3UB +yMEh0AtLxYe0bFk6EHah11tMUPgscbCq73snJ++8koUw+csk22G65hOs51bVb7Aa +6JBe67oLzdtvgCUFAA2qfrKzWRZzAdhUirQUZgySZk+Xq1pBAkEA/kZG0A6roTSM +BVnx7LnPfsycKUsTumorpXiylZJjTi9XtmzxhrYN6wgZlDOOwOLgSQhszGpxVoMD +u3gByT1b2QJBAPtL3mSKdvwRu/+40zaZLwvSJRxaj0mcE4BJOS6Oqs/hS1xRlrNk +PpQ7WJ4yM6ZOLnXzm2mKyxm50Mv64109FtMCQQDOqS2KkjHaLowTGVxwC0DijMfr +I9Lf8sSQk32J5VWCySWf5gGTfEnpmUa41gKTMJIbqZZLucNuDcOtzUaeWZlZAkA8 +ttXigLnCqR486JDPTi9ZscoZkZ+w7y6e/hH8t6d5Vjt48JVyfjPIaJY+km58LcN3 +6AWSeGAdtRFHVzR7oHjVAkB4hutvxiOeiIVQNBhM6RSI9aBPMI21DoX2JRoxvNW2 +cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq +-----END RSA PRIVATE KEY----- +""") + +intermediate_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICVzCCAcCgAwIBAgIRAMPzhm6//0Y/g2pmnHR2C4cwDQYJKoZIhvcNAQENBQAw +WDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAw +DgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwHhcNMTQw +ODI4MDIwNDA4WhcNMjQwODI1MDIwNDA4WjBmMRUwEwYDVQQDEwxpbnRlcm1lZGlh +dGUxDDAKBgNVBAoTA29yZzERMA8GA1UECxMIb3JnLXVuaXQxCzAJBgNVBAYTAlVT +MQswCQYDVQQIEwJDQTESMBAGA1UEBxMJU2FuIERpZWdvMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDYcEQw5lfbEQRjr5Yy4yxAHGV0b9Al+Lmu7wLHMkZ/ZMmK +FGIbljbviiD1Nz97Oh2cpB91YwOXOTN2vXHq26S+A5xe8z/QJbBsyghMur88CjdT +21H2qwMa+r5dCQwEhuGIiZ3KbzB/n4DTMYI5zy4IYPv0pjxShZn4aZTCCK2IUwID +AQABoxMwETAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDQUAA4GBAPIWSkLX +QRMApOjjyC+tMxumT5e2pMqChHmxobQK4NMdrf2VCx+cRT6EmY8sK3/Xl/X8UBQ+ +9n5zXb1ZwhW/sTWgUvmOceJ4/XVs9FkdWOOn1J0XBch9ZIiFe/s5ASIgG7fUdcUF +9mAWS6FK2ca3xIh5kIupCXOFa0dPvlw/YUFT +-----END CERTIFICATE----- +""") + +intermediate_key_pem = b("""-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQDYcEQw5lfbEQRjr5Yy4yxAHGV0b9Al+Lmu7wLHMkZ/ZMmKFGIb +ljbviiD1Nz97Oh2cpB91YwOXOTN2vXHq26S+A5xe8z/QJbBsyghMur88CjdT21H2 +qwMa+r5dCQwEhuGIiZ3KbzB/n4DTMYI5zy4IYPv0pjxShZn4aZTCCK2IUwIDAQAB +AoGAfSZVV80pSeOKHTYfbGdNY/jHdU9eFUa/33YWriXU+77EhpIItJjkRRgivIfo +rhFJpBSGmDLblaqepm8emsXMeH4+2QzOYIf0QGGP6E6scjTt1PLqdqKfVJ1a2REN +147cujNcmFJb/5VQHHMpaPTgttEjlzuww4+BCDPsVRABWrkCQQD3loH36nLoQTtf ++kQq0T6Bs9/UWkTAGo0ND81ALj0F8Ie1oeZg6RNT96RxZ3aVuFTESTv6/TbjWywO +wdzlmV1vAkEA38rTJ6PTwaJlw5OttdDzAXGPB9tDmzh9oSi7cHwQQXizYd8MBYx4 +sjHUKD3dCQnb1dxJFhd3BT5HsnkRMbVZXQJAbXduH17ZTzcIOXc9jHDXYiFVZV5D +52vV0WCbLzVCZc3jMrtSUKa8lPN5EWrdU3UchWybyG0MR5mX8S5lrF4SoQJAIyUD +DBKaSqpqONCUUx1BTFS9FYrFjzbL4+c1qHCTTPTblt8kUCrDOZjBrKAqeiTmNSum +/qUot9YUBF8m6BuGsQJATHHmdFy/fG1VLkyBp49CAa8tN3Z5r/CgTznI4DfMTf4C +NbRHn2UmYlwQBa+L5lg9phewNe8aEwpPyPLoV85U8Q== +-----END RSA PRIVATE KEY----- +""") + +server_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICKDCCAZGgAwIBAgIJAJn/HpR21r/8MA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV +BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH +VGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBSb290IENBMCIYDzIwMDkwMzI1MTIz +NzUzWhgPMjAxNzA2MTExMjM3NTNaMBgxFjAUBgNVBAMTDWxvdmVseSBzZXJ2ZXIw +gZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAL6m+G653V0tpBC/OKl22VxOi2Cv +lK4TYu9LHSDP9uDVTe7V5D5Tl6qzFoRRx5pfmnkqT5B+W9byp2NU3FC5hLm5zSAr +b45meUhjEJ/ifkZgbNUjHdBIGP9MAQUHZa5WKdkGIJvGAvs8UzUqlr4TBWQIB24+ +lJ+Ukk/CRgasrYwdAgMBAAGjNjA0MB0GA1UdDgQWBBS4kC7Ij0W1TZXZqXQFAM2e +gKEG2DATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQUFAAOBgQBh30Li +dJ+NlxIOx5343WqIBka3UbsOb2kxWrbkVCrvRapCMLCASO4FqiKWM+L0VDBprqIp +2mgpFQ6FHpoIENGvJhdEKpptQ5i7KaGhnDNTfdy3x1+h852G99f1iyj0RmbuFcM8 +uzujnS8YXWvM7DM1Ilozk4MzPug8jzFp5uhKCQ== +-----END CERTIFICATE----- +""") + +server_key_pem = normalize_privatekey_pem(b("""-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQC+pvhuud1dLaQQvzipdtlcTotgr5SuE2LvSx0gz/bg1U3u1eQ+ +U5eqsxaEUceaX5p5Kk+QflvW8qdjVNxQuYS5uc0gK2+OZnlIYxCf4n5GYGzVIx3Q +SBj/TAEFB2WuVinZBiCbxgL7PFM1Kpa+EwVkCAduPpSflJJPwkYGrK2MHQIDAQAB +AoGAbwuZ0AR6JveahBaczjfnSpiFHf+mve2UxoQdpyr6ROJ4zg/PLW5K/KXrC48G +j6f3tXMrfKHcpEoZrQWUfYBRCUsGD5DCazEhD8zlxEHahIsqpwA0WWssJA2VOLEN +j6DuV2pCFbw67rfTBkTSo32ahfXxEKev5KswZk0JIzH3ooECQQDgzS9AI89h0gs8 +Dt+1m11Rzqo3vZML7ZIyGApUzVan+a7hbc33nbGRkAXjHaUBJO31it/H6dTO+uwX +msWwNG5ZAkEA2RyFKs5xR5USTFaKLWCgpH/ydV96KPOpBND7TKQx62snDenFNNbn +FwwOhpahld+vqhYk+pfuWWUpQciE+Bu7ZQJASjfT4sQv4qbbKK/scePicnDdx9th +4e1EeB9xwb+tXXXUo/6Bor/AcUNwfiQ6Zt9PZOK9sR3lMZSsP7rMi7kzuQJABie6 +1sXXjFH7nNJvRG4S39cIxq8YRYTy68II/dlB2QzGpKxV/POCxbJ/zu0CU79tuYK7 +NaeNCFfH3aeTrX0LyQJAMBWjWmeKM2G2sCExheeQK0ROnaBC8itCECD4Jsve4nqf +r50+LF74iLXFwqysVCebPKMOpDWp/qQ1BbJQIPs7/A== +-----END RSA PRIVATE KEY----- +""")) + +intermediate_server_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICWDCCAcGgAwIBAgIRAPQFY9jfskSihdiNSNdt6GswDQYJKoZIhvcNAQENBQAw +ZjEVMBMGA1UEAxMMaW50ZXJtZWRpYXRlMQwwCgYDVQQKEwNvcmcxETAPBgNVBAsT +CG9yZy11bml0MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEjAQBgNVBAcTCVNh +biBEaWVnbzAeFw0xNDA4MjgwMjEwNDhaFw0yNDA4MjUwMjEwNDhaMG4xHTAbBgNV +BAMTFGludGVybWVkaWF0ZS1zZXJ2aWNlMQwwCgYDVQQKEwNvcmcxETAPBgNVBAsT +CG9yZy11bml0MQswCQYDVQQGEwJVUzELMAkGA1UECBMCQ0ExEjAQBgNVBAcTCVNh +biBEaWVnbzCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAqpJZygd+w1faLOr1 +iOAmbBhx5SZWcTCZ/ZjHQTJM7GuPT624QkqsixFghRKdDROwpwnAP7gMRukLqiy4 ++kRuGT5OfyGggL95i2xqA+zehjj08lSTlvGHpePJgCyTavIy5+Ljsj4DKnKyuhxm +biXTRrH83NDgixVkObTEmh/OVK0CAwEAATANBgkqhkiG9w0BAQ0FAAOBgQBa0Npw +UkzjaYEo1OUE1sTI6Mm4riTIHMak4/nswKh9hYup//WVOlr/RBSBtZ7Q/BwbjobN +3bfAtV7eSAqBsfxYXyof7G1ALANQERkq3+oyLP1iVt08W1WOUlIMPhdCF/QuCwy6 +x9MJLhUCGLJPM+O2rAPWVD9wCmvq10ALsiH3yA== +-----END CERTIFICATE----- +""") + +intermediate_server_key_pem = b("""-----BEGIN RSA PRIVATE KEY----- +MIICXAIBAAKBgQCqklnKB37DV9os6vWI4CZsGHHlJlZxMJn9mMdBMkzsa49PrbhC +SqyLEWCFEp0NE7CnCcA/uAxG6QuqLLj6RG4ZPk5/IaCAv3mLbGoD7N6GOPTyVJOW +8Yel48mALJNq8jLn4uOyPgMqcrK6HGZuJdNGsfzc0OCLFWQ5tMSaH85UrQIDAQAB +AoGAIQ594j5zna3/9WaPsTgnmhlesVctt4AAx/n827DA4ayyuHFlXUuVhtoWR5Pk +5ezj9mtYW8DyeCegABnsu2vZni/CdvU6uiS1Hv6qM1GyYDm9KWgovIP9rQCDSGaz +d57IWVGxx7ODFkm3gN5nxnSBOFVHytuW1J7FBRnEsehRroECQQDXHFOv82JuXDcz +z3+4c74IEURdOHcbycxlppmK9kFqm5lsUdydnnGW+mvwDk0APOB7Wg7vyFyr393e +dpmBDCzNAkEAyv6tVbTKUYhSjW+QhabJo896/EqQEYUmtMXxk4cQnKeR/Ao84Rkf +EqD5IykMUfUI0jJU4DGX+gWZ10a7kNbHYQJAVFCuHNFxS4Cpwo0aqtnzKoZaHY/8 +X9ABZfafSHCtw3Op92M+7ikkrOELXdS9KdKyyqbKJAKNEHF3LbOfB44WIQJAA2N4 +9UNNVUsXRbElEnYUS529CdUczo4QdVgQjkvk5RiPAUwSdBd9Q0xYnFOlFwEmIowg +ipWJWe0aAlP18ZcEQQJBAL+5lekZ/GUdQoZ4HAsN5a9syrzavJ9VvU1KOOPorPZK +nMRZbbQgP+aSB7yl6K0gaLaZ8XaK0pjxNBh6ASqg9f4= +-----END RSA PRIVATE KEY----- +""") + +client_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICJjCCAY+gAwIBAgIJAKxpFI5lODkjMA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV +BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH +VGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBSb290IENBMCIYDzIwMDkwMzI1MTIz +ODA1WhgPMjAxNzA2MTExMjM4MDVaMBYxFDASBgNVBAMTC3VnbHkgY2xpZW50MIGf +MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDAZh/SRtNm5ntMT4qb6YzEpTroMlq2 +rn+GrRHRiZ+xkCw/CGNhbtPir7/QxaUj26BSmQrHw1bGKEbPsWiW7bdXSespl+xK +iku4G/KvnnmWdeJHqsiXeUZtqurMELcPQAw9xPHEuhqqUJvvEoMTsnCEqGM+7Dtb +oCRajYyHfluARQIDAQABozYwNDAdBgNVHQ4EFgQUNQB+qkaOaEVecf1J3TTUtAff +0fAwEwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQEFBQADgYEAyv/Jh7gM +Q3OHvmsFEEvRI+hsW8y66zK4K5de239Y44iZrFYkt7Q5nBPMEWDj4F2hLYWL/qtI +9Zdr0U4UDCU9SmmGYh4o7R4TZ5pGFvBYvjhHbkSFYFQXZxKUi+WUxplP6I0wr2KJ +PSTJCjJOn3xo2NTKRgV1gaoTf2EhL+RG8TQ= +-----END CERTIFICATE----- +""") + +client_key_pem = normalize_privatekey_pem(b("""-----BEGIN RSA PRIVATE KEY----- +MIICXgIBAAKBgQDAZh/SRtNm5ntMT4qb6YzEpTroMlq2rn+GrRHRiZ+xkCw/CGNh +btPir7/QxaUj26BSmQrHw1bGKEbPsWiW7bdXSespl+xKiku4G/KvnnmWdeJHqsiX +eUZtqurMELcPQAw9xPHEuhqqUJvvEoMTsnCEqGM+7DtboCRajYyHfluARQIDAQAB +AoGATkZ+NceY5Glqyl4mD06SdcKfV65814vg2EL7V9t8+/mi9rYL8KztSXGlQWPX +zuHgtRoMl78yQ4ZJYOBVo+nsx8KZNRCEBlE19bamSbQLCeQMenWnpeYyQUZ908gF +h6L9qsFVJepgA9RDgAjyDoS5CaWCdCCPCH2lDkdcqC54SVUCQQDseuduc4wi8h4t +V8AahUn9fn9gYfhoNuM0gdguTA0nPLVWz4hy1yJiWYQe0H7NLNNTmCKiLQaJpAbb +TC6vE8C7AkEA0Ee8CMJUc20BnGEmxwgWcVuqFWaKCo8jTH1X38FlATUsyR3krjW2 +dL3yDD9NwHxsYP7nTKp/U8MV7U9IBn4y/wJBAJl7H0/BcLeRmuJk7IqJ7b635iYB +D/9beFUw3MUXmQXZUfyYz39xf6CDZsu1GEdEC5haykeln3Of4M9d/4Kj+FcCQQCY +si6xwT7GzMDkk/ko684AV3KPc/h6G0yGtFIrMg7J3uExpR/VdH2KgwMkZXisSMvw +JJEQjOMCVsEJlRk54WWjAkEAzoZNH6UhDdBK5F38rVt/y4SEHgbSfJHIAmPS32Kq +f6GGcfNpip0Uk7q7udTKuX7Q/buZi/C4YW7u3VKAquv9NA== +-----END RSA PRIVATE KEY----- +""")) + +cleartextCertificatePEM = b("""-----BEGIN CERTIFICATE----- +MIIC7TCCAlagAwIBAgIIPQzE4MbeufQwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE +BhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdU +ZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwIhgPMjAwOTAzMjUxMjM2 +NThaGA8yMDE3MDYxMTEyMzY1OFowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklM +MRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9U +ZXN0aW5nIFJvb3QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPmaQumL +urpE527uSEHdL1pqcDRmWzu+98Y6YHzT/J7KWEamyMCNZ6fRW1JCR782UQ8a07fy +2xXsKy4WdKaxyG8CcatwmXvpvRQ44dSANMihHELpANTdyVp6DCysED6wkQFurHlF +1dshEaJw8b/ypDhmbVIo6Ci1xvCJqivbLFnbAgMBAAGjgbswgbgwHQYDVR0OBBYE +FINVdy1eIfFJDAkk51QJEo3IfgSuMIGIBgNVHSMEgYAwfoAUg1V3LV4h8UkMCSTn +VAkSjch+BK6hXKRaMFgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UE +BxMHQ2hpY2FnbzEQMA4GA1UEChMHVGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBS +b290IENBggg9DMTgxt659DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GB +AGGCDazMJGoWNBpc03u6+smc95dEead2KlZXBATOdFT1VesY3+nUOqZhEhTGlDMi +hkgaZnzoIq/Uamidegk4hirsCT/R+6vsKAAxNTcBjUeZjlykCJWy5ojShGftXIKY +w/njVbKMXrvc83qmTdGl3TAM0fxQIpqgcglFLveEBgzn +-----END CERTIFICATE----- +""") + +cleartextPrivateKeyPEM = normalize_privatekey_pem(b("""\ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQD5mkLpi7q6ROdu7khB3S9aanA0Zls7vvfGOmB80/yeylhGpsjA +jWen0VtSQke/NlEPGtO38tsV7CsuFnSmschvAnGrcJl76b0UOOHUgDTIoRxC6QDU +3claegwsrBA+sJEBbqx5RdXbIRGicPG/8qQ4Zm1SKOgotcbwiaor2yxZ2wIDAQAB +AoGBAPCgMpmLxzwDaUmcFbTJUvlLW1hoxNNYSu2jIZm1k/hRAcE60JYwvBkgz3UB +yMEh0AtLxYe0bFk6EHah11tMUPgscbCq73snJ++8koUw+csk22G65hOs51bVb7Aa +6JBe67oLzdtvgCUFAA2qfrKzWRZzAdhUirQUZgySZk+Xq1pBAkEA/kZG0A6roTSM +BVnx7LnPfsycKUsTumorpXiylZJjTi9XtmzxhrYN6wgZlDOOwOLgSQhszGpxVoMD +u3gByT1b2QJBAPtL3mSKdvwRu/+40zaZLwvSJRxaj0mcE4BJOS6Oqs/hS1xRlrNk +PpQ7WJ4yM6ZOLnXzm2mKyxm50Mv64109FtMCQQDOqS2KkjHaLowTGVxwC0DijMfr +I9Lf8sSQk32J5VWCySWf5gGTfEnpmUa41gKTMJIbqZZLucNuDcOtzUaeWZlZAkA8 +ttXigLnCqR486JDPTi9ZscoZkZ+w7y6e/hH8t6d5Vjt48JVyfjPIaJY+km58LcN3 +6AWSeGAdtRFHVzR7oHjVAkB4hutvxiOeiIVQNBhM6RSI9aBPMI21DoX2JRoxvNW2 +cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq +-----END RSA PRIVATE KEY----- +""")) + +cleartextCertificateRequestPEM = b("""-----BEGIN CERTIFICATE REQUEST----- +MIIBnjCCAQcCAQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQH +EwdDaGljYWdvMRcwFQYDVQQKEw5NeSBDb21wYW55IEx0ZDEXMBUGA1UEAxMORnJl +ZGVyaWNrIERlYW4wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANp6Y17WzKSw +BsUWkXdqg6tnXy8H8hA1msCMWpc+/2KJ4mbv5NyD6UD+/SqagQqulPbF/DFea9nA +E0zhmHJELcM8gUTIlXv/cgDWnmK4xj8YkjVUiCdqKRAKeuzLG1pGmwwF5lGeJpXN +xQn5ecR0UYSOWj6TTGXB9VyUMQzCClcBAgMBAAGgADANBgkqhkiG9w0BAQUFAAOB +gQAAJGuF/R/GGbeC7FbFW+aJgr9ee0Xbl6nlhu7pTe67k+iiKT2dsl2ti68MVTnu +Vrb3HUNqOkiwsJf6kCtq5oPn3QVYzTa76Dt2y3Rtzv6boRSlmlfrgS92GNma8JfR +oICQk3nAudi6zl1Dix3BCv1pUp5KMtGn3MeDEi6QFGy2rA== +-----END CERTIFICATE REQUEST----- +""") + +encryptedPrivateKeyPEM = b("""-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,9573604A18579E9E + +SHOho56WxDkT0ht10UTeKc0F5u8cqIa01kzFAmETw0MAs8ezYtK15NPdCXUm3X/2 +a17G7LSF5bkxOgZ7vpXyMzun/owrj7CzvLxyncyEFZWvtvzaAhPhvTJtTIB3kf8B +8+qRcpTGK7NgXEgYBW5bj1y4qZkD4zCL9o9NQzsKI3Ie8i0239jsDOWR38AxjXBH +mGwAQ4Z6ZN5dnmM4fhMIWsmFf19sNyAML4gHenQCHhmXbjXeVq47aC2ProInJbrm ++00TcisbAQ40V9aehVbcDKtS4ZbMVDwncAjpXpcncC54G76N6j7F7wL7L/FuXa3A +fvSVy9n2VfF/pJ3kYSflLHH2G/DFxjF7dl0GxhKPxJjp3IJi9VtuvmN9R2jZWLQF +tfC8dXgy/P9CfFQhlinqBTEwgH0oZ/d4k4NVFDSdEMaSdmBAjlHpc+Vfdty3HVnV +rKXj//wslsFNm9kIwJGIgKUa/n2jsOiydrsk1mgH7SmNCb3YHgZhbbnq0qLat/HC +gHDt3FHpNQ31QzzL3yrenFB2L9osIsnRsDTPFNi4RX4SpDgNroxOQmyzCCV6H+d4 +o1mcnNiZSdxLZxVKccq0AfRpHqpPAFnJcQHP6xyT9MZp6fBa0XkxDnt9kNU8H3Qw +7SJWZ69VXjBUzMlQViLuaWMgTnL+ZVyFZf9hTF7U/ef4HMLMAVNdiaGG+G+AjCV/ +MbzjS007Oe4qqBnCWaFPSnJX6uLApeTbqAxAeyCql56ULW5x6vDMNC3dwjvS/CEh +11n8RkgFIQA0AhuKSIg3CbuartRsJnWOLwgLTzsrKYL4yRog1RJrtw== +-----END RSA PRIVATE KEY----- +""") + +encryptedPrivateKeyPEMPassphrase = b("foobar") + +# Some PKCS#7 stuff. Generated with the openssl command line: +# +# openssl crl2pkcs7 -inform pem -outform pem -certfile s.pem -nocrl +# +# with a certificate and key (but the key should be irrelevant) in s.pem +pkcs7Data = b("""\ +-----BEGIN PKCS7----- +MIIDNwYJKoZIhvcNAQcCoIIDKDCCAyQCAQExADALBgkqhkiG9w0BBwGgggMKMIID +BjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzERMA8G +A1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtN +MkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNA +cG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzELMAkG +A1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhvc3Qx +HTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEBBQAD +SwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh5kwI +zOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQCMAAw +LAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0G +A1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7hyNp +65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoTCE0y +Q3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlwdG8g +Q2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3QxLmNv +bYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6BoJu +VwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++7QGG +/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JEWUQ9 +Ho4EzbYCOaEAMQA= +-----END PKCS7----- +""") + +pkcs7DataASN1 = base64.b64decode(b""" +MIIDNwYJKoZIhvcNAQcCoIIDKDCCAyQCAQExADALBgkqhkiG9w0BBwGgggMKMIID +BjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzERMA8G +A1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtN +MkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNA +cG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzELMAkG +A1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhvc3Qx +HTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEBBQAD +SwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh5kwI +zOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQCMAAw +LAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0G +A1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7hyNp +65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoTCE0y +Q3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlwdG8g +Q2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3QxLmNv +bYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6BoJu +VwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++7QGG +/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JEWUQ9 +Ho4EzbYCOaEAMQA= +""") + +crlData = b("""\ +-----BEGIN X509 CRL----- +MIIBWzCBxTANBgkqhkiG9w0BAQQFADBYMQswCQYDVQQGEwJVUzELMAkGA1UECBMC +SUwxEDAOBgNVBAcTB0NoaWNhZ28xEDAOBgNVBAoTB1Rlc3RpbmcxGDAWBgNVBAMT +D1Rlc3RpbmcgUm9vdCBDQRcNMDkwNzI2MDQzNDU2WhcNMTIwOTI3MDI0MTUyWjA8 +MBUCAgOrGA8yMDA5MDcyNTIzMzQ1NlowIwICAQAYDzIwMDkwNzI1MjMzNDU2WjAM +MAoGA1UdFQQDCgEEMA0GCSqGSIb3DQEBBAUAA4GBAEBt7xTs2htdD3d4ErrcGAw1 +4dKcVnIWTutoI7xxen26Wwvh8VCsT7i/UeP+rBl9rC/kfjWjzQk3/zleaarGTpBT +0yp4HXRFFoRhhSE/hP+eteaPXRgrsNRLHe9ZDd69wmh7J1wMDb0m81RG7kqcbsid +vrzEeLDRiiPl92dyyWmu +-----END X509 CRL----- +""") + + +# A broken RSA private key which can be used to test the error path through +# PKey.check. +inconsistentPrivateKeyPEM = b("""-----BEGIN RSA PRIVATE KEY----- +MIIBPAIBAAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh +5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEaAQJBAIqm/bz4NA1H++Vx5Ewx +OcKp3w19QSaZAwlGRtsUxrP7436QjnREM3Bm8ygU11BjkPVmtrKm6AayQfCHqJoT +zIECIQDW0BoMoL0HOYM/mrTLhaykYAVqgIeJsPjvkEhTFXWBuQIhAM3deFAvWNu4 +nklUQ37XsCT2c9tmNt1LAT+slG2JOTTRAiAuXDtC/m3NYVwyHfFm+zKHRzHkClk2 +HjubeEgjpj32AQIhAJqMGTaZVOwevTXvvHwNeH+vRWsAYU/gbx+OQB+7VOcBAiEA +oolb6NMg/R3enNPvS1O4UU1H8wpaF77L4yiSWlE0p4w= +-----END RSA PRIVATE KEY----- +""") + +# certificate with NULL bytes in subjectAltName and common name + +nulbyteSubjectAltNamePEM = b("""-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE-----""") + + +class X509ExtTests(TestCase): + """ + Tests for :py:class:`OpenSSL.crypto.X509Extension`. + """ + + def setUp(self): + """ + Create a new private key and start a certificate request (for a test + method to finish in one way or another). + """ + super(X509ExtTests, self).setUp() + # Basic setup stuff to generate a certificate + self.pkey = PKey() + self.pkey.generate_key(TYPE_RSA, 384) + self.req = X509Req() + self.req.set_pubkey(self.pkey) + # Authority good you have. + self.req.get_subject().commonName = "Yoda root CA" + self.x509 = X509() + self.subject = self.x509.get_subject() + self.subject.commonName = self.req.get_subject().commonName + self.x509.set_issuer(self.subject) + self.x509.set_pubkey(self.pkey) + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + expire = b((datetime.now() + timedelta(days=100)).strftime("%Y%m%d%H%M%SZ")) + self.x509.set_notBefore(now) + self.x509.set_notAfter(expire) + + + def tearDown(self): + """ + Forget all of the pyOpenSSL objects so they can be garbage collected, + their memory released, and not interfere with the leak detection code. + """ + self.pkey = self.req = self.x509 = self.subject = None + super(X509ExtTests, self).tearDown() + + + def test_str(self): + """ + The string representation of :py:class:`X509Extension` instances as returned by + :py:data:`str` includes stuff. + """ + # This isn't necessarily the best string representation. Perhaps it + # will be changed/improved in the future. + self.assertEquals( + str(X509Extension(b('basicConstraints'), True, b('CA:false'))), + 'CA:FALSE') + + + def test_type(self): + """ + :py:class:`X509Extension` and :py:class:`X509ExtensionType` refer to the same type object + and can be used to create instances of that type. + """ + self.assertIdentical(X509Extension, X509ExtensionType) + self.assertConsistentType( + X509Extension, + 'X509Extension', b('basicConstraints'), True, b('CA:true')) + + + def test_construction(self): + """ + :py:class:`X509Extension` accepts an extension type name, a critical flag, + and an extension value and returns an :py:class:`X509ExtensionType` instance. + """ + basic = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertTrue( + isinstance(basic, X509ExtensionType), + "%r is of type %r, should be %r" % ( + basic, type(basic), X509ExtensionType)) + + comment = X509Extension( + b('nsComment'), False, b('pyOpenSSL unit test')) + self.assertTrue( + isinstance(comment, X509ExtensionType), + "%r is of type %r, should be %r" % ( + comment, type(comment), X509ExtensionType)) + + + def test_invalid_extension(self): + """ + :py:class:`X509Extension` raises something if it is passed a bad extension + name or value. + """ + self.assertRaises( + Error, X509Extension, b('thisIsMadeUp'), False, b('hi')) + self.assertRaises( + Error, X509Extension, b('basicConstraints'), False, b('blah blah')) + + # Exercise a weird one (an extension which uses the r2i method). This + # exercises the codepath that requires a non-NULL ctx to be passed to + # X509V3_EXT_nconf. It can't work now because we provide no + # configuration database. It might be made to work in the future. + self.assertRaises( + Error, X509Extension, b('proxyCertInfo'), True, + b('language:id-ppl-anyLanguage,pathlen:1,policy:text:AB')) + + + def test_get_critical(self): + """ + :py:meth:`X509ExtensionType.get_critical` returns the value of the + extension's critical flag. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertTrue(ext.get_critical()) + ext = X509Extension(b('basicConstraints'), False, b('CA:true')) + self.assertFalse(ext.get_critical()) + + + def test_get_short_name(self): + """ + :py:meth:`X509ExtensionType.get_short_name` returns a string giving the short + type name of the extension. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertEqual(ext.get_short_name(), b('basicConstraints')) + ext = X509Extension(b('nsComment'), True, b('foo bar')) + self.assertEqual(ext.get_short_name(), b('nsComment')) + + + def test_get_data(self): + """ + :py:meth:`X509Extension.get_data` returns a string giving the data of the + extension. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + # Expect to get back the DER encoded form of CA:true. + self.assertEqual(ext.get_data(), b('0\x03\x01\x01\xff')) + + + def test_get_data_wrong_args(self): + """ + :py:meth:`X509Extension.get_data` raises :py:exc:`TypeError` if passed any arguments. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertRaises(TypeError, ext.get_data, None) + self.assertRaises(TypeError, ext.get_data, "foo") + self.assertRaises(TypeError, ext.get_data, 7) + + + def test_unused_subject(self): + """ + The :py:data:`subject` parameter to :py:class:`X509Extension` may be provided for an + extension which does not use it and is ignored in this case. + """ + ext1 = X509Extension( + b('basicConstraints'), False, b('CA:TRUE'), subject=self.x509) + self.x509.add_extensions([ext1]) + self.x509.sign(self.pkey, 'sha1') + # This is a little lame. Can we think of a better way? + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Basic Constraints:') in text) + self.assertTrue(b('CA:TRUE') in text) + + + def test_subject(self): + """ + If an extension requires a subject, the :py:data:`subject` parameter to + :py:class:`X509Extension` provides its value. + """ + ext3 = X509Extension( + b('subjectKeyIdentifier'), False, b('hash'), subject=self.x509) + self.x509.add_extensions([ext3]) + self.x509.sign(self.pkey, 'sha1') + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Subject Key Identifier:') in text) + + + def test_missing_subject(self): + """ + If an extension requires a subject and the :py:data:`subject` parameter is + given no value, something happens. + """ + self.assertRaises( + Error, X509Extension, b('subjectKeyIdentifier'), False, b('hash')) + + + def test_invalid_subject(self): + """ + If the :py:data:`subject` parameter is given a value which is not an + :py:class:`X509` instance, :py:exc:`TypeError` is raised. + """ + for badObj in [True, object(), "hello", [], self]: + self.assertRaises( + TypeError, + X509Extension, + 'basicConstraints', False, 'CA:TRUE', subject=badObj) + + + def test_unused_issuer(self): + """ + The :py:data:`issuer` parameter to :py:class:`X509Extension` may be provided for an + extension which does not use it and is ignored in this case. + """ + ext1 = X509Extension( + b('basicConstraints'), False, b('CA:TRUE'), issuer=self.x509) + self.x509.add_extensions([ext1]) + self.x509.sign(self.pkey, 'sha1') + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Basic Constraints:') in text) + self.assertTrue(b('CA:TRUE') in text) + + + def test_issuer(self): + """ + If an extension requires an issuer, the :py:data:`issuer` parameter to + :py:class:`X509Extension` provides its value. + """ + ext2 = X509Extension( + b('authorityKeyIdentifier'), False, b('issuer:always'), + issuer=self.x509) + self.x509.add_extensions([ext2]) + self.x509.sign(self.pkey, 'sha1') + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Authority Key Identifier:') in text) + self.assertTrue(b('DirName:/CN=Yoda root CA') in text) + + + def test_missing_issuer(self): + """ + If an extension requires an issue and the :py:data:`issuer` parameter is given + no value, something happens. + """ + self.assertRaises( + Error, + X509Extension, + b('authorityKeyIdentifier'), False, + b('keyid:always,issuer:always')) + + + def test_invalid_issuer(self): + """ + If the :py:data:`issuer` parameter is given a value which is not an + :py:class:`X509` instance, :py:exc:`TypeError` is raised. + """ + for badObj in [True, object(), "hello", [], self]: + self.assertRaises( + TypeError, + X509Extension, + 'authorityKeyIdentifier', False, 'keyid:always,issuer:always', + issuer=badObj) + + + +class PKeyTests(TestCase): + """ + Unit tests for :py:class:`OpenSSL.crypto.PKey`. + """ + def test_type(self): + """ + :py:class:`PKey` and :py:class:`PKeyType` refer to the same type object + and can be used to create instances of that type. + """ + self.assertIdentical(PKey, PKeyType) + self.assertConsistentType(PKey, 'PKey') + + + def test_construction(self): + """ + :py:class:`PKey` takes no arguments and returns a new :py:class:`PKey` instance. + """ + self.assertRaises(TypeError, PKey, None) + key = PKey() + self.assertTrue( + isinstance(key, PKeyType), + "%r is of type %r, should be %r" % (key, type(key), PKeyType)) + + + def test_pregeneration(self): + """ + :py:attr:`PKeyType.bits` and :py:attr:`PKeyType.type` return :py:data:`0` before the key is + generated. :py:attr:`PKeyType.check` raises :py:exc:`TypeError` before the key is + generated. + """ + key = PKey() + self.assertEqual(key.type(), 0) + self.assertEqual(key.bits(), 0) + self.assertRaises(TypeError, key.check) + + + def test_failedGeneration(self): + """ + :py:meth:`PKeyType.generate_key` takes two arguments, the first giving the key + type as one of :py:data:`TYPE_RSA` or :py:data:`TYPE_DSA` and the second giving the + number of bits to generate. If an invalid type is specified or + generation fails, :py:exc:`Error` is raised. If an invalid number of bits is + specified, :py:exc:`ValueError` or :py:exc:`Error` is raised. + """ + key = PKey() + self.assertRaises(TypeError, key.generate_key) + self.assertRaises(TypeError, key.generate_key, 1, 2, 3) + self.assertRaises(TypeError, key.generate_key, "foo", "bar") + self.assertRaises(Error, key.generate_key, -1, 0) + + self.assertRaises(ValueError, key.generate_key, TYPE_RSA, -1) + self.assertRaises(ValueError, key.generate_key, TYPE_RSA, 0) + + # XXX RSA generation for small values of bits is fairly buggy in a wide + # range of OpenSSL versions. I need to figure out what the safe lower + # bound for a reasonable number of OpenSSL versions is and explicitly + # check for that in the wrapper. The failure behavior is typically an + # infinite loop inside OpenSSL. + + # self.assertRaises(Error, key.generate_key, TYPE_RSA, 2) + + # XXX DSA generation seems happy with any number of bits. The DSS + # says bits must be between 512 and 1024 inclusive. OpenSSL's DSA + # generator doesn't seem to care about the upper limit at all. For + # the lower limit, it uses 512 if anything smaller is specified. + # So, it doesn't seem possible to make generate_key fail for + # TYPE_DSA with a bits argument which is at least an int. + + # self.assertRaises(Error, key.generate_key, TYPE_DSA, -7) + + + def test_rsaGeneration(self): + """ + :py:meth:`PKeyType.generate_key` generates an RSA key when passed + :py:data:`TYPE_RSA` as a type and a reasonable number of bits. + """ + bits = 128 + key = PKey() + key.generate_key(TYPE_RSA, bits) + self.assertEqual(key.type(), TYPE_RSA) + self.assertEqual(key.bits(), bits) + self.assertTrue(key.check()) + + + def test_dsaGeneration(self): + """ + :py:meth:`PKeyType.generate_key` generates a DSA key when passed + :py:data:`TYPE_DSA` as a type and a reasonable number of bits. + """ + # 512 is a magic number. The DSS (Digital Signature Standard) + # allows a minimum of 512 bits for DSA. DSA_generate_parameters + # will silently promote any value below 512 to 512. + bits = 512 + key = PKey() + key.generate_key(TYPE_DSA, bits) + # self.assertEqual(key.type(), TYPE_DSA) + # self.assertEqual(key.bits(), bits) + # self.assertRaises(TypeError, key.check) + + + def test_regeneration(self): + """ + :py:meth:`PKeyType.generate_key` can be called multiple times on the same + key to generate new keys. + """ + key = PKey() + for type, bits in [(TYPE_RSA, 512), (TYPE_DSA, 576)]: + key.generate_key(type, bits) + self.assertEqual(key.type(), type) + self.assertEqual(key.bits(), bits) + + + def test_inconsistentKey(self): + """ + :py:`PKeyType.check` returns :py:exc:`Error` if the key is not consistent. + """ + key = load_privatekey(FILETYPE_PEM, inconsistentPrivateKeyPEM) + self.assertRaises(Error, key.check) + + + def test_check_wrong_args(self): + """ + :py:meth:`PKeyType.check` raises :py:exc:`TypeError` if called with any arguments. + """ + self.assertRaises(TypeError, PKey().check, None) + self.assertRaises(TypeError, PKey().check, object()) + self.assertRaises(TypeError, PKey().check, 1) + + + def test_check_public_key(self): + """ + :py:meth:`PKeyType.check` raises :py:exc:`TypeError` if only the public + part of the key is available. + """ + # A trick to get a public-only key + key = PKey() + key.generate_key(TYPE_RSA, 512) + cert = X509() + cert.set_pubkey(key) + pub = cert.get_pubkey() + self.assertRaises(TypeError, pub.check) + + + +class X509NameTests(TestCase): + """ + Unit tests for :py:class:`OpenSSL.crypto.X509Name`. + """ + def _x509name(self, **attrs): + # XXX There's no other way to get a new X509Name yet. + name = X509().get_subject() + attrs = list(attrs.items()) + # Make the order stable - order matters! + def key(attr): + return attr[1] + attrs.sort(key=key) + for k, v in attrs: + setattr(name, k, v) + return name + + + def test_type(self): + """ + The type of X509Name objects is :py:class:`X509NameType`. + """ + self.assertIdentical(X509Name, X509NameType) + self.assertEqual(X509NameType.__name__, 'X509Name') + self.assertTrue(isinstance(X509NameType, type)) + + name = self._x509name() + self.assertTrue( + isinstance(name, X509NameType), + "%r is of type %r, should be %r" % ( + name, type(name), X509NameType)) + + + def test_onlyStringAttributes(self): + """ + Attempting to set a non-:py:data:`str` attribute name on an :py:class:`X509NameType` + instance causes :py:exc:`TypeError` to be raised. + """ + name = self._x509name() + # Beyond these cases, you may also think that unicode should be + # rejected. Sorry, you're wrong. unicode is automatically converted to + # str outside of the control of X509Name, so there's no way to reject + # it. + + # Also, this used to test str subclasses, but that test is less relevant + # now that the implementation is in Python instead of C. Also PyPy + # automatically converts str subclasses to str when they are passed to + # setattr, so we can't test it on PyPy. Apparently CPython does this + # sometimes as well. + self.assertRaises(TypeError, setattr, name, None, "hello") + self.assertRaises(TypeError, setattr, name, 30, "hello") + + + def test_setInvalidAttribute(self): + """ + Attempting to set any attribute name on an :py:class:`X509NameType` instance for + which no corresponding NID is defined causes :py:exc:`AttributeError` to be + raised. + """ + name = self._x509name() + self.assertRaises(AttributeError, setattr, name, "no such thing", None) + + + def test_attributes(self): + """ + :py:class:`X509NameType` instances have attributes for each standard (?) + X509Name field. + """ + name = self._x509name() + name.commonName = "foo" + self.assertEqual(name.commonName, "foo") + self.assertEqual(name.CN, "foo") + name.CN = "baz" + self.assertEqual(name.commonName, "baz") + self.assertEqual(name.CN, "baz") + name.commonName = "bar" + self.assertEqual(name.commonName, "bar") + self.assertEqual(name.CN, "bar") + name.CN = "quux" + self.assertEqual(name.commonName, "quux") + self.assertEqual(name.CN, "quux") + + + def test_copy(self): + """ + :py:class:`X509Name` creates a new :py:class:`X509NameType` instance with all the same + attributes as an existing :py:class:`X509NameType` instance when called with + one. + """ + name = self._x509name(commonName="foo", emailAddress="bar@example.com") + + copy = X509Name(name) + self.assertEqual(copy.commonName, "foo") + self.assertEqual(copy.emailAddress, "bar@example.com") + + # Mutate the copy and ensure the original is unmodified. + copy.commonName = "baz" + self.assertEqual(name.commonName, "foo") + + # Mutate the original and ensure the copy is unmodified. + name.emailAddress = "quux@example.com" + self.assertEqual(copy.emailAddress, "bar@example.com") + + + def test_repr(self): + """ + :py:func:`repr` passed an :py:class:`X509NameType` instance should return a string + containing a description of the type and the NIDs which have been set + on it. + """ + name = self._x509name(commonName="foo", emailAddress="bar") + self.assertEqual( + repr(name), + "") + + + def test_comparison(self): + """ + :py:class:`X509NameType` instances should compare based on their NIDs. + """ + def _equality(a, b, assertTrue, assertFalse): + assertTrue(a == b, "(%r == %r) --> False" % (a, b)) + assertFalse(a != b) + assertTrue(b == a) + assertFalse(b != a) + + def assertEqual(a, b): + _equality(a, b, self.assertTrue, self.assertFalse) + + # Instances compare equal to themselves. + name = self._x509name() + assertEqual(name, name) + + # Empty instances should compare equal to each other. + assertEqual(self._x509name(), self._x509name()) + + # Instances with equal NIDs should compare equal to each other. + assertEqual(self._x509name(commonName="foo"), + self._x509name(commonName="foo")) + + # Instance with equal NIDs set using different aliases should compare + # equal to each other. + assertEqual(self._x509name(commonName="foo"), + self._x509name(CN="foo")) + + # Instances with more than one NID with the same values should compare + # equal to each other. + assertEqual(self._x509name(CN="foo", organizationalUnitName="bar"), + self._x509name(commonName="foo", OU="bar")) + + def assertNotEqual(a, b): + _equality(a, b, self.assertFalse, self.assertTrue) + + # Instances with different values for the same NID should not compare + # equal to each other. + assertNotEqual(self._x509name(CN="foo"), + self._x509name(CN="bar")) + + # Instances with different NIDs should not compare equal to each other. + assertNotEqual(self._x509name(CN="foo"), + self._x509name(OU="foo")) + + def _inequality(a, b, assertTrue, assertFalse): + assertTrue(a < b) + assertTrue(a <= b) + assertTrue(b > a) + assertTrue(b >= a) + assertFalse(a > b) + assertFalse(a >= b) + assertFalse(b < a) + assertFalse(b <= a) + + def assertLessThan(a, b): + _inequality(a, b, self.assertTrue, self.assertFalse) + + # An X509Name with a NID with a value which sorts less than the value + # of the same NID on another X509Name compares less than the other + # X509Name. + assertLessThan(self._x509name(CN="abc"), + self._x509name(CN="def")) + + def assertGreaterThan(a, b): + _inequality(a, b, self.assertFalse, self.assertTrue) + + # An X509Name with a NID with a value which sorts greater than the + # value of the same NID on another X509Name compares greater than the + # other X509Name. + assertGreaterThan(self._x509name(CN="def"), + self._x509name(CN="abc")) + + + def test_hash(self): + """ + :py:meth:`X509Name.hash` returns an integer hash based on the value of the + name. + """ + a = self._x509name(CN="foo") + b = self._x509name(CN="foo") + self.assertEqual(a.hash(), b.hash()) + a.CN = "bar" + self.assertNotEqual(a.hash(), b.hash()) + + + def test_der(self): + """ + :py:meth:`X509Name.der` returns the DER encoded form of the name. + """ + a = self._x509name(CN="foo", C="US") + self.assertEqual( + a.der(), + b('0\x1b1\x0b0\t\x06\x03U\x04\x06\x13\x02US' + '1\x0c0\n\x06\x03U\x04\x03\x13\x03foo')) + + + def test_get_components(self): + """ + :py:meth:`X509Name.get_components` returns a :py:data:`list` of + two-tuples of :py:data:`str` + giving the NIDs and associated values which make up the name. + """ + a = self._x509name() + self.assertEqual(a.get_components(), []) + a.CN = "foo" + self.assertEqual(a.get_components(), [(b("CN"), b("foo"))]) + a.organizationalUnitName = "bar" + self.assertEqual( + a.get_components(), + [(b("CN"), b("foo")), (b("OU"), b("bar"))]) + + + def test_load_nul_byte_attribute(self): + """ + An :py:class:`OpenSSL.crypto.X509Name` from an + :py:class:`OpenSSL.crypto.X509` instance loaded from a file can have a + NUL byte in the value of one of its attributes. + """ + cert = load_certificate(FILETYPE_PEM, nulbyteSubjectAltNamePEM) + subject = cert.get_subject() + self.assertEqual( + "null.python.org\x00example.org", subject.commonName) + + + def test_setAttributeFailure(self): + """ + If the value of an attribute cannot be set for some reason then + :py:class:`OpenSSL.crypto.Error` is raised. + """ + name = self._x509name() + # This value is too long + self.assertRaises(Error, setattr, name, "O", b"x" * 512) + + + +class _PKeyInteractionTestsMixin: + """ + Tests which involve another thing and a PKey. + """ + def signable(self): + """ + Return something with a :py:meth:`set_pubkey`, :py:meth:`set_pubkey`, + and :py:meth:`sign` method. + """ + raise NotImplementedError() + + + def test_signWithUngenerated(self): + """ + :py:meth:`X509Req.sign` raises :py:exc:`ValueError` when pass a + :py:class:`PKey` with no parts. + """ + request = self.signable() + key = PKey() + self.assertRaises(ValueError, request.sign, key, GOOD_DIGEST) + + + def test_signWithPublicKey(self): + """ + :py:meth:`X509Req.sign` raises :py:exc:`ValueError` when pass a + :py:class:`PKey` with no private part as the signing key. + """ + request = self.signable() + key = PKey() + key.generate_key(TYPE_RSA, 512) + request.set_pubkey(key) + pub = request.get_pubkey() + self.assertRaises(ValueError, request.sign, pub, GOOD_DIGEST) + + + def test_signWithUnknownDigest(self): + """ + :py:meth:`X509Req.sign` raises :py:exc:`ValueError` when passed a digest name which is + not known. + """ + request = self.signable() + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises(ValueError, request.sign, key, BAD_DIGEST) + + + def test_sign(self): + """ + :py:meth:`X509Req.sign` succeeds when passed a private key object and a valid + digest function. :py:meth:`X509Req.verify` can be used to check the signature. + """ + request = self.signable() + key = PKey() + key.generate_key(TYPE_RSA, 512) + request.set_pubkey(key) + request.sign(key, GOOD_DIGEST) + # If the type has a verify method, cover that too. + if getattr(request, 'verify', None) is not None: + pub = request.get_pubkey() + self.assertTrue(request.verify(pub)) + # Make another key that won't verify. + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises(Error, request.verify, key) + + + + +class X509ReqTests(TestCase, _PKeyInteractionTestsMixin): + """ + Tests for :py:class:`OpenSSL.crypto.X509Req`. + """ + def signable(self): + """ + Create and return a new :py:class:`X509Req`. + """ + return X509Req() + + + def test_type(self): + """ + :py:obj:`X509Req` and :py:obj:`X509ReqType` refer to the same type object and can be + used to create instances of that type. + """ + self.assertIdentical(X509Req, X509ReqType) + self.assertConsistentType(X509Req, 'X509Req') + + + def test_construction(self): + """ + :py:obj:`X509Req` takes no arguments and returns an :py:obj:`X509ReqType` instance. + """ + request = X509Req() + self.assertTrue( + isinstance(request, X509ReqType), + "%r is of type %r, should be %r" % (request, type(request), X509ReqType)) + + + def test_version(self): + """ + :py:obj:`X509ReqType.set_version` sets the X.509 version of the certificate + request. :py:obj:`X509ReqType.get_version` returns the X.509 version of + the certificate request. The initial value of the version is 0. + """ + request = X509Req() + self.assertEqual(request.get_version(), 0) + request.set_version(1) + self.assertEqual(request.get_version(), 1) + request.set_version(3) + self.assertEqual(request.get_version(), 3) + + + def test_version_wrong_args(self): + """ + :py:obj:`X509ReqType.set_version` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`int` argument. + :py:obj:`X509ReqType.get_version` raises :py:obj:`TypeError` if called with any + arguments. + """ + request = X509Req() + self.assertRaises(TypeError, request.set_version) + self.assertRaises(TypeError, request.set_version, "foo") + self.assertRaises(TypeError, request.set_version, 1, 2) + self.assertRaises(TypeError, request.get_version, None) + + + def test_get_subject(self): + """ + :py:obj:`X509ReqType.get_subject` returns an :py:obj:`X509Name` for the subject of + the request and which is valid even after the request object is + otherwise dead. + """ + request = X509Req() + subject = request.get_subject() + self.assertTrue( + isinstance(subject, X509NameType), + "%r is of type %r, should be %r" % (subject, type(subject), X509NameType)) + subject.commonName = "foo" + self.assertEqual(request.get_subject().commonName, "foo") + del request + subject.commonName = "bar" + self.assertEqual(subject.commonName, "bar") + + + def test_get_subject_wrong_args(self): + """ + :py:obj:`X509ReqType.get_subject` raises :py:obj:`TypeError` if called with any + arguments. + """ + request = X509Req() + self.assertRaises(TypeError, request.get_subject, None) + + + def test_add_extensions(self): + """ + :py:obj:`X509Req.add_extensions` accepts a :py:obj:`list` of :py:obj:`X509Extension` + instances and adds them to the X509 request. + """ + request = X509Req() + request.add_extensions([ + X509Extension(b('basicConstraints'), True, b('CA:false'))]) + exts = request.get_extensions() + self.assertEqual(len(exts), 1) + self.assertEqual(exts[0].get_short_name(), b('basicConstraints')) + self.assertEqual(exts[0].get_critical(), 1) + self.assertEqual(exts[0].get_data(), b('0\x00')) + + + def test_get_extensions(self): + """ + :py:obj:`X509Req.get_extensions` returns a :py:obj:`list` of + extensions added to this X509 request. + """ + request = X509Req() + exts = request.get_extensions() + self.assertEqual(exts, []) + request.add_extensions([ + X509Extension(b('basicConstraints'), True, b('CA:true')), + X509Extension(b('keyUsage'), False, b('digitalSignature'))]) + exts = request.get_extensions() + self.assertEqual(len(exts), 2) + self.assertEqual(exts[0].get_short_name(), b('basicConstraints')) + self.assertEqual(exts[0].get_critical(), 1) + self.assertEqual(exts[0].get_data(), b('0\x03\x01\x01\xff')) + self.assertEqual(exts[1].get_short_name(), b('keyUsage')) + self.assertEqual(exts[1].get_critical(), 0) + self.assertEqual(exts[1].get_data(), b('\x03\x02\x07\x80')) + + + def test_add_extensions_wrong_args(self): + """ + :py:obj:`X509Req.add_extensions` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`list`. Or it raises :py:obj:`ValueError` + if called with a :py:obj:`list` containing objects other than :py:obj:`X509Extension` + instances. + """ + request = X509Req() + self.assertRaises(TypeError, request.add_extensions) + self.assertRaises(TypeError, request.add_extensions, object()) + self.assertRaises(ValueError, request.add_extensions, [object()]) + self.assertRaises(TypeError, request.add_extensions, [], None) + + + def test_verify_wrong_args(self): + """ + :py:obj:`X509Req.verify` raises :py:obj:`TypeError` if called with zero + arguments or more than one argument or if passed anything other than a + :py:obj:`PKey` instance as its single argument. + """ + request = X509Req() + self.assertRaises(TypeError, request.verify) + self.assertRaises(TypeError, request.verify, object()) + self.assertRaises(TypeError, request.verify, PKey(), object()) + + + def test_verify_uninitialized_key(self): + """ + :py:obj:`X509Req.verify` raises :py:obj:`OpenSSL.crypto.Error` if called + with a :py:obj:`OpenSSL.crypto.PKey` which contains no key data. + """ + request = X509Req() + pkey = PKey() + self.assertRaises(Error, request.verify, pkey) + + + def test_verify_wrong_key(self): + """ + :py:obj:`X509Req.verify` raises :py:obj:`OpenSSL.crypto.Error` if called + with a :py:obj:`OpenSSL.crypto.PKey` which does not represent the public + part of the key which signed the request. + """ + request = X509Req() + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + request.sign(pkey, GOOD_DIGEST) + another_pkey = load_privatekey(FILETYPE_PEM, client_key_pem) + self.assertRaises(Error, request.verify, another_pkey) + + + def test_verify_success(self): + """ + :py:obj:`X509Req.verify` returns :py:obj:`True` if called with a + :py:obj:`OpenSSL.crypto.PKey` which represents the public part of the key + which signed the request. + """ + request = X509Req() + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + request.sign(pkey, GOOD_DIGEST) + self.assertEqual(True, request.verify(pkey)) + + + +class X509Tests(TestCase, _PKeyInteractionTestsMixin): + """ + Tests for :py:obj:`OpenSSL.crypto.X509`. + """ + pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM + + extpem = """ +-----BEGIN CERTIFICATE----- +MIIC3jCCAkegAwIBAgIJAJHFjlcCgnQzMA0GCSqGSIb3DQEBBQUAMEcxCzAJBgNV +BAYTAlNFMRUwEwYDVQQIEwxXZXN0ZXJib3R0b20xEjAQBgNVBAoTCUNhdGFsb2dp +eDENMAsGA1UEAxMEUm9vdDAeFw0wODA0MjIxNDQ1MzhaFw0wOTA0MjIxNDQ1Mzha +MFQxCzAJBgNVBAYTAlNFMQswCQYDVQQIEwJXQjEUMBIGA1UEChMLT3Blbk1ldGFk +aXIxIjAgBgNVBAMTGW5vZGUxLm9tMi5vcGVubWV0YWRpci5vcmcwgZ8wDQYJKoZI +hvcNAQEBBQADgY0AMIGJAoGBAPIcQMrwbk2nESF/0JKibj9i1x95XYAOwP+LarwT +Op4EQbdlI9SY+uqYqlERhF19w7CS+S6oyqx0DRZSk4Y9dZ9j9/xgm2u/f136YS1u +zgYFPvfUs6PqYLPSM8Bw+SjJ+7+2+TN+Tkiof9WP1cMjodQwOmdsiRbR0/J7+b1B +hec1AgMBAAGjgcQwgcEwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT +TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFIdHsBcMVVMbAO7j6NCj +03HgLnHaMB8GA1UdIwQYMBaAFL2h9Bf9Mre4vTdOiHTGAt7BRY/8MEYGA1UdEQQ/ +MD2CDSouZXhhbXBsZS5vcmeCESoub20yLmV4bWFwbGUuY29thwSC7wgKgRNvbTJA +b3Blbm1ldGFkaXIub3JnMA0GCSqGSIb3DQEBBQUAA4GBALd7WdXkp2KvZ7/PuWZA +MPlIxyjS+Ly11+BNE0xGQRp9Wz+2lABtpgNqssvU156+HkKd02rGheb2tj7MX9hG +uZzbwDAZzJPjzDQDD7d3cWsrVcfIdqVU7epHqIadnOF+X0ghJ39pAm6VVadnSXCt +WpOdIpB8KksUTCzV591Nr1wd +-----END CERTIFICATE----- + """ + def signable(self): + """ + Create and return a new :py:obj:`X509`. + """ + return X509() + + + def test_type(self): + """ + :py:obj:`X509` and :py:obj:`X509Type` refer to the same type object and can be used + to create instances of that type. + """ + self.assertIdentical(X509, X509Type) + self.assertConsistentType(X509, 'X509') + + + def test_construction(self): + """ + :py:obj:`X509` takes no arguments and returns an instance of :py:obj:`X509Type`. + """ + certificate = X509() + self.assertTrue( + isinstance(certificate, X509Type), + "%r is of type %r, should be %r" % (certificate, + type(certificate), + X509Type)) + self.assertEqual(type(X509Type).__name__, 'type') + self.assertEqual(type(certificate).__name__, 'X509') + self.assertEqual(type(certificate), X509Type) + self.assertEqual(type(certificate), X509) + + + def test_get_version_wrong_args(self): + """ + :py:obj:`X509.get_version` raises :py:obj:`TypeError` if invoked with any arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_version, None) + + + def test_set_version_wrong_args(self): + """ + :py:obj:`X509.set_version` raises :py:obj:`TypeError` if invoked with the wrong number + of arguments or an argument not of type :py:obj:`int`. + """ + cert = X509() + self.assertRaises(TypeError, cert.set_version) + self.assertRaises(TypeError, cert.set_version, None) + self.assertRaises(TypeError, cert.set_version, 1, None) + + + def test_version(self): + """ + :py:obj:`X509.set_version` sets the certificate version number. + :py:obj:`X509.get_version` retrieves it. + """ + cert = X509() + cert.set_version(1234) + self.assertEquals(cert.get_version(), 1234) + + + def test_get_serial_number_wrong_args(self): + """ + :py:obj:`X509.get_serial_number` raises :py:obj:`TypeError` if invoked with any + arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_serial_number, None) + + + def test_serial_number(self): + """ + The serial number of an :py:obj:`X509Type` can be retrieved and modified with + :py:obj:`X509Type.get_serial_number` and :py:obj:`X509Type.set_serial_number`. + """ + certificate = X509() + self.assertRaises(TypeError, certificate.set_serial_number) + self.assertRaises(TypeError, certificate.set_serial_number, 1, 2) + self.assertRaises(TypeError, certificate.set_serial_number, "1") + self.assertRaises(TypeError, certificate.set_serial_number, 5.5) + self.assertEqual(certificate.get_serial_number(), 0) + certificate.set_serial_number(1) + self.assertEqual(certificate.get_serial_number(), 1) + certificate.set_serial_number(2 ** 32 + 1) + self.assertEqual(certificate.get_serial_number(), 2 ** 32 + 1) + certificate.set_serial_number(2 ** 64 + 1) + self.assertEqual(certificate.get_serial_number(), 2 ** 64 + 1) + certificate.set_serial_number(2 ** 128 + 1) + self.assertEqual(certificate.get_serial_number(), 2 ** 128 + 1) + + + def _setBoundTest(self, which): + """ + :py:obj:`X509Type.set_notBefore` takes a string in the format of an ASN1 + GENERALIZEDTIME and sets the beginning of the certificate's validity + period to it. + """ + certificate = X509() + set = getattr(certificate, 'set_not' + which) + get = getattr(certificate, 'get_not' + which) + + # Starts with no value. + self.assertEqual(get(), None) + + # GMT (Or is it UTC?) -exarkun + when = b("20040203040506Z") + set(when) + self.assertEqual(get(), when) + + # A plus two hours and thirty minutes offset + when = b("20040203040506+0530") + set(when) + self.assertEqual(get(), when) + + # A minus one hour fifteen minutes offset + when = b("20040203040506-0115") + set(when) + self.assertEqual(get(), when) + + # An invalid string results in a ValueError + self.assertRaises(ValueError, set, b("foo bar")) + + # The wrong number of arguments results in a TypeError. + self.assertRaises(TypeError, set) + self.assertRaises(TypeError, set, b("20040203040506Z"), b("20040203040506Z")) + self.assertRaises(TypeError, get, b("foo bar")) + + + # XXX ASN1_TIME (not GENERALIZEDTIME) + + def test_set_notBefore(self): + """ + :py:obj:`X509Type.set_notBefore` takes a string in the format of an ASN1 + GENERALIZEDTIME and sets the beginning of the certificate's validity + period to it. + """ + self._setBoundTest("Before") + + + def test_set_notAfter(self): + """ + :py:obj:`X509Type.set_notAfter` takes a string in the format of an ASN1 + GENERALIZEDTIME and sets the end of the certificate's validity period + to it. + """ + self._setBoundTest("After") + + + def test_get_notBefore(self): + """ + :py:obj:`X509Type.get_notBefore` returns a string in the format of an ASN1 + GENERALIZEDTIME even for certificates which store it as UTCTIME + internally. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertEqual(cert.get_notBefore(), b("20090325123658Z")) + + + def test_get_notAfter(self): + """ + :py:obj:`X509Type.get_notAfter` returns a string in the format of an ASN1 + GENERALIZEDTIME even for certificates which store it as UTCTIME + internally. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertEqual(cert.get_notAfter(), b("20170611123658Z")) + + + def test_gmtime_adj_notBefore_wrong_args(self): + """ + :py:obj:`X509Type.gmtime_adj_notBefore` raises :py:obj:`TypeError` if called with the + wrong number of arguments or a non-:py:obj:`int` argument. + """ + cert = X509() + self.assertRaises(TypeError, cert.gmtime_adj_notBefore) + self.assertRaises(TypeError, cert.gmtime_adj_notBefore, None) + self.assertRaises(TypeError, cert.gmtime_adj_notBefore, 123, None) + + + def test_gmtime_adj_notBefore(self): + """ + :py:obj:`X509Type.gmtime_adj_notBefore` changes the not-before timestamp to be + the current time plus the number of seconds passed in. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + now = datetime.utcnow() + timedelta(seconds=100) + cert.gmtime_adj_notBefore(100) + self.assertEqual(cert.get_notBefore(), b(now.strftime("%Y%m%d%H%M%SZ"))) + + + def test_gmtime_adj_notAfter_wrong_args(self): + """ + :py:obj:`X509Type.gmtime_adj_notAfter` raises :py:obj:`TypeError` if called with the + wrong number of arguments or a non-:py:obj:`int` argument. + """ + cert = X509() + self.assertRaises(TypeError, cert.gmtime_adj_notAfter) + self.assertRaises(TypeError, cert.gmtime_adj_notAfter, None) + self.assertRaises(TypeError, cert.gmtime_adj_notAfter, 123, None) + + + def test_gmtime_adj_notAfter(self): + """ + :py:obj:`X509Type.gmtime_adj_notAfter` changes the not-after timestamp to be + the current time plus the number of seconds passed in. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + now = datetime.utcnow() + timedelta(seconds=100) + cert.gmtime_adj_notAfter(100) + self.assertEqual(cert.get_notAfter(), b(now.strftime("%Y%m%d%H%M%SZ"))) + + + def test_has_expired_wrong_args(self): + """ + :py:obj:`X509Type.has_expired` raises :py:obj:`TypeError` if called with any + arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.has_expired, None) + + + def test_has_expired(self): + """ + :py:obj:`X509Type.has_expired` returns :py:obj:`True` if the certificate's not-after + time is in the past. + """ + cert = X509() + cert.gmtime_adj_notAfter(-1) + self.assertTrue(cert.has_expired()) + + + def test_has_not_expired(self): + """ + :py:obj:`X509Type.has_expired` returns :py:obj:`False` if the certificate's not-after + time is in the future. + """ + cert = X509() + cert.gmtime_adj_notAfter(2) + self.assertFalse(cert.has_expired()) + + + def test_digest(self): + """ + :py:obj:`X509.digest` returns a string giving ":"-separated hex-encoded words + of the digest of the certificate. + """ + cert = X509() + self.assertEqual( + # This is MD5 instead of GOOD_DIGEST because the digest algorithm + # actually matters to the assertion (ie, another arbitrary, good + # digest will not product the same digest). + cert.digest("MD5"), + b("A8:EB:07:F8:53:25:0A:F2:56:05:C5:A5:C4:C4:C7:15")) + + + def _extcert(self, pkey, extensions): + cert = X509() + cert.set_pubkey(pkey) + cert.get_subject().commonName = "Unit Tests" + cert.get_issuer().commonName = "Unit Tests" + when = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + cert.set_notBefore(when) + cert.set_notAfter(when) + + cert.add_extensions(extensions) + return load_certificate( + FILETYPE_PEM, dump_certificate(FILETYPE_PEM, cert)) + + + def test_extension_count(self): + """ + :py:obj:`X509.get_extension_count` returns the number of extensions that are + present in the certificate. + """ + pkey = load_privatekey(FILETYPE_PEM, client_key_pem) + ca = X509Extension(b('basicConstraints'), True, b('CA:FALSE')) + key = X509Extension(b('keyUsage'), True, b('digitalSignature')) + subjectAltName = X509Extension( + b('subjectAltName'), True, b('DNS:example.com')) + + # Try a certificate with no extensions at all. + c = self._extcert(pkey, []) + self.assertEqual(c.get_extension_count(), 0) + + # And a certificate with one + c = self._extcert(pkey, [ca]) + self.assertEqual(c.get_extension_count(), 1) + + # And a certificate with several + c = self._extcert(pkey, [ca, key, subjectAltName]) + self.assertEqual(c.get_extension_count(), 3) + + + def test_get_extension(self): + """ + :py:obj:`X509.get_extension` takes an integer and returns an :py:obj:`X509Extension` + corresponding to the extension at that index. + """ + pkey = load_privatekey(FILETYPE_PEM, client_key_pem) + ca = X509Extension(b('basicConstraints'), True, b('CA:FALSE')) + key = X509Extension(b('keyUsage'), True, b('digitalSignature')) + subjectAltName = X509Extension( + b('subjectAltName'), False, b('DNS:example.com')) + + cert = self._extcert(pkey, [ca, key, subjectAltName]) + + ext = cert.get_extension(0) + self.assertTrue(isinstance(ext, X509Extension)) + self.assertTrue(ext.get_critical()) + self.assertEqual(ext.get_short_name(), b('basicConstraints')) + + ext = cert.get_extension(1) + self.assertTrue(isinstance(ext, X509Extension)) + self.assertTrue(ext.get_critical()) + self.assertEqual(ext.get_short_name(), b('keyUsage')) + + ext = cert.get_extension(2) + self.assertTrue(isinstance(ext, X509Extension)) + self.assertFalse(ext.get_critical()) + self.assertEqual(ext.get_short_name(), b('subjectAltName')) + + self.assertRaises(IndexError, cert.get_extension, -1) + self.assertRaises(IndexError, cert.get_extension, 4) + self.assertRaises(TypeError, cert.get_extension, "hello") + + + def test_nullbyte_subjectAltName(self): + """ + The fields of a `subjectAltName` extension on an X509 may contain NUL + bytes and this value is reflected in the string representation of the + extension object. + """ + cert = load_certificate(FILETYPE_PEM, nulbyteSubjectAltNamePEM) + + ext = cert.get_extension(3) + self.assertEqual(ext.get_short_name(), b('subjectAltName')) + self.assertEqual( + b("DNS:altnull.python.org\x00example.com, " + "email:null@python.org\x00user@example.org, " + "URI:http://null.python.org\x00http://example.org, " + "IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1\n"), + b(str(ext))) + + + def test_invalid_digest_algorithm(self): + """ + :py:obj:`X509.digest` raises :py:obj:`ValueError` if called with an unrecognized hash + algorithm. + """ + cert = X509() + self.assertRaises(ValueError, cert.digest, BAD_DIGEST) + + + def test_get_subject_wrong_args(self): + """ + :py:obj:`X509.get_subject` raises :py:obj:`TypeError` if called with any arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_subject, None) + + + def test_get_subject(self): + """ + :py:obj:`X509.get_subject` returns an :py:obj:`X509Name` instance. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + subj = cert.get_subject() + self.assertTrue(isinstance(subj, X509Name)) + self.assertEquals( + subj.get_components(), + [(b('C'), b('US')), (b('ST'), b('IL')), (b('L'), b('Chicago')), + (b('O'), b('Testing')), (b('CN'), b('Testing Root CA'))]) + + + def test_set_subject_wrong_args(self): + """ + :py:obj:`X509.set_subject` raises a :py:obj:`TypeError` if called with the wrong + number of arguments or an argument not of type :py:obj:`X509Name`. + """ + cert = X509() + self.assertRaises(TypeError, cert.set_subject) + self.assertRaises(TypeError, cert.set_subject, None) + self.assertRaises(TypeError, cert.set_subject, cert.get_subject(), None) + + + def test_set_subject(self): + """ + :py:obj:`X509.set_subject` changes the subject of the certificate to the one + passed in. + """ + cert = X509() + name = cert.get_subject() + name.C = 'AU' + name.O = 'Unit Tests' + cert.set_subject(name) + self.assertEquals( + cert.get_subject().get_components(), + [(b('C'), b('AU')), (b('O'), b('Unit Tests'))]) + + + def test_get_issuer_wrong_args(self): + """ + :py:obj:`X509.get_issuer` raises :py:obj:`TypeError` if called with any arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_issuer, None) + + + def test_get_issuer(self): + """ + :py:obj:`X509.get_issuer` returns an :py:obj:`X509Name` instance. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + subj = cert.get_issuer() + self.assertTrue(isinstance(subj, X509Name)) + comp = subj.get_components() + self.assertEquals( + comp, + [(b('C'), b('US')), (b('ST'), b('IL')), (b('L'), b('Chicago')), + (b('O'), b('Testing')), (b('CN'), b('Testing Root CA'))]) + + + def test_set_issuer_wrong_args(self): + """ + :py:obj:`X509.set_issuer` raises a :py:obj:`TypeError` if called with the wrong + number of arguments or an argument not of type :py:obj:`X509Name`. + """ + cert = X509() + self.assertRaises(TypeError, cert.set_issuer) + self.assertRaises(TypeError, cert.set_issuer, None) + self.assertRaises(TypeError, cert.set_issuer, cert.get_issuer(), None) + + + def test_set_issuer(self): + """ + :py:obj:`X509.set_issuer` changes the issuer of the certificate to the one + passed in. + """ + cert = X509() + name = cert.get_issuer() + name.C = 'AU' + name.O = 'Unit Tests' + cert.set_issuer(name) + self.assertEquals( + cert.get_issuer().get_components(), + [(b('C'), b('AU')), (b('O'), b('Unit Tests'))]) + + + def test_get_pubkey_uninitialized(self): + """ + When called on a certificate with no public key, :py:obj:`X509.get_pubkey` + raises :py:obj:`OpenSSL.crypto.Error`. + """ + cert = X509() + self.assertRaises(Error, cert.get_pubkey) + + + def test_subject_name_hash_wrong_args(self): + """ + :py:obj:`X509.subject_name_hash` raises :py:obj:`TypeError` if called with any + arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.subject_name_hash, None) + + + def test_subject_name_hash(self): + """ + :py:obj:`X509.subject_name_hash` returns the hash of the certificate's subject + name. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertIn( + cert.subject_name_hash(), + [3350047874, # OpenSSL 0.9.8, MD5 + 3278919224, # OpenSSL 1.0.0, SHA1 + ]) + + + def test_get_signature_algorithm(self): + """ + :py:obj:`X509Type.get_signature_algorithm` returns a string which means + the algorithm used to sign the certificate. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertEqual( + b("sha1WithRSAEncryption"), cert.get_signature_algorithm()) + + + def test_get_undefined_signature_algorithm(self): + """ + :py:obj:`X509Type.get_signature_algorithm` raises :py:obj:`ValueError` if the + signature algorithm is undefined or unknown. + """ + # This certificate has been modified to indicate a bogus OID in the + # signature algorithm field so that OpenSSL does not recognize it. + certPEM = b("""\ +-----BEGIN CERTIFICATE----- +MIIC/zCCAmigAwIBAgIBATAGBgJ8BQUAMHsxCzAJBgNVBAYTAlNHMREwDwYDVQQK +EwhNMkNyeXB0bzEUMBIGA1UECxMLTTJDcnlwdG8gQ0ExJDAiBgNVBAMTG00yQ3J5 +cHRvIENlcnRpZmljYXRlIE1hc3RlcjEdMBsGCSqGSIb3DQEJARYObmdwc0Bwb3N0 +MS5jb20wHhcNMDAwOTEwMDk1MTMwWhcNMDIwOTEwMDk1MTMwWjBTMQswCQYDVQQG +EwJTRzERMA8GA1UEChMITTJDcnlwdG8xEjAQBgNVBAMTCWxvY2FsaG9zdDEdMBsG +CSqGSIb3DQEJARYObmdwc0Bwb3N0MS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBI +AkEArL57d26W9fNXvOhNlZzlPOACmvwOZ5AdNgLzJ1/MfsQQJ7hHVeHmTAjM664V ++fXvwUGJLziCeBo1ysWLRnl8CQIDAQABo4IBBDCCAQAwCQYDVR0TBAIwADAsBglg +hkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0O +BBYEFM+EgpK+eyZiwFU1aOPSbczbPSpVMIGlBgNVHSMEgZ0wgZqAFPuHI2nrnDqT +FeXFvylRT/7tKDgBoX+kfTB7MQswCQYDVQQGEwJTRzERMA8GA1UEChMITTJDcnlw +dG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtNMkNyeXB0byBDZXJ0 +aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tggEA +MA0GCSqGSIb3DQEBBAUAA4GBADv8KpPo+gfJxN2ERK1Y1l17sz/ZhzoGgm5XCdbx +jEY7xKfpQngV599k1xhl11IMqizDwu0855agrckg2MCTmOI9DZzDD77tAYb+Dk0O +PEVk0Mk/V0aIsDE9bolfCi/i/QWZ3N8s5nTWMNyBBBmoSliWCm4jkkRZRD0ejgTN +tgI5 +-----END CERTIFICATE----- +""") + cert = load_certificate(FILETYPE_PEM, certPEM) + self.assertRaises(ValueError, cert.get_signature_algorithm) + + + +class X509StoreTests(TestCase): + """ + Test for :py:obj:`OpenSSL.crypto.X509Store`. + """ + def test_type(self): + """ + :py:obj:`X509StoreType` is a type object. + """ + self.assertIdentical(X509Store, X509StoreType) + self.assertConsistentType(X509Store, 'X509Store') + + + def test_add_cert_wrong_args(self): + store = X509Store() + self.assertRaises(TypeError, store.add_cert) + self.assertRaises(TypeError, store.add_cert, object()) + self.assertRaises(TypeError, store.add_cert, X509(), object()) + + + def test_add_cert(self): + """ + :py:obj:`X509Store.add_cert` adds a :py:obj:`X509` instance to the + certificate store. + """ + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + store = X509Store() + store.add_cert(cert) + + + def test_add_cert_rejects_duplicate(self): + """ + :py:obj:`X509Store.add_cert` raises :py:obj:`OpenSSL.crypto.Error` if an + attempt is made to add the same certificate to the store more than once. + """ + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + store = X509Store() + store.add_cert(cert) + self.assertRaises(Error, store.add_cert, cert) + + + +class PKCS12Tests(TestCase): + """ + Test for :py:obj:`OpenSSL.crypto.PKCS12` and :py:obj:`OpenSSL.crypto.load_pkcs12`. + """ + pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM + + def test_type(self): + """ + :py:obj:`PKCS12Type` is a type object. + """ + self.assertIdentical(PKCS12, PKCS12Type) + self.assertConsistentType(PKCS12, 'PKCS12') + + + def test_empty_construction(self): + """ + :py:obj:`PKCS12` returns a new instance of :py:obj:`PKCS12` with no certificate, + private key, CA certificates, or friendly name. + """ + p12 = PKCS12() + self.assertEqual(None, p12.get_certificate()) + self.assertEqual(None, p12.get_privatekey()) + self.assertEqual(None, p12.get_ca_certificates()) + self.assertEqual(None, p12.get_friendlyname()) + + + def test_type_errors(self): + """ + The :py:obj:`PKCS12` setter functions (:py:obj:`set_certificate`, :py:obj:`set_privatekey`, + :py:obj:`set_ca_certificates`, and :py:obj:`set_friendlyname`) raise :py:obj:`TypeError` + when passed objects of types other than those expected. + """ + p12 = PKCS12() + self.assertRaises(TypeError, p12.set_certificate, 3) + self.assertRaises(TypeError, p12.set_certificate, PKey()) + self.assertRaises(TypeError, p12.set_certificate, X509) + self.assertRaises(TypeError, p12.set_privatekey, 3) + self.assertRaises(TypeError, p12.set_privatekey, 'legbone') + self.assertRaises(TypeError, p12.set_privatekey, X509()) + self.assertRaises(TypeError, p12.set_ca_certificates, 3) + self.assertRaises(TypeError, p12.set_ca_certificates, X509()) + self.assertRaises(TypeError, p12.set_ca_certificates, (3, 4)) + self.assertRaises(TypeError, p12.set_ca_certificates, ( PKey(), )) + self.assertRaises(TypeError, p12.set_friendlyname, 6) + self.assertRaises(TypeError, p12.set_friendlyname, ('foo', 'bar')) + + + def test_key_only(self): + """ + A :py:obj:`PKCS12` with only a private key can be exported using + :py:obj:`PKCS12.export` and loaded again using :py:obj:`load_pkcs12`. + """ + passwd = b"blah" + p12 = PKCS12() + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + p12.set_privatekey(pkey) + self.assertEqual(None, p12.get_certificate()) + self.assertEqual(pkey, p12.get_privatekey()) + try: + dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3) + except Error: + # Some versions of OpenSSL will throw an exception + # for this nearly useless PKCS12 we tried to generate: + # [('PKCS12 routines', 'PKCS12_create', 'invalid null argument')] + return + p12 = load_pkcs12(dumped_p12, passwd) + self.assertEqual(None, p12.get_ca_certificates()) + self.assertEqual(None, p12.get_certificate()) + + # OpenSSL fails to bring the key back to us. So sad. Perhaps in the + # future this will be improved. + self.assertTrue(isinstance(p12.get_privatekey(), (PKey, type(None)))) + + + def test_cert_only(self): + """ + A :py:obj:`PKCS12` with only a certificate can be exported using + :py:obj:`PKCS12.export` and loaded again using :py:obj:`load_pkcs12`. + """ + passwd = b"blah" + p12 = PKCS12() + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + p12.set_certificate(cert) + self.assertEqual(cert, p12.get_certificate()) + self.assertEqual(None, p12.get_privatekey()) + try: + dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3) + except Error: + # Some versions of OpenSSL will throw an exception + # for this nearly useless PKCS12 we tried to generate: + # [('PKCS12 routines', 'PKCS12_create', 'invalid null argument')] + return + p12 = load_pkcs12(dumped_p12, passwd) + self.assertEqual(None, p12.get_privatekey()) + + # OpenSSL fails to bring the cert back to us. Groany mcgroan. + self.assertTrue(isinstance(p12.get_certificate(), (X509, type(None)))) + + # Oh ho. It puts the certificate into the ca certificates list, in + # fact. Totally bogus, I would think. Nevertheless, let's exploit + # that to check to see if it reconstructed the certificate we expected + # it to. At some point, hopefully this will change so that + # p12.get_certificate() is actually what returns the loaded + # certificate. + self.assertEqual( + cleartextCertificatePEM, + dump_certificate(FILETYPE_PEM, p12.get_ca_certificates()[0])) + + + def gen_pkcs12(self, cert_pem=None, key_pem=None, ca_pem=None, friendly_name=None): + """ + Generate a PKCS12 object with components from PEM. Verify that the set + functions return None. + """ + p12 = PKCS12() + if cert_pem: + ret = p12.set_certificate(load_certificate(FILETYPE_PEM, cert_pem)) + self.assertEqual(ret, None) + if key_pem: + ret = p12.set_privatekey(load_privatekey(FILETYPE_PEM, key_pem)) + self.assertEqual(ret, None) + if ca_pem: + ret = p12.set_ca_certificates((load_certificate(FILETYPE_PEM, ca_pem),)) + self.assertEqual(ret, None) + if friendly_name: + ret = p12.set_friendlyname(friendly_name) + self.assertEqual(ret, None) + return p12 + + + def check_recovery(self, p12_str, key=None, cert=None, ca=None, passwd=b"", + extra=()): + """ + Use openssl program to confirm three components are recoverable from a + PKCS12 string. + """ + if key: + recovered_key = _runopenssl( + p12_str, b"pkcs12", b"-nocerts", b"-nodes", b"-passin", + b"pass:" + passwd, *extra) + self.assertEqual(recovered_key[-len(key):], key) + if cert: + recovered_cert = _runopenssl( + p12_str, b"pkcs12", b"-clcerts", b"-nodes", b"-passin", + b"pass:" + passwd, b"-nokeys", *extra) + self.assertEqual(recovered_cert[-len(cert):], cert) + if ca: + recovered_cert = _runopenssl( + p12_str, b"pkcs12", b"-cacerts", b"-nodes", b"-passin", + b"pass:" + passwd, b"-nokeys", *extra) + self.assertEqual(recovered_cert[-len(ca):], ca) + + + def verify_pkcs12_container(self, p12): + """ + Verify that the PKCS#12 container contains the correct client + certificate and private key. + + :param p12: The PKCS12 instance to verify. + :type p12: :py:class:`PKCS12` + """ + cert_pem = dump_certificate(FILETYPE_PEM, p12.get_certificate()) + key_pem = dump_privatekey(FILETYPE_PEM, p12.get_privatekey()) + self.assertEqual( + (client_cert_pem, client_key_pem, None), + (cert_pem, key_pem, p12.get_ca_certificates())) + + + def test_load_pkcs12(self): + """ + A PKCS12 string generated using the openssl command line can be loaded + with :py:obj:`load_pkcs12` and its components extracted and examined. + """ + passwd = b"whatever" + pem = client_key_pem + client_cert_pem + p12_str = _runopenssl( + pem, b"pkcs12", b"-export", b"-clcerts", b"-passout", b"pass:" + passwd) + p12 = load_pkcs12(p12_str, passphrase=passwd) + self.verify_pkcs12_container(p12) + + + def test_load_pkcs12_text_passphrase(self): + """ + A PKCS12 string generated using the openssl command line can be loaded + with :py:obj:`load_pkcs12` and its components extracted and examined. + Using text as passphrase instead of bytes. DeprecationWarning expected. + """ + pem = client_key_pem + client_cert_pem + passwd = b"whatever" + p12_str = _runopenssl(pem, b"pkcs12", b"-export", b"-clcerts", + b"-passout", b"pass:" + passwd) + with catch_warnings(record=True) as w: + simplefilter("always") + p12 = load_pkcs12(p12_str, passphrase=b"whatever".decode("ascii")) + + self.assertEqual( + "{0} for passphrase is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + + self.verify_pkcs12_container(p12) + + + def test_load_pkcs12_no_passphrase(self): + """ + A PKCS12 string generated using openssl command line can be loaded with + :py:obj:`load_pkcs12` without a passphrase and its components extracted + and examined. + """ + pem = client_key_pem + client_cert_pem + p12_str = _runopenssl( + pem, b"pkcs12", b"-export", b"-clcerts", b"-passout", b"pass:") + p12 = load_pkcs12(p12_str) + self.verify_pkcs12_container(p12) + + + def _dump_and_load(self, dump_passphrase, load_passphrase): + """ + A helper method to dump and load a PKCS12 object. + """ + p12 = self.gen_pkcs12(client_cert_pem, client_key_pem) + dumped_p12 = p12.export(passphrase=dump_passphrase, iter=2, maciter=3) + return load_pkcs12(dumped_p12, passphrase=load_passphrase) + + + def test_load_pkcs12_null_passphrase_load_empty(self): + """ + A PKCS12 string can be dumped with a null passphrase, loaded with an + empty passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=None, load_passphrase=b'')) + + + def test_load_pkcs12_null_passphrase_load_null(self): + """ + A PKCS12 string can be dumped with a null passphrase, loaded with a + null passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=None, load_passphrase=None)) + + + def test_load_pkcs12_empty_passphrase_load_empty(self): + """ + A PKCS12 string can be dumped with an empty passphrase, loaded with an + empty passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=b'', load_passphrase=b'')) + + + def test_load_pkcs12_empty_passphrase_load_null(self): + """ + A PKCS12 string can be dumped with an empty passphrase, loaded with a + null passphrase with :py:obj:`load_pkcs12`, and its components + extracted and examined. + """ + self.verify_pkcs12_container( + self._dump_and_load(dump_passphrase=b'', load_passphrase=None)) + + + def test_load_pkcs12_garbage(self): + """ + :py:obj:`load_pkcs12` raises :py:obj:`OpenSSL.crypto.Error` when passed a string + which is not a PKCS12 dump. + """ + passwd = 'whatever' + e = self.assertRaises(Error, load_pkcs12, b'fruit loops', passwd) + self.assertEqual( e.args[0][0][0], 'asn1 encoding routines') + self.assertEqual( len(e.args[0][0]), 3) + + + def test_replace(self): + """ + :py:obj:`PKCS12.set_certificate` replaces the certificate in a PKCS12 cluster. + :py:obj:`PKCS12.set_privatekey` replaces the private key. + :py:obj:`PKCS12.set_ca_certificates` replaces the CA certificates. + """ + p12 = self.gen_pkcs12(client_cert_pem, client_key_pem, root_cert_pem) + p12.set_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + p12.set_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + root_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + client_cert = load_certificate(FILETYPE_PEM, client_cert_pem) + p12.set_ca_certificates([root_cert]) # not a tuple + self.assertEqual(1, len(p12.get_ca_certificates())) + self.assertEqual(root_cert, p12.get_ca_certificates()[0]) + p12.set_ca_certificates([client_cert, root_cert]) + self.assertEqual(2, len(p12.get_ca_certificates())) + self.assertEqual(client_cert, p12.get_ca_certificates()[0]) + self.assertEqual(root_cert, p12.get_ca_certificates()[1]) + + + def test_friendly_name(self): + """ + The *friendlyName* of a PKCS12 can be set and retrieved via + :py:obj:`PKCS12.get_friendlyname` and :py:obj:`PKCS12_set_friendlyname`, and a + :py:obj:`PKCS12` with a friendly name set can be dumped with :py:obj:`PKCS12.export`. + """ + passwd = b'Dogmeat[]{}!@#$%^&*()~`?/.,<>-_+=";:' + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + for friendly_name in [b('Serverlicious'), None, b('###')]: + p12.set_friendlyname(friendly_name) + self.assertEqual(p12.get_friendlyname(), friendly_name) + dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3) + reloaded_p12 = load_pkcs12(dumped_p12, passwd) + self.assertEqual( + p12.get_friendlyname(), reloaded_p12.get_friendlyname()) + # We would use the openssl program to confirm the friendly + # name, but it is not possible. The pkcs12 command + # does not store the friendly name in the cert's + # alias, which we could then extract. + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, + ca=root_cert_pem, passwd=passwd) + + + def test_various_empty_passphrases(self): + """ + Test that missing, None, and '' passphrases are identical for PKCS12 + export. + """ + p12 = self.gen_pkcs12(client_cert_pem, client_key_pem, root_cert_pem) + passwd = b"" + dumped_p12_empty = p12.export(iter=2, maciter=0, passphrase=passwd) + dumped_p12_none = p12.export(iter=3, maciter=2, passphrase=None) + dumped_p12_nopw = p12.export(iter=9, maciter=4) + for dumped_p12 in [dumped_p12_empty, dumped_p12_none, dumped_p12_nopw]: + self.check_recovery( + dumped_p12, key=client_key_pem, cert=client_cert_pem, + ca=root_cert_pem, passwd=passwd) + + + def test_removing_ca_cert(self): + """ + Passing :py:obj:`None` to :py:obj:`PKCS12.set_ca_certificates` removes all CA + certificates. + """ + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + p12.set_ca_certificates(None) + self.assertEqual(None, p12.get_ca_certificates()) + + + def test_export_without_mac(self): + """ + Exporting a PKCS12 with a :py:obj:`maciter` of ``-1`` excludes the MAC + entirely. + """ + passwd = b"Lake Michigan" + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + dumped_p12 = p12.export(maciter=-1, passphrase=passwd, iter=2) + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, + passwd=passwd, extra=(b"-nomacver",)) + + + def test_load_without_mac(self): + """ + Loading a PKCS12 without a MAC does something other than crash. + """ + passwd = b"Lake Michigan" + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + dumped_p12 = p12.export(maciter=-1, passphrase=passwd, iter=2) + try: + recovered_p12 = load_pkcs12(dumped_p12, passwd) + # The person who generated this PCKS12 should be flogged, + # or better yet we should have a means to determine + # whether a PCKS12 had a MAC that was verified. + # Anyway, libopenssl chooses to allow it, so the + # pyopenssl binding does as well. + self.assertTrue(isinstance(recovered_p12, PKCS12)) + except Error: + # Failing here with an exception is preferred as some openssl + # versions do. + pass + + + def test_zero_len_list_for_ca(self): + """ + A PKCS12 with an empty CA certificates list can be exported. + """ + passwd = 'Hobie 18' + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem) + # p12.set_ca_certificates([]) + # self.assertEqual((), p12.get_ca_certificates()) + # dumped_p12 = p12.export(passphrase=passwd, iter=3) + # self.check_recovery( + # dumped_p12, key=server_key_pem, cert=server_cert_pem, + # passwd=passwd) + + + def test_export_without_args(self): + """ + All the arguments to :py:obj:`PKCS12.export` are optional. + """ + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + dumped_p12 = p12.export() # no args + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, passwd=b"") + + + def test_export_without_bytes(self): + """ + Test :py:obj:`PKCS12.export` with text not bytes as passphrase + """ + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + + with catch_warnings(record=True) as w: + simplefilter("always") + dumped_p12 = p12.export(passphrase=b"randomtext".decode("ascii")) + self.assertEqual( + "{0} for passphrase is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, passwd=b"randomtext") + + + def test_key_cert_mismatch(self): + """ + :py:obj:`PKCS12.export` raises an exception when a key and certificate + mismatch. + """ + p12 = self.gen_pkcs12(server_cert_pem, client_key_pem, root_cert_pem) + self.assertRaises(Error, p12.export) + + + +# These quoting functions taken directly from Twisted's twisted.python.win32. +_cmdLineQuoteRe = re.compile(br'(\\*)"') +_cmdLineQuoteRe2 = re.compile(br'(\\+)\Z') +def cmdLineQuote(s): + """ + Internal method for quoting a single command-line argument. + + See http://www.perlmonks.org/?node_id=764004 + + :type: :py:obj:`str` + :param s: A single unquoted string to quote for something that is expecting + cmd.exe-style quoting + + :rtype: :py:obj:`str` + :return: A cmd.exe-style quoted string + """ + s = _cmdLineQuoteRe2.sub(br"\1\1", _cmdLineQuoteRe.sub(br'\1\1\\"', s)) + return b'"' + s + b'"' + + + +def quoteArguments(arguments): + """ + Quote an iterable of command-line arguments for passing to CreateProcess or + a similar API. This allows the list passed to :py:obj:`reactor.spawnProcess` to + match the child process's :py:obj:`sys.argv` properly. + + :type arguments: :py:obj:`iterable` of :py:obj:`str` + :param arguments: An iterable of unquoted arguments to quote + + :rtype: :py:obj:`str` + :return: A space-delimited string containing quoted versions of :py:obj:`arguments` + """ + return b' '.join(map(cmdLineQuote, arguments)) + + + +def _runopenssl(pem, *args): + """ + Run the command line openssl tool with the given arguments and write + the given PEM to its stdin. Not safe for quotes. + """ + if os.name == 'posix': + command = b"openssl " + b" ".join([ + (b"'" + arg.replace(b"'", b"'\\''") + b"'") + for arg in args]) + else: + command = b"openssl " + quoteArguments(args) + proc = Popen(native(command), shell=True, stdin=PIPE, stdout=PIPE) + proc.stdin.write(pem) + proc.stdin.close() + output = proc.stdout.read() + proc.stdout.close() + proc.wait() + return output + + + +class FunctionTests(TestCase): + """ + Tests for free-functions in the :py:obj:`OpenSSL.crypto` module. + """ + + def test_load_privatekey_invalid_format(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`ValueError` if passed an unknown filetype. + """ + self.assertRaises(ValueError, load_privatekey, 100, root_key_pem) + + + def test_load_privatekey_invalid_passphrase_type(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`TypeError` if passed a passphrase that is + neither a :py:obj:`str` nor a callable. + """ + self.assertRaises( + TypeError, + load_privatekey, + FILETYPE_PEM, encryptedPrivateKeyPEMPassphrase, object()) + + + def test_load_privatekey_wrong_args(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`TypeError` if called with the wrong number + of arguments. + """ + self.assertRaises(TypeError, load_privatekey) + + + def test_load_privatekey_wrongPassphrase(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`OpenSSL.crypto.Error` when it is passed an + encrypted PEM and an incorrect passphrase. + """ + self.assertRaises( + Error, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, b("quack")) + + + def test_load_privatekey_passphraseWrongType(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`ValueError` when it is passed a passphrase + with a private key encoded in a format, that doesn't support + encryption. + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + blob = dump_privatekey(FILETYPE_ASN1, key) + self.assertRaises(ValueError, + load_privatekey, FILETYPE_ASN1, blob, "secret") + + + def test_load_privatekey_passphrase(self): + """ + :py:obj:`load_privatekey` can create a :py:obj:`PKey` object from an encrypted PEM + string if given the passphrase. + """ + key = load_privatekey( + FILETYPE_PEM, encryptedPrivateKeyPEM, + encryptedPrivateKeyPEMPassphrase) + self.assertTrue(isinstance(key, PKeyType)) + + + def test_load_privatekey_passphrase_exception(self): + """ + If the passphrase callback raises an exception, that exception is raised + by :py:obj:`load_privatekey`. + """ + def cb(ignored): + raise ArithmeticError + + self.assertRaises(ArithmeticError, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + + + def test_load_privatekey_wrongPassphraseCallback(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`OpenSSL.crypto.Error` when it + is passed an encrypted PEM and a passphrase callback which returns an + incorrect passphrase. + """ + called = [] + def cb(*a): + called.append(None) + return b("quack") + self.assertRaises( + Error, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + self.assertTrue(called) + + + def test_load_privatekey_passphraseCallback(self): + """ + :py:obj:`load_privatekey` can create a :py:obj:`PKey` object from an encrypted PEM + string if given a passphrase callback which returns the correct + password. + """ + called = [] + def cb(writing): + called.append(writing) + return encryptedPrivateKeyPEMPassphrase + key = load_privatekey(FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + self.assertTrue(isinstance(key, PKeyType)) + self.assertEqual(called, [False]) + + + def test_load_privatekey_passphrase_wrong_return_type(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`ValueError` if the passphrase + callback returns something other than a byte string. + """ + self.assertRaises( + ValueError, + load_privatekey, + FILETYPE_PEM, encryptedPrivateKeyPEM, lambda *args: 3) + + + def test_dump_privatekey_wrong_args(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`TypeError` if called with the wrong number + of arguments. + """ + self.assertRaises(TypeError, dump_privatekey) + # If cipher name is given, password is required. + self.assertRaises( + TypeError, dump_privatekey, FILETYPE_PEM, PKey(), GOOD_CIPHER) + + + def test_dump_privatekey_unknown_cipher(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`ValueError` if called with an unrecognized + cipher name. + """ + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises( + ValueError, dump_privatekey, + FILETYPE_PEM, key, BAD_CIPHER, "passphrase") + + + def test_dump_privatekey_invalid_passphrase_type(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`TypeError` if called with a passphrase which + is neither a :py:obj:`str` nor a callable. + """ + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises( + TypeError, + dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, object()) + + + def test_dump_privatekey_invalid_filetype(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`ValueError` if called with an unrecognized + filetype. + """ + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises(ValueError, dump_privatekey, 100, key) + + + def test_load_privatekey_passphraseCallbackLength(self): + """ + :py:obj:`crypto.load_privatekey` should raise an error when the passphrase + provided by the callback is too long, not silently truncate it. + """ + def cb(ignored): + return "a" * 1025 + + self.assertRaises(ValueError, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + + + def test_dump_privatekey_passphrase(self): + """ + :py:obj:`dump_privatekey` writes an encrypted PEM when given a passphrase. + """ + passphrase = b("foo") + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + pem = dump_privatekey(FILETYPE_PEM, key, GOOD_CIPHER, passphrase) + self.assertTrue(isinstance(pem, binary_type)) + loadedKey = load_privatekey(FILETYPE_PEM, pem, passphrase) + self.assertTrue(isinstance(loadedKey, PKeyType)) + self.assertEqual(loadedKey.type(), key.type()) + self.assertEqual(loadedKey.bits(), key.bits()) + + + def test_dump_privatekey_passphraseWrongType(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`ValueError` when it is passed a passphrase + with a private key encoded in a format, that doesn't support + encryption. + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertRaises(ValueError, + dump_privatekey, FILETYPE_ASN1, key, GOOD_CIPHER, "secret") + + + def test_dump_certificate(self): + """ + :py:obj:`dump_certificate` writes PEM, DER, and text. + """ + pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM + cert = load_certificate(FILETYPE_PEM, pemData) + dumped_pem = dump_certificate(FILETYPE_PEM, cert) + self.assertEqual(dumped_pem, cleartextCertificatePEM) + dumped_der = dump_certificate(FILETYPE_ASN1, cert) + good_der = _runopenssl(dumped_pem, b"x509", b"-outform", b"DER") + self.assertEqual(dumped_der, good_der) + cert2 = load_certificate(FILETYPE_ASN1, dumped_der) + dumped_pem2 = dump_certificate(FILETYPE_PEM, cert2) + self.assertEqual(dumped_pem2, cleartextCertificatePEM) + dumped_text = dump_certificate(FILETYPE_TEXT, cert) + good_text = _runopenssl(dumped_pem, b"x509", b"-noout", b"-text") + self.assertEqual(dumped_text, good_text) + + + def test_dump_privatekey_pem(self): + """ + :py:obj:`dump_privatekey` writes a PEM + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertTrue(key.check()) + dumped_pem = dump_privatekey(FILETYPE_PEM, key) + self.assertEqual(dumped_pem, cleartextPrivateKeyPEM) + + + def test_dump_privatekey_asn1(self): + """ + :py:obj:`dump_privatekey` writes a DER + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + dumped_pem = dump_privatekey(FILETYPE_PEM, key) + + dumped_der = dump_privatekey(FILETYPE_ASN1, key) + # XXX This OpenSSL call writes "writing RSA key" to standard out. Sad. + good_der = _runopenssl(dumped_pem, b"rsa", b"-outform", b"DER") + self.assertEqual(dumped_der, good_der) + key2 = load_privatekey(FILETYPE_ASN1, dumped_der) + dumped_pem2 = dump_privatekey(FILETYPE_PEM, key2) + self.assertEqual(dumped_pem2, cleartextPrivateKeyPEM) + + + def test_dump_privatekey_text(self): + """ + :py:obj:`dump_privatekey` writes a text + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + dumped_pem = dump_privatekey(FILETYPE_PEM, key) + + dumped_text = dump_privatekey(FILETYPE_TEXT, key) + good_text = _runopenssl(dumped_pem, b"rsa", b"-noout", b"-text") + self.assertEqual(dumped_text, good_text) + + + def test_dump_certificate_request(self): + """ + :py:obj:`dump_certificate_request` writes a PEM, DER, and text. + """ + req = load_certificate_request(FILETYPE_PEM, cleartextCertificateRequestPEM) + dumped_pem = dump_certificate_request(FILETYPE_PEM, req) + self.assertEqual(dumped_pem, cleartextCertificateRequestPEM) + dumped_der = dump_certificate_request(FILETYPE_ASN1, req) + good_der = _runopenssl(dumped_pem, b"req", b"-outform", b"DER") + self.assertEqual(dumped_der, good_der) + req2 = load_certificate_request(FILETYPE_ASN1, dumped_der) + dumped_pem2 = dump_certificate_request(FILETYPE_PEM, req2) + self.assertEqual(dumped_pem2, cleartextCertificateRequestPEM) + dumped_text = dump_certificate_request(FILETYPE_TEXT, req) + good_text = _runopenssl(dumped_pem, b"req", b"-noout", b"-text") + self.assertEqual(dumped_text, good_text) + self.assertRaises(ValueError, dump_certificate_request, 100, req) + + + def test_dump_privatekey_passphraseCallback(self): + """ + :py:obj:`dump_privatekey` writes an encrypted PEM when given a callback which + returns the correct passphrase. + """ + passphrase = b("foo") + called = [] + def cb(writing): + called.append(writing) + return passphrase + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + pem = dump_privatekey(FILETYPE_PEM, key, GOOD_CIPHER, cb) + self.assertTrue(isinstance(pem, binary_type)) + self.assertEqual(called, [True]) + loadedKey = load_privatekey(FILETYPE_PEM, pem, passphrase) + self.assertTrue(isinstance(loadedKey, PKeyType)) + self.assertEqual(loadedKey.type(), key.type()) + self.assertEqual(loadedKey.bits(), key.bits()) + + + def test_dump_privatekey_passphrase_exception(self): + """ + :py:obj:`dump_privatekey` should not overwrite the exception raised + by the passphrase callback. + """ + def cb(ignored): + raise ArithmeticError + + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertRaises(ArithmeticError, + dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, cb) + + + def test_dump_privatekey_passphraseCallbackLength(self): + """ + :py:obj:`crypto.dump_privatekey` should raise an error when the passphrase + provided by the callback is too long, not silently truncate it. + """ + def cb(ignored): + return "a" * 1025 + + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertRaises(ValueError, + dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, cb) + + + def test_load_pkcs7_data_pem(self): + """ + :py:obj:`load_pkcs7_data` accepts a PKCS#7 string and returns an instance of + :py:obj:`PKCS7Type`. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertTrue(isinstance(pkcs7, PKCS7Type)) + + + def test_load_pkcs7_data_asn1(self): + """ + :py:obj:`load_pkcs7_data` accepts a bytes containing ASN1 data + representing PKCS#7 and returns an instance of :py:obj`PKCS7Type`. + """ + pkcs7 = load_pkcs7_data(FILETYPE_ASN1, pkcs7DataASN1) + self.assertTrue(isinstance(pkcs7, PKCS7Type)) + + + def test_load_pkcs7_data_invalid(self): + """ + If the data passed to :py:obj:`load_pkcs7_data` is invalid, + :py:obj:`Error` is raised. + """ + self.assertRaises(Error, load_pkcs7_data, FILETYPE_PEM, b"foo") + + + +class LoadCertificateTests(TestCase): + """ + Tests for :py:obj:`load_certificate_request`. + """ + def test_badFileType(self): + """ + If the file type passed to :py:obj:`load_certificate_request` is + neither :py:obj:`FILETYPE_PEM` nor :py:obj:`FILETYPE_ASN1` then + :py:class:`ValueError` is raised. + """ + self.assertRaises(ValueError, load_certificate_request, object(), b"") + + + +class PKCS7Tests(TestCase): + """ + Tests for :py:obj:`PKCS7Type`. + """ + def test_type(self): + """ + :py:obj:`PKCS7Type` is a type object. + """ + self.assertTrue(isinstance(PKCS7Type, type)) + self.assertEqual(PKCS7Type.__name__, 'PKCS7') + + # XXX This doesn't currently work. + # self.assertIdentical(PKCS7, PKCS7Type) + + + # XXX Opposite results for all these following methods + + def test_type_is_signed_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_signed` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_signed, None) + + + def test_type_is_signed(self): + """ + :py:obj:`PKCS7Type.type_is_signed` returns :py:obj:`True` if the PKCS7 object is of + the type *signed*. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertTrue(pkcs7.type_is_signed()) + + + def test_type_is_enveloped_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_enveloped` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_enveloped, None) + + + def test_type_is_enveloped(self): + """ + :py:obj:`PKCS7Type.type_is_enveloped` returns :py:obj:`False` if the PKCS7 object is + not of the type *enveloped*. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertFalse(pkcs7.type_is_enveloped()) + + + def test_type_is_signedAndEnveloped_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_signedAndEnveloped` raises :py:obj:`TypeError` if called + with any arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_signedAndEnveloped, None) + + + def test_type_is_signedAndEnveloped(self): + """ + :py:obj:`PKCS7Type.type_is_signedAndEnveloped` returns :py:obj:`False` if the PKCS7 + object is not of the type *signed and enveloped*. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertFalse(pkcs7.type_is_signedAndEnveloped()) + + + def test_type_is_data(self): + """ + :py:obj:`PKCS7Type.type_is_data` returns :py:obj:`False` if the PKCS7 object is not of + the type data. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertFalse(pkcs7.type_is_data()) + + + def test_type_is_data_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_data` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_data, None) + + + def test_get_type_name_wrong_args(self): + """ + :py:obj:`PKCS7Type.get_type_name` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.get_type_name, None) + + + def test_get_type_name(self): + """ + :py:obj:`PKCS7Type.get_type_name` returns a :py:obj:`str` giving the type name. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertEquals(pkcs7.get_type_name(), b('pkcs7-signedData')) + + + def test_attribute(self): + """ + If an attribute other than one of the methods tested here is accessed on + an instance of :py:obj:`PKCS7Type`, :py:obj:`AttributeError` is raised. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(AttributeError, getattr, pkcs7, "foo") + + + +class NetscapeSPKITests(TestCase, _PKeyInteractionTestsMixin): + """ + Tests for :py:obj:`OpenSSL.crypto.NetscapeSPKI`. + """ + def signable(self): + """ + Return a new :py:obj:`NetscapeSPKI` for use with signing tests. + """ + return NetscapeSPKI() + + + def test_type(self): + """ + :py:obj:`NetscapeSPKI` and :py:obj:`NetscapeSPKIType` refer to the same type object + and can be used to create instances of that type. + """ + self.assertIdentical(NetscapeSPKI, NetscapeSPKIType) + self.assertConsistentType(NetscapeSPKI, 'NetscapeSPKI') + + + def test_construction(self): + """ + :py:obj:`NetscapeSPKI` returns an instance of :py:obj:`NetscapeSPKIType`. + """ + nspki = NetscapeSPKI() + self.assertTrue(isinstance(nspki, NetscapeSPKIType)) + + + def test_invalid_attribute(self): + """ + Accessing a non-existent attribute of a :py:obj:`NetscapeSPKI` instance causes + an :py:obj:`AttributeError` to be raised. + """ + nspki = NetscapeSPKI() + self.assertRaises(AttributeError, lambda: nspki.foo) + + + def test_b64_encode(self): + """ + :py:obj:`NetscapeSPKI.b64_encode` encodes the certificate to a base64 blob. + """ + nspki = NetscapeSPKI() + blob = nspki.b64_encode() + self.assertTrue(isinstance(blob, binary_type)) + + + +class RevokedTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.Revoked` + """ + def test_construction(self): + """ + Confirm we can create :py:obj:`OpenSSL.crypto.Revoked`. Check + that it is empty. + """ + revoked = Revoked() + self.assertTrue(isinstance(revoked, Revoked)) + self.assertEquals(type(revoked), Revoked) + self.assertEquals(revoked.get_serial(), b('00')) + self.assertEquals(revoked.get_rev_date(), None) + self.assertEquals(revoked.get_reason(), None) + + + def test_construction_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.crypto.Revoked` with any arguments results + in a :py:obj:`TypeError` being raised. + """ + self.assertRaises(TypeError, Revoked, None) + self.assertRaises(TypeError, Revoked, 1) + self.assertRaises(TypeError, Revoked, "foo") + + + def test_serial(self): + """ + Confirm we can set and get serial numbers from + :py:obj:`OpenSSL.crypto.Revoked`. Confirm errors are handled + with grace. + """ + revoked = Revoked() + ret = revoked.set_serial(b('10b')) + self.assertEquals(ret, None) + ser = revoked.get_serial() + self.assertEquals(ser, b('010B')) + + revoked.set_serial(b('31ppp')) # a type error would be nice + ser = revoked.get_serial() + self.assertEquals(ser, b('31')) + + self.assertRaises(ValueError, revoked.set_serial, b('pqrst')) + self.assertRaises(TypeError, revoked.set_serial, 100) + self.assertRaises(TypeError, revoked.get_serial, 1) + self.assertRaises(TypeError, revoked.get_serial, None) + self.assertRaises(TypeError, revoked.get_serial, "") + + + def test_date(self): + """ + Confirm we can set and get revocation dates from + :py:obj:`OpenSSL.crypto.Revoked`. Confirm errors are handled + with grace. + """ + revoked = Revoked() + date = revoked.get_rev_date() + self.assertEquals(date, None) + + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + ret = revoked.set_rev_date(now) + self.assertEqual(ret, None) + date = revoked.get_rev_date() + self.assertEqual(date, now) + + + def test_reason(self): + """ + Confirm we can set and get revocation reasons from + :py:obj:`OpenSSL.crypto.Revoked`. The "get" need to work + as "set". Likewise, each reason of all_reasons() must work. + """ + revoked = Revoked() + for r in revoked.all_reasons(): + for x in range(2): + ret = revoked.set_reason(r) + self.assertEquals(ret, None) + reason = revoked.get_reason() + self.assertEquals( + reason.lower().replace(b(' '), b('')), + r.lower().replace(b(' '), b(''))) + r = reason # again with the resp of get + + revoked.set_reason(None) + self.assertEqual(revoked.get_reason(), None) + + + def test_set_reason_wrong_arguments(self): + """ + Calling :py:obj:`OpenSSL.crypto.Revoked.set_reason` with other than + one argument, or an argument which isn't a valid reason, + results in :py:obj:`TypeError` or :py:obj:`ValueError` being raised. + """ + revoked = Revoked() + self.assertRaises(TypeError, revoked.set_reason, 100) + self.assertRaises(ValueError, revoked.set_reason, b('blue')) + + + def test_get_reason_wrong_arguments(self): + """ + Calling :py:obj:`OpenSSL.crypto.Revoked.get_reason` with any + arguments results in :py:obj:`TypeError` being raised. + """ + revoked = Revoked() + self.assertRaises(TypeError, revoked.get_reason, None) + self.assertRaises(TypeError, revoked.get_reason, 1) + self.assertRaises(TypeError, revoked.get_reason, "foo") + + + +class CRLTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.CRL` + """ + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + + def test_construction(self): + """ + Confirm we can create :py:obj:`OpenSSL.crypto.CRL`. Check + that it is empty + """ + crl = CRL() + self.assertTrue( isinstance(crl, CRL) ) + self.assertEqual(crl.get_revoked(), None) + + + def test_construction_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.crypto.CRL` with any number of arguments + results in a :py:obj:`TypeError` being raised. + """ + self.assertRaises(TypeError, CRL, 1) + self.assertRaises(TypeError, CRL, "") + self.assertRaises(TypeError, CRL, None) + + + def _get_crl(self): + """ + Get a new ``CRL`` with a revocation. + """ + crl = CRL() + revoked = Revoked() + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + revoked.set_rev_date(now) + revoked.set_serial(b('3ab')) + revoked.set_reason(b('sUpErSeDEd')) + crl.add_revoked(revoked) + return crl + + + def test_export_pem(self): + """ + If not passed a format, ``CRL.export`` returns a "PEM" format string + representing a serial number, a revoked reason, and certificate issuer + information. + """ + crl = self._get_crl() + # PEM format + dumped_crl = crl.export(self.cert, self.pkey, days=20) + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + + # These magic values are based on the way the CRL above was constructed + # and with what certificate it was exported. + text.index(b('Serial Number: 03AB')) + text.index(b('Superseded')) + text.index( + b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA') + ) + + + def test_export_der(self): + """ + If passed ``FILETYPE_ASN1`` for the format, ``CRL.export`` returns a + "DER" format string representing a serial number, a revoked reason, and + certificate issuer information. + """ + crl = self._get_crl() + + # DER format + dumped_crl = crl.export(self.cert, self.pkey, FILETYPE_ASN1) + text = _runopenssl( + dumped_crl, b"crl", b"-noout", b"-text", b"-inform", b"DER" + ) + text.index(b('Serial Number: 03AB')) + text.index(b('Superseded')) + text.index( + b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA') + ) + + + def test_export_text(self): + """ + If passed ``FILETYPE_TEXT`` for the format, ``CRL.export`` returns a + text format string like the one produced by the openssl command line + tool. + """ + crl = self._get_crl() + + dumped_crl = crl.export(self.cert, self.pkey, FILETYPE_ASN1) + text = _runopenssl( + dumped_crl, b"crl", b"-noout", b"-text", b"-inform", b"DER" + ) + + # text format + dumped_text = crl.export(self.cert, self.pkey, type=FILETYPE_TEXT) + self.assertEqual(text, dumped_text) + + + def test_export_custom_digest(self): + """ + If passed the name of a digest function, ``CRL.export`` uses a + signature algorithm based on that digest function. + """ + crl = self._get_crl() + dumped_crl = crl.export(self.cert, self.pkey, digest=b"sha1") + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Signature Algorithm: sha1')) + + + def test_export_md5_digest(self): + """ + If passed md5 as the digest function, ``CRL.export`` uses md5 and does + not emit a deprecation warning. + """ + crl = self._get_crl() + with catch_warnings(record=True) as catcher: + simplefilter("always") + self.assertEqual(0, len(catcher)) + dumped_crl = crl.export(self.cert, self.pkey, digest=b"md5") + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Signature Algorithm: md5')) + + + def test_export_default_digest(self): + """ + If not passed the name of a digest function, ``CRL.export`` uses a + signature algorithm based on MD5 and emits a deprecation warning. + """ + crl = self._get_crl() + with catch_warnings(record=True) as catcher: + simplefilter("always") + dumped_crl = crl.export(self.cert, self.pkey) + self.assertEqual( + "The default message digest (md5) is deprecated. " + "Pass the name of a message digest explicitly.", + str(catcher[0].message), + ) + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Signature Algorithm: md5')) + + + def test_export_invalid(self): + """ + If :py:obj:`CRL.export` is used with an uninitialized :py:obj:`X509` + instance, :py:obj:`OpenSSL.crypto.Error` is raised. + """ + crl = CRL() + self.assertRaises(Error, crl.export, X509(), PKey()) + + + def test_add_revoked_keyword(self): + """ + :py:obj:`OpenSSL.CRL.add_revoked` accepts its single argument as the + ``revoked`` keyword argument. + """ + crl = CRL() + revoked = Revoked() + crl.add_revoked(revoked=revoked) + self.assertTrue(isinstance(crl.get_revoked()[0], Revoked)) + + + def test_export_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.CRL.export` with fewer than two or more than + four arguments, or with arguments other than the certificate, + private key, integer file type, and integer number of days it + expects, results in a :py:obj:`TypeError` being raised. + """ + crl = CRL() + self.assertRaises(TypeError, crl.export) + self.assertRaises(TypeError, crl.export, self.cert) + self.assertRaises(TypeError, crl.export, self.cert, self.pkey, FILETYPE_PEM, 10, "md5", "foo") + + self.assertRaises(TypeError, crl.export, None, self.pkey, FILETYPE_PEM, 10) + self.assertRaises(TypeError, crl.export, self.cert, None, FILETYPE_PEM, 10) + self.assertRaises(TypeError, crl.export, self.cert, self.pkey, None, 10) + self.assertRaises(TypeError, crl.export, self.cert, FILETYPE_PEM, None) + + + def test_export_unknown_filetype(self): + """ + Calling :py:obj:`OpenSSL.CRL.export` with a file type other than + :py:obj:`FILETYPE_PEM`, :py:obj:`FILETYPE_ASN1`, or :py:obj:`FILETYPE_TEXT` results + in a :py:obj:`ValueError` being raised. + """ + crl = CRL() + self.assertRaises(ValueError, crl.export, self.cert, self.pkey, 100, 10) + + + def test_export_unknown_digest(self): + """ + Calling :py:obj:`OpenSSL.CRL.export` with a unsupported digest results + in a :py:obj:`ValueError` being raised. + """ + crl = CRL() + self.assertRaises( + ValueError, + crl.export, + self.cert, self.pkey, FILETYPE_PEM, 10, b"strange-digest" + ) + + + def test_get_revoked(self): + """ + Use python to create a simple CRL with two revocations. + Get back the :py:obj:`Revoked` using :py:obj:`OpenSSL.CRL.get_revoked` and + verify them. + """ + crl = CRL() + + revoked = Revoked() + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + revoked.set_rev_date(now) + revoked.set_serial(b('3ab')) + crl.add_revoked(revoked) + revoked.set_serial(b('100')) + revoked.set_reason(b('sUpErSeDEd')) + crl.add_revoked(revoked) + + revs = crl.get_revoked() + self.assertEqual(len(revs), 2) + self.assertEqual(type(revs[0]), Revoked) + self.assertEqual(type(revs[1]), Revoked) + self.assertEqual(revs[0].get_serial(), b('03AB')) + self.assertEqual(revs[1].get_serial(), b('0100')) + self.assertEqual(revs[0].get_rev_date(), now) + self.assertEqual(revs[1].get_rev_date(), now) + + + def test_get_revoked_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.CRL.get_revoked` with any arguments results + in a :py:obj:`TypeError` being raised. + """ + crl = CRL() + self.assertRaises(TypeError, crl.get_revoked, None) + self.assertRaises(TypeError, crl.get_revoked, 1) + self.assertRaises(TypeError, crl.get_revoked, "") + self.assertRaises(TypeError, crl.get_revoked, "", 1, None) + + + def test_add_revoked_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.CRL.add_revoked` with other than one + argument results in a :py:obj:`TypeError` being raised. + """ + crl = CRL() + self.assertRaises(TypeError, crl.add_revoked) + self.assertRaises(TypeError, crl.add_revoked, 1, 2) + self.assertRaises(TypeError, crl.add_revoked, "foo", "bar") + + + def test_load_crl(self): + """ + Load a known CRL and inspect its revocations. Both + PEM and DER formats are loaded. + """ + crl = load_crl(FILETYPE_PEM, crlData) + revs = crl.get_revoked() + self.assertEqual(len(revs), 2) + self.assertEqual(revs[0].get_serial(), b('03AB')) + self.assertEqual(revs[0].get_reason(), None) + self.assertEqual(revs[1].get_serial(), b('0100')) + self.assertEqual(revs[1].get_reason(), b('Superseded')) + + der = _runopenssl(crlData, b"crl", b"-outform", b"DER") + crl = load_crl(FILETYPE_ASN1, der) + revs = crl.get_revoked() + self.assertEqual(len(revs), 2) + self.assertEqual(revs[0].get_serial(), b('03AB')) + self.assertEqual(revs[0].get_reason(), None) + self.assertEqual(revs[1].get_serial(), b('0100')) + self.assertEqual(revs[1].get_reason(), b('Superseded')) + + + def test_load_crl_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.crypto.load_crl` with other than two + arguments results in a :py:obj:`TypeError` being raised. + """ + self.assertRaises(TypeError, load_crl) + self.assertRaises(TypeError, load_crl, FILETYPE_PEM) + self.assertRaises(TypeError, load_crl, FILETYPE_PEM, crlData, None) + + + def test_load_crl_bad_filetype(self): + """ + Calling :py:obj:`OpenSSL.crypto.load_crl` with an unknown file type + raises a :py:obj:`ValueError`. + """ + self.assertRaises(ValueError, load_crl, 100, crlData) + + + def test_load_crl_bad_data(self): + """ + Calling :py:obj:`OpenSSL.crypto.load_crl` with file data which can't + be loaded raises a :py:obj:`OpenSSL.crypto.Error`. + """ + self.assertRaises(Error, load_crl, FILETYPE_PEM, b"hello, world") + + + +class X509StoreContextTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.X509StoreContext`. + """ + root_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + intermediate_cert = load_certificate(FILETYPE_PEM, intermediate_cert_pem) + intermediate_server_cert = load_certificate(FILETYPE_PEM, intermediate_server_cert_pem) + + def test_valid(self): + """ + :py:obj:`verify_certificate` returns ``None`` when called with a certificate + and valid chain. + """ + store = X509Store() + store.add_cert(self.root_cert) + store.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + self.assertEqual(store_ctx.verify_certificate(), None) + + + def test_reuse(self): + """ + :py:obj:`verify_certificate` can be called multiple times with the same + ``X509StoreContext`` instance to produce the same result. + """ + store = X509Store() + store.add_cert(self.root_cert) + store.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + self.assertEqual(store_ctx.verify_certificate(), None) + self.assertEqual(store_ctx.verify_certificate(), None) + + + def test_trusted_self_signed(self): + """ + :py:obj:`verify_certificate` returns ``None`` when called with a self-signed + certificate and itself in the chain. + """ + store = X509Store() + store.add_cert(self.root_cert) + store_ctx = X509StoreContext(store, self.root_cert) + self.assertEqual(store_ctx.verify_certificate(), None) + + + def test_untrusted_self_signed(self): + """ + :py:obj:`verify_certificate` raises error when a self-signed certificate is + verified without itself in the chain. + """ + store = X509Store() + store_ctx = X509StoreContext(store, self.root_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'self signed certificate') + self.assertEqual(e.certificate.get_subject().CN, 'Testing Root CA') + + + def test_invalid_chain_no_root(self): + """ + :py:obj:`verify_certificate` raises error when a root certificate is missing + from the chain. + """ + store = X509Store() + store.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'unable to get issuer certificate') + self.assertEqual(e.certificate.get_subject().CN, 'intermediate') + + + def test_invalid_chain_no_intermediate(self): + """ + :py:obj:`verify_certificate` raises error when an intermediate certificate is + missing from the chain. + """ + store = X509Store() + store.add_cert(self.root_cert) + store_ctx = X509StoreContext(store, self.intermediate_server_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'unable to get local issuer certificate') + self.assertEqual(e.certificate.get_subject().CN, 'intermediate-service') + + + def test_modification_pre_verify(self): + """ + :py:obj:`verify_certificate` can use a store context modified after + instantiation. + """ + store_bad = X509Store() + store_bad.add_cert(self.intermediate_cert) + store_good = X509Store() + store_good.add_cert(self.root_cert) + store_good.add_cert(self.intermediate_cert) + store_ctx = X509StoreContext(store_bad, self.intermediate_server_cert) + e = self.assertRaises(X509StoreContextError, store_ctx.verify_certificate) + self.assertEqual(e.args[0][2], 'unable to get issuer certificate') + self.assertEqual(e.certificate.get_subject().CN, 'intermediate') + store_ctx.set_store(store_good) + self.assertEqual(store_ctx.verify_certificate(), None) + + + +class SignVerifyTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.sign` and :py:obj:`OpenSSL.crypto.verify`. + """ + def test_sign_verify(self): + """ + :py:obj:`sign` generates a cryptographic signature which :py:obj:`verify` can check. + """ + content = b( + "It was a bright cold day in April, and the clocks were striking " + "thirteen. Winston Smith, his chin nuzzled into his breast in an " + "effort to escape the vile wind, slipped quickly through the " + "glass doors of Victory Mansions, though not quickly enough to " + "prevent a swirl of gritty dust from entering along with him.") + + # sign the content with this private key + priv_key = load_privatekey(FILETYPE_PEM, root_key_pem) + # verify the content with this cert + good_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + # certificate unrelated to priv_key, used to trigger an error + bad_cert = load_certificate(FILETYPE_PEM, server_cert_pem) + + for digest in ['md5', 'sha1']: + sig = sign(priv_key, content, digest) + + # Verify the signature of content, will throw an exception if error. + verify(good_cert, sig, content, digest) + + # This should fail because the certificate doesn't match the + # private key that was used to sign the content. + self.assertRaises(Error, verify, bad_cert, sig, content, digest) + + # This should fail because we've "tainted" the content after + # signing it. + self.assertRaises( + Error, verify, + good_cert, sig, content + b("tainted"), digest) + + # test that unknown digest types fail + self.assertRaises( + ValueError, sign, priv_key, content, "strange-digest") + self.assertRaises( + ValueError, verify, good_cert, sig, content, "strange-digest") + + + def test_sign_verify_with_text(self): + """ + :py:obj:`sign` generates a cryptographic signature which :py:obj:`verify` can check. + Deprecation warnings raised because using text instead of bytes as content + """ + content = ( + b"It was a bright cold day in April, and the clocks were striking " + b"thirteen. Winston Smith, his chin nuzzled into his breast in an " + b"effort to escape the vile wind, slipped quickly through the " + b"glass doors of Victory Mansions, though not quickly enough to " + b"prevent a swirl of gritty dust from entering along with him." + ).decode("ascii") + + priv_key = load_privatekey(FILETYPE_PEM, root_key_pem) + cert = load_certificate(FILETYPE_PEM, root_cert_pem) + for digest in ['md5', 'sha1']: + with catch_warnings(record=True) as w: + simplefilter("always") + sig = sign(priv_key, content, digest) + + self.assertEqual( + "{0} for data is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + + with catch_warnings(record=True) as w: + simplefilter("always") + verify(cert, sig, content, digest) + + self.assertEqual( + "{0} for data is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + + + def test_sign_nulls(self): + """ + :py:obj:`sign` produces a signature for a string with embedded nulls. + """ + content = b("Watch out! \0 Did you see it?") + priv_key = load_privatekey(FILETYPE_PEM, root_key_pem) + good_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + sig = sign(priv_key, content, "sha1") + verify(good_cert, sig, content, "sha1") + + + +class EllipticCurveTests(TestCase): + """ + Tests for :py:class:`_EllipticCurve`, :py:obj:`get_elliptic_curve`, and + :py:obj:`get_elliptic_curves`. + """ + def test_set(self): + """ + :py:obj:`get_elliptic_curves` returns a :py:obj:`set`. + """ + self.assertIsInstance(get_elliptic_curves(), set) + + + def test_some_curves(self): + """ + If :py:mod:`cryptography` has elliptic curve support then the set + returned by :py:obj:`get_elliptic_curves` has some elliptic curves in + it. + + There could be an OpenSSL that violates this assumption. If so, this + test will fail and we'll find out. + """ + curves = get_elliptic_curves() + if lib.Cryptography_HAS_EC: + self.assertTrue(curves) + else: + self.assertFalse(curves) + + + def test_a_curve(self): + """ + :py:obj:`get_elliptic_curve` can be used to retrieve a particular + supported curve. + """ + curves = get_elliptic_curves() + if curves: + curve = next(iter(curves)) + self.assertEqual(curve.name, get_elliptic_curve(curve.name).name) + else: + self.assertRaises(ValueError, get_elliptic_curve, u("prime256v1")) + + + def test_not_a_curve(self): + """ + :py:obj:`get_elliptic_curve` raises :py:class:`ValueError` if called + with a name which does not identify a supported curve. + """ + self.assertRaises( + ValueError, get_elliptic_curve, u("this curve was just invented")) + + + def test_repr(self): + """ + The string representation of a curve object includes simply states the + object is a curve and what its name is. + """ + curves = get_elliptic_curves() + if curves: + curve = next(iter(curves)) + self.assertEqual("" % (curve.name,), repr(curve)) + + + def test_to_EC_KEY(self): + """ + The curve object can export a version of itself as an EC_KEY* via the + private :py:meth:`_EllipticCurve._to_EC_KEY`. + """ + curves = get_elliptic_curves() + if curves: + curve = next(iter(curves)) + # It's not easy to assert anything about this object. However, see + # leakcheck/crypto.py for a test that demonstrates it at least does + # not leak memory. + curve._to_EC_KEY() + + + +class EllipticCurveFactory(object): + """ + A helper to get the names of two curves. + """ + def __init__(self): + curves = iter(get_elliptic_curves()) + try: + self.curve_name = next(curves).name + self.another_curve_name = next(curves).name + except StopIteration: + self.curve_name = self.another_curve_name = None + + + +class EllipticCurveEqualityTests(TestCase, EqualityTestsMixin): + """ + Tests :py:type:`_EllipticCurve`\ 's implementation of ``==`` and ``!=``. + """ + curve_factory = EllipticCurveFactory() + + if curve_factory.curve_name is None: + skip = "There are no curves available there can be no curve objects." + + + def anInstance(self): + """ + Get the curve object for an arbitrary curve supported by the system. + """ + return get_elliptic_curve(self.curve_factory.curve_name) + + + def anotherInstance(self): + """ + Get the curve object for an arbitrary curve supported by the system - + but not the one returned by C{anInstance}. + """ + return get_elliptic_curve(self.curve_factory.another_curve_name) + + + +class EllipticCurveHashTests(TestCase): + """ + Tests for :py:type:`_EllipticCurve`\ 's implementation of hashing (thus use + as an item in a :py:type:`dict` or :py:type:`set`). + """ + curve_factory = EllipticCurveFactory() + + if curve_factory.curve_name is None: + skip = "There are no curves available there can be no curve objects." + + + def test_contains(self): + """ + The ``in`` operator reports that a :py:type:`set` containing a curve + does contain that curve. + """ + curve = get_elliptic_curve(self.curve_factory.curve_name) + curves = set([curve]) + self.assertIn(curve, curves) + + + def test_does_not_contain(self): + """ + The ``in`` operator reports that a :py:type:`set` not containing a + curve does not contain that curve. + """ + curve = get_elliptic_curve(self.curve_factory.curve_name) + curves = set([get_elliptic_curve(self.curve_factory.another_curve_name)]) + self.assertNotIn(curve, curves) + + + +if __name__ == '__main__': + main() diff --git a/Lib/site-packages/OpenSSL/test/test_rand.py b/Lib/site-packages/OpenSSL/test/test_rand.py new file mode 100644 index 0000000..3d5c290 --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/test_rand.py @@ -0,0 +1,223 @@ +# Copyright (c) Frederick Dean +# See LICENSE for details. + +""" +Unit tests for :py:obj:`OpenSSL.rand`. +""" + +from unittest import main +import os +import stat +import sys + +from OpenSSL.test.util import NON_ASCII, TestCase, b +from OpenSSL import rand + + +class RandTests(TestCase): + def test_bytes_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.bytes` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`int` argument. + """ + self.assertRaises(TypeError, rand.bytes) + self.assertRaises(TypeError, rand.bytes, None) + self.assertRaises(TypeError, rand.bytes, 3, None) + + + def test_insufficientMemory(self): + """ + :py:obj:`OpenSSL.rand.bytes` raises :py:obj:`MemoryError` if more bytes + are requested than will fit in memory. + """ + self.assertRaises(MemoryError, rand.bytes, sys.maxsize) + + + def test_bytes(self): + """ + Verify that we can obtain bytes from rand_bytes() and + that they are different each time. Test the parameter + of rand_bytes() for bad values. + """ + b1 = rand.bytes(50) + self.assertEqual(len(b1), 50) + b2 = rand.bytes(num_bytes=50) # parameter by name + self.assertNotEqual(b1, b2) # Hip, Hip, Horay! FIPS complaince + b3 = rand.bytes(num_bytes=0) + self.assertEqual(len(b3), 0) + exc = self.assertRaises(ValueError, rand.bytes, -1) + self.assertEqual(str(exc), "num_bytes must not be negative") + + + def test_add_wrong_args(self): + """ + When called with the wrong number of arguments, or with arguments not of + type :py:obj:`str` and :py:obj:`int`, :py:obj:`OpenSSL.rand.add` raises :py:obj:`TypeError`. + """ + self.assertRaises(TypeError, rand.add) + self.assertRaises(TypeError, rand.add, b("foo"), None) + self.assertRaises(TypeError, rand.add, None, 3) + self.assertRaises(TypeError, rand.add, b("foo"), 3, None) + + + def test_add(self): + """ + :py:obj:`OpenSSL.rand.add` adds entropy to the PRNG. + """ + rand.add(b('hamburger'), 3) + + + def test_seed_wrong_args(self): + """ + When called with the wrong number of arguments, or with a non-:py:obj:`str` + argument, :py:obj:`OpenSSL.rand.seed` raises :py:obj:`TypeError`. + """ + self.assertRaises(TypeError, rand.seed) + self.assertRaises(TypeError, rand.seed, None) + self.assertRaises(TypeError, rand.seed, b("foo"), None) + + + def test_seed(self): + """ + :py:obj:`OpenSSL.rand.seed` adds entropy to the PRNG. + """ + rand.seed(b('milk shake')) + + + def test_status_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.status` raises :py:obj:`TypeError` when called with any + arguments. + """ + self.assertRaises(TypeError, rand.status, None) + + + def test_status(self): + """ + :py:obj:`OpenSSL.rand.status` returns :py:obj:`True` if the PRNG has sufficient + entropy, :py:obj:`False` otherwise. + """ + # It's hard to know what it is actually going to return. Different + # OpenSSL random engines decide differently whether they have enough + # entropy or not. + self.assertTrue(rand.status() in (1, 2)) + + + def test_egd_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.egd` raises :py:obj:`TypeError` when called with the wrong + number of arguments or with arguments not of type :py:obj:`str` and :py:obj:`int`. + """ + self.assertRaises(TypeError, rand.egd) + self.assertRaises(TypeError, rand.egd, None) + self.assertRaises(TypeError, rand.egd, "foo", None) + self.assertRaises(TypeError, rand.egd, None, 3) + self.assertRaises(TypeError, rand.egd, "foo", 3, None) + + + def test_egd_missing(self): + """ + :py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the + EGD socket passed to it does not exist. + """ + result = rand.egd(self.mktemp()) + expected = (-1, 0) + self.assertTrue( + result in expected, + "%r not in %r" % (result, expected)) + + + def test_egd_missing_and_bytes(self): + """ + :py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the + EGD socket passed to it does not exist even if a size argument is + explicitly passed. + """ + result = rand.egd(self.mktemp(), 1024) + expected = (-1, 0) + self.assertTrue( + result in expected, + "%r not in %r" % (result, expected)) + + + def test_cleanup_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.cleanup` raises :py:obj:`TypeError` when called with any + arguments. + """ + self.assertRaises(TypeError, rand.cleanup, None) + + + def test_cleanup(self): + """ + :py:obj:`OpenSSL.rand.cleanup` releases the memory used by the PRNG and returns + :py:obj:`None`. + """ + self.assertIdentical(rand.cleanup(), None) + + + def test_load_file_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.load_file` raises :py:obj:`TypeError` when called the wrong + number of arguments or arguments not of type :py:obj:`str` and :py:obj:`int`. + """ + self.assertRaises(TypeError, rand.load_file) + self.assertRaises(TypeError, rand.load_file, "foo", None) + self.assertRaises(TypeError, rand.load_file, None, 1) + self.assertRaises(TypeError, rand.load_file, "foo", 1, None) + + + def test_write_file_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.write_file` raises :py:obj:`TypeError` when called with the + wrong number of arguments or a non-:py:obj:`str` argument. + """ + self.assertRaises(TypeError, rand.write_file) + self.assertRaises(TypeError, rand.write_file, None) + self.assertRaises(TypeError, rand.write_file, "foo", None) + + def _read_write_test(self, path): + """ + Verify that ``rand.write_file`` and ``rand.load_file`` can be used. + """ + # Create the file so cleanup is more straightforward + with open(path, "w"): + pass + + try: + # Write random bytes to a file + rand.write_file(path) + + # Verify length of written file + size = os.stat(path)[stat.ST_SIZE] + self.assertEqual(1024, size) + + # Read random bytes from file + rand.load_file(path) + rand.load_file(path, 4) # specify a length + finally: + # Cleanup + os.unlink(path) + + + def test_bytes_paths(self): + """ + Random data can be saved and loaded to files with paths specified as + bytes. + """ + path = self.mktemp() + path += NON_ASCII.encode(sys.getfilesystemencoding()) + self._read_write_test(path) + + + def test_unicode_paths(self): + """ + Random data can be saved and loaded to files with paths specified as + unicode. + """ + path = self.mktemp().decode('utf-8') + NON_ASCII + self._read_write_test(path) + + +if __name__ == '__main__': + main() diff --git a/Lib/site-packages/OpenSSL/test/test_ssl.py b/Lib/site-packages/OpenSSL/test/test_ssl.py new file mode 100644 index 0000000..bb1c9ae --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/test_ssl.py @@ -0,0 +1,3775 @@ +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +Unit tests for :py:obj:`OpenSSL.SSL`. +""" + +from gc import collect, get_referrers +from errno import ECONNREFUSED, EINPROGRESS, EWOULDBLOCK, EPIPE, ESHUTDOWN +from sys import platform, getfilesystemencoding +from socket import SHUT_RDWR, error, socket +from os import makedirs +from os.path import join +from unittest import main +from weakref import ref +from warnings import catch_warnings, simplefilter + +from six import PY3, text_type, u + +from OpenSSL.crypto import TYPE_RSA, FILETYPE_PEM +from OpenSSL.crypto import PKey, X509, X509Extension, X509Store +from OpenSSL.crypto import dump_privatekey, load_privatekey +from OpenSSL.crypto import dump_certificate, load_certificate +from OpenSSL.crypto import get_elliptic_curves + +from OpenSSL.SSL import OPENSSL_VERSION_NUMBER, SSLEAY_VERSION, SSLEAY_CFLAGS +from OpenSSL.SSL import SSLEAY_PLATFORM, SSLEAY_DIR, SSLEAY_BUILT_ON +from OpenSSL.SSL import SENT_SHUTDOWN, RECEIVED_SHUTDOWN +from OpenSSL.SSL import ( + SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, + TLSv1_1_METHOD, TLSv1_2_METHOD) +from OpenSSL.SSL import OP_SINGLE_DH_USE, OP_NO_SSLv2, OP_NO_SSLv3 +from OpenSSL.SSL import ( + VERIFY_PEER, VERIFY_FAIL_IF_NO_PEER_CERT, VERIFY_CLIENT_ONCE, VERIFY_NONE) + +from OpenSSL.SSL import ( + SESS_CACHE_OFF, SESS_CACHE_CLIENT, SESS_CACHE_SERVER, SESS_CACHE_BOTH, + SESS_CACHE_NO_AUTO_CLEAR, SESS_CACHE_NO_INTERNAL_LOOKUP, + SESS_CACHE_NO_INTERNAL_STORE, SESS_CACHE_NO_INTERNAL) + +from OpenSSL.SSL import ( + Error, SysCallError, WantReadError, WantWriteError, ZeroReturnError) +from OpenSSL.SSL import ( + Context, ContextType, Session, Connection, ConnectionType, SSLeay_version) + +from OpenSSL._util import lib as _lib + +from OpenSSL.test.util import WARNING_TYPE_EXPECTED, NON_ASCII, TestCase, b +from OpenSSL.test.test_crypto import ( + cleartextCertificatePEM, cleartextPrivateKeyPEM, + client_cert_pem, client_key_pem, server_cert_pem, server_key_pem, + root_cert_pem) + +try: + from OpenSSL.SSL import OP_NO_QUERY_MTU +except ImportError: + OP_NO_QUERY_MTU = None +try: + from OpenSSL.SSL import OP_COOKIE_EXCHANGE +except ImportError: + OP_COOKIE_EXCHANGE = None +try: + from OpenSSL.SSL import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = None + +try: + from OpenSSL.SSL import OP_NO_COMPRESSION +except ImportError: + OP_NO_COMPRESSION = None + +try: + from OpenSSL.SSL import MODE_RELEASE_BUFFERS +except ImportError: + MODE_RELEASE_BUFFERS = None + +try: + from OpenSSL.SSL import OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2 +except ImportError: + OP_NO_TLSv1 = OP_NO_TLSv1_1 = OP_NO_TLSv1_2 = None + +from OpenSSL.SSL import ( + SSL_ST_CONNECT, SSL_ST_ACCEPT, SSL_ST_MASK, SSL_ST_INIT, SSL_ST_BEFORE, + SSL_ST_OK, SSL_ST_RENEGOTIATE, + SSL_CB_LOOP, SSL_CB_EXIT, SSL_CB_READ, SSL_CB_WRITE, SSL_CB_ALERT, + SSL_CB_READ_ALERT, SSL_CB_WRITE_ALERT, SSL_CB_ACCEPT_LOOP, + SSL_CB_ACCEPT_EXIT, SSL_CB_CONNECT_LOOP, SSL_CB_CONNECT_EXIT, + SSL_CB_HANDSHAKE_START, SSL_CB_HANDSHAKE_DONE) + +# openssl dhparam 128 -out dh-128.pem (note that 128 is a small number of bits +# to use) +dhparam = """\ +-----BEGIN DH PARAMETERS----- +MBYCEQCobsg29c9WZP/54oAPcwiDAgEC +-----END DH PARAMETERS----- +""" + + +def join_bytes_or_unicode(prefix, suffix): + """ + Join two path components of either ``bytes`` or ``unicode``. + + The return type is the same as the type of ``prefix``. + """ + # If the types are the same, nothing special is necessary. + if type(prefix) == type(suffix): + return join(prefix, suffix) + + # Otherwise, coerce suffix to the type of prefix. + if isinstance(prefix, text_type): + return join(prefix, suffix.decode(getfilesystemencoding())) + else: + return join(prefix, suffix.encode(getfilesystemencoding())) + + +def verify_cb(conn, cert, errnum, depth, ok): + return ok + + +def socket_pair(): + """ + Establish and return a pair of network sockets connected to each other. + """ + # Connect a pair of sockets + port = socket() + port.bind(('', 0)) + port.listen(1) + client = socket() + client.setblocking(False) + client.connect_ex(("127.0.0.1", port.getsockname()[1])) + client.setblocking(True) + server = port.accept()[0] + + # Let's pass some unencrypted data to make sure our socket connection is + # fine. Just one byte, so we don't have to worry about buffers getting + # filled up or fragmentation. + server.send(b("x")) + assert client.recv(1024) == b("x") + client.send(b("y")) + assert server.recv(1024) == b("y") + + # Most of our callers want non-blocking sockets, make it easy for them. + server.setblocking(False) + client.setblocking(False) + + return (server, client) + + + +def handshake(client, server): + conns = [client, server] + while conns: + for conn in conns: + try: + conn.do_handshake() + except WantReadError: + pass + else: + conns.remove(conn) + + +def _create_certificate_chain(): + """ + Construct and return a chain of certificates. + + 1. A new self-signed certificate authority certificate (cacert) + 2. A new intermediate certificate signed by cacert (icert) + 3. A new server certificate signed by icert (scert) + """ + caext = X509Extension(b('basicConstraints'), False, b('CA:true')) + + # Step 1 + cakey = PKey() + cakey.generate_key(TYPE_RSA, 512) + cacert = X509() + cacert.get_subject().commonName = "Authority Certificate" + cacert.set_issuer(cacert.get_subject()) + cacert.set_pubkey(cakey) + cacert.set_notBefore(b("20000101000000Z")) + cacert.set_notAfter(b("20200101000000Z")) + cacert.add_extensions([caext]) + cacert.set_serial_number(0) + cacert.sign(cakey, "sha1") + + # Step 2 + ikey = PKey() + ikey.generate_key(TYPE_RSA, 512) + icert = X509() + icert.get_subject().commonName = "Intermediate Certificate" + icert.set_issuer(cacert.get_subject()) + icert.set_pubkey(ikey) + icert.set_notBefore(b("20000101000000Z")) + icert.set_notAfter(b("20200101000000Z")) + icert.add_extensions([caext]) + icert.set_serial_number(0) + icert.sign(cakey, "sha1") + + # Step 3 + skey = PKey() + skey.generate_key(TYPE_RSA, 512) + scert = X509() + scert.get_subject().commonName = "Server Certificate" + scert.set_issuer(icert.get_subject()) + scert.set_pubkey(skey) + scert.set_notBefore(b("20000101000000Z")) + scert.set_notAfter(b("20200101000000Z")) + scert.add_extensions([ + X509Extension(b('basicConstraints'), True, b('CA:false'))]) + scert.set_serial_number(0) + scert.sign(ikey, "sha1") + + return [(cakey, cacert), (ikey, icert), (skey, scert)] + + + +class _LoopbackMixin: + """ + Helper mixin which defines methods for creating a connected socket pair and + for forcing two connected SSL sockets to talk to each other via memory BIOs. + """ + def _loopbackClientFactory(self, socket): + client = Connection(Context(TLSv1_METHOD), socket) + client.set_connect_state() + return client + + + def _loopbackServerFactory(self, socket): + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + server = Connection(ctx, socket) + server.set_accept_state() + return server + + + def _loopback(self, serverFactory=None, clientFactory=None): + if serverFactory is None: + serverFactory = self._loopbackServerFactory + if clientFactory is None: + clientFactory = self._loopbackClientFactory + + (server, client) = socket_pair() + server = serverFactory(server) + client = clientFactory(client) + + handshake(client, server) + + server.setblocking(True) + client.setblocking(True) + return server, client + + + def _interactInMemory(self, client_conn, server_conn): + """ + Try to read application bytes from each of the two :py:obj:`Connection` + objects. Copy bytes back and forth between their send/receive buffers + for as long as there is anything to copy. When there is nothing more + to copy, return :py:obj:`None`. If one of them actually manages to deliver + some application bytes, return a two-tuple of the connection from which + the bytes were read and the bytes themselves. + """ + wrote = True + while wrote: + # Loop until neither side has anything to say + wrote = False + + # Copy stuff from each side's send buffer to the other side's + # receive buffer. + for (read, write) in [(client_conn, server_conn), + (server_conn, client_conn)]: + + # Give the side a chance to generate some more bytes, or + # succeed. + try: + data = read.recv(2 ** 16) + except WantReadError: + # It didn't succeed, so we'll hope it generated some + # output. + pass + else: + # It did succeed, so we'll stop now and let the caller deal + # with it. + return (read, data) + + while True: + # Keep copying as long as there's more stuff there. + try: + dirty = read.bio_read(4096) + except WantReadError: + # Okay, nothing more waiting to be sent. Stop + # processing this send buffer. + break + else: + # Keep track of the fact that someone generated some + # output. + wrote = True + write.bio_write(dirty) + + + def _handshakeInMemory(self, client_conn, server_conn): + """ + Perform the TLS handshake between two :py:class:`Connection` instances + connected to each other via memory BIOs. + """ + client_conn.set_connect_state() + server_conn.set_accept_state() + + for conn in [client_conn, server_conn]: + try: + conn.do_handshake() + except WantReadError: + pass + + self._interactInMemory(client_conn, server_conn) + + + +class VersionTests(TestCase): + """ + Tests for version information exposed by + :py:obj:`OpenSSL.SSL.SSLeay_version` and + :py:obj:`OpenSSL.SSL.OPENSSL_VERSION_NUMBER`. + """ + def test_OPENSSL_VERSION_NUMBER(self): + """ + :py:obj:`OPENSSL_VERSION_NUMBER` is an integer with status in the low + byte and the patch, fix, minor, and major versions in the + nibbles above that. + """ + self.assertTrue(isinstance(OPENSSL_VERSION_NUMBER, int)) + + + def test_SSLeay_version(self): + """ + :py:obj:`SSLeay_version` takes a version type indicator and returns + one of a number of version strings based on that indicator. + """ + versions = {} + for t in [SSLEAY_VERSION, SSLEAY_CFLAGS, SSLEAY_BUILT_ON, + SSLEAY_PLATFORM, SSLEAY_DIR]: + version = SSLeay_version(t) + versions[version] = t + self.assertTrue(isinstance(version, bytes)) + self.assertEqual(len(versions), 5) + + + +class ContextTests(TestCase, _LoopbackMixin): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Context`. + """ + def test_method(self): + """ + :py:obj:`Context` can be instantiated with one of :py:obj:`SSLv2_METHOD`, + :py:obj:`SSLv3_METHOD`, :py:obj:`SSLv23_METHOD`, :py:obj:`TLSv1_METHOD`, + :py:obj:`TLSv1_1_METHOD`, or :py:obj:`TLSv1_2_METHOD`. + """ + methods = [ + SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD] + for meth in methods: + Context(meth) + + + maybe = [SSLv2_METHOD, TLSv1_1_METHOD, TLSv1_2_METHOD] + for meth in maybe: + try: + Context(meth) + except (Error, ValueError): + # Some versions of OpenSSL have SSLv2 / TLSv1.1 / TLSv1.2, some + # don't. Difficult to say in advance. + pass + + self.assertRaises(TypeError, Context, "") + self.assertRaises(ValueError, Context, 10) + + + if not PY3: + def test_method_long(self): + """ + On Python 2 :py:class:`Context` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + Context(long(TLSv1_METHOD)) + + + + def test_type(self): + """ + :py:obj:`Context` and :py:obj:`ContextType` refer to the same type object and can be + used to create instances of that type. + """ + self.assertIdentical(Context, ContextType) + self.assertConsistentType(Context, 'Context', TLSv1_METHOD) + + + def test_use_privatekey(self): + """ + :py:obj:`Context.use_privatekey` takes an :py:obj:`OpenSSL.crypto.PKey` instance. + """ + key = PKey() + key.generate_key(TYPE_RSA, 128) + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(key) + self.assertRaises(TypeError, ctx.use_privatekey, "") + + + def test_use_privatekey_file_missing(self): + """ + :py:obj:`Context.use_privatekey_file` raises :py:obj:`OpenSSL.SSL.Error` + when passed the name of a file which does not exist. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(Error, ctx.use_privatekey_file, self.mktemp()) + + + def _use_privatekey_file_test(self, pemfile, filetype): + """ + Verify that calling ``Context.use_privatekey_file`` with the given + arguments does not raise an exception. + """ + key = PKey() + key.generate_key(TYPE_RSA, 128) + + with open(pemfile, "wt") as pem: + pem.write( + dump_privatekey(FILETYPE_PEM, key).decode("ascii") + ) + + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey_file(pemfile, filetype) + + + def test_use_privatekey_file_bytes(self): + """ + A private key can be specified from a file by passing a ``bytes`` + instance giving the file name to ``Context.use_privatekey_file``. + """ + self._use_privatekey_file_test( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()), + FILETYPE_PEM, + ) + + + def test_use_privatekey_file_unicode(self): + """ + A private key can be specified from a file by passing a ``unicode`` + instance giving the file name to ``Context.use_privatekey_file``. + """ + self._use_privatekey_file_test( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII, + FILETYPE_PEM, + ) + + + if not PY3: + def test_use_privatekey_file_long(self): + """ + On Python 2 :py:obj:`Context.use_privatekey_file` accepts a + filetype of type :py:obj:`long` as well as :py:obj:`int`. + """ + self._use_privatekey_file_test(self.mktemp(), long(FILETYPE_PEM)) + + + def test_use_certificate_wrong_args(self): + """ + :py:obj:`Context.use_certificate_wrong_args` raises :py:obj:`TypeError` + when not passed exactly one :py:obj:`OpenSSL.crypto.X509` instance as an + argument. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(TypeError, ctx.use_certificate) + self.assertRaises(TypeError, ctx.use_certificate, "hello, world") + self.assertRaises(TypeError, ctx.use_certificate, X509(), "hello, world") + + + def test_use_certificate_uninitialized(self): + """ + :py:obj:`Context.use_certificate` raises :py:obj:`OpenSSL.SSL.Error` + when passed a :py:obj:`OpenSSL.crypto.X509` instance which has not been + initialized (ie, which does not actually have any certificate data). + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(Error, ctx.use_certificate, X509()) + + + def test_use_certificate(self): + """ + :py:obj:`Context.use_certificate` sets the certificate which will be + used to identify connections created using the context. + """ + # TODO + # Hard to assert anything. But we could set a privatekey then ask + # OpenSSL if the cert and key agree using check_privatekey. Then as + # long as check_privatekey works right we're good... + ctx = Context(TLSv1_METHOD) + ctx.use_certificate(load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + + + def test_use_certificate_file_wrong_args(self): + """ + :py:obj:`Context.use_certificate_file` raises :py:obj:`TypeError` if + called with zero arguments or more than two arguments, or if the first + argument is not a byte string or the second argumnent is not an integer. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(TypeError, ctx.use_certificate_file) + self.assertRaises(TypeError, ctx.use_certificate_file, b"somefile", object()) + self.assertRaises( + TypeError, ctx.use_certificate_file, b"somefile", FILETYPE_PEM, object()) + self.assertRaises( + TypeError, ctx.use_certificate_file, object(), FILETYPE_PEM) + self.assertRaises( + TypeError, ctx.use_certificate_file, b"somefile", object()) + + + def test_use_certificate_file_missing(self): + """ + :py:obj:`Context.use_certificate_file` raises + `:py:obj:`OpenSSL.SSL.Error` if passed the name of a file which does not + exist. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(Error, ctx.use_certificate_file, self.mktemp()) + + + def _use_certificate_file_test(self, certificate_file): + """ + Verify that calling ``Context.use_certificate_file`` with the given + filename doesn't raise an exception. + """ + # TODO + # Hard to assert anything. But we could set a privatekey then ask + # OpenSSL if the cert and key agree using check_privatekey. Then as + # long as check_privatekey works right we're good... + with open(certificate_file, "wb") as pem_file: + pem_file.write(cleartextCertificatePEM) + + ctx = Context(TLSv1_METHOD) + ctx.use_certificate_file(certificate_file) + + + def test_use_certificate_file_bytes(self): + """ + :py:obj:`Context.use_certificate_file` sets the certificate (given as a + ``bytes`` filename) which will be used to identify connections created + using the context. + """ + filename = self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + self._use_certificate_file_test(filename) + + + def test_use_certificate_file_unicode(self): + """ + :py:obj:`Context.use_certificate_file` sets the certificate (given as a + ``bytes`` filename) which will be used to identify connections created + using the context. + """ + filename = self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + self._use_certificate_file_test(filename) + + + if not PY3: + def test_use_certificate_file_long(self): + """ + On Python 2 :py:obj:`Context.use_certificate_file` accepts a + filetype of type :py:obj:`long` as well as :py:obj:`int`. + """ + pem_filename = self.mktemp() + with open(pem_filename, "wb") as pem_file: + pem_file.write(cleartextCertificatePEM) + + ctx = Context(TLSv1_METHOD) + ctx.use_certificate_file(pem_filename, long(FILETYPE_PEM)) + + + def test_check_privatekey_valid(self): + """ + :py:obj:`Context.check_privatekey` returns :py:obj:`None` if the + :py:obj:`Context` instance has been configured to use a matched key and + certificate pair. + """ + key = load_privatekey(FILETYPE_PEM, client_key_pem) + cert = load_certificate(FILETYPE_PEM, client_cert_pem) + context = Context(TLSv1_METHOD) + context.use_privatekey(key) + context.use_certificate(cert) + self.assertIs(None, context.check_privatekey()) + + + def test_check_privatekey_invalid(self): + """ + :py:obj:`Context.check_privatekey` raises :py:obj:`Error` if the + :py:obj:`Context` instance has been configured to use a key and + certificate pair which don't relate to each other. + """ + key = load_privatekey(FILETYPE_PEM, client_key_pem) + cert = load_certificate(FILETYPE_PEM, server_cert_pem) + context = Context(TLSv1_METHOD) + context.use_privatekey(key) + context.use_certificate(cert) + self.assertRaises(Error, context.check_privatekey) + + + def test_check_privatekey_wrong_args(self): + """ + :py:obj:`Context.check_privatekey` raises :py:obj:`TypeError` if called + with other than no arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.check_privatekey, object()) + + + def test_set_app_data_wrong_args(self): + """ + :py:obj:`Context.set_app_data` raises :py:obj:`TypeError` if called with other than + one argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_app_data) + self.assertRaises(TypeError, context.set_app_data, None, None) + + + def test_get_app_data_wrong_args(self): + """ + :py:obj:`Context.get_app_data` raises :py:obj:`TypeError` if called with any + arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_app_data, None) + + + def test_app_data(self): + """ + :py:obj:`Context.set_app_data` stores an object for later retrieval using + :py:obj:`Context.get_app_data`. + """ + app_data = object() + context = Context(TLSv1_METHOD) + context.set_app_data(app_data) + self.assertIdentical(context.get_app_data(), app_data) + + + def test_set_options_wrong_args(self): + """ + :py:obj:`Context.set_options` raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_options) + self.assertRaises(TypeError, context.set_options, None) + self.assertRaises(TypeError, context.set_options, 1, None) + + + def test_set_options(self): + """ + :py:obj:`Context.set_options` returns the new options value. + """ + context = Context(TLSv1_METHOD) + options = context.set_options(OP_NO_SSLv2) + self.assertTrue(OP_NO_SSLv2 & options) + + + if not PY3: + def test_set_options_long(self): + """ + On Python 2 :py:obj:`Context.set_options` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + options = context.set_options(long(OP_NO_SSLv2)) + self.assertTrue(OP_NO_SSLv2 & options) + + + def test_set_mode_wrong_args(self): + """ + :py:obj:`Context.set`mode} raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_mode) + self.assertRaises(TypeError, context.set_mode, None) + self.assertRaises(TypeError, context.set_mode, 1, None) + + + if MODE_RELEASE_BUFFERS is not None: + def test_set_mode(self): + """ + :py:obj:`Context.set_mode` accepts a mode bitvector and returns the newly + set mode. + """ + context = Context(TLSv1_METHOD) + self.assertTrue( + MODE_RELEASE_BUFFERS & context.set_mode(MODE_RELEASE_BUFFERS)) + + if not PY3: + def test_set_mode_long(self): + """ + On Python 2 :py:obj:`Context.set_mode` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + mode = context.set_mode(long(MODE_RELEASE_BUFFERS)) + self.assertTrue(MODE_RELEASE_BUFFERS & mode) + else: + "MODE_RELEASE_BUFFERS unavailable - OpenSSL version may be too old" + + + def test_set_timeout_wrong_args(self): + """ + :py:obj:`Context.set_timeout` raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_timeout) + self.assertRaises(TypeError, context.set_timeout, None) + self.assertRaises(TypeError, context.set_timeout, 1, None) + + + def test_get_timeout_wrong_args(self): + """ + :py:obj:`Context.get_timeout` raises :py:obj:`TypeError` if called with any arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_timeout, None) + + + def test_timeout(self): + """ + :py:obj:`Context.set_timeout` sets the session timeout for all connections + created using the context object. :py:obj:`Context.get_timeout` retrieves this + value. + """ + context = Context(TLSv1_METHOD) + context.set_timeout(1234) + self.assertEquals(context.get_timeout(), 1234) + + + if not PY3: + def test_timeout_long(self): + """ + On Python 2 :py:obj:`Context.set_timeout` accepts values of type + `long` as well as int. + """ + context = Context(TLSv1_METHOD) + context.set_timeout(long(1234)) + self.assertEquals(context.get_timeout(), 1234) + + + def test_set_verify_depth_wrong_args(self): + """ + :py:obj:`Context.set_verify_depth` raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_verify_depth) + self.assertRaises(TypeError, context.set_verify_depth, None) + self.assertRaises(TypeError, context.set_verify_depth, 1, None) + + + def test_get_verify_depth_wrong_args(self): + """ + :py:obj:`Context.get_verify_depth` raises :py:obj:`TypeError` if called with any arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_verify_depth, None) + + + def test_verify_depth(self): + """ + :py:obj:`Context.set_verify_depth` sets the number of certificates in a chain + to follow before giving up. The value can be retrieved with + :py:obj:`Context.get_verify_depth`. + """ + context = Context(TLSv1_METHOD) + context.set_verify_depth(11) + self.assertEquals(context.get_verify_depth(), 11) + + + if not PY3: + def test_verify_depth_long(self): + """ + On Python 2 :py:obj:`Context.set_verify_depth` accepts values of + type `long` as well as int. + """ + context = Context(TLSv1_METHOD) + context.set_verify_depth(long(11)) + self.assertEquals(context.get_verify_depth(), 11) + + + def _write_encrypted_pem(self, passphrase): + """ + Write a new private key out to a new file, encrypted using the given + passphrase. Return the path to the new file. + """ + key = PKey() + key.generate_key(TYPE_RSA, 128) + pemFile = self.mktemp() + fObj = open(pemFile, 'w') + pem = dump_privatekey(FILETYPE_PEM, key, "blowfish", passphrase) + fObj.write(pem.decode('ascii')) + fObj.close() + return pemFile + + + def test_set_passwd_cb_wrong_args(self): + """ + :py:obj:`Context.set_passwd_cb` raises :py:obj:`TypeError` if called with the + wrong arguments or with a non-callable first argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_passwd_cb) + self.assertRaises(TypeError, context.set_passwd_cb, None) + self.assertRaises(TypeError, context.set_passwd_cb, lambda: None, None, None) + + + def test_set_passwd_cb(self): + """ + :py:obj:`Context.set_passwd_cb` accepts a callable which will be invoked when + a private key is loaded from an encrypted PEM. + """ + passphrase = b("foobar") + pemFile = self._write_encrypted_pem(passphrase) + calledWith = [] + def passphraseCallback(maxlen, verify, extra): + calledWith.append((maxlen, verify, extra)) + return passphrase + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + context.use_privatekey_file(pemFile) + self.assertTrue(len(calledWith), 1) + self.assertTrue(isinstance(calledWith[0][0], int)) + self.assertTrue(isinstance(calledWith[0][1], int)) + self.assertEqual(calledWith[0][2], None) + + + def test_passwd_callback_exception(self): + """ + :py:obj:`Context.use_privatekey_file` propagates any exception raised by the + passphrase callback. + """ + pemFile = self._write_encrypted_pem(b("monkeys are nice")) + def passphraseCallback(maxlen, verify, extra): + raise RuntimeError("Sorry, I am a fail.") + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + self.assertRaises(RuntimeError, context.use_privatekey_file, pemFile) + + + def test_passwd_callback_false(self): + """ + :py:obj:`Context.use_privatekey_file` raises :py:obj:`OpenSSL.SSL.Error` if the + passphrase callback returns a false value. + """ + pemFile = self._write_encrypted_pem(b("monkeys are nice")) + def passphraseCallback(maxlen, verify, extra): + return b"" + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + self.assertRaises(Error, context.use_privatekey_file, pemFile) + + + def test_passwd_callback_non_string(self): + """ + :py:obj:`Context.use_privatekey_file` raises :py:obj:`OpenSSL.SSL.Error` if the + passphrase callback returns a true non-string value. + """ + pemFile = self._write_encrypted_pem(b("monkeys are nice")) + def passphraseCallback(maxlen, verify, extra): + return 10 + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + self.assertRaises(ValueError, context.use_privatekey_file, pemFile) + + + def test_passwd_callback_too_long(self): + """ + If the passphrase returned by the passphrase callback returns a string + longer than the indicated maximum length, it is truncated. + """ + # A priori knowledge! + passphrase = b("x") * 1024 + pemFile = self._write_encrypted_pem(passphrase) + def passphraseCallback(maxlen, verify, extra): + assert maxlen == 1024 + return passphrase + b("y") + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + # This shall succeed because the truncated result is the correct + # passphrase. + context.use_privatekey_file(pemFile) + + + def test_set_info_callback(self): + """ + :py:obj:`Context.set_info_callback` accepts a callable which will be invoked + when certain information about an SSL connection is available. + """ + (server, client) = socket_pair() + + clientSSL = Connection(Context(TLSv1_METHOD), client) + clientSSL.set_connect_state() + + called = [] + def info(conn, where, ret): + called.append((conn, where, ret)) + context = Context(TLSv1_METHOD) + context.set_info_callback(info) + context.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + context.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + + serverSSL = Connection(context, server) + serverSSL.set_accept_state() + + handshake(clientSSL, serverSSL) + + # The callback must always be called with a Connection instance as the + # first argument. It would probably be better to split this into + # separate tests for client and server side info callbacks so we could + # assert it is called with the right Connection instance. It would + # also be good to assert *something* about `where` and `ret`. + notConnections = [ + conn for (conn, where, ret) in called + if not isinstance(conn, Connection)] + self.assertEqual( + [], notConnections, + "Some info callback arguments were not Connection instaces.") + + + def _load_verify_locations_test(self, *args): + """ + Create a client context which will verify the peer certificate and call + its :py:obj:`load_verify_locations` method with the given arguments. + Then connect it to a server and ensure that the handshake succeeds. + """ + (server, client) = socket_pair() + + clientContext = Context(TLSv1_METHOD) + clientContext.load_verify_locations(*args) + # Require that the server certificate verify properly or the + # connection will fail. + clientContext.set_verify( + VERIFY_PEER, + lambda conn, cert, errno, depth, preverify_ok: preverify_ok) + + clientSSL = Connection(clientContext, client) + clientSSL.set_connect_state() + + serverContext = Context(TLSv1_METHOD) + serverContext.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + serverContext.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + + serverSSL = Connection(serverContext, server) + serverSSL.set_accept_state() + + # Without load_verify_locations above, the handshake + # will fail: + # Error: [('SSL routines', 'SSL3_GET_SERVER_CERTIFICATE', + # 'certificate verify failed')] + handshake(clientSSL, serverSSL) + + cert = clientSSL.get_peer_certificate() + self.assertEqual(cert.get_subject().CN, 'Testing Root CA') + + + def _load_verify_cafile(self, cafile): + """ + Verify that if path to a file containing a certificate is passed to + ``Context.load_verify_locations`` for the ``cafile`` parameter, that + certificate is used as a trust root for the purposes of verifying + connections created using that ``Context``. + """ + fObj = open(cafile, 'w') + fObj.write(cleartextCertificatePEM.decode('ascii')) + fObj.close() + + self._load_verify_locations_test(cafile) + + + def test_load_verify_bytes_cafile(self): + """ + :py:obj:`Context.load_verify_locations` accepts a file name as a + ``bytes`` instance and uses the certificates within for verification + purposes. + """ + cafile = self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + self._load_verify_cafile(cafile) + + + def test_load_verify_unicode_cafile(self): + """ + :py:obj:`Context.load_verify_locations` accepts a file name as a + ``unicode`` instance and uses the certificates within for verification + purposes. + """ + self._load_verify_cafile( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) + + + def test_load_verify_invalid_file(self): + """ + :py:obj:`Context.load_verify_locations` raises :py:obj:`Error` when passed a + non-existent cafile. + """ + clientContext = Context(TLSv1_METHOD) + self.assertRaises( + Error, clientContext.load_verify_locations, self.mktemp()) + + + def _load_verify_directory_locations_capath(self, capath): + """ + Verify that if path to a directory containing certificate files is + passed to ``Context.load_verify_locations`` for the ``capath`` + parameter, those certificates are used as trust roots for the purposes + of verifying connections created using that ``Context``. + """ + makedirs(capath) + # Hash values computed manually with c_rehash to avoid depending on + # c_rehash in the test suite. One is from OpenSSL 0.9.8, the other + # from OpenSSL 1.0.0. + for name in [b'c7adac82.0', b'c3705638.0']: + cafile = join_bytes_or_unicode(capath, name) + with open(cafile, 'w') as fObj: + fObj.write(cleartextCertificatePEM.decode('ascii')) + + self._load_verify_locations_test(None, capath) + + + def test_load_verify_directory_bytes_capath(self): + """ + :py:obj:`Context.load_verify_locations` accepts a directory name as a + ``bytes`` instance and uses the certificates within for verification + purposes. + """ + self._load_verify_directory_locations_capath( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + ) + + + def test_load_verify_directory_unicode_capath(self): + """ + :py:obj:`Context.load_verify_locations` accepts a directory name as a + ``unicode`` instance and uses the certificates within for verification + purposes. + """ + self._load_verify_directory_locations_capath( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) + + + def test_load_verify_locations_wrong_args(self): + """ + :py:obj:`Context.load_verify_locations` raises :py:obj:`TypeError` if called with + the wrong number of arguments or with non-:py:obj:`str` arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.load_verify_locations) + self.assertRaises(TypeError, context.load_verify_locations, object()) + self.assertRaises(TypeError, context.load_verify_locations, object(), object()) + self.assertRaises(TypeError, context.load_verify_locations, None, None, None) + + + if platform == "win32": + "set_default_verify_paths appears not to work on Windows. " + "See LP#404343 and LP#404344." + else: + def test_set_default_verify_paths(self): + """ + :py:obj:`Context.set_default_verify_paths` causes the platform-specific CA + certificate locations to be used for verification purposes. + """ + # Testing this requires a server with a certificate signed by one of + # the CAs in the platform CA location. Getting one of those costs + # money. Fortunately (or unfortunately, depending on your + # perspective), it's easy to think of a public server on the + # internet which has such a certificate. Connecting to the network + # in a unit test is bad, but it's the only way I can think of to + # really test this. -exarkun + + # Arg, verisign.com doesn't speak anything newer than TLS 1.0 + context = Context(TLSv1_METHOD) + context.set_default_verify_paths() + context.set_verify( + VERIFY_PEER, + lambda conn, cert, errno, depth, preverify_ok: preverify_ok) + + client = socket() + client.connect(('verisign.com', 443)) + clientSSL = Connection(context, client) + clientSSL.set_connect_state() + clientSSL.do_handshake() + clientSSL.send(b"GET / HTTP/1.0\r\n\r\n") + self.assertTrue(clientSSL.recv(1024)) + + + def test_set_default_verify_paths_signature(self): + """ + :py:obj:`Context.set_default_verify_paths` takes no arguments and raises + :py:obj:`TypeError` if given any. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_default_verify_paths, None) + self.assertRaises(TypeError, context.set_default_verify_paths, 1) + self.assertRaises(TypeError, context.set_default_verify_paths, "") + + + def test_add_extra_chain_cert_invalid_cert(self): + """ + :py:obj:`Context.add_extra_chain_cert` raises :py:obj:`TypeError` if called with + other than one argument or if called with an object which is not an + instance of :py:obj:`X509`. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.add_extra_chain_cert) + self.assertRaises(TypeError, context.add_extra_chain_cert, object()) + self.assertRaises(TypeError, context.add_extra_chain_cert, object(), object()) + + + def _handshake_test(self, serverContext, clientContext): + """ + Verify that a client and server created with the given contexts can + successfully handshake and communicate. + """ + serverSocket, clientSocket = socket_pair() + + server = Connection(serverContext, serverSocket) + server.set_accept_state() + + client = Connection(clientContext, clientSocket) + client.set_connect_state() + + # Make them talk to each other. + # self._interactInMemory(client, server) + for i in range(3): + for s in [client, server]: + try: + s.do_handshake() + except WantReadError: + pass + + + def test_set_verify_callback_connection_argument(self): + """ + The first argument passed to the verify callback is the + :py:class:`Connection` instance for which verification is taking place. + """ + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + serverContext.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + serverConnection = Connection(serverContext, None) + + class VerifyCallback(object): + def callback(self, connection, *args): + self.connection = connection + return 1 + + verify = VerifyCallback() + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify(VERIFY_PEER, verify.callback) + clientConnection = Connection(clientContext, None) + clientConnection.set_connect_state() + + self._handshakeInMemory(clientConnection, serverConnection) + + self.assertIdentical(verify.connection, clientConnection) + + + def test_set_verify_callback_exception(self): + """ + If the verify callback passed to :py:obj:`Context.set_verify` raises an + exception, verification fails and the exception is propagated to the + caller of :py:obj:`Connection.do_handshake`. + """ + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + serverContext.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + + clientContext = Context(TLSv1_METHOD) + def verify_callback(*args): + raise Exception("silly verify failure") + clientContext.set_verify(VERIFY_PEER, verify_callback) + + exc = self.assertRaises( + Exception, self._handshake_test, serverContext, clientContext) + self.assertEqual("silly verify failure", str(exc)) + + + def test_add_extra_chain_cert(self): + """ + :py:obj:`Context.add_extra_chain_cert` accepts an :py:obj:`X509` instance to add to + the certificate chain. + + See :py:obj:`_create_certificate_chain` for the details of the certificate + chain tested. + + The chain is tested by starting a server with scert and connecting + to it with a client which trusts cacert and requires verification to + succeed. + """ + chain = _create_certificate_chain() + [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + + # Dump the CA certificate to a file because that's the only way to load + # it as a trusted CA in the client context. + for cert, name in [(cacert, 'ca.pem'), (icert, 'i.pem'), (scert, 's.pem')]: + fObj = open(name, 'w') + fObj.write(dump_certificate(FILETYPE_PEM, cert).decode('ascii')) + fObj.close() + + for key, name in [(cakey, 'ca.key'), (ikey, 'i.key'), (skey, 's.key')]: + fObj = open(name, 'w') + fObj.write(dump_privatekey(FILETYPE_PEM, key).decode('ascii')) + fObj.close() + + # Create the server context + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey(skey) + serverContext.use_certificate(scert) + # The client already has cacert, we only need to give them icert. + serverContext.add_extra_chain_cert(icert) + + # Create the client + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify( + VERIFY_PEER | VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) + clientContext.load_verify_locations(b"ca.pem") + + # Try it out. + self._handshake_test(serverContext, clientContext) + + + def _use_certificate_chain_file_test(self, certdir): + """ + Verify that :py:obj:`Context.use_certificate_chain_file` reads a + certificate chain from a specified file. + + The chain is tested by starting a server with scert and connecting to + it with a client which trusts cacert and requires verification to + succeed. + """ + chain = _create_certificate_chain() + [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + + makedirs(certdir) + + chainFile = join_bytes_or_unicode(certdir, "chain.pem") + caFile = join_bytes_or_unicode(certdir, "ca.pem") + + # Write out the chain file. + with open(chainFile, 'wb') as fObj: + # Most specific to least general. + fObj.write(dump_certificate(FILETYPE_PEM, scert)) + fObj.write(dump_certificate(FILETYPE_PEM, icert)) + fObj.write(dump_certificate(FILETYPE_PEM, cacert)) + + with open(caFile, 'w') as fObj: + fObj.write(dump_certificate(FILETYPE_PEM, cacert).decode('ascii')) + + serverContext = Context(TLSv1_METHOD) + serverContext.use_certificate_chain_file(chainFile) + serverContext.use_privatekey(skey) + + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify( + VERIFY_PEER | VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) + clientContext.load_verify_locations(caFile) + + self._handshake_test(serverContext, clientContext) + + + def test_use_certificate_chain_file_bytes(self): + """ + ``Context.use_certificate_chain_file`` accepts the name of a file (as + an instance of ``bytes``) to specify additional certificates to use to + construct and verify a trust chain. + """ + self._use_certificate_chain_file_test( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()) + ) + + + def test_use_certificate_chain_file_unicode(self): + """ + ``Context.use_certificate_chain_file`` accepts the name of a file (as + an instance of ``unicode``) to specify additional certificates to use + to construct and verify a trust chain. + """ + self._use_certificate_chain_file_test( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII + ) + + + def test_use_certificate_chain_file_wrong_args(self): + """ + :py:obj:`Context.use_certificate_chain_file` raises :py:obj:`TypeError` + if passed zero or more than one argument or when passed a non-byte + string single argument. It also raises :py:obj:`OpenSSL.SSL.Error` when + passed a bad chain file name (for example, the name of a file which does + not exist). + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.use_certificate_chain_file) + self.assertRaises(TypeError, context.use_certificate_chain_file, object()) + self.assertRaises(TypeError, context.use_certificate_chain_file, b"foo", object()) + + self.assertRaises(Error, context.use_certificate_chain_file, self.mktemp()) + + # XXX load_client_ca + # XXX set_session_id + + def test_get_verify_mode_wrong_args(self): + """ + :py:obj:`Context.get_verify_mode` raises :py:obj:`TypeError` if called with any + arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_verify_mode, None) + + + def test_set_verify_mode(self): + """ + :py:obj:`Context.get_verify_mode` returns the verify mode flags previously + passed to :py:obj:`Context.set_verify`. + """ + context = Context(TLSv1_METHOD) + self.assertEquals(context.get_verify_mode(), 0) + context.set_verify( + VERIFY_PEER | VERIFY_CLIENT_ONCE, lambda *args: None) + self.assertEquals( + context.get_verify_mode(), VERIFY_PEER | VERIFY_CLIENT_ONCE) + + + if not PY3: + def test_set_verify_mode_long(self): + """ + On Python 2 :py:obj:`Context.set_verify_mode` accepts values of + type :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + self.assertEquals(context.get_verify_mode(), 0) + context.set_verify( + long(VERIFY_PEER | VERIFY_CLIENT_ONCE), lambda *args: None) + self.assertEquals( + context.get_verify_mode(), VERIFY_PEER | VERIFY_CLIENT_ONCE) + + + def test_load_tmp_dh_wrong_args(self): + """ + :py:obj:`Context.load_tmp_dh` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`str` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.load_tmp_dh) + self.assertRaises(TypeError, context.load_tmp_dh, "foo", None) + self.assertRaises(TypeError, context.load_tmp_dh, object()) + + + def test_load_tmp_dh_missing_file(self): + """ + :py:obj:`Context.load_tmp_dh` raises :py:obj:`OpenSSL.SSL.Error` if the specified file + does not exist. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(Error, context.load_tmp_dh, b"hello") + + + def _load_tmp_dh_test(self, dhfilename): + """ + Verify that calling ``Context.load_tmp_dh`` with the given filename + does not raise an exception. + """ + context = Context(TLSv1_METHOD) + with open(dhfilename, "w") as dhfile: + dhfile.write(dhparam) + + context.load_tmp_dh(dhfilename) + # XXX What should I assert here? -exarkun + + + def test_load_tmp_dh_bytes(self): + """ + :py:obj:`Context.load_tmp_dh` loads Diffie-Hellman parameters from the + specified file (given as ``bytes``). + """ + self._load_tmp_dh_test( + self.mktemp() + NON_ASCII.encode(getfilesystemencoding()), + ) + + + def test_load_tmp_dh_unicode(self): + """ + :py:obj:`Context.load_tmp_dh` loads Diffie-Hellman parameters from the + specified file (given as ``unicode``). + """ + self._load_tmp_dh_test( + self.mktemp().decode(getfilesystemencoding()) + NON_ASCII, + ) + + + def test_set_tmp_ecdh(self): + """ + :py:obj:`Context.set_tmp_ecdh` sets the elliptic curve for + Diffie-Hellman to the specified curve. + """ + context = Context(TLSv1_METHOD) + for curve in get_elliptic_curves(): + # The only easily "assertable" thing is that it does not raise an + # exception. + context.set_tmp_ecdh(curve) + + + def test_set_cipher_list_bytes(self): + """ + :py:obj:`Context.set_cipher_list` accepts a :py:obj:`bytes` naming the + ciphers which connections created with the context object will be able + to choose from. + """ + context = Context(TLSv1_METHOD) + context.set_cipher_list(b"hello world:EXP-RC4-MD5") + conn = Connection(context, None) + self.assertEquals(conn.get_cipher_list(), ["EXP-RC4-MD5"]) + + + def test_set_cipher_list_text(self): + """ + :py:obj:`Context.set_cipher_list` accepts a :py:obj:`unicode` naming + the ciphers which connections created with the context object will be + able to choose from. + """ + context = Context(TLSv1_METHOD) + context.set_cipher_list(u("hello world:EXP-RC4-MD5")) + conn = Connection(context, None) + self.assertEquals(conn.get_cipher_list(), ["EXP-RC4-MD5"]) + + + def test_set_cipher_list_wrong_args(self): + """ + :py:obj:`Context.set_cipher_list` raises :py:obj:`TypeError` when + passed zero arguments or more than one argument or when passed a + non-string single argument and raises :py:obj:`OpenSSL.SSL.Error` when + passed an incorrect cipher list string. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_cipher_list) + self.assertRaises(TypeError, context.set_cipher_list, object()) + self.assertRaises(TypeError, context.set_cipher_list, b"EXP-RC4-MD5", object()) + + self.assertRaises(Error, context.set_cipher_list, "imaginary-cipher") + + + def test_set_session_cache_mode_wrong_args(self): + """ + :py:obj:`Context.set_session_cache_mode` raises :py:obj:`TypeError` if + called with other than one integer argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_session_cache_mode) + self.assertRaises(TypeError, context.set_session_cache_mode, object()) + + + def test_get_session_cache_mode_wrong_args(self): + """ + :py:obj:`Context.get_session_cache_mode` raises :py:obj:`TypeError` if + called with any arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_session_cache_mode, 1) + + + def test_session_cache_mode(self): + """ + :py:obj:`Context.set_session_cache_mode` specifies how sessions are + cached. The setting can be retrieved via + :py:obj:`Context.get_session_cache_mode`. + """ + context = Context(TLSv1_METHOD) + context.set_session_cache_mode(SESS_CACHE_OFF) + off = context.set_session_cache_mode(SESS_CACHE_BOTH) + self.assertEqual(SESS_CACHE_OFF, off) + self.assertEqual(SESS_CACHE_BOTH, context.get_session_cache_mode()) + + if not PY3: + def test_session_cache_mode_long(self): + """ + On Python 2 :py:obj:`Context.set_session_cache_mode` accepts values + of type :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + context.set_session_cache_mode(long(SESS_CACHE_BOTH)) + self.assertEqual( + SESS_CACHE_BOTH, context.get_session_cache_mode()) + + + def test_get_cert_store(self): + """ + :py:obj:`Context.get_cert_store` returns a :py:obj:`X509Store` instance. + """ + context = Context(TLSv1_METHOD) + store = context.get_cert_store() + self.assertIsInstance(store, X509Store) + + + +class ServerNameCallbackTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Context.set_tlsext_servername_callback` and its interaction with + :py:obj:`Connection`. + """ + def test_wrong_args(self): + """ + :py:obj:`Context.set_tlsext_servername_callback` raises :py:obj:`TypeError` if called + with other than one argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_tlsext_servername_callback) + self.assertRaises( + TypeError, context.set_tlsext_servername_callback, 1, 2) + + + def test_old_callback_forgotten(self): + """ + If :py:obj:`Context.set_tlsext_servername_callback` is used to specify a new + callback, the one it replaces is dereferenced. + """ + def callback(connection): + pass + + def replacement(connection): + pass + + context = Context(TLSv1_METHOD) + context.set_tlsext_servername_callback(callback) + + tracker = ref(callback) + del callback + + context.set_tlsext_servername_callback(replacement) + + # One run of the garbage collector happens to work on CPython. PyPy + # doesn't collect the underlying object until a second run for whatever + # reason. That's fine, it still demonstrates our code has properly + # dropped the reference. + collect() + collect() + + callback = tracker() + if callback is not None: + referrers = get_referrers(callback) + if len(referrers) > 1: + self.fail("Some references remain: %r" % (referrers,)) + + + def test_no_servername(self): + """ + When a client specifies no server name, the callback passed to + :py:obj:`Context.set_tlsext_servername_callback` is invoked and the result of + :py:obj:`Connection.get_servername` is :py:obj:`None`. + """ + args = [] + def servername(conn): + args.append((conn, conn.get_servername())) + context = Context(TLSv1_METHOD) + context.set_tlsext_servername_callback(servername) + + # Lose our reference to it. The Context is responsible for keeping it + # alive now. + del servername + collect() + + # Necessary to actually accept the connection + context.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + context.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(context, None) + server.set_accept_state() + + client = Connection(Context(TLSv1_METHOD), None) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server, None)], args) + + + def test_servername(self): + """ + When a client specifies a server name in its hello message, the callback + passed to :py:obj:`Contexts.set_tlsext_servername_callback` is invoked and the + result of :py:obj:`Connection.get_servername` is that server name. + """ + args = [] + def servername(conn): + args.append((conn, conn.get_servername())) + context = Context(TLSv1_METHOD) + context.set_tlsext_servername_callback(servername) + + # Necessary to actually accept the connection + context.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + context.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(context, None) + server.set_accept_state() + + client = Connection(Context(TLSv1_METHOD), None) + client.set_connect_state() + client.set_tlsext_host_name(b("foo1.example.com")) + + self._interactInMemory(server, client) + + self.assertEqual([(server, b("foo1.example.com"))], args) + + +class NextProtoNegotiationTests(TestCase, _LoopbackMixin): + """ + Test for Next Protocol Negotiation in PyOpenSSL. + """ + if _lib.Cryptography_HAS_NEXTPROTONEG: + def test_npn_success(self): + """ + Tests that clients and servers that agree on the negotiated next + protocol can correct establish a connection, and that the agreed + protocol is reported by the connections. + """ + advertise_args = [] + select_args = [] + def advertise(conn): + advertise_args.append((conn,)) + return [b'http/1.1', b'spdy/2'] + def select(conn, options): + select_args.append((conn, options)) + return b'spdy/2' + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server,)], advertise_args) + self.assertEqual([(client, [b'http/1.1', b'spdy/2'])], select_args) + + self.assertEqual(server.get_next_proto_negotiated(), b'spdy/2') + self.assertEqual(client.get_next_proto_negotiated(), b'spdy/2') + + + def test_npn_client_fail(self): + """ + Tests that when clients and servers cannot agree on what protocol + to use next that the TLS connection does not get established. + """ + advertise_args = [] + select_args = [] + def advertise(conn): + advertise_args.append((conn,)) + return [b'http/1.1', b'spdy/2'] + def select(conn, options): + select_args.append((conn, options)) + return b'' + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the client doesn't return anything, the connection will fail. + self.assertRaises(Error, self._interactInMemory, server, client) + + self.assertEqual([(server,)], advertise_args) + self.assertEqual([(client, [b'http/1.1', b'spdy/2'])], select_args) + + + def test_npn_select_error(self): + """ + Test that we can handle exceptions in the select callback. If + select fails it should be fatal to the connection. + """ + advertise_args = [] + def advertise(conn): + advertise_args.append((conn,)) + return [b'http/1.1', b'spdy/2'] + def select(conn, options): + raise TypeError + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the callback throws an exception it should be raised here. + self.assertRaises( + TypeError, self._interactInMemory, server, client + ) + self.assertEqual([(server,)], advertise_args) + + + def test_npn_advertise_error(self): + """ + Test that we can handle exceptions in the advertise callback. If + advertise fails no NPN is advertised to the client. + """ + select_args = [] + def advertise(conn): + raise TypeError + def select(conn, options): + select_args.append((conn, options)) + return b'' + + server_context = Context(TLSv1_METHOD) + server_context.set_npn_advertise_callback(advertise) + + client_context = Context(TLSv1_METHOD) + client_context.set_npn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the client doesn't return anything, the connection will fail. + self.assertRaises( + TypeError, self._interactInMemory, server, client + ) + self.assertEqual([], select_args) + + else: + # No NPN. + def test_npn_not_implemented(self): + # Test the context methods first. + context = Context(TLSv1_METHOD) + fail_methods = [ + context.set_npn_advertise_callback, + context.set_npn_select_callback, + ] + for method in fail_methods: + self.assertRaises( + NotImplementedError, method, None + ) + + # Now test a connection. + conn = Connection(context) + fail_methods = [ + conn.get_next_proto_negotiated, + ] + for method in fail_methods: + self.assertRaises(NotImplementedError, method) + + + +class ApplicationLayerProtoNegotiationTests(TestCase, _LoopbackMixin): + """ + Tests for ALPN in PyOpenSSL. + """ + # Skip tests on versions that don't support ALPN. + if _lib.Cryptography_HAS_ALPN: + + def test_alpn_success(self): + """ + Clients and servers that agree on the negotiated ALPN protocol can + correct establish a connection, and the agreed protocol is reported + by the connections. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + return b'spdy/2' + + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + self.assertEqual(server.get_alpn_proto_negotiated(), b'spdy/2') + self.assertEqual(client.get_alpn_proto_negotiated(), b'spdy/2') + + + def test_alpn_set_on_connection(self): + """ + The same as test_alpn_success, but setting the ALPN protocols on + the connection rather than the context. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + return b'spdy/2' + + # Setup the client context but don't set any ALPN protocols. + client_context = Context(TLSv1_METHOD) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + # Set the ALPN protocols on the client connection. + client = Connection(client_context, None) + client.set_alpn_protos([b'http/1.1', b'spdy/2']) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + self.assertEqual(server.get_alpn_proto_negotiated(), b'spdy/2') + self.assertEqual(client.get_alpn_proto_negotiated(), b'spdy/2') + + + def test_alpn_server_fail(self): + """ + When clients and servers cannot agree on what protocol to use next + the TLS connection does not get established. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + return b'' + + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # If the client doesn't return anything, the connection will fail. + self.assertRaises(Error, self._interactInMemory, server, client) + + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + + def test_alpn_no_server(self): + """ + When clients and servers cannot agree on what protocol to use next + because the server doesn't offer ALPN, no protocol is negotiated. + """ + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + # Do the dance. + self._interactInMemory(server, client) + + self.assertEqual(client.get_alpn_proto_negotiated(), b'') + + + def test_alpn_callback_exception(self): + """ + We can handle exceptions in the ALPN select callback. + """ + select_args = [] + def select(conn, options): + select_args.append((conn, options)) + raise TypeError() + + client_context = Context(TLSv1_METHOD) + client_context.set_alpn_protos([b'http/1.1', b'spdy/2']) + + server_context = Context(TLSv1_METHOD) + server_context.set_alpn_select_callback(select) + + # Necessary to actually accept the connection + server_context.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_context.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(server_context, None) + server.set_accept_state() + + client = Connection(client_context, None) + client.set_connect_state() + + self.assertRaises( + TypeError, self._interactInMemory, server, client + ) + self.assertEqual([(server, [b'http/1.1', b'spdy/2'])], select_args) + + else: + # No ALPN. + def test_alpn_not_implemented(self): + """ + If ALPN is not in OpenSSL, we should raise NotImplementedError. + """ + # Test the context methods first. + context = Context(TLSv1_METHOD) + self.assertRaises( + NotImplementedError, context.set_alpn_protos, None + ) + self.assertRaises( + NotImplementedError, context.set_alpn_select_callback, None + ) + + # Now test a connection. + conn = Connection(context) + self.assertRaises( + NotImplementedError, context.set_alpn_protos, None + ) + + + +class SessionTests(TestCase): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Session`. + """ + def test_construction(self): + """ + :py:class:`Session` can be constructed with no arguments, creating a new + instance of that type. + """ + new_session = Session() + self.assertTrue(isinstance(new_session, Session)) + + + def test_construction_wrong_args(self): + """ + If any arguments are passed to :py:class:`Session`, :py:obj:`TypeError` + is raised. + """ + self.assertRaises(TypeError, Session, 123) + self.assertRaises(TypeError, Session, "hello") + self.assertRaises(TypeError, Session, object()) + + + +class ConnectionTests(TestCase, _LoopbackMixin): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Connection`. + """ + # XXX get_peer_certificate -> None + # XXX sock_shutdown + # XXX master_key -> TypeError + # XXX server_random -> TypeError + # XXX state_string + # XXX connect -> TypeError + # XXX connect_ex -> TypeError + # XXX set_connect_state -> TypeError + # XXX set_accept_state -> TypeError + # XXX renegotiate_pending + # XXX do_handshake -> TypeError + # XXX bio_read -> TypeError + # XXX recv -> TypeError + # XXX send -> TypeError + # XXX bio_write -> TypeError + + def test_type(self): + """ + :py:obj:`Connection` and :py:obj:`ConnectionType` refer to the same type object and + can be used to create instances of that type. + """ + self.assertIdentical(Connection, ConnectionType) + ctx = Context(TLSv1_METHOD) + self.assertConsistentType(Connection, 'Connection', ctx, None) + + + def test_get_context(self): + """ + :py:obj:`Connection.get_context` returns the :py:obj:`Context` instance used to + construct the :py:obj:`Connection` instance. + """ + context = Context(TLSv1_METHOD) + connection = Connection(context, None) + self.assertIdentical(connection.get_context(), context) + + + def test_get_context_wrong_args(self): + """ + :py:obj:`Connection.get_context` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.get_context, None) + + + def test_set_context_wrong_args(self): + """ + :py:obj:`Connection.set_context` raises :py:obj:`TypeError` if called with a + non-:py:obj:`Context` instance argument or with any number of arguments other + than 1. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertRaises(TypeError, connection.set_context) + self.assertRaises(TypeError, connection.set_context, object()) + self.assertRaises(TypeError, connection.set_context, "hello") + self.assertRaises(TypeError, connection.set_context, 1) + self.assertRaises(TypeError, connection.set_context, 1, 2) + self.assertRaises( + TypeError, connection.set_context, Context(TLSv1_METHOD), 2) + self.assertIdentical(ctx, connection.get_context()) + + + def test_set_context(self): + """ + :py:obj:`Connection.set_context` specifies a new :py:obj:`Context` instance to be used + for the connection. + """ + original = Context(SSLv23_METHOD) + replacement = Context(TLSv1_METHOD) + connection = Connection(original, None) + connection.set_context(replacement) + self.assertIdentical(replacement, connection.get_context()) + # Lose our references to the contexts, just in case the Connection isn't + # properly managing its own contributions to their reference counts. + del original, replacement + collect() + + + def test_set_tlsext_host_name_wrong_args(self): + """ + If :py:obj:`Connection.set_tlsext_host_name` is called with a non-byte string + argument or a byte string with an embedded NUL or other than one + argument, :py:obj:`TypeError` is raised. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, conn.set_tlsext_host_name) + self.assertRaises(TypeError, conn.set_tlsext_host_name, object()) + self.assertRaises(TypeError, conn.set_tlsext_host_name, 123, 456) + self.assertRaises( + TypeError, conn.set_tlsext_host_name, b("with\0null")) + + if PY3: + # On Python 3.x, don't accidentally implicitly convert from text. + self.assertRaises( + TypeError, + conn.set_tlsext_host_name, b("example.com").decode("ascii")) + + + def test_get_servername_wrong_args(self): + """ + :py:obj:`Connection.get_servername` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.get_servername, object()) + self.assertRaises(TypeError, connection.get_servername, 1) + self.assertRaises(TypeError, connection.get_servername, "hello") + + + def test_pending(self): + """ + :py:obj:`Connection.pending` returns the number of bytes available for + immediate read. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertEquals(connection.pending(), 0) + + + def test_pending_wrong_args(self): + """ + :py:obj:`Connection.pending` raises :py:obj:`TypeError` if called with any arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.pending, None) + + + def test_connect_wrong_args(self): + """ + :py:obj:`Connection.connect` raises :py:obj:`TypeError` if called with a non-address + argument or with the wrong number of arguments. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + self.assertRaises(TypeError, connection.connect, None) + self.assertRaises(TypeError, connection.connect) + self.assertRaises(TypeError, connection.connect, ("127.0.0.1", 1), None) + + + def test_connect_refused(self): + """ + :py:obj:`Connection.connect` raises :py:obj:`socket.error` if the underlying socket + connect method raises it. + """ + client = socket() + context = Context(TLSv1_METHOD) + clientSSL = Connection(context, client) + exc = self.assertRaises(error, clientSSL.connect, ("127.0.0.1", 1)) + self.assertEquals(exc.args[0], ECONNREFUSED) + + + def test_connect(self): + """ + :py:obj:`Connection.connect` establishes a connection to the specified address. + """ + port = socket() + port.bind(('', 0)) + port.listen(3) + + clientSSL = Connection(Context(TLSv1_METHOD), socket()) + clientSSL.connect(('127.0.0.1', port.getsockname()[1])) + # XXX An assertion? Or something? + + + if platform == "darwin": + "connect_ex sometimes causes a kernel panic on OS X 10.6.4" + else: + def test_connect_ex(self): + """ + If there is a connection error, :py:obj:`Connection.connect_ex` returns the + errno instead of raising an exception. + """ + port = socket() + port.bind(('', 0)) + port.listen(3) + + clientSSL = Connection(Context(TLSv1_METHOD), socket()) + clientSSL.setblocking(False) + result = clientSSL.connect_ex(port.getsockname()) + expected = (EINPROGRESS, EWOULDBLOCK) + self.assertTrue( + result in expected, "%r not in %r" % (result, expected)) + + + def test_accept_wrong_args(self): + """ + :py:obj:`Connection.accept` raises :py:obj:`TypeError` if called with any arguments. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + self.assertRaises(TypeError, connection.accept, None) + + + def test_accept(self): + """ + :py:obj:`Connection.accept` accepts a pending connection attempt and returns a + tuple of a new :py:obj:`Connection` (the accepted client) and the address the + connection originated from. + """ + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + port = socket() + portSSL = Connection(ctx, port) + portSSL.bind(('', 0)) + portSSL.listen(3) + + clientSSL = Connection(Context(TLSv1_METHOD), socket()) + + # Calling portSSL.getsockname() here to get the server IP address sounds + # great, but frequently fails on Windows. + clientSSL.connect(('127.0.0.1', portSSL.getsockname()[1])) + + serverSSL, address = portSSL.accept() + + self.assertTrue(isinstance(serverSSL, Connection)) + self.assertIdentical(serverSSL.get_context(), ctx) + self.assertEquals(address, clientSSL.getsockname()) + + + def test_shutdown_wrong_args(self): + """ + :py:obj:`Connection.shutdown` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with arguments other than integers. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.shutdown, None) + self.assertRaises(TypeError, connection.get_shutdown, None) + self.assertRaises(TypeError, connection.set_shutdown) + self.assertRaises(TypeError, connection.set_shutdown, None) + self.assertRaises(TypeError, connection.set_shutdown, 0, 1) + + + def test_shutdown(self): + """ + :py:obj:`Connection.shutdown` performs an SSL-level connection shutdown. + """ + server, client = self._loopback() + self.assertFalse(server.shutdown()) + self.assertEquals(server.get_shutdown(), SENT_SHUTDOWN) + self.assertRaises(ZeroReturnError, client.recv, 1024) + self.assertEquals(client.get_shutdown(), RECEIVED_SHUTDOWN) + client.shutdown() + self.assertEquals(client.get_shutdown(), SENT_SHUTDOWN|RECEIVED_SHUTDOWN) + self.assertRaises(ZeroReturnError, server.recv, 1024) + self.assertEquals(server.get_shutdown(), SENT_SHUTDOWN|RECEIVED_SHUTDOWN) + + + def test_shutdown_closed(self): + """ + If the underlying socket is closed, :py:obj:`Connection.shutdown` propagates the + write error from the low level write call. + """ + server, client = self._loopback() + server.sock_shutdown(2) + exc = self.assertRaises(SysCallError, server.shutdown) + if platform == "win32": + self.assertEqual(exc.args[0], ESHUTDOWN) + else: + self.assertEqual(exc.args[0], EPIPE) + + + def test_shutdown_truncated(self): + """ + If the underlying connection is truncated, :obj:`Connection.shutdown` + raises an :obj:`Error`. + """ + server_ctx = Context(TLSv1_METHOD) + client_ctx = Context(TLSv1_METHOD) + server_ctx.use_privatekey( + load_privatekey(FILETYPE_PEM, server_key_pem)) + server_ctx.use_certificate( + load_certificate(FILETYPE_PEM, server_cert_pem)) + server = Connection(server_ctx, None) + client = Connection(client_ctx, None) + self._handshakeInMemory(client, server) + self.assertEqual(server.shutdown(), False) + self.assertRaises(WantReadError, server.shutdown) + server.bio_shutdown() + self.assertRaises(Error, server.shutdown) + + + def test_set_shutdown(self): + """ + :py:obj:`Connection.set_shutdown` sets the state of the SSL connection shutdown + process. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + connection.set_shutdown(RECEIVED_SHUTDOWN) + self.assertEquals(connection.get_shutdown(), RECEIVED_SHUTDOWN) + + + if not PY3: + def test_set_shutdown_long(self): + """ + On Python 2 :py:obj:`Connection.set_shutdown` accepts an argument + of type :py:obj:`long` as well as :py:obj:`int`. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + connection.set_shutdown(long(RECEIVED_SHUTDOWN)) + self.assertEquals(connection.get_shutdown(), RECEIVED_SHUTDOWN) + + + def test_app_data_wrong_args(self): + """ + :py:obj:`Connection.set_app_data` raises :py:obj:`TypeError` if called with other than + one argument. :py:obj:`Connection.get_app_data` raises :py:obj:`TypeError` if called + with any arguments. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, conn.get_app_data, None) + self.assertRaises(TypeError, conn.set_app_data) + self.assertRaises(TypeError, conn.set_app_data, None, None) + + + def test_app_data(self): + """ + Any object can be set as app data by passing it to + :py:obj:`Connection.set_app_data` and later retrieved with + :py:obj:`Connection.get_app_data`. + """ + conn = Connection(Context(TLSv1_METHOD), None) + app_data = object() + conn.set_app_data(app_data) + self.assertIdentical(conn.get_app_data(), app_data) + + + def test_makefile(self): + """ + :py:obj:`Connection.makefile` is not implemented and calling that method raises + :py:obj:`NotImplementedError`. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(NotImplementedError, conn.makefile) + + + def test_get_peer_cert_chain_wrong_args(self): + """ + :py:obj:`Connection.get_peer_cert_chain` raises :py:obj:`TypeError` if called with any + arguments. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, conn.get_peer_cert_chain, 1) + self.assertRaises(TypeError, conn.get_peer_cert_chain, "foo") + self.assertRaises(TypeError, conn.get_peer_cert_chain, object()) + self.assertRaises(TypeError, conn.get_peer_cert_chain, []) + + + def test_get_peer_cert_chain(self): + """ + :py:obj:`Connection.get_peer_cert_chain` returns a list of certificates which + the connected server returned for the certification verification. + """ + chain = _create_certificate_chain() + [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey(skey) + serverContext.use_certificate(scert) + serverContext.add_extra_chain_cert(icert) + serverContext.add_extra_chain_cert(cacert) + server = Connection(serverContext, None) + server.set_accept_state() + + # Create the client + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify(VERIFY_NONE, verify_cb) + client = Connection(clientContext, None) + client.set_connect_state() + + self._interactInMemory(client, server) + + chain = client.get_peer_cert_chain() + self.assertEqual(len(chain), 3) + self.assertEqual( + "Server Certificate", chain[0].get_subject().CN) + self.assertEqual( + "Intermediate Certificate", chain[1].get_subject().CN) + self.assertEqual( + "Authority Certificate", chain[2].get_subject().CN) + + + def test_get_peer_cert_chain_none(self): + """ + :py:obj:`Connection.get_peer_cert_chain` returns :py:obj:`None` if the peer sends no + certificate chain. + """ + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + server = Connection(ctx, None) + server.set_accept_state() + client = Connection(Context(TLSv1_METHOD), None) + client.set_connect_state() + self._interactInMemory(client, server) + self.assertIdentical(None, server.get_peer_cert_chain()) + + + def test_get_session_wrong_args(self): + """ + :py:obj:`Connection.get_session` raises :py:obj:`TypeError` if called + with any arguments. + """ + ctx = Context(TLSv1_METHOD) + server = Connection(ctx, None) + self.assertRaises(TypeError, server.get_session, 123) + self.assertRaises(TypeError, server.get_session, "hello") + self.assertRaises(TypeError, server.get_session, object()) + + + def test_get_session_unconnected(self): + """ + :py:obj:`Connection.get_session` returns :py:obj:`None` when used with + an object which has not been connected. + """ + ctx = Context(TLSv1_METHOD) + server = Connection(ctx, None) + session = server.get_session() + self.assertIdentical(None, session) + + + def test_server_get_session(self): + """ + On the server side of a connection, :py:obj:`Connection.get_session` + returns a :py:class:`Session` instance representing the SSL session for + that connection. + """ + server, client = self._loopback() + session = server.get_session() + self.assertIsInstance(session, Session) + + + def test_client_get_session(self): + """ + On the client side of a connection, :py:obj:`Connection.get_session` + returns a :py:class:`Session` instance representing the SSL session for + that connection. + """ + server, client = self._loopback() + session = client.get_session() + self.assertIsInstance(session, Session) + + + def test_set_session_wrong_args(self): + """ + If called with an object that is not an instance of :py:class:`Session`, + or with other than one argument, :py:obj:`Connection.set_session` raises + :py:obj:`TypeError`. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertRaises(TypeError, connection.set_session) + self.assertRaises(TypeError, connection.set_session, 123) + self.assertRaises(TypeError, connection.set_session, "hello") + self.assertRaises(TypeError, connection.set_session, object()) + self.assertRaises( + TypeError, connection.set_session, Session(), Session()) + + + def test_client_set_session(self): + """ + :py:obj:`Connection.set_session`, when used prior to a connection being + established, accepts a :py:class:`Session` instance and causes an + attempt to re-use the session it represents when the SSL handshake is + performed. + """ + key = load_privatekey(FILETYPE_PEM, server_key_pem) + cert = load_certificate(FILETYPE_PEM, server_cert_pem) + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(key) + ctx.use_certificate(cert) + ctx.set_session_id("unity-test") + + def makeServer(socket): + server = Connection(ctx, socket) + server.set_accept_state() + return server + + originalServer, originalClient = self._loopback( + serverFactory=makeServer) + originalSession = originalClient.get_session() + + def makeClient(socket): + client = self._loopbackClientFactory(socket) + client.set_session(originalSession) + return client + resumedServer, resumedClient = self._loopback( + serverFactory=makeServer, + clientFactory=makeClient) + + # This is a proxy: in general, we have no access to any unique + # identifier for the session (new enough versions of OpenSSL expose a + # hash which could be usable, but "new enough" is very, very new). + # Instead, exploit the fact that the master key is re-used if the + # session is re-used. As long as the master key for the two connections + # is the same, the session was re-used! + self.assertEqual( + originalServer.master_key(), resumedServer.master_key()) + + + def test_set_session_wrong_method(self): + """ + If :py:obj:`Connection.set_session` is passed a :py:class:`Session` + instance associated with a context using a different SSL method than the + :py:obj:`Connection` is using, a :py:class:`OpenSSL.SSL.Error` is + raised. + """ + key = load_privatekey(FILETYPE_PEM, server_key_pem) + cert = load_certificate(FILETYPE_PEM, server_cert_pem) + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(key) + ctx.use_certificate(cert) + ctx.set_session_id("unity-test") + + def makeServer(socket): + server = Connection(ctx, socket) + server.set_accept_state() + return server + + originalServer, originalClient = self._loopback( + serverFactory=makeServer) + originalSession = originalClient.get_session() + + def makeClient(socket): + # Intentionally use a different, incompatible method here. + client = Connection(Context(SSLv3_METHOD), socket) + client.set_connect_state() + client.set_session(originalSession) + return client + + self.assertRaises( + Error, + self._loopback, clientFactory=makeClient, serverFactory=makeServer) + + + def test_wantWriteError(self): + """ + :py:obj:`Connection` methods which generate output raise + :py:obj:`OpenSSL.SSL.WantWriteError` if writing to the connection's BIO + fail indicating a should-write state. + """ + client_socket, server_socket = socket_pair() + # Fill up the client's send buffer so Connection won't be able to write + # anything. Only write a single byte at a time so we can be sure we + # completely fill the buffer. Even though the socket API is allowed to + # signal a short write via its return value it seems this doesn't + # always happen on all platforms (FreeBSD and OS X particular) for the + # very last bit of available buffer space. + msg = b"x" + for i in range(1024 * 1024 * 4): + try: + client_socket.send(msg) + except error as e: + if e.errno == EWOULDBLOCK: + break + raise + else: + self.fail( + "Failed to fill socket buffer, cannot test BIO want write") + + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, client_socket) + # Client's speak first, so make it an SSL client + conn.set_connect_state() + self.assertRaises(WantWriteError, conn.do_handshake) + + # XXX want_read + + def test_get_finished_before_connect(self): + """ + :py:obj:`Connection.get_finished` returns :py:obj:`None` before TLS + handshake is completed. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertEqual(connection.get_finished(), None) + + + def test_get_peer_finished_before_connect(self): + """ + :py:obj:`Connection.get_peer_finished` returns :py:obj:`None` before + TLS handshake is completed. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertEqual(connection.get_peer_finished(), None) + + + def test_get_finished(self): + """ + :py:obj:`Connection.get_finished` method returns the TLS Finished + message send from client, or server. Finished messages are send during + TLS handshake. + """ + + server, client = self._loopback() + + self.assertNotEqual(server.get_finished(), None) + self.assertTrue(len(server.get_finished()) > 0) + + + def test_get_peer_finished(self): + """ + :py:obj:`Connection.get_peer_finished` method returns the TLS Finished + message received from client, or server. Finished messages are send + during TLS handshake. + """ + server, client = self._loopback() + + self.assertNotEqual(server.get_peer_finished(), None) + self.assertTrue(len(server.get_peer_finished()) > 0) + + + def test_tls_finished_message_symmetry(self): + """ + The TLS Finished message send by server must be the TLS Finished message + received by client. + + The TLS Finished message send by client must be the TLS Finished message + received by server. + """ + server, client = self._loopback() + + self.assertEqual(server.get_finished(), client.get_peer_finished()) + self.assertEqual(client.get_finished(), server.get_peer_finished()) + + + def test_get_cipher_name_before_connect(self): + """ + :py:obj:`Connection.get_cipher_name` returns :py:obj:`None` if no + connection has been established. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertIdentical(conn.get_cipher_name(), None) + + + def test_get_cipher_name(self): + """ + :py:obj:`Connection.get_cipher_name` returns a :py:class:`unicode` + string giving the name of the currently used cipher. + """ + server, client = self._loopback() + server_cipher_name, client_cipher_name = \ + server.get_cipher_name(), client.get_cipher_name() + + self.assertIsInstance(server_cipher_name, text_type) + self.assertIsInstance(client_cipher_name, text_type) + + self.assertEqual(server_cipher_name, client_cipher_name) + + + def test_get_cipher_version_before_connect(self): + """ + :py:obj:`Connection.get_cipher_version` returns :py:obj:`None` if no + connection has been established. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertIdentical(conn.get_cipher_version(), None) + + + def test_get_cipher_version(self): + """ + :py:obj:`Connection.get_cipher_version` returns a :py:class:`unicode` + string giving the protocol name of the currently used cipher. + """ + server, client = self._loopback() + server_cipher_version, client_cipher_version = \ + server.get_cipher_version(), client.get_cipher_version() + + self.assertIsInstance(server_cipher_version, text_type) + self.assertIsInstance(client_cipher_version, text_type) + + self.assertEqual(server_cipher_version, client_cipher_version) + + + def test_get_cipher_bits_before_connect(self): + """ + :py:obj:`Connection.get_cipher_bits` returns :py:obj:`None` if no + connection has been established. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertIdentical(conn.get_cipher_bits(), None) + + + def test_get_cipher_bits(self): + """ + :py:obj:`Connection.get_cipher_bits` returns the number of secret bits + of the currently used cipher. + """ + server, client = self._loopback() + server_cipher_bits, client_cipher_bits = \ + server.get_cipher_bits(), client.get_cipher_bits() + + self.assertIsInstance(server_cipher_bits, int) + self.assertIsInstance(client_cipher_bits, int) + + self.assertEqual(server_cipher_bits, client_cipher_bits) + + + +class ConnectionGetCipherListTests(TestCase): + """ + Tests for :py:obj:`Connection.get_cipher_list`. + """ + def test_wrong_args(self): + """ + :py:obj:`Connection.get_cipher_list` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.get_cipher_list, None) + + + def test_result(self): + """ + :py:obj:`Connection.get_cipher_list` returns a :py:obj:`list` of + :py:obj:`bytes` giving the names of the ciphers which might be used. + """ + connection = Connection(Context(TLSv1_METHOD), None) + ciphers = connection.get_cipher_list() + self.assertTrue(isinstance(ciphers, list)) + for cipher in ciphers: + self.assertTrue(isinstance(cipher, str)) + + + +class ConnectionSendTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Connection.send` + """ + def test_wrong_args(self): + """ + When called with arguments other than string argument for its first + parameter or more than two arguments, :py:obj:`Connection.send` raises + :py:obj:`TypeError`. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.send) + self.assertRaises(TypeError, connection.send, object()) + self.assertRaises(TypeError, connection.send, "foo", object(), "bar") + + + def test_short_bytes(self): + """ + When passed a short byte string, :py:obj:`Connection.send` transmits all of it + and returns the number of bytes sent. + """ + server, client = self._loopback() + count = server.send(b('xy')) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b('xy')) + + + def test_text(self): + """ + When passed a text, :py:obj:`Connection.send` transmits all of it and + returns the number of bytes sent. It also raises a DeprecationWarning. + """ + server, client = self._loopback() + with catch_warnings(record=True) as w: + simplefilter("always") + count = server.send(b"xy".decode("ascii")) + self.assertEqual( + "{0} for buf is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b"xy") + + try: + memoryview + except NameError: + "cannot test sending memoryview without memoryview" + else: + def test_short_memoryview(self): + """ + When passed a memoryview onto a small number of bytes, + :py:obj:`Connection.send` transmits all of them and returns the number of + bytes sent. + """ + server, client = self._loopback() + count = server.send(memoryview(b('xy'))) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b('xy')) + + + try: + buffer + except NameError: + "cannot test sending buffer without buffer" + else: + def test_short_buffer(self): + """ + When passed a buffer containing a small number of bytes, + :py:obj:`Connection.send` transmits all of them and returns the number of + bytes sent. + """ + server, client = self._loopback() + count = server.send(buffer(b('xy'))) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b('xy')) + + + +def _make_memoryview(size): + """ + Create a new ``memoryview`` wrapped around a ``bytearray`` of the given + size. + """ + return memoryview(bytearray(size)) + + + +class ConnectionRecvIntoTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Connection.recv_into` + """ + def _no_length_test(self, factory): + """ + Assert that when the given buffer is passed to + ``Connection.recv_into``, whatever bytes are available to be received + that fit into that buffer are written into that buffer. + """ + output_buffer = factory(5) + + server, client = self._loopback() + server.send(b('xy')) + + self.assertEqual(client.recv_into(output_buffer), 2) + self.assertEqual(output_buffer, bytearray(b('xy\x00\x00\x00'))) + + + def test_bytearray_no_length(self): + """ + :py:obj:`Connection.recv_into` can be passed a ``bytearray`` instance + and data in the receive buffer is written to it. + """ + self._no_length_test(bytearray) + + + def _respects_length_test(self, factory): + """ + Assert that when the given buffer is passed to ``Connection.recv_into`` + along with a value for ``nbytes`` that is less than the size of that + buffer, only ``nbytes`` bytes are written into the buffer. + """ + output_buffer = factory(10) + + server, client = self._loopback() + server.send(b('abcdefghij')) + + self.assertEqual(client.recv_into(output_buffer, 5), 5) + self.assertEqual( + output_buffer, bytearray(b('abcde\x00\x00\x00\x00\x00')) + ) + + + def test_bytearray_respects_length(self): + """ + When called with a ``bytearray`` instance, + :py:obj:`Connection.recv_into` respects the ``nbytes`` parameter and + doesn't copy in more than that number of bytes. + """ + self._respects_length_test(bytearray) + + + def _doesnt_overfill_test(self, factory): + """ + Assert that if there are more bytes available to be read from the + receive buffer than would fit into the buffer passed to + :py:obj:`Connection.recv_into`, only as many as fit are written into + it. + """ + output_buffer = factory(5) + + server, client = self._loopback() + server.send(b('abcdefghij')) + + self.assertEqual(client.recv_into(output_buffer), 5) + self.assertEqual(output_buffer, bytearray(b('abcde'))) + rest = client.recv(5) + self.assertEqual(b('fghij'), rest) + + + def test_bytearray_doesnt_overfill(self): + """ + When called with a ``bytearray`` instance, + :py:obj:`Connection.recv_into` respects the size of the array and + doesn't write more bytes into it than will fit. + """ + self._doesnt_overfill_test(bytearray) + + + def _really_doesnt_overfill_test(self, factory): + """ + Assert that if the value given by ``nbytes`` is greater than the actual + size of the output buffer passed to :py:obj:`Connection.recv_into`, the + behavior is as if no value was given for ``nbytes`` at all. + """ + output_buffer = factory(5) + + server, client = self._loopback() + server.send(b('abcdefghij')) + + self.assertEqual(client.recv_into(output_buffer, 50), 5) + self.assertEqual(output_buffer, bytearray(b('abcde'))) + rest = client.recv(5) + self.assertEqual(b('fghij'), rest) + + + def test_bytearray_really_doesnt_overfill(self): + """ + When called with a ``bytearray`` instance and an ``nbytes`` value that + is too large, :py:obj:`Connection.recv_into` respects the size of the + array and not the ``nbytes`` value and doesn't write more bytes into + the buffer than will fit. + """ + self._doesnt_overfill_test(bytearray) + + + try: + memoryview + except NameError: + "cannot test recv_into memoryview without memoryview" + else: + def test_memoryview_no_length(self): + """ + :py:obj:`Connection.recv_into` can be passed a ``memoryview`` + instance and data in the receive buffer is written to it. + """ + self._no_length_test(_make_memoryview) + + + def test_memoryview_respects_length(self): + """ + When called with a ``memoryview`` instance, + :py:obj:`Connection.recv_into` respects the ``nbytes`` parameter + and doesn't copy more than that number of bytes in. + """ + self._respects_length_test(_make_memoryview) + + + def test_memoryview_doesnt_overfill(self): + """ + When called with a ``memoryview`` instance, + :py:obj:`Connection.recv_into` respects the size of the array and + doesn't write more bytes into it than will fit. + """ + self._doesnt_overfill_test(_make_memoryview) + + + def test_memoryview_really_doesnt_overfill(self): + """ + When called with a ``memoryview`` instance and an ``nbytes`` value + that is too large, :py:obj:`Connection.recv_into` respects the size + of the array and not the ``nbytes`` value and doesn't write more + bytes into the buffer than will fit. + """ + self._doesnt_overfill_test(_make_memoryview) + + + +class ConnectionSendallTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Connection.sendall`. + """ + def test_wrong_args(self): + """ + When called with arguments other than a string argument for its first + parameter or with more than two arguments, :py:obj:`Connection.sendall` + raises :py:obj:`TypeError`. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.sendall) + self.assertRaises(TypeError, connection.sendall, object()) + self.assertRaises( + TypeError, connection.sendall, "foo", object(), "bar") + + + def test_short(self): + """ + :py:obj:`Connection.sendall` transmits all of the bytes in the string passed to + it. + """ + server, client = self._loopback() + server.sendall(b('x')) + self.assertEquals(client.recv(1), b('x')) + + + def test_text(self): + """ + :py:obj:`Connection.sendall` transmits all the content in the string + passed to it raising a DeprecationWarning in case of this being a text. + """ + server, client = self._loopback() + with catch_warnings(record=True) as w: + simplefilter("always") + server.sendall(b"x".decode("ascii")) + self.assertEqual( + "{0} for buf is no longer accepted, use bytes".format( + WARNING_TYPE_EXPECTED + ), + str(w[-1].message) + ) + self.assertIs(w[-1].category, DeprecationWarning) + self.assertEquals(client.recv(1), b"x") + + + try: + memoryview + except NameError: + "cannot test sending memoryview without memoryview" + else: + def test_short_memoryview(self): + """ + When passed a memoryview onto a small number of bytes, + :py:obj:`Connection.sendall` transmits all of them. + """ + server, client = self._loopback() + server.sendall(memoryview(b('x'))) + self.assertEquals(client.recv(1), b('x')) + + + try: + buffer + except NameError: + "cannot test sending buffers without buffers" + else: + def test_short_buffers(self): + """ + When passed a buffer containing a small number of bytes, + :py:obj:`Connection.sendall` transmits all of them. + """ + server, client = self._loopback() + server.sendall(buffer(b('x'))) + self.assertEquals(client.recv(1), b('x')) + + + def test_long(self): + """ + :py:obj:`Connection.sendall` transmits all of the bytes in the string passed to + it even if this requires multiple calls of an underlying write function. + """ + server, client = self._loopback() + # Should be enough, underlying SSL_write should only do 16k at a time. + # On Windows, after 32k of bytes the write will block (forever - because + # no one is yet reading). + message = b('x') * (1024 * 32 - 1) + b('y') + server.sendall(message) + accum = [] + received = 0 + while received < len(message): + data = client.recv(1024) + accum.append(data) + received += len(data) + self.assertEquals(message, b('').join(accum)) + + + def test_closed(self): + """ + If the underlying socket is closed, :py:obj:`Connection.sendall` propagates the + write error from the low level write call. + """ + server, client = self._loopback() + server.sock_shutdown(2) + exc = self.assertRaises(SysCallError, server.sendall, b"hello, world") + if platform == "win32": + self.assertEqual(exc.args[0], ESHUTDOWN) + else: + self.assertEqual(exc.args[0], EPIPE) + + + +class ConnectionRenegotiateTests(TestCase, _LoopbackMixin): + """ + Tests for SSL renegotiation APIs. + """ + def test_renegotiate_wrong_args(self): + """ + :py:obj:`Connection.renegotiate` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.renegotiate, None) + + + def test_total_renegotiations_wrong_args(self): + """ + :py:obj:`Connection.total_renegotiations` raises :py:obj:`TypeError` if called with + any arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.total_renegotiations, None) + + + def test_total_renegotiations(self): + """ + :py:obj:`Connection.total_renegotiations` returns :py:obj:`0` before any + renegotiations have happened. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertEquals(connection.total_renegotiations(), 0) + + +# def test_renegotiate(self): +# """ +# """ +# server, client = self._loopback() + +# server.send("hello world") +# self.assertEquals(client.recv(len("hello world")), "hello world") + +# self.assertEquals(server.total_renegotiations(), 0) +# self.assertTrue(server.renegotiate()) + +# server.setblocking(False) +# client.setblocking(False) +# while server.renegotiate_pending(): +# client.do_handshake() +# server.do_handshake() + +# self.assertEquals(server.total_renegotiations(), 1) + + + + +class ErrorTests(TestCase): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Error`. + """ + def test_type(self): + """ + :py:obj:`Error` is an exception type. + """ + self.assertTrue(issubclass(Error, Exception)) + self.assertEqual(Error.__name__, 'Error') + + + +class ConstantsTests(TestCase): + """ + Tests for the values of constants exposed in :py:obj:`OpenSSL.SSL`. + + These are values defined by OpenSSL intended only to be used as flags to + OpenSSL APIs. The only assertions it seems can be made about them is + their values. + """ + # unittest.TestCase has no skip mechanism + if OP_NO_QUERY_MTU is not None: + def test_op_no_query_mtu(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_NO_QUERY_MTU` is 0x1000, the value of + :py:const:`SSL_OP_NO_QUERY_MTU` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_NO_QUERY_MTU, 0x1000) + else: + "OP_NO_QUERY_MTU unavailable - OpenSSL version may be too old" + + + if OP_COOKIE_EXCHANGE is not None: + def test_op_cookie_exchange(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_COOKIE_EXCHANGE` is 0x2000, the value + of :py:const:`SSL_OP_COOKIE_EXCHANGE` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_COOKIE_EXCHANGE, 0x2000) + else: + "OP_COOKIE_EXCHANGE unavailable - OpenSSL version may be too old" + + + if OP_NO_TICKET is not None: + def test_op_no_ticket(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_NO_TICKET` is 0x4000, the value of + :py:const:`SSL_OP_NO_TICKET` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_NO_TICKET, 0x4000) + else: + "OP_NO_TICKET unavailable - OpenSSL version may be too old" + + + if OP_NO_COMPRESSION is not None: + def test_op_no_compression(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_NO_COMPRESSION` is 0x20000, the value + of :py:const:`SSL_OP_NO_COMPRESSION` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_NO_COMPRESSION, 0x20000) + else: + "OP_NO_COMPRESSION unavailable - OpenSSL version may be too old" + + + def test_sess_cache_off(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_OFF` 0x0, the value of + :py:obj:`SSL_SESS_CACHE_OFF` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x0, SESS_CACHE_OFF) + + + def test_sess_cache_client(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_CLIENT` 0x1, the value of + :py:obj:`SSL_SESS_CACHE_CLIENT` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x1, SESS_CACHE_CLIENT) + + + def test_sess_cache_server(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_SERVER` 0x2, the value of + :py:obj:`SSL_SESS_CACHE_SERVER` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x2, SESS_CACHE_SERVER) + + + def test_sess_cache_both(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_BOTH` 0x3, the value of + :py:obj:`SSL_SESS_CACHE_BOTH` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x3, SESS_CACHE_BOTH) + + + def test_sess_cache_no_auto_clear(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_AUTO_CLEAR` 0x80, the + value of :py:obj:`SSL_SESS_CACHE_NO_AUTO_CLEAR` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x80, SESS_CACHE_NO_AUTO_CLEAR) + + + def test_sess_cache_no_internal_lookup(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_INTERNAL_LOOKUP` 0x100, + the value of :py:obj:`SSL_SESS_CACHE_NO_INTERNAL_LOOKUP` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x100, SESS_CACHE_NO_INTERNAL_LOOKUP) + + + def test_sess_cache_no_internal_store(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_INTERNAL_STORE` 0x200, + the value of :py:obj:`SSL_SESS_CACHE_NO_INTERNAL_STORE` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x200, SESS_CACHE_NO_INTERNAL_STORE) + + + def test_sess_cache_no_internal(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_INTERNAL` 0x300, the + value of :py:obj:`SSL_SESS_CACHE_NO_INTERNAL` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x300, SESS_CACHE_NO_INTERNAL) + + + +class MemoryBIOTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`OpenSSL.SSL.Connection` using a memory BIO. + """ + def _server(self, sock): + """ + Create a new server-side SSL :py:obj:`Connection` object wrapped around + :py:obj:`sock`. + """ + # Create the server side Connection. This is mostly setup boilerplate + # - use TLSv1, use a particular certificate, etc. + server_ctx = Context(TLSv1_METHOD) + server_ctx.set_options(OP_NO_SSLv2 | OP_NO_SSLv3 | OP_SINGLE_DH_USE ) + server_ctx.set_verify(VERIFY_PEER|VERIFY_FAIL_IF_NO_PEER_CERT|VERIFY_CLIENT_ONCE, verify_cb) + server_store = server_ctx.get_cert_store() + server_ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + server_ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + server_ctx.check_privatekey() + server_store.add_cert(load_certificate(FILETYPE_PEM, root_cert_pem)) + # Here the Connection is actually created. If None is passed as the 2nd + # parameter, it indicates a memory BIO should be created. + server_conn = Connection(server_ctx, sock) + server_conn.set_accept_state() + return server_conn + + + def _client(self, sock): + """ + Create a new client-side SSL :py:obj:`Connection` object wrapped around + :py:obj:`sock`. + """ + # Now create the client side Connection. Similar boilerplate to the + # above. + client_ctx = Context(TLSv1_METHOD) + client_ctx.set_options(OP_NO_SSLv2 | OP_NO_SSLv3 | OP_SINGLE_DH_USE ) + client_ctx.set_verify(VERIFY_PEER|VERIFY_FAIL_IF_NO_PEER_CERT|VERIFY_CLIENT_ONCE, verify_cb) + client_store = client_ctx.get_cert_store() + client_ctx.use_privatekey(load_privatekey(FILETYPE_PEM, client_key_pem)) + client_ctx.use_certificate(load_certificate(FILETYPE_PEM, client_cert_pem)) + client_ctx.check_privatekey() + client_store.add_cert(load_certificate(FILETYPE_PEM, root_cert_pem)) + client_conn = Connection(client_ctx, sock) + client_conn.set_connect_state() + return client_conn + + + def test_memoryConnect(self): + """ + Two :py:obj:`Connection`s which use memory BIOs can be manually connected by + reading from the output of each and writing those bytes to the input of + the other and in this way establish a connection and exchange + application-level bytes with each other. + """ + server_conn = self._server(None) + client_conn = self._client(None) + + # There should be no key or nonces yet. + self.assertIdentical(server_conn.master_key(), None) + self.assertIdentical(server_conn.client_random(), None) + self.assertIdentical(server_conn.server_random(), None) + + # First, the handshake needs to happen. We'll deliver bytes back and + # forth between the client and server until neither of them feels like + # speaking any more. + self.assertIdentical( + self._interactInMemory(client_conn, server_conn), None) + + # Now that the handshake is done, there should be a key and nonces. + self.assertNotIdentical(server_conn.master_key(), None) + self.assertNotIdentical(server_conn.client_random(), None) + self.assertNotIdentical(server_conn.server_random(), None) + self.assertEquals(server_conn.client_random(), client_conn.client_random()) + self.assertEquals(server_conn.server_random(), client_conn.server_random()) + self.assertNotEquals(server_conn.client_random(), server_conn.server_random()) + self.assertNotEquals(client_conn.client_random(), client_conn.server_random()) + + # Here are the bytes we'll try to send. + important_message = b('One if by land, two if by sea.') + + server_conn.write(important_message) + self.assertEquals( + self._interactInMemory(client_conn, server_conn), + (client_conn, important_message)) + + client_conn.write(important_message[::-1]) + self.assertEquals( + self._interactInMemory(client_conn, server_conn), + (server_conn, important_message[::-1])) + + + def test_socketConnect(self): + """ + Just like :py:obj:`test_memoryConnect` but with an actual socket. + + This is primarily to rule out the memory BIO code as the source of + any problems encountered while passing data over a :py:obj:`Connection` (if + this test fails, there must be a problem outside the memory BIO + code, as no memory BIO is involved here). Even though this isn't a + memory BIO test, it's convenient to have it here. + """ + server_conn, client_conn = self._loopback() + + important_message = b("Help me Obi Wan Kenobi, you're my only hope.") + client_conn.send(important_message) + msg = server_conn.recv(1024) + self.assertEqual(msg, important_message) + + # Again in the other direction, just for fun. + important_message = important_message[::-1] + server_conn.send(important_message) + msg = client_conn.recv(1024) + self.assertEqual(msg, important_message) + + + def test_socketOverridesMemory(self): + """ + Test that :py:obj:`OpenSSL.SSL.bio_read` and :py:obj:`OpenSSL.SSL.bio_write` don't + work on :py:obj:`OpenSSL.SSL.Connection`() that use sockets. + """ + context = Context(SSLv3_METHOD) + client = socket() + clientSSL = Connection(context, client) + self.assertRaises( TypeError, clientSSL.bio_read, 100) + self.assertRaises( TypeError, clientSSL.bio_write, "foo") + self.assertRaises( TypeError, clientSSL.bio_shutdown ) + + + def test_outgoingOverflow(self): + """ + If more bytes than can be written to the memory BIO are passed to + :py:obj:`Connection.send` at once, the number of bytes which were written is + returned and that many bytes from the beginning of the input can be + read from the other end of the connection. + """ + server = self._server(None) + client = self._client(None) + + self._interactInMemory(client, server) + + size = 2 ** 15 + sent = client.send(b"x" * size) + # Sanity check. We're trying to test what happens when the entire + # input can't be sent. If the entire input was sent, this test is + # meaningless. + self.assertTrue(sent < size) + + receiver, received = self._interactInMemory(client, server) + self.assertIdentical(receiver, server) + + # We can rely on all of these bytes being received at once because + # _loopback passes 2 ** 16 to recv - more than 2 ** 15. + self.assertEquals(len(received), sent) + + + def test_shutdown(self): + """ + :py:obj:`Connection.bio_shutdown` signals the end of the data stream from + which the :py:obj:`Connection` reads. + """ + server = self._server(None) + server.bio_shutdown() + e = self.assertRaises(Error, server.recv, 1024) + # We don't want WantReadError or ZeroReturnError or anything - it's a + # handshake failure. + self.assertEquals(e.__class__, Error) + + + def test_unexpectedEndOfFile(self): + """ + If the connection is lost before an orderly SSL shutdown occurs, + :py:obj:`OpenSSL.SSL.SysCallError` is raised with a message of + "Unexpected EOF". + """ + server_conn, client_conn = self._loopback() + client_conn.sock_shutdown(SHUT_RDWR) + exc = self.assertRaises(SysCallError, server_conn.recv, 1024) + self.assertEqual(exc.args, (-1, "Unexpected EOF")) + + + def _check_client_ca_list(self, func): + """ + Verify the return value of the :py:obj:`get_client_ca_list` method for server and client connections. + + :param func: A function which will be called with the server context + before the client and server are connected to each other. This + function should specify a list of CAs for the server to send to the + client and return that same list. The list will be used to verify + that :py:obj:`get_client_ca_list` returns the proper value at various + times. + """ + server = self._server(None) + client = self._client(None) + self.assertEqual(client.get_client_ca_list(), []) + self.assertEqual(server.get_client_ca_list(), []) + ctx = server.get_context() + expected = func(ctx) + self.assertEqual(client.get_client_ca_list(), []) + self.assertEqual(server.get_client_ca_list(), expected) + self._interactInMemory(client, server) + self.assertEqual(client.get_client_ca_list(), expected) + self.assertEqual(server.get_client_ca_list(), expected) + + + def test_set_client_ca_list_errors(self): + """ + :py:obj:`Context.set_client_ca_list` raises a :py:obj:`TypeError` if called with a + non-list or a list that contains objects other than X509Names. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(TypeError, ctx.set_client_ca_list, "spam") + self.assertRaises(TypeError, ctx.set_client_ca_list, ["spam"]) + self.assertIdentical(ctx.set_client_ca_list([]), None) + + + def test_set_empty_ca_list(self): + """ + If passed an empty list, :py:obj:`Context.set_client_ca_list` configures the + context to send no CA names to the client and, on both the server and + client sides, :py:obj:`Connection.get_client_ca_list` returns an empty list + after the connection is set up. + """ + def no_ca(ctx): + ctx.set_client_ca_list([]) + return [] + self._check_client_ca_list(no_ca) + + + def test_set_one_ca_list(self): + """ + If passed a list containing a single X509Name, + :py:obj:`Context.set_client_ca_list` configures the context to send that CA + name to the client and, on both the server and client sides, + :py:obj:`Connection.get_client_ca_list` returns a list containing that + X509Name after the connection is set up. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + cadesc = cacert.get_subject() + def single_ca(ctx): + ctx.set_client_ca_list([cadesc]) + return [cadesc] + self._check_client_ca_list(single_ca) + + + def test_set_multiple_ca_list(self): + """ + If passed a list containing multiple X509Name objects, + :py:obj:`Context.set_client_ca_list` configures the context to send those CA + names to the client and, on both the server and client sides, + :py:obj:`Connection.get_client_ca_list` returns a list containing those + X509Names after the connection is set up. + """ + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + sedesc = secert.get_subject() + cldesc = clcert.get_subject() + + def multiple_ca(ctx): + L = [sedesc, cldesc] + ctx.set_client_ca_list(L) + return L + self._check_client_ca_list(multiple_ca) + + + def test_reset_ca_list(self): + """ + If called multiple times, only the X509Names passed to the final call + of :py:obj:`Context.set_client_ca_list` are used to configure the CA names + sent to the client. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + cldesc = clcert.get_subject() + + def changed_ca(ctx): + ctx.set_client_ca_list([sedesc, cldesc]) + ctx.set_client_ca_list([cadesc]) + return [cadesc] + self._check_client_ca_list(changed_ca) + + + def test_mutated_ca_list(self): + """ + If the list passed to :py:obj:`Context.set_client_ca_list` is mutated + afterwards, this does not affect the list of CA names sent to the + client. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + + def mutated_ca(ctx): + L = [cadesc] + ctx.set_client_ca_list([cadesc]) + L.append(sedesc) + return [cadesc] + self._check_client_ca_list(mutated_ca) + + + def test_add_client_ca_errors(self): + """ + :py:obj:`Context.add_client_ca` raises :py:obj:`TypeError` if called with a non-X509 + object or with a number of arguments other than one. + """ + ctx = Context(TLSv1_METHOD) + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + self.assertRaises(TypeError, ctx.add_client_ca) + self.assertRaises(TypeError, ctx.add_client_ca, "spam") + self.assertRaises(TypeError, ctx.add_client_ca, cacert, cacert) + + + def test_one_add_client_ca(self): + """ + A certificate's subject can be added as a CA to be sent to the client + with :py:obj:`Context.add_client_ca`. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + cadesc = cacert.get_subject() + def single_ca(ctx): + ctx.add_client_ca(cacert) + return [cadesc] + self._check_client_ca_list(single_ca) + + + def test_multiple_add_client_ca(self): + """ + Multiple CA names can be sent to the client by calling + :py:obj:`Context.add_client_ca` with multiple X509 objects. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + + def multiple_ca(ctx): + ctx.add_client_ca(cacert) + ctx.add_client_ca(secert) + return [cadesc, sedesc] + self._check_client_ca_list(multiple_ca) + + + def test_set_and_add_client_ca(self): + """ + A call to :py:obj:`Context.set_client_ca_list` followed by a call to + :py:obj:`Context.add_client_ca` results in using the CA names from the first + call and the CA name from the second call. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + cldesc = clcert.get_subject() + + def mixed_set_add_ca(ctx): + ctx.set_client_ca_list([cadesc, sedesc]) + ctx.add_client_ca(clcert) + return [cadesc, sedesc, cldesc] + self._check_client_ca_list(mixed_set_add_ca) + + + def test_set_after_add_client_ca(self): + """ + A call to :py:obj:`Context.set_client_ca_list` after a call to + :py:obj:`Context.add_client_ca` replaces the CA name specified by the former + call with the names specified by the latter cal. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + + def set_replaces_add_ca(ctx): + ctx.add_client_ca(clcert) + ctx.set_client_ca_list([cadesc]) + ctx.add_client_ca(secert) + return [cadesc, sedesc] + self._check_client_ca_list(set_replaces_add_ca) + + + +class ConnectionBIOTests(TestCase): + """ + Tests for :py:obj:`Connection.bio_read` and :py:obj:`Connection.bio_write`. + """ + def test_wantReadError(self): + """ + :py:obj:`Connection.bio_read` raises :py:obj:`OpenSSL.SSL.WantReadError` + if there are no bytes available to be read from the BIO. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertRaises(WantReadError, conn.bio_read, 1024) + + + def test_buffer_size(self): + """ + :py:obj:`Connection.bio_read` accepts an integer giving the maximum + number of bytes to read and return. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + conn.set_connect_state() + try: + conn.do_handshake() + except WantReadError: + pass + data = conn.bio_read(2) + self.assertEqual(2, len(data)) + + + if not PY3: + def test_buffer_size_long(self): + """ + On Python 2 :py:obj:`Connection.bio_read` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + conn.set_connect_state() + try: + conn.do_handshake() + except WantReadError: + pass + data = conn.bio_read(long(2)) + self.assertEqual(2, len(data)) + + + + +class InfoConstantTests(TestCase): + """ + Tests for assorted constants exposed for use in info callbacks. + """ + def test_integers(self): + """ + All of the info constants are integers. + + This is a very weak test. It would be nice to have one that actually + verifies that as certain info events happen, the value passed to the + info callback matches up with the constant exposed by OpenSSL.SSL. + """ + for const in [ + SSL_ST_CONNECT, SSL_ST_ACCEPT, SSL_ST_MASK, SSL_ST_INIT, + SSL_ST_BEFORE, SSL_ST_OK, SSL_ST_RENEGOTIATE, + SSL_CB_LOOP, SSL_CB_EXIT, SSL_CB_READ, SSL_CB_WRITE, SSL_CB_ALERT, + SSL_CB_READ_ALERT, SSL_CB_WRITE_ALERT, SSL_CB_ACCEPT_LOOP, + SSL_CB_ACCEPT_EXIT, SSL_CB_CONNECT_LOOP, SSL_CB_CONNECT_EXIT, + SSL_CB_HANDSHAKE_START, SSL_CB_HANDSHAKE_DONE]: + + self.assertTrue(isinstance(const, int)) + + +if __name__ == '__main__': + main() diff --git a/Lib/site-packages/OpenSSL/test/test_tsafe.py b/Lib/site-packages/OpenSSL/test/test_tsafe.py new file mode 100644 index 0000000..0456957 --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/test_tsafe.py @@ -0,0 +1,24 @@ +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +Unit tests for :py:obj:`OpenSSL.tsafe`. +""" + +from OpenSSL.SSL import TLSv1_METHOD, Context +from OpenSSL.tsafe import Connection +from OpenSSL.test.util import TestCase + + +class ConnectionTest(TestCase): + """ + Tests for :py:obj:`OpenSSL.tsafe.Connection`. + """ + def test_instantiation(self): + """ + :py:obj:`OpenSSL.tsafe.Connection` can be instantiated. + """ + # The following line should not throw an error. This isn't an ideal + # test. It would be great to refactor the other Connection tests so + # they could automatically be applied to this class too. + Connection(Context(TLSv1_METHOD), None) diff --git a/Lib/site-packages/OpenSSL/test/test_util.py b/Lib/site-packages/OpenSSL/test/test_util.py new file mode 100644 index 0000000..8d92a3c --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/test_util.py @@ -0,0 +1,17 @@ +from OpenSSL._util import exception_from_error_queue, lib +from OpenSSL.test.util import TestCase + + + +class ErrorTests(TestCase): + """ + Tests for handling of certain OpenSSL error cases. + """ + def test_exception_from_error_queue_nonexistent_reason(self): + """ + :py:func:`exception_from_error_queue` raises ``ValueError`` when it + encounters an OpenSSL error code which does not have a reason string. + """ + lib.ERR_put_error(lib.ERR_LIB_EVP, 0, 1112, b"", 10) + exc = self.assertRaises(ValueError, exception_from_error_queue, ValueError) + self.assertEqual(exc.args[0][0][2], "") diff --git a/Lib/site-packages/OpenSSL/test/util.py b/Lib/site-packages/OpenSSL/test/util.py new file mode 100644 index 0000000..b8be91d --- /dev/null +++ b/Lib/site-packages/OpenSSL/test/util.py @@ -0,0 +1,463 @@ +# Copyright (C) Jean-Paul Calderone +# Copyright (C) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +Helpers for the OpenSSL test suite, largely copied from +U{Twisted}. +""" + +import shutil +import traceback +import os, os.path +from tempfile import mktemp +from unittest import TestCase +import sys + +from six import PY3 + +from OpenSSL._util import exception_from_error_queue +from OpenSSL.crypto import Error + +try: + import memdbg +except Exception: + class _memdbg(object): heap = None + memdbg = _memdbg() + +from OpenSSL._util import ffi, lib, byte_string as b + + +# This is the UTF-8 encoding of the SNOWMAN unicode code point. +NON_ASCII = b("\xe2\x98\x83").decode("utf-8") + + +class TestCase(TestCase): + """ + :py:class:`TestCase` adds useful testing functionality beyond what is available + from the standard library :py:class:`unittest.TestCase`. + """ + def run(self, result): + run = super(TestCase, self).run + if memdbg.heap is None: + return run(result) + + # Run the test as usual + before = set(memdbg.heap) + run(result) + + # Clean up some long-lived allocations so they won't be reported as + # memory leaks. + lib.CRYPTO_cleanup_all_ex_data() + lib.ERR_remove_thread_state(ffi.NULL) + after = set(memdbg.heap) + + if not after - before: + # No leaks, fast succeed + return + + if result.wasSuccessful(): + # If it passed, run it again with memory debugging + before = set(memdbg.heap) + run(result) + + # Clean up some long-lived allocations so they won't be reported as + # memory leaks. + lib.CRYPTO_cleanup_all_ex_data() + lib.ERR_remove_thread_state(ffi.NULL) + + after = set(memdbg.heap) + + self._reportLeaks(after - before, result) + + + def _reportLeaks(self, leaks, result): + def format_leak(p): + stacks = memdbg.heap[p] + # Eventually look at multiple stacks for the realloc() case. For + # now just look at the original allocation location. + (size, python_stack, c_stack) = stacks[0] + + stack = traceback.format_list(python_stack)[:-1] + + # c_stack looks something like this (interesting parts indicated + # with inserted arrows not part of the data): + # + # /home/exarkun/Projects/pyOpenSSL/branches/use-opentls/__pycache__/_cffi__x89095113xb9185b9b.so(+0x12cf) [0x7fe2e20582cf] + # /home/exarkun/Projects/cpython/2.7/python(PyCFunction_Call+0x8b) [0x56265a] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d5f52] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d6419] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d6129] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalCodeEx+0x1043) [0x4d3726] + # /home/exarkun/Projects/cpython/2.7/python() [0x55fd51] + # /home/exarkun/Projects/cpython/2.7/python(PyObject_Call+0x7e) [0x420ee6] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_CallObjectWithKeywords+0x158) [0x4d56ec] + # /home/exarkun/.local/lib/python2.7/site-packages/cffi-0.5-py2.7-linux-x86_64.egg/_cffi_backend.so(+0xe96e) [0x7fe2e38be96e] + # /usr/lib/x86_64-linux-gnu/libffi.so.6(ffi_closure_unix64_inner+0x1b9) [0x7fe2e36ad819] + # /usr/lib/x86_64-linux-gnu/libffi.so.6(ffi_closure_unix64+0x46) [0x7fe2e36adb7c] + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(CRYPTO_malloc+0x64) [0x7fe2e1cef784] <------ end interesting + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(lh_insert+0x16b) [0x7fe2e1d6a24b] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(+0x61c18) [0x7fe2e1cf0c18] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(+0x625ec) [0x7fe2e1cf15ec] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(DSA_new_method+0xe6) [0x7fe2e1d524d6] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(DSA_generate_parameters+0x3a) [0x7fe2e1d5364a] <------ begin interesting + # /home/exarkun/Projects/opentls/trunk/tls/c/__pycache__/_cffi__x305d4698xb539baaa.so(+0x1f397) [0x7fe2df84d397] + # /home/exarkun/Projects/cpython/2.7/python(PyCFunction_Call+0x8b) [0x56265a] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d5f52] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d6419] + # ... + # + # Notice the stack is upside down compared to a Python traceback. + # Identify the start and end of interesting bits and stuff it into the stack we report. + + saved = list(c_stack) + + # Figure the first interesting frame will be after a the cffi-compiled module + while c_stack and '/__pycache__/_cffi__' not in c_stack[-1]: + c_stack.pop() + + # Figure the last interesting frame will always be CRYPTO_malloc, + # since that's where we hooked in to things. + while c_stack and 'CRYPTO_malloc' not in c_stack[0] and 'CRYPTO_realloc' not in c_stack[0]: + c_stack.pop(0) + + if c_stack: + c_stack.reverse() + else: + c_stack = saved[::-1] + stack.extend([frame + "\n" for frame in c_stack]) + + stack.insert(0, "Leaked (%s) at:\n") + return "".join(stack) + + if leaks: + unique_leaks = {} + for p in leaks: + size = memdbg.heap[p][-1][0] + new_leak = format_leak(p) + if new_leak not in unique_leaks: + unique_leaks[new_leak] = [(size, p)] + else: + unique_leaks[new_leak].append((size, p)) + memdbg.free(p) + + for (stack, allocs) in unique_leaks.iteritems(): + allocs_accum = [] + for (size, pointer) in allocs: + + addr = int(ffi.cast('uintptr_t', pointer)) + allocs_accum.append("%d@0x%x" % (size, addr)) + allocs_report = ", ".join(sorted(allocs_accum)) + + result.addError( + self, + (None, Exception(stack % (allocs_report,)), None)) + + + def tearDown(self): + """ + Clean up any files or directories created using :py:meth:`TestCase.mktemp`. + Subclasses must invoke this method if they override it or the + cleanup will not occur. + """ + if False and self._temporaryFiles is not None: + for temp in self._temporaryFiles: + if os.path.isdir(temp): + shutil.rmtree(temp) + elif os.path.exists(temp): + os.unlink(temp) + try: + exception_from_error_queue(Error) + except Error: + e = sys.exc_info()[1] + if e.args != ([],): + self.fail("Left over errors in OpenSSL error queue: " + repr(e)) + + + def assertIsInstance(self, instance, classOrTuple, message=None): + """ + Fail if C{instance} is not an instance of the given class or of + one of the given classes. + + @param instance: the object to test the type (first argument of the + C{isinstance} call). + @type instance: any. + @param classOrTuple: the class or classes to test against (second + argument of the C{isinstance} call). + @type classOrTuple: class, type, or tuple. + + @param message: Custom text to include in the exception text if the + assertion fails. + """ + if not isinstance(instance, classOrTuple): + if message is None: + suffix = "" + else: + suffix = ": " + message + self.fail("%r is not an instance of %s%s" % ( + instance, classOrTuple, suffix)) + + + def failUnlessIn(self, containee, container, msg=None): + """ + Fail the test if :py:data:`containee` is not found in :py:data:`container`. + + :param containee: the value that should be in :py:class:`container` + :param container: a sequence type, or in the case of a mapping type, + will follow semantics of 'if key in dict.keys()' + :param msg: if msg is None, then the failure message will be + '%r not in %r' % (first, second) + """ + if containee not in container: + raise self.failureException(msg or "%r not in %r" + % (containee, container)) + return containee + assertIn = failUnlessIn + + def assertNotIn(self, containee, container, msg=None): + """ + Fail the test if C{containee} is found in C{container}. + + @param containee: the value that should not be in C{container} + @param container: a sequence type, or in the case of a mapping type, + will follow semantics of 'if key in dict.keys()' + @param msg: if msg is None, then the failure message will be + '%r in %r' % (first, second) + """ + if containee in container: + raise self.failureException(msg or "%r in %r" + % (containee, container)) + return containee + failIfIn = assertNotIn + + + def assertIs(self, first, second, msg=None): + """ + Fail the test if :py:data:`first` is not :py:data:`second`. This is an + obect-identity-equality test, not an object equality + (i.e. :py:func:`__eq__`) test. + + :param msg: if msg is None, then the failure message will be + '%r is not %r' % (first, second) + """ + if first is not second: + raise self.failureException(msg or '%r is not %r' % (first, second)) + return first + assertIdentical = failUnlessIdentical = assertIs + + + def assertIsNot(self, first, second, msg=None): + """ + Fail the test if :py:data:`first` is :py:data:`second`. This is an + obect-identity-equality test, not an object equality + (i.e. :py:func:`__eq__`) test. + + :param msg: if msg is None, then the failure message will be + '%r is %r' % (first, second) + """ + if first is second: + raise self.failureException(msg or '%r is %r' % (first, second)) + return first + assertNotIdentical = failIfIdentical = assertIsNot + + + def failUnlessRaises(self, exception, f, *args, **kwargs): + """ + Fail the test unless calling the function :py:data:`f` with the given + :py:data:`args` and :py:data:`kwargs` raises :py:data:`exception`. The + failure will report the traceback and call stack of the unexpected + exception. + + :param exception: exception type that is to be expected + :param f: the function to call + + :return: The raised exception instance, if it is of the given type. + :raise self.failureException: Raised if the function call does + not raise an exception or if it raises an exception of a + different type. + """ + try: + result = f(*args, **kwargs) + except exception: + inst = sys.exc_info()[1] + return inst + except: + raise self.failureException('%s raised instead of %s' + % (sys.exc_info()[0], + exception.__name__, + )) + else: + raise self.failureException('%s not raised (%r returned)' + % (exception.__name__, result)) + assertRaises = failUnlessRaises + + + _temporaryFiles = None + def mktemp(self): + """ + Pathetic substitute for twisted.trial.unittest.TestCase.mktemp. + """ + if self._temporaryFiles is None: + self._temporaryFiles = [] + temp = b(mktemp(dir=".")) + self._temporaryFiles.append(temp) + return temp + + + # Other stuff + def assertConsistentType(self, theType, name, *constructionArgs): + """ + Perform various assertions about :py:data:`theType` to ensure that it is a + well-defined type. This is useful for extension types, where it's + pretty easy to do something wacky. If something about the type is + unusual, an exception will be raised. + + :param theType: The type object about which to make assertions. + :param name: A string giving the name of the type. + :param constructionArgs: Positional arguments to use with :py:data:`theType` to + create an instance of it. + """ + self.assertEqual(theType.__name__, name) + self.assertTrue(isinstance(theType, type)) + instance = theType(*constructionArgs) + self.assertIdentical(type(instance), theType) + + + +class EqualityTestsMixin(object): + """ + A mixin defining tests for the standard implementation of C{==} and C{!=}. + """ + def anInstance(self): + """ + Return an instance of the class under test. Each call to this method + must return a different object. All objects returned must be equal to + each other. + """ + raise NotImplementedError() + + + def anotherInstance(self): + """ + Return an instance of the class under test. Each call to this method + must return a different object. The objects must not be equal to the + objects returned by C{anInstance}. They may or may not be equal to + each other (they will not be compared against each other). + """ + raise NotImplementedError() + + + def test_identicalEq(self): + """ + An object compares equal to itself using the C{==} operator. + """ + o = self.anInstance() + self.assertTrue(o == o) + + + def test_identicalNe(self): + """ + An object doesn't compare not equal to itself using the C{!=} operator. + """ + o = self.anInstance() + self.assertFalse(o != o) + + + def test_sameEq(self): + """ + Two objects that are equal to each other compare equal to each other + using the C{==} operator. + """ + a = self.anInstance() + b = self.anInstance() + self.assertTrue(a == b) + + + def test_sameNe(self): + """ + Two objects that are equal to each other do not compare not equal to + each other using the C{!=} operator. + """ + a = self.anInstance() + b = self.anInstance() + self.assertFalse(a != b) + + + def test_differentEq(self): + """ + Two objects that are not equal to each other do not compare equal to + each other using the C{==} operator. + """ + a = self.anInstance() + b = self.anotherInstance() + self.assertFalse(a == b) + + + def test_differentNe(self): + """ + Two objects that are not equal to each other compare not equal to each + other using the C{!=} operator. + """ + a = self.anInstance() + b = self.anotherInstance() + self.assertTrue(a != b) + + + def test_anotherTypeEq(self): + """ + The object does not compare equal to an object of an unrelated type + (which does not implement the comparison) using the C{==} operator. + """ + a = self.anInstance() + b = object() + self.assertFalse(a == b) + + + def test_anotherTypeNe(self): + """ + The object compares not equal to an object of an unrelated type (which + does not implement the comparison) using the C{!=} operator. + """ + a = self.anInstance() + b = object() + self.assertTrue(a != b) + + + def test_delegatedEq(self): + """ + The result of comparison using C{==} is delegated to the right-hand + operand if it is of an unrelated type. + """ + class Delegate(object): + def __eq__(self, other): + # Do something crazy and obvious. + return [self] + + a = self.anInstance() + b = Delegate() + self.assertEqual(a == b, [b]) + + + def test_delegateNe(self): + """ + The result of comparison using C{!=} is delegated to the right-hand + operand if it is of an unrelated type. + """ + class Delegate(object): + def __ne__(self, other): + # Do something crazy and obvious. + return [self] + + a = self.anInstance() + b = Delegate() + self.assertEqual(a != b, [b]) + + +# The type name expected in warnings about using the wrong string type. +if PY3: + WARNING_TYPE_EXPECTED = "str" +else: + WARNING_TYPE_EXPECTED = "unicode" diff --git a/Lib/site-packages/OpenSSL/tsafe.py b/Lib/site-packages/OpenSSL/tsafe.py new file mode 100644 index 0000000..3a9c710 --- /dev/null +++ b/Lib/site-packages/OpenSSL/tsafe.py @@ -0,0 +1,28 @@ +from OpenSSL import SSL +_ssl = SSL +del SSL + +import threading +_RLock = threading.RLock +del threading + +class Connection: + def __init__(self, *args): + self._ssl_conn = _ssl.Connection(*args) + self._lock = _RLock() + + for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read', + 'renegotiate', 'bind', 'listen', 'connect', 'accept', + 'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list', + 'getpeername', 'getsockname', 'getsockopt', 'setsockopt', + 'makefile', 'get_app_data', 'set_app_data', 'state_string', + 'sock_shutdown', 'get_peer_certificate', 'get_peer_cert_chain', 'want_read', + 'want_write', 'set_connect_state', 'set_accept_state', + 'connect_ex', 'sendall'): + exec("""def %s(self, *args): + self._lock.acquire() + try: + return self._ssl_conn.%s(*args) + finally: + self._lock.release()\n""" % (f, f)) + diff --git a/Lib/site-packages/OpenSSL/version.py b/Lib/site-packages/OpenSSL/version.py new file mode 100644 index 0000000..eb3b736 --- /dev/null +++ b/Lib/site-packages/OpenSSL/version.py @@ -0,0 +1,9 @@ +# Copyright (C) AB Strakt +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +__version__ = '0.15.1' diff --git a/Lib/site-packages/PIL/BdfFontFile.py b/Lib/site-packages/PIL/BdfFontFile.py new file mode 100644 index 0000000..e6cc22f --- /dev/null +++ b/Lib/site-packages/PIL/BdfFontFile.py @@ -0,0 +1,132 @@ +# +# The Python Imaging Library +# $Id$ +# +# bitmap distribution font (bdf) file parser +# +# history: +# 1996-05-16 fl created (as bdf2pil) +# 1997-08-25 fl converted to FontFile driver +# 2001-05-25 fl removed bogus __init__ call +# 2002-11-20 fl robustification (from Kevin Cazabon, Dmitry Vasiliev) +# 2003-04-22 fl more robustification (from Graham Dumpleton) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile + + +# -------------------------------------------------------------------- +# parse X Bitmap Distribution Format (BDF) +# -------------------------------------------------------------------- + +bdf_slant = { + "R": "Roman", + "I": "Italic", + "O": "Oblique", + "RI": "Reverse Italic", + "RO": "Reverse Oblique", + "OT": "Other" +} + +bdf_spacing = { + "P": "Proportional", + "M": "Monospaced", + "C": "Cell" +} + + +def bdf_char(f): + # skip to STARTCHAR + while True: + s = f.readline() + if not s: + return None + if s[:9] == b"STARTCHAR": + break + id = s[9:].strip().decode('ascii') + + # load symbol properties + props = {} + while True: + s = f.readline() + if not s or s[:6] == b"BITMAP": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + + # load bitmap + bitmap = [] + while True: + s = f.readline() + if not s or s[:7] == b"ENDCHAR": + break + bitmap.append(s[:-1]) + bitmap = b"".join(bitmap) + + [x, y, l, d] = [int(p) for p in props["BBX"].split()] + [dx, dy] = [int(p) for p in props["DWIDTH"].split()] + + bbox = (dx, dy), (l, -d-y, x+l, -d), (0, 0, x, y) + + try: + im = Image.frombytes("1", (x, y), bitmap, "hex", "1") + except ValueError: + # deal with zero-width characters + im = Image.new("1", (x, y)) + + return id, int(props["ENCODING"]), bbox, im + + +## +# Font file plugin for the X11 BDF format. + +class BdfFontFile(FontFile.FontFile): + + def __init__(self, fp): + + FontFile.FontFile.__init__(self) + + s = fp.readline() + if s[:13] != b"STARTFONT 2.1": + raise SyntaxError("not a valid BDF file") + + props = {} + comments = [] + + while True: + s = fp.readline() + if not s or s[:13] == b"ENDPROPERTIES": + break + i = s.find(b" ") + props[s[:i].decode('ascii')] = s[i+1:-1].decode('ascii') + if s[:i] in [b"COMMENT", b"COPYRIGHT"]: + if s.find(b"LogicalFontDescription") < 0: + comments.append(s[i+1:-1].decode('ascii')) + + # font = props["FONT"].split("-") + + # font[4] = bdf_slant[font[4].upper()] + # font[11] = bdf_spacing[font[11].upper()] + + # ascent = int(props["FONT_ASCENT"]) + # descent = int(props["FONT_DESCENT"]) + + # fontname = ";".join(font[1:]) + + # print "#", fontname + # for i in comments: + # print "#", i + + while True: + c = bdf_char(fp) + if not c: + break + id, ch, (xy, dst, src), im = c + if 0 <= ch < len(self.glyph): + self.glyph[ch] = xy, dst, src, im diff --git a/Lib/site-packages/PIL/BmpImagePlugin.py b/Lib/site-packages/PIL/BmpImagePlugin.py new file mode 100644 index 0000000..e398445 --- /dev/null +++ b/Lib/site-packages/PIL/BmpImagePlugin.py @@ -0,0 +1,288 @@ +# +# The Python Imaging Library. +# $Id$ +# +# BMP file handler +# +# Windows (and OS/2) native bitmap storage format. +# +# history: +# 1995-09-01 fl Created +# 1996-04-30 fl Added save +# 1997-08-27 fl Fixed save of 1-bit images +# 1998-03-06 fl Load P images as L where possible +# 1998-07-03 fl Load P images as 1 where possible +# 1998-12-29 fl Handle small palettes +# 2002-12-30 fl Fixed load of 1-bit palette images +# 2003-04-21 fl Fixed load of 1-bit monochrome images +# 2003-04-23 fl Added limited support for BI_BITFIELDS compression +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary +import math + +__version__ = "0.7" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +# +# -------------------------------------------------------------------- +# Read BMP file + +BIT2MODE = { + # bits => mode, rawmode + 1: ("P", "P;1"), + 4: ("P", "P;4"), + 8: ("P", "P"), + 16: ("RGB", "BGR;15"), + 24: ("RGB", "BGR"), + 32: ("RGB", "BGRX"), +} + + +def _accept(prefix): + return prefix[:2] == b"BM" + + +# ============================================================================== +# Image plugin for the Windows BMP format. +# ============================================================================== +class BmpImageFile(ImageFile.ImageFile): + """ Image plugin for the Windows Bitmap format (BMP) """ + + # -------------------------------------------------------------- Description + format_description = "Windows Bitmap" + format = "BMP" + # --------------------------------------------------- BMP Compression values + COMPRESSIONS = {'RAW': 0, 'RLE8': 1, 'RLE4': 2, 'BITFIELDS': 3, 'JPEG': 4, 'PNG': 5} + RAW, RLE8, RLE4, BITFIELDS, JPEG, PNG = 0, 1, 2, 3, 4, 5 + + def _bitmap(self, header=0, offset=0): + """ Read relevant info about the BMP """ + read, seek = self.fp.read, self.fp.seek + if header: + seek(header) + file_info = dict() + file_info['header_size'] = i32(read(4)) # read bmp header size @offset 14 (this is part of the header size) + file_info['direction'] = -1 + # --------------------- If requested, read header at a specific position + header_data = ImageFile._safe_read(self.fp, file_info['header_size'] - 4) # read the rest of the bmp header, without its size + # --------------------------------------------------- IBM OS/2 Bitmap v1 + # ------ This format has different offsets because of width/height types + if file_info['header_size'] == 12: + file_info['width'] = i16(header_data[0:2]) + file_info['height'] = i16(header_data[2:4]) + file_info['planes'] = i16(header_data[4:6]) + file_info['bits'] = i16(header_data[6:8]) + file_info['compression'] = self.RAW + file_info['palette_padding'] = 3 + # ---------------------------------------------- Windows Bitmap v2 to v5 + elif file_info['header_size'] in (40, 64, 108, 124): # v3, OS/2 v2, v4, v5 + if file_info['header_size'] >= 40: # v3 and OS/2 + file_info['y_flip'] = i8(header_data[7]) == 0xff + file_info['direction'] = 1 if file_info['y_flip'] else -1 + file_info['width'] = i32(header_data[0:4]) + file_info['height'] = i32(header_data[4:8]) if not file_info['y_flip'] else 2**32 - i32(header_data[4:8]) + file_info['planes'] = i16(header_data[8:10]) + file_info['bits'] = i16(header_data[10:12]) + file_info['compression'] = i32(header_data[12:16]) + file_info['data_size'] = i32(header_data[16:20]) # byte size of pixel data + file_info['pixels_per_meter'] = (i32(header_data[20:24]), i32(header_data[24:28])) + file_info['colors'] = i32(header_data[28:32]) + file_info['palette_padding'] = 4 + self.info["dpi"] = tuple( + map(lambda x: int(math.ceil(x / 39.3701)), + file_info['pixels_per_meter'])) + if file_info['compression'] == self.BITFIELDS: + if len(header_data) >= 52: + for idx, mask in enumerate(['r_mask', 'g_mask', 'b_mask', 'a_mask']): + file_info[mask] = i32(header_data[36+idx*4:40+idx*4]) + else: + for mask in ['r_mask', 'g_mask', 'b_mask', 'a_mask']: + file_info[mask] = i32(read(4)) + file_info['rgb_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask']) + file_info['rgba_mask'] = (file_info['r_mask'], file_info['g_mask'], file_info['b_mask'], file_info['a_mask']) + else: + raise IOError("Unsupported BMP header type (%d)" % file_info['header_size']) + # ------------------ Special case : header is reported 40, which + # ---------------------- is shorter than real size for bpp >= 16 + self.size = file_info['width'], file_info['height'] + # -------- If color count was not found in the header, compute from bits + file_info['colors'] = file_info['colors'] if file_info.get('colors', 0) else (1 << file_info['bits']) + # -------------------------------- Check abnormal values for DOS attacks + if file_info['width'] * file_info['height'] > 2**31: + raise IOError("Unsupported BMP Size: (%dx%d)" % self.size) + # ----------------------- Check bit depth for unusual unsupported values + self.mode, raw_mode = BIT2MODE.get(file_info['bits'], (None, None)) + if self.mode is None: + raise IOError("Unsupported BMP pixel depth (%d)" % file_info['bits']) + # ----------------- Process BMP with Bitfields compression (not palette) + if file_info['compression'] == self.BITFIELDS: + SUPPORTED = { + 32: [(0xff0000, 0xff00, 0xff, 0x0), (0xff0000, 0xff00, 0xff, 0xff000000), (0x0, 0x0, 0x0, 0x0)], + 24: [(0xff0000, 0xff00, 0xff)], + 16: [(0xf800, 0x7e0, 0x1f), (0x7c00, 0x3e0, 0x1f)] + } + MASK_MODES = { + (32, (0xff0000, 0xff00, 0xff, 0x0)): "BGRX", + (32, (0xff0000, 0xff00, 0xff, 0xff000000)): "BGRA", + (32, (0x0, 0x0, 0x0, 0x0)): "BGRA", + (24, (0xff0000, 0xff00, 0xff)): "BGR", + (16, (0xf800, 0x7e0, 0x1f)): "BGR;16", + (16, (0x7c00, 0x3e0, 0x1f)): "BGR;15" + } + if file_info['bits'] in SUPPORTED: + if file_info['bits'] == 32 and file_info['rgba_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgba_mask'])] + self.mode = "RGBA" if raw_mode in ("BGRA",) else self.mode + elif file_info['bits'] in (24, 16) and file_info['rgb_mask'] in SUPPORTED[file_info['bits']]: + raw_mode = MASK_MODES[(file_info['bits'], file_info['rgb_mask'])] + else: + raise IOError("Unsupported BMP bitfields layout") + else: + raise IOError("Unsupported BMP bitfields layout") + elif file_info['compression'] == self.RAW: + if file_info['bits'] == 32 and header == 22: # 32-bit .cur offset + raw_mode, self.mode = "BGRA", "RGBA" + else: + raise IOError("Unsupported BMP compression (%d)" % file_info['compression']) + # ---------------- Once the header is processed, process the palette/LUT + if self.mode == "P": # Paletted for 1, 4 and 8 bit images + # ----------------------------------------------------- 1-bit images + if not (0 < file_info['colors'] <= 65536): + raise IOError("Unsupported BMP Palette size (%d)" % file_info['colors']) + else: + padding = file_info['palette_padding'] + palette = read(padding * file_info['colors']) + greyscale = True + indices = (0, 255) if file_info['colors'] == 2 else list(range(file_info['colors'])) + # ------------------ Check if greyscale and ignore palette if so + for ind, val in enumerate(indices): + rgb = palette[ind*padding:ind*padding + 3] + if rgb != o8(val) * 3: + greyscale = False + # -------- If all colors are grey, white or black, ditch palette + if greyscale: + self.mode = "1" if file_info['colors'] == 2 else "L" + raw_mode = self.mode + else: + self.mode = "P" + self.palette = ImagePalette.raw("BGRX" if padding == 4 else "BGR", palette) + + # ----------------------------- Finally set the tile data for the plugin + self.info['compression'] = file_info['compression'] + self.tile = [('raw', (0, 0, file_info['width'], file_info['height']), offset or self.fp.tell(), + (raw_mode, ((file_info['width'] * file_info['bits'] + 31) >> 3) & (~3), file_info['direction']) + )] + + def _open(self): + """ Open file, check magic number and read header """ + # read 14 bytes: magic number, filesize, reserved, header final offset + head_data = self.fp.read(14) + # choke if the file does not have the required magic bytes + if head_data[0:2] != b"BM": + raise SyntaxError("Not a BMP file") + # read the start position of the BMP image data (u32) + offset = i32(head_data[10:14]) + # load bitmap information (offset=raster info) + self._bitmap(offset=offset) + + +# ============================================================================== +# Image plugin for the DIB format (BMP alias) +# ============================================================================== +class DibImageFile(BmpImageFile): + + format = "DIB" + format_description = "Windows Bitmap" + + def _open(self): + self._bitmap() + +# +# -------------------------------------------------------------------- +# Write BMP file + +SAVE = { + "1": ("1", 1, 2), + "L": ("L", 8, 256), + "P": ("P", 8, 256), + "RGB": ("BGR", 24, 0), + "RGBA": ("BGRA", 32, 0), +} + + +def _save(im, fp, filename, check=0): + try: + rawmode, bits, colors = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as BMP" % im.mode) + + if check: + return check + + info = im.encoderinfo + + dpi = info.get("dpi", (96, 96)) + + # 1 meter == 39.3701 inches + ppm = tuple(map(lambda x: int(x * 39.3701), dpi)) + + stride = ((im.size[0]*bits+7)//8+3) & (~3) + header = 40 # or 64 for OS/2 version 2 + offset = 14 + header + colors * 4 + image = stride * im.size[1] + + # bitmap header + fp.write(b"BM" + # file type (magic) + o32(offset+image) + # file size + o32(0) + # reserved + o32(offset)) # image data offset + + # bitmap info header + fp.write(o32(header) + # info header size + o32(im.size[0]) + # width + o32(im.size[1]) + # height + o16(1) + # planes + o16(bits) + # depth + o32(0) + # compression (0=uncompressed) + o32(image) + # size of bitmap + o32(ppm[0]) + o32(ppm[1]) + # resolution + o32(colors) + # colors used + o32(colors)) # colors important + + fp.write(b"\0" * (header - 40)) # padding (for OS/2 format) + + if im.mode == "1": + for i in (0, 255): + fp.write(o8(i) * 4) + elif im.mode == "L": + for i in range(256): + fp.write(o8(i) * 4) + elif im.mode == "P": + fp.write(im.im.getpalette("RGB", "BGRX")) + + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, + (rawmode, stride, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BmpImageFile.format, BmpImageFile, _accept) +Image.register_save(BmpImageFile.format, _save) + +Image.register_extension(BmpImageFile.format, ".bmp") + +Image.register_mime(BmpImageFile.format, "image/bmp") diff --git a/Lib/site-packages/PIL/BufrStubImagePlugin.py b/Lib/site-packages/PIL/BufrStubImagePlugin.py new file mode 100644 index 0000000..45ee547 --- /dev/null +++ b/Lib/site-packages/PIL/BufrStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# BUFR stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific BUFR image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:4] == b"BUFR" or prefix[:4] == b"ZCZC" + + +class BufrStubImageFile(ImageFile.StubImageFile): + + format = "BUFR" + format_description = "BUFR" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a BUFR file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("BUFR save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(BufrStubImageFile.format, BufrStubImageFile, _accept) +Image.register_save(BufrStubImageFile.format, _save) + +Image.register_extension(BufrStubImageFile.format, ".bufr") diff --git a/Lib/site-packages/PIL/ContainerIO.py b/Lib/site-packages/PIL/ContainerIO.py new file mode 100644 index 0000000..262f2af --- /dev/null +++ b/Lib/site-packages/PIL/ContainerIO.py @@ -0,0 +1,117 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a class to read from a container file +# +# History: +# 1995-06-18 fl Created +# 1995-09-07 fl Added readline(), readlines() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +## +# A file object that provides read access to a part of an existing +# file (for example a TAR file). + + +class ContainerIO(object): + + ## + # Create file object. + # + # @param file Existing file. + # @param offset Start of region, in bytes. + # @param length Size of region, in bytes. + + def __init__(self, file, offset, length): + self.fh = file + self.pos = 0 + self.offset = offset + self.length = length + self.fh.seek(offset) + + ## + # Always false. + + def isatty(self): + return 0 + + ## + # Move file pointer. + # + # @param offset Offset in bytes. + # @param mode Starting position. Use 0 for beginning of region, 1 + # for current offset, and 2 for end of region. You cannot move + # the pointer outside the defined region. + + def seek(self, offset, mode=0): + if mode == 1: + self.pos = self.pos + offset + elif mode == 2: + self.pos = self.length + offset + else: + self.pos = offset + # clamp + self.pos = max(0, min(self.pos, self.length)) + self.fh.seek(self.offset + self.pos) + + ## + # Get current file pointer. + # + # @return Offset from start of region, in bytes. + + def tell(self): + return self.pos + + ## + # Read data. + # + # @def read(bytes=0) + # @param bytes Number of bytes to read. If omitted or zero, + # read until end of region. + # @return An 8-bit string. + + def read(self, n=0): + if n: + n = min(n, self.length - self.pos) + else: + n = self.length - self.pos + if not n: # EOF + return "" + self.pos = self.pos + n + return self.fh.read(n) + + ## + # Read a line of text. + # + # @return An 8-bit string. + + def readline(self): + s = "" + while True: + c = self.read(1) + if not c: + break + s = s + c + if c == "\n": + break + return s + + ## + # Read multiple lines of text. + # + # @return A list of 8-bit strings. + + def readlines(self): + l = [] + while True: + s = self.readline() + if not s: + break + l.append(s) + return l diff --git a/Lib/site-packages/PIL/CurImagePlugin.py b/Lib/site-packages/PIL/CurImagePlugin.py new file mode 100644 index 0000000..4db4c40 --- /dev/null +++ b/Lib/site-packages/PIL/CurImagePlugin.py @@ -0,0 +1,88 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Cursor support for PIL +# +# notes: +# uses BmpImagePlugin.py to read the bitmap data. +# +# history: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, BmpImagePlugin, _binary + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le + + +def _accept(prefix): + return prefix[:4] == b"\0\0\2\0" + + +## +# Image plugin for Windows Cursor files. + +class CurImageFile(BmpImagePlugin.BmpImageFile): + + format = "CUR" + format_description = "Windows Cursor" + + def _open(self): + + offset = self.fp.tell() + + # check magic + s = self.fp.read(6) + if not _accept(s): + raise SyntaxError("not a CUR file") + + # pick the largest cursor in the file + m = b"" + for i in range(i16(s[4:])): + s = self.fp.read(16) + if not m: + m = s + elif i8(s[0]) > i8(m[0]) and i8(s[1]) > i8(m[1]): + m = s + # print "width", i8(s[0]) + # print "height", i8(s[1]) + # print "colors", i8(s[2]) + # print "reserved", i8(s[3]) + # print "hotspot x", i16(s[4:]) + # print "hotspot y", i16(s[6:]) + # print "bytes", i32(s[8:]) + # print "offset", i32(s[12:]) + if not m: + raise TypeError("No cursors were found") + + # load as bitmap + self._bitmap(i32(m[12:]) + offset) + + # patch up the bitmap height + self.size = self.size[0], self.size[1]//2 + d, e, o, a = self.tile[0] + self.tile[0] = d, (0, 0)+self.size, o, a + + return + + +# +# -------------------------------------------------------------------- + +Image.register_open(CurImageFile.format, CurImageFile, _accept) + +Image.register_extension(CurImageFile.format, ".cur") diff --git a/Lib/site-packages/PIL/DcxImagePlugin.py b/Lib/site-packages/PIL/DcxImagePlugin.py new file mode 100644 index 0000000..f9034d1 --- /dev/null +++ b/Lib/site-packages/PIL/DcxImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# DCX file handling +# +# DCX is a container file format defined by Intel, commonly used +# for fax applications. Each DCX file consists of a directory +# (a list of file offsets) followed by a set of (usually 1-bit) +# PCX files. +# +# History: +# 1995-09-09 fl Created +# 1996-03-20 fl Properly derived from PcxImageFile. +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2002-07-30 fl Fixed file handling +# +# Copyright (c) 1997-98 by Secret Labs AB. +# Copyright (c) 1995-96 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, _binary +from PIL.PcxImagePlugin import PcxImageFile + +__version__ = "0.2" + +MAGIC = 0x3ADE68B1 # QUIZ: what's this value, then? + +i32 = _binary.i32le + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == MAGIC + + +## +# Image plugin for the Intel DCX format. + +class DcxImageFile(PcxImageFile): + + format = "DCX" + format_description = "Intel DCX" + + def _open(self): + + # Header + s = self.fp.read(4) + if i32(s) != MAGIC: + raise SyntaxError("not a DCX file") + + # Component directory + self._offset = [] + for i in range(1024): + offset = i32(self.fp.read(4)) + if not offset: + break + self._offset.append(offset) + + self.__fp = self.fp + self.seek(0) + + @property + def n_frames(self): + return len(self._offset) + + @property + def is_animated(self): + return len(self._offset) > 1 + + def seek(self, frame): + if frame >= len(self._offset): + raise EOFError("attempt to seek outside DCX directory") + self.frame = frame + self.fp = self.__fp + self.fp.seek(self._offset[frame]) + PcxImageFile._open(self) + + def tell(self): + return self.frame + + +Image.register_open(DcxImageFile.format, DcxImageFile, _accept) + +Image.register_extension(DcxImageFile.format, ".dcx") diff --git a/Lib/site-packages/PIL/EpsImagePlugin.py b/Lib/site-packages/PIL/EpsImagePlugin.py new file mode 100644 index 0000000..a950c52 --- /dev/null +++ b/Lib/site-packages/PIL/EpsImagePlugin.py @@ -0,0 +1,428 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EPS file handling +# +# History: +# 1995-09-01 fl Created (0.1) +# 1996-05-18 fl Don't choke on "atend" fields, Ghostscript interface (0.2) +# 1996-08-22 fl Don't choke on floating point BoundingBox values +# 1996-08-23 fl Handle files from Macintosh (0.3) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2003-09-07 fl Check gs.close status (from Federico Di Gregorio) (0.5) +# 2014-05-07 e Handling of EPS with binary preview and fixed resolution +# resizing +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +import io +import sys +from PIL import Image, ImageFile, _binary + +__version__ = "0.5" + +# +# -------------------------------------------------------------------- + +i32 = _binary.i32le +o32 = _binary.o32le + +split = re.compile(r"^%%([^:]*):[ \t]*(.*)[ \t]*$") +field = re.compile(r"^%[%!\w]([^:]*)[ \t]*$") + +gs_windows_binary = None +if sys.platform.startswith('win'): + import shutil + if hasattr(shutil, 'which'): + which = shutil.which + else: + # Python < 3.3 + import distutils.spawn + which = distutils.spawn.find_executable + for binary in ('gswin32c', 'gswin64c', 'gs'): + if which(binary) is not None: + gs_windows_binary = binary + break + else: + gs_windows_binary = False + + +def has_ghostscript(): + if gs_windows_binary: + return True + if not sys.platform.startswith('win'): + import subprocess + try: + gs = subprocess.Popen(['gs', '--version'], stdout=subprocess.PIPE) + gs.stdout.read() + return True + except OSError: + # no ghostscript + pass + return False + + +def Ghostscript(tile, size, fp, scale=1): + """Render an image using Ghostscript""" + + # Unpack decoder tile + decoder, tile, offset, data = tile[0] + length, bbox = data + + # Hack to support hi-res rendering + scale = int(scale) or 1 + # orig_size = size + # orig_bbox = bbox + size = (size[0] * scale, size[1] * scale) + # resolution is dependent on bbox and size + res = (float((72.0 * size[0]) / (bbox[2]-bbox[0])), + float((72.0 * size[1]) / (bbox[3]-bbox[1]))) + # print("Ghostscript", scale, size, orig_size, bbox, orig_bbox, res) + + import os + import subprocess + import tempfile + + out_fd, outfile = tempfile.mkstemp() + os.close(out_fd) + + infile_temp = None + if hasattr(fp, 'name') and os.path.exists(fp.name): + infile = fp.name + else: + in_fd, infile_temp = tempfile.mkstemp() + os.close(in_fd) + infile = infile_temp + + # ignore length and offset! + # ghostscript can read it + # copy whole file to read in ghostscript + with open(infile_temp, 'wb') as f: + # fetch length of fp + fp.seek(0, 2) + fsize = fp.tell() + # ensure start position + # go back + fp.seek(0) + lengthfile = fsize + while lengthfile > 0: + s = fp.read(min(lengthfile, 100*1024)) + if not s: + break + lengthfile -= len(s) + f.write(s) + + # Build ghostscript command + command = ["gs", + "-q", # quiet mode + "-g%dx%d" % size, # set output geometry (pixels) + "-r%fx%f" % res, # set input DPI (dots per inch) + "-dNOPAUSE", # don't pause between pages, + "-dSAFER", # safe mode + "-sDEVICE=ppmraw", # ppm driver + "-sOutputFile=%s" % outfile, # output file + "-c", "%d %d translate" % (-bbox[0], -bbox[1]), + # adjust for image origin + "-f", infile, # input file + ] + + if gs_windows_binary is not None: + if not gs_windows_binary: + raise WindowsError('Unable to locate Ghostscript on paths') + command[0] = gs_windows_binary + + # push data through ghostscript + try: + gs = subprocess.Popen(command, stdin=subprocess.PIPE, + stdout=subprocess.PIPE) + gs.stdin.close() + status = gs.wait() + if status: + raise IOError("gs failed (status %d)" % status) + im = Image.core.open_ppm(outfile) + finally: + try: + os.unlink(outfile) + if infile_temp: + os.unlink(infile_temp) + except OSError: + pass + + return im + + +class PSFile(object): + """ + Wrapper for bytesio object that treats either CR or LF as end of line. + """ + def __init__(self, fp): + self.fp = fp + self.char = None + + def seek(self, offset, whence=0): + self.char = None + self.fp.seek(offset, whence) + + def readline(self): + s = self.char or b"" + self.char = None + + c = self.fp.read(1) + while c not in b"\r\n": + s = s + c + c = self.fp.read(1) + + self.char = self.fp.read(1) + # line endings can be 1 or 2 of \r \n, in either order + if self.char in b"\r\n": + self.char = None + + return s.decode('latin-1') + + +def _accept(prefix): + return prefix[:4] == b"%!PS" or \ + (len(prefix) >= 4 and i32(prefix) == 0xC6D3D0C5) + +## +# Image plugin for Encapsulated Postscript. This plugin supports only +# a few variants of this format. + + +class EpsImageFile(ImageFile.ImageFile): + """EPS File Parser for the Python Imaging Library""" + + format = "EPS" + format_description = "Encapsulated Postscript" + + mode_map = {1: "L", 2: "LAB", 3: "RGB"} + + def _open(self): + (length, offset) = self._find_offset(self.fp) + + # Rewrap the open file pointer in something that will + # convert line endings and decode to latin-1. + try: + if bytes is str: + # Python2, no encoding conversion necessary + fp = open(self.fp.name, "Ur") + else: + # Python3, can use bare open command. + fp = open(self.fp.name, "Ur", encoding='latin-1') + except: + # Expect this for bytesio/stringio + fp = PSFile(self.fp) + + # go to offset - start of "%!PS" + fp.seek(offset) + + box = None + + self.mode = "RGB" + self.size = 1, 1 # FIXME: huh? + + # + # Load EPS header + + s = fp.readline().strip('\r\n') + + while s: + if len(s) > 255: + raise SyntaxError("not an EPS file") + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an EPS file") + + if m: + k, v = m.group(1, 2) + self.info[k] = v + if k == "BoundingBox": + try: + # Note: The DSC spec says that BoundingBox + # fields should be integers, but some drivers + # put floating point values there anyway. + box = [int(float(i)) for i in v.split()] + self.size = box[2] - box[0], box[3] - box[1] + self.tile = [("eps", (0, 0) + self.size, offset, + (length, box))] + except: + pass + + else: + m = field.match(s) + if m: + k = m.group(1) + + if k == "EndComments": + break + if k[:8] == "PS-Adobe": + self.info[k[:8]] = k[9:] + else: + self.info[k] = "" + elif s[0] == '%': + # handle non-DSC Postscript comments that some + # tools mistakenly put in the Comments section + pass + else: + raise IOError("bad EPS header") + + s = fp.readline().strip('\r\n') + + if s[:1] != "%": + break + + # + # Scan for an "ImageData" descriptor + + while s[:1] == "%": + + if len(s) > 255: + raise SyntaxError("not an EPS file") + + if s[:11] == "%ImageData:": + # Encoded bitmapped image. + x, y, bi, mo = s[11:].split(None, 7)[:4] + + if int(bi) != 8: + break + try: + self.mode = self.mode_map[int(mo)] + except ValueError: + break + + self.size = int(x), int(y) + return + + s = fp.readline().strip('\r\n') + if not s: + break + + if not box: + raise IOError("cannot determine EPS bounding box") + + def _find_offset(self, fp): + + s = fp.read(160) + + if s[:4] == b"%!PS": + # for HEAD without binary preview + fp.seek(0, 2) + length = fp.tell() + offset = 0 + elif i32(s[0:4]) == 0xC6D3D0C5: + # FIX for: Some EPS file not handled correctly / issue #302 + # EPS can contain binary data + # or start directly with latin coding + # more info see: + # http://partners.adobe.com/public/developer/en/ps/5002.EPSF_Spec.pdf + offset = i32(s[4:8]) + length = i32(s[8:12]) + else: + raise SyntaxError("not an EPS file") + + return (length, offset) + + def load(self, scale=1): + # Load EPS via Ghostscript + if not self.tile: + return + self.im = Ghostscript(self.tile, self.size, self.fp, scale) + self.mode = self.im.mode + self.size = self.im.size + self.tile = [] + + def load_seek(self, *args, **kwargs): + # we can't incrementally load, so force ImageFile.parser to + # use our custom load method by defining this method. + pass + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename, eps=1): + """EPS Writer for the Python Imaging Library.""" + + # + # make sure image data is available + im.load() + + # + # determine postscript image mode + if im.mode == "L": + operator = (8, 1, "image") + elif im.mode == "RGB": + operator = (8, 3, "false 3 colorimage") + elif im.mode == "CMYK": + operator = (8, 4, "false 4 colorimage") + else: + raise ValueError("image mode is not supported") + + class NoCloseStream(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def close(self): + pass + + base_fp = fp + if fp != sys.stdout: + fp = NoCloseStream(fp) + if sys.version_info[0] > 2: + fp = io.TextIOWrapper(fp, encoding='latin-1') + + if eps: + # + # write EPS header + fp.write("%!PS-Adobe-3.0 EPSF-3.0\n") + fp.write("%%Creator: PIL 0.1 EpsEncode\n") + # fp.write("%%CreationDate: %s"...) + fp.write("%%%%BoundingBox: 0 0 %d %d\n" % im.size) + fp.write("%%Pages: 1\n") + fp.write("%%EndComments\n") + fp.write("%%Page: 1 1\n") + fp.write("%%ImageData: %d %d " % im.size) + fp.write("%d %d 0 1 1 \"%s\"\n" % operator) + + # + # image header + fp.write("gsave\n") + fp.write("10 dict begin\n") + fp.write("/buf %d string def\n" % (im.size[0] * operator[1])) + fp.write("%d %d scale\n" % im.size) + fp.write("%d %d 8\n" % im.size) # <= bits + fp.write("[%d 0 0 -%d 0 %d]\n" % (im.size[0], im.size[1], im.size[1])) + fp.write("{ currentfile buf readhexstring pop } bind\n") + fp.write(operator[2] + "\n") + if hasattr(fp, "flush"): + fp.flush() + + ImageFile._save(im, base_fp, [("eps", (0, 0)+im.size, 0, None)]) + + fp.write("\n%%%%EndBinary\n") + fp.write("grestore end\n") + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_open(EpsImageFile.format, EpsImageFile, _accept) + +Image.register_save(EpsImageFile.format, _save) + +Image.register_extension(EpsImageFile.format, ".ps") +Image.register_extension(EpsImageFile.format, ".eps") + +Image.register_mime(EpsImageFile.format, "application/postscript") diff --git a/Lib/site-packages/PIL/ExifTags.py b/Lib/site-packages/PIL/ExifTags.py new file mode 100644 index 0000000..52e145f --- /dev/null +++ b/Lib/site-packages/PIL/ExifTags.py @@ -0,0 +1,193 @@ +# +# The Python Imaging Library. +# $Id$ +# +# EXIF tags +# +# Copyright (c) 2003 by Secret Labs AB +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known EXIF tags. +## + +## +# Maps EXIF tags to tag names. + +TAGS = { + + # possibly incomplete + 0x00fe: "NewSubfileType", + 0x00ff: "SubfileType", + 0x0100: "ImageWidth", + 0x0101: "ImageLength", + 0x0102: "BitsPerSample", + 0x0103: "Compression", + 0x0106: "PhotometricInterpretation", + 0x0107: "Threshholding", + 0x0108: "CellWidth", + 0x0109: "CellLenght", + 0x010a: "FillOrder", + 0x010d: "DocumentName", + 0x011d: "PageName", + 0x010e: "ImageDescription", + 0x010f: "Make", + 0x0110: "Model", + 0x0111: "StripOffsets", + 0x0112: "Orientation", + 0x0115: "SamplesPerPixel", + 0x0116: "RowsPerStrip", + 0x0117: "StripByteConunts", + 0x0118: "MinSampleValue", + 0x0119: "MaxSampleValue", + 0x011a: "XResolution", + 0x011b: "YResolution", + 0x011c: "PlanarConfiguration", + 0x0120: "FreeOffsets", + 0x0121: "FreeByteCounts", + 0x0122: "GrayResponseUnit", + 0x0123: "GrayResponseCurve", + 0x0128: "ResolutionUnit", + 0x012d: "TransferFunction", + 0x0131: "Software", + 0x0132: "DateTime", + 0x013b: "Artist", + 0x013c: "HostComputer", + 0x013e: "WhitePoint", + 0x013f: "PrimaryChromaticities", + 0x0140: "ColorMap", + 0x0152: "ExtraSamples", + 0x0201: "JpegIFOffset", + 0x0202: "JpegIFByteCount", + 0x0211: "YCbCrCoefficients", + 0x0212: "YCbCrSubSampling", + 0x0213: "YCbCrPositioning", + 0x0214: "ReferenceBlackWhite", + 0x1000: "RelatedImageFileFormat", + 0x1001: "RelatedImageWidth", + 0x1002: "RelatedImageLength", + 0x828d: "CFARepeatPatternDim", + 0x828e: "CFAPattern", + 0x828f: "BatteryLevel", + 0x8298: "Copyright", + 0x829a: "ExposureTime", + 0x829d: "FNumber", + 0x8769: "ExifOffset", + 0x8773: "InterColorProfile", + 0x8822: "ExposureProgram", + 0x8824: "SpectralSensitivity", + 0x8825: "GPSInfo", + 0x8827: "ISOSpeedRatings", + 0x8828: "OECF", + 0x8829: "Interlace", + 0x882a: "TimeZoneOffset", + 0x882b: "SelfTimerMode", + 0x9000: "ExifVersion", + 0x9003: "DateTimeOriginal", + 0x9004: "DateTimeDigitized", + 0x9101: "ComponentsConfiguration", + 0x9102: "CompressedBitsPerPixel", + 0x9201: "ShutterSpeedValue", + 0x9202: "ApertureValue", + 0x9203: "BrightnessValue", + 0x9204: "ExposureBiasValue", + 0x9205: "MaxApertureValue", + 0x9206: "SubjectDistance", + 0x9207: "MeteringMode", + 0x9208: "LightSource", + 0x9209: "Flash", + 0x920a: "FocalLength", + 0x920b: "FlashEnergy", + 0x920c: "SpatialFrequencyResponse", + 0x920d: "Noise", + 0x9211: "ImageNumber", + 0x9212: "SecurityClassification", + 0x9213: "ImageHistory", + 0x9214: "SubjectLocation", + 0x9215: "ExposureIndex", + 0x9216: "TIFF/EPStandardID", + 0x927c: "MakerNote", + 0x9286: "UserComment", + 0x9290: "SubsecTime", + 0x9291: "SubsecTimeOriginal", + 0x9292: "SubsecTimeDigitized", + 0xa000: "FlashPixVersion", + 0xa001: "ColorSpace", + 0xa002: "ExifImageWidth", + 0xa003: "ExifImageHeight", + 0xa004: "RelatedSoundFile", + 0xa005: "ExifInteroperabilityOffset", + 0xa20b: "FlashEnergy", + 0xa20c: "SpatialFrequencyResponse", + 0xa20e: "FocalPlaneXResolution", + 0xa20f: "FocalPlaneYResolution", + 0xa210: "FocalPlaneResolutionUnit", + 0xa214: "SubjectLocation", + 0xa215: "ExposureIndex", + 0xa217: "SensingMethod", + 0xa300: "FileSource", + 0xa301: "SceneType", + 0xa302: "CFAPattern", + 0xa401: "CustomRendered", + 0xa402: "ExposureMode", + 0xa403: "WhiteBalance", + 0xa404: "DigitalZoomRatio", + 0xa405: "FocalLengthIn35mmFilm", + 0xa406: "SceneCaptureType", + 0xa407: "GainControl", + 0xa408: "Contrast", + 0xa409: "Saturation", + 0xa40a: "Sharpness", + 0xa40b: "DeviceSettingDescription", + 0xa40c: "SubjectDistanceRange", + 0xa420: "ImageUniqueID", + 0xa430: "CameraOwnerName", + 0xa431: "BodySerialNumber", + 0xa432: "LensSpecification", + 0xa433: "LensMake", + 0xa434: "LensModel", + 0xa435: "LensSerialNumber", + 0xa500: "Gamma", + +} + +## +# Maps EXIF GPS tags to tag names. + +GPSTAGS = { + 0: "GPSVersionID", + 1: "GPSLatitudeRef", + 2: "GPSLatitude", + 3: "GPSLongitudeRef", + 4: "GPSLongitude", + 5: "GPSAltitudeRef", + 6: "GPSAltitude", + 7: "GPSTimeStamp", + 8: "GPSSatellites", + 9: "GPSStatus", + 10: "GPSMeasureMode", + 11: "GPSDOP", + 12: "GPSSpeedRef", + 13: "GPSSpeed", + 14: "GPSTrackRef", + 15: "GPSTrack", + 16: "GPSImgDirectionRef", + 17: "GPSImgDirection", + 18: "GPSMapDatum", + 19: "GPSDestLatitudeRef", + 20: "GPSDestLatitude", + 21: "GPSDestLongitudeRef", + 22: "GPSDestLongitude", + 23: "GPSDestBearingRef", + 24: "GPSDestBearing", + 25: "GPSDestDistanceRef", + 26: "GPSDestDistance", + 27: "GPSProcessingMethod", + 28: "GPSAreaInformation", + 29: "GPSDateStamp", + 30: "GPSDifferential", + 31: "GPSHPositioningError", +} diff --git a/Lib/site-packages/PIL/FitsStubImagePlugin.py b/Lib/site-packages/PIL/FitsStubImagePlugin.py new file mode 100644 index 0000000..7aefff2 --- /dev/null +++ b/Lib/site-packages/PIL/FitsStubImagePlugin.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library +# $Id$ +# +# FITS stub adapter +# +# Copyright (c) 1998-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + +## +# Install application-specific FITS image handler. +# +# @param handler Handler object. + + +def register_handler(handler): + global _handler + _handler = handler + +# -------------------------------------------------------------------- +# Image adapter + + +def _accept(prefix): + return prefix[:6] == b"SIMPLE" + + +class FITSStubImageFile(ImageFile.StubImageFile): + + format = "FITS" + format_description = "FITS" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(6)): + raise SyntaxError("Not a FITS file") + + # FIXME: add more sanity checks here; mandatory header items + # include SIMPLE, BITPIX, NAXIS, etc. + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("FITS save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(FITSStubImageFile.format, FITSStubImageFile, _accept) +Image.register_save(FITSStubImageFile.format, _save) + +Image.register_extension(FITSStubImageFile.format, ".fit") +Image.register_extension(FITSStubImageFile.format, ".fits") diff --git a/Lib/site-packages/PIL/FliImagePlugin.py b/Lib/site-packages/PIL/FliImagePlugin.py new file mode 100644 index 0000000..a07dc29 --- /dev/null +++ b/Lib/site-packages/PIL/FliImagePlugin.py @@ -0,0 +1,188 @@ +# +# The Python Imaging Library. +# $Id$ +# +# FLI/FLC file handling. +# +# History: +# 95-09-01 fl Created +# 97-01-03 fl Fixed parser, setup decoder tile +# 98-07-15 fl Renamed offset attribute to avoid name clash +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le +o8 = _binary.o8 + + +# +# decoder + +def _accept(prefix): + return len(prefix) >= 6 and i16(prefix[4:6]) in [0xAF11, 0xAF12] + + +## +# Image plugin for the FLI/FLC animation format. Use the seek +# method to load individual frames. + +class FliImageFile(ImageFile.ImageFile): + + format = "FLI" + format_description = "Autodesk FLI/FLC Animation" + + def _open(self): + + # HEAD + s = self.fp.read(128) + magic = i16(s[4:6]) + if not (magic in [0xAF11, 0xAF12] and + i16(s[14:16]) in [0, 3] and # flags + s[20:22] == b"\x00\x00"): # reserved + raise SyntaxError("not an FLI/FLC file") + + # image characteristics + self.mode = "P" + self.size = i16(s[8:10]), i16(s[10:12]) + + # animation speed + duration = i32(s[16:20]) + if magic == 0xAF11: + duration = (duration * 1000) / 70 + self.info["duration"] = duration + + # look for palette + palette = [(a, a, a) for a in range(256)] + + s = self.fp.read(16) + + self.__offset = 128 + + if i16(s[4:6]) == 0xF100: + # prefix chunk; ignore it + self.__offset = self.__offset + i32(s) + s = self.fp.read(16) + + if i16(s[4:6]) == 0xF1FA: + # look for palette chunk + s = self.fp.read(6) + if i16(s[4:6]) == 11: + self._palette(palette, 2) + elif i16(s[4:6]) == 4: + self._palette(palette, 0) + + palette = [o8(r)+o8(g)+o8(b) for (r, g, b) in palette] + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + # set things up to decode first frame + self.__frame = -1 + self.__fp = self.fp + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self.seek(0) + + def _palette(self, palette, shift): + # load palette + + i = 0 + for e in range(i16(self.fp.read(2))): + s = self.fp.read(2) + i = i + i8(s[0]) + n = i8(s[1]) + if n == 0: + n = 256 + s = self.fp.read(n * 3) + for n in range(0, len(s), 3): + r = i8(s[n]) << shift + g = i8(s[n+1]) << shift + b = i8(s[n+2]) << shift + palette[i] = (r, g, b) + i += 1 + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in FLI file") + + def _seek(self, frame): + if frame == 0: + self.__frame = -1 + self.__fp.seek(self.__rewind) + self.__offset = 128 + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + # move to next frame + self.fp = self.__fp + self.fp.seek(self.__offset) + + s = self.fp.read(4) + if not s: + raise EOFError + + framesize = i32(s) + + self.decodermaxblock = framesize + self.tile = [("fli", (0, 0)+self.size, self.__offset, None)] + + self.__offset += framesize + + def tell(self): + return self.__frame + +# +# registry + +Image.register_open(FliImageFile.format, FliImageFile, _accept) + +Image.register_extension(FliImageFile.format, ".fli") +Image.register_extension(FliImageFile.format, ".flc") diff --git a/Lib/site-packages/PIL/FontFile.py b/Lib/site-packages/PIL/FontFile.py new file mode 100644 index 0000000..db8e6be --- /dev/null +++ b/Lib/site-packages/PIL/FontFile.py @@ -0,0 +1,115 @@ +# +# The Python Imaging Library +# $Id$ +# +# base class for raster font file parsers +# +# history: +# 1997-06-05 fl created +# 1997-08-19 fl restrict image width +# +# Copyright (c) 1997-1998 by Secret Labs AB +# Copyright (c) 1997-1998 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import os +from PIL import Image, _binary + +WIDTH = 800 + + +def puti16(fp, values): + # write network order (big-endian) 16-bit sequence + for v in values: + if v < 0: + v += 65536 + fp.write(_binary.o16be(v)) + + +## +# Base class for raster font file handlers. + +class FontFile(object): + + bitmap = None + + def __init__(self): + + self.info = {} + self.glyph = [None] * 256 + + def __getitem__(self, ix): + return self.glyph[ix] + + def compile(self): + "Create metrics and bitmap" + + if self.bitmap: + return + + # create bitmap large enough to hold all data + h = w = maxwidth = 0 + lines = 1 + for glyph in self: + if glyph: + d, dst, src, im = glyph + h = max(h, src[3] - src[1]) + w = w + (src[2] - src[0]) + if w > WIDTH: + lines += 1 + w = (src[2] - src[0]) + maxwidth = max(maxwidth, w) + + xsize = maxwidth + ysize = lines * h + + if xsize == 0 and ysize == 0: + return "" + + self.ysize = h + + # paste glyphs into bitmap + self.bitmap = Image.new("1", (xsize, ysize)) + self.metrics = [None] * 256 + x = y = 0 + for i in range(256): + glyph = self[i] + if glyph: + d, dst, src, im = glyph + xx = src[2] - src[0] + # yy = src[3] - src[1] + x0, y0 = x, y + x = x + xx + if x > WIDTH: + x, y = 0, y + h + x0, y0 = x, y + x = xx + s = src[0] + x0, src[1] + y0, src[2] + x0, src[3] + y0 + self.bitmap.paste(im.crop(src), s) + # print chr(i), dst, s + self.metrics[i] = d, dst, s + + def save(self, filename): + "Save font" + + self.compile() + + # font data + self.bitmap.save(os.path.splitext(filename)[0] + ".pbm", "PNG") + + # font metrics + fp = open(os.path.splitext(filename)[0] + ".pil", "wb") + fp.write(b"PILfont\n") + fp.write((";;;;;;%d;\n" % self.ysize).encode('ascii')) # HACK!!! + fp.write(b"DATA\n") + for id in range(256): + m = self.metrics[id] + if not m: + puti16(fp, [0] * 10) + else: + puti16(fp, m[0] + m[1] + m[2]) + fp.close() + +# End of file diff --git a/Lib/site-packages/PIL/FpxImagePlugin.py b/Lib/site-packages/PIL/FpxImagePlugin.py new file mode 100644 index 0000000..aefc574 --- /dev/null +++ b/Lib/site-packages/PIL/FpxImagePlugin.py @@ -0,0 +1,226 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library. +# $Id$ +# +# FlashPix support for PIL +# +# History: +# 97-01-25 fl Created (reads uncompressed RGB images only) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL.OleFileIO import i8, i32, MAGIC, OleFileIO + +__version__ = "0.1" + + +# we map from colour field tuples to (mode, rawmode) descriptors +MODES = { + # opacity + (0x00007ffe): ("A", "L"), + # monochrome + (0x00010000,): ("L", "L"), + (0x00018000, 0x00017ffe): ("RGBA", "LA"), + # photo YCC + (0x00020000, 0x00020001, 0x00020002): ("RGB", "YCC;P"), + (0x00028000, 0x00028001, 0x00028002, 0x00027ffe): ("RGBA", "YCCA;P"), + # standard RGB (NIFRGB) + (0x00030000, 0x00030001, 0x00030002): ("RGB", "RGB"), + (0x00038000, 0x00038001, 0x00038002, 0x00037ffe): ("RGBA", "RGBA"), +} + + +# +# -------------------------------------------------------------------- + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for the FlashPix images. + +class FpxImageFile(ImageFile.ImageFile): + + format = "FPX" + format_description = "FlashPix" + + def _open(self): + # + # read the OLE directory and see if this is a likely + # to be a FlashPix file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an FPX file; invalid OLE file") + + if self.ole.root.clsid != "56616700-C154-11CE-8553-00AA00A1F95B": + raise SyntaxError("not an FPX file; bad root CLSID") + + self._open_index(1) + + def _open_index(self, index=1): + # + # get the Image Contents Property Set + + prop = self.ole.getproperties([ + "Data Object Store %06d" % index, + "\005Image Contents" + ]) + + # size (highest resolution) + + self.size = prop[0x1000002], prop[0x1000003] + + size = max(self.size) + i = 1 + while size > 64: + size = size / 2 + i += 1 + self.maxid = i - 1 + + # mode. instead of using a single field for this, flashpix + # requires you to specify the mode for each channel in each + # resolution subimage, and leaves it to the decoder to make + # sure that they all match. for now, we'll cheat and assume + # that this is always the case. + + id = self.maxid << 16 + + s = prop[0x2000002 | id] + + colors = [] + for i in range(i32(s, 4)): + # note: for now, we ignore the "uncalibrated" flag + colors.append(i32(s, 8+i*4) & 0x7fffffff) + + self.mode, self.rawmode = MODES[tuple(colors)] + + # load JPEG tables, if any + self.jpeg = {} + for i in range(256): + id = 0x3000001 | (i << 16) + if id in prop: + self.jpeg[i] = prop[id] + + # print len(self.jpeg), "tables loaded" + + self._open_subimage(1, self.maxid) + + def _open_subimage(self, index=1, subimage=0): + # + # setup tile descriptors for a given subimage + + stream = [ + "Data Object Store %06d" % index, + "Resolution %04d" % subimage, + "Subimage 0000 Header" + ] + + fp = self.ole.openstream(stream) + + # skip prefix + fp.read(28) + + # header stream + s = fp.read(36) + + size = i32(s, 4), i32(s, 8) + # tilecount = i32(s, 12) + tilesize = i32(s, 16), i32(s, 20) + # channels = i32(s, 24) + offset = i32(s, 28) + length = i32(s, 32) + + # print size, self.mode, self.rawmode + + if size != self.size: + raise IOError("subimage mismatch") + + # get tile descriptors + fp.seek(28 + offset) + s = fp.read(i32(s, 12) * length) + + x = y = 0 + xsize, ysize = size + xtile, ytile = tilesize + self.tile = [] + + for i in range(0, len(s), length): + + compression = i32(s, i+8) + + if compression == 0: + self.tile.append(("raw", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode))) + + elif compression == 1: + + # FIXME: the fill decoder is not implemented + self.tile.append(("fill", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (self.rawmode, s[12:16]))) + + elif compression == 2: + + internal_color_conversion = i8(s[14]) + jpeg_tables = i8(s[15]) + rawmode = self.rawmode + + if internal_color_conversion: + # The image is stored as usual (usually YCbCr). + if rawmode == "RGBA": + # For "RGBA", data is stored as YCbCrA based on + # negative RGB. The following trick works around + # this problem : + jpegmode, rawmode = "YCbCrK", "CMYK" + else: + jpegmode = None # let the decoder decide + + else: + # The image is stored as defined by rawmode + jpegmode = rawmode + + self.tile.append(("jpeg", (x, y, x+xtile, y+ytile), + i32(s, i) + 28, (rawmode, jpegmode))) + + # FIXME: jpeg tables are tile dependent; the prefix + # data must be placed in the tile descriptor itself! + + if jpeg_tables: + self.tile_prefix = self.jpeg[jpeg_tables] + + else: + raise IOError("unknown/invalid compression") + + x = x + xtile + if x >= xsize: + x, y = 0, y + ytile + if y >= ysize: + break # isn't really required + + self.stream = stream + self.fp = None + + def load(self): + + if not self.fp: + self.fp = self.ole.openstream(self.stream[:2] + + ["Subimage 0000 Data"]) + + ImageFile.ImageFile.load(self) + +# +# -------------------------------------------------------------------- + +Image.register_open(FpxImageFile.format, FpxImageFile, _accept) + +Image.register_extension(FpxImageFile.format, ".fpx") diff --git a/Lib/site-packages/PIL/GbrImagePlugin.py b/Lib/site-packages/PIL/GbrImagePlugin.py new file mode 100644 index 0000000..15282ec --- /dev/null +++ b/Lib/site-packages/PIL/GbrImagePlugin.py @@ -0,0 +1,71 @@ +# +# The Python Imaging Library +# $Id$ +# +# load a GIMP brush file +# +# History: +# 96-03-14 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +i32 = _binary.i32be + + +def _accept(prefix): + return len(prefix) >= 8 and i32(prefix) >= 20 and i32(prefix[4:8]) == 1 + + +## +# Image plugin for the GIMP brush format. + +class GbrImageFile(ImageFile.ImageFile): + + format = "GBR" + format_description = "GIMP brush file" + + def _open(self): + + header_size = i32(self.fp.read(4)) + version = i32(self.fp.read(4)) + if header_size < 20 or version != 1: + raise SyntaxError("not a GIMP brush") + + width = i32(self.fp.read(4)) + height = i32(self.fp.read(4)) + color_depth = i32(self.fp.read(4)) + if width <= 0 or height <= 0 or color_depth != 1: + raise SyntaxError("not a GIMP brush") + + comment = self.fp.read(header_size - 20)[:-1] + + self.mode = "L" + self.size = width, height + + self.info["comment"] = comment + + # Since the brush is so small, we read the data immediately + self.data = self.fp.read(width * height) + + def load(self): + + if not self.data: + return + + # create an image out of the brush data block + self.im = Image.core.new(self.mode, self.size) + self.im.frombytes(self.data) + self.data = b"" + +# +# registry + +Image.register_open(GbrImageFile.format, GbrImageFile, _accept) + +Image.register_extension(GbrImageFile.format, ".gbr") diff --git a/Lib/site-packages/PIL/GdImageFile.py b/Lib/site-packages/PIL/GdImageFile.py new file mode 100644 index 0000000..ae3500f --- /dev/null +++ b/Lib/site-packages/PIL/GdImageFile.py @@ -0,0 +1,92 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GD file handling +# +# History: +# 1996-04-12 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +# NOTE: This format cannot be automatically recognized, so the +# class is not registered for use with Image.open(). To open a +# gd file, use the GdImageFile.open() function instead. + +# THE GD FORMAT IS NOT DESIGNED FOR DATA INTERCHANGE. This +# implementation is provided for convenience and demonstrational +# purposes only. + + +from PIL import ImageFile, ImagePalette, _binary +from PIL._util import isPath + +__version__ = "0.1" + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i16 = _binary.i16be + + +## +# Image plugin for the GD uncompressed format. Note that this format +# is not supported by the standard Image.open function. To use +# this plugin, you have to import the GdImageFile module and +# use the GdImageFile.open function. + +class GdImageFile(ImageFile.ImageFile): + + format = "GD" + format_description = "GD uncompressed images" + + def _open(self): + + # Header + s = self.fp.read(775) + + self.mode = "L" # FIXME: "P" + self.size = i16(s[0:2]), i16(s[2:4]) + + # transparency index + tindex = i16(s[5:7]) + if tindex < 256: + self.info["transparent"] = tindex + + self.palette = ImagePalette.raw("RGB", s[7:]) + + self.tile = [("raw", (0, 0)+self.size, 775, ("L", 0, -1))] + + +## +# Load texture from a GD image file. +# +# @param filename GD file name, or an opened file handle. +# @param mode Optional mode. In this version, if the mode argument +# is given, it must be "r". +# @return An image instance. +# @exception IOError If the image could not be read. + +def open(fp, mode="r"): + + if mode != "r": + raise ValueError("bad mode") + + if isPath(fp): + filename = fp + fp = builtins.open(fp, "rb") + else: + filename = "" + + try: + return GdImageFile(fp, filename) + except SyntaxError: + raise IOError("cannot identify this image file") diff --git a/Lib/site-packages/PIL/GifImagePlugin.py b/Lib/site-packages/PIL/GifImagePlugin.py new file mode 100644 index 0000000..b9d2588 --- /dev/null +++ b/Lib/site-packages/PIL/GifImagePlugin.py @@ -0,0 +1,685 @@ +# +# The Python Imaging Library. +# $Id$ +# +# GIF file handling +# +# History: +# 1995-09-01 fl Created +# 1996-12-14 fl Added interlace support +# 1996-12-30 fl Added animation support +# 1997-01-05 fl Added write support, fixed local colour map bug +# 1997-02-23 fl Make sure to load raster data in getdata() +# 1997-07-05 fl Support external decoder (0.4) +# 1998-07-09 fl Handle all modes when saving (0.5) +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 2001-04-16 fl Added rewind support (seek to frame 0) (0.6) +# 2001-04-17 fl Added palette optimization (0.7) +# 2002-06-06 fl Added transparency support for save (0.8) +# 2004-02-24 fl Disable interlacing for small images +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, ImagePalette, \ + ImageChops, ImageSequence, _binary + +__version__ = "0.9" + + +# -------------------------------------------------------------------- +# Helpers + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 +o16 = _binary.o16le + + +# -------------------------------------------------------------------- +# Identify/read GIF files + +def _accept(prefix): + return prefix[:6] in [b"GIF87a", b"GIF89a"] + + +## +# Image plugin for GIF images. This plugin supports both GIF87 and +# GIF89 images. + +class GifImageFile(ImageFile.ImageFile): + + format = "GIF" + format_description = "Compuserve GIF" + global_palette = None + + def data(self): + s = self.fp.read(1) + if s and i8(s): + return self.fp.read(i8(s)) + return None + + def _open(self): + + # Screen + s = self.fp.read(13) + if s[:6] not in [b"GIF87a", b"GIF89a"]: + raise SyntaxError("not a GIF file") + + self.info["version"] = s[:6] + self.size = i16(s[6:]), i16(s[8:]) + self.tile = [] + flags = i8(s[10]) + bits = (flags & 7) + 1 + + if flags & 128: + # get global palette + self.info["background"] = i8(s[11]) + # check if palette contains colour indices + p = self.fp.read(3 << bits) + for i in range(0, len(p), 3): + if not (i//3 == i8(p[i]) == i8(p[i+1]) == i8(p[i+2])): + p = ImagePalette.raw("RGB", p) + self.global_palette = self.palette = p + break + + self.__fp = self.fp # FIXME: hack + self.__rewind = self.fp.tell() + self._n_frames = None + self._is_animated = None + self._seek(0) # get ready to read first frame + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self.seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + if frame == self.__frame: + return + if frame < self.__frame: + self._seek(0) + + last_frame = self.__frame + for f in range(self.__frame + 1, frame + 1): + try: + self._seek(f) + except EOFError: + self.seek(last_frame) + raise EOFError("no more images in GIF file") + + def _seek(self, frame): + + if frame == 0: + # rewind + self.__offset = 0 + self.dispose = None + self.dispose_extent = [0, 0, 0, 0] # x0, y0, x1, y1 + self.__frame = -1 + self.__fp.seek(self.__rewind) + self._prev_im = None + self.disposal_method = 0 + else: + # ensure that the previous frame was loaded + if not self.im: + self.load() + + if frame != self.__frame + 1: + raise ValueError("cannot seek to frame %d" % frame) + self.__frame = frame + + self.tile = [] + + self.fp = self.__fp + if self.__offset: + # backup to last frame + self.fp.seek(self.__offset) + while self.data(): + pass + self.__offset = 0 + + if self.dispose: + self.im.paste(self.dispose, self.dispose_extent) + + from copy import copy + self.palette = copy(self.global_palette) + + while True: + + s = self.fp.read(1) + if not s or s == b";": + break + + elif s == b"!": + # + # extensions + # + s = self.fp.read(1) + block = self.data() + if i8(s) == 249: + # + # graphic control extension + # + flags = i8(block[0]) + if flags & 1: + self.info["transparency"] = i8(block[3]) + self.info["duration"] = i16(block[1:3]) * 10 + + # disposal method - find the value of bits 4 - 6 + dispose_bits = 0b00011100 & flags + dispose_bits = dispose_bits >> 2 + if dispose_bits: + # only set the dispose if it is not + # unspecified. I'm not sure if this is + # correct, but it seems to prevent the last + # frame from looking odd for some animations + self.disposal_method = dispose_bits + elif i8(s) == 255: + # + # application extension + # + self.info["extension"] = block, self.fp.tell() + if block[:11] == b"NETSCAPE2.0": + block = self.data() + if len(block) >= 3 and i8(block[0]) == 1: + self.info["loop"] = i16(block[1:3]) + while self.data(): + pass + + elif s == b",": + # + # local image + # + s = self.fp.read(9) + + # extent + x0, y0 = i16(s[0:]), i16(s[2:]) + x1, y1 = x0 + i16(s[4:]), y0 + i16(s[6:]) + self.dispose_extent = x0, y0, x1, y1 + flags = i8(s[8]) + + interlace = (flags & 64) != 0 + + if flags & 128: + bits = (flags & 7) + 1 + self.palette =\ + ImagePalette.raw("RGB", self.fp.read(3 << bits)) + + # image data + bits = i8(self.fp.read(1)) + self.__offset = self.fp.tell() + self.tile = [("gif", + (x0, y0, x1, y1), + self.__offset, + (bits, interlace))] + break + + else: + pass + # raise IOError, "illegal GIF tag `%x`" % i8(s) + + try: + if self.disposal_method < 2: + # do not dispose or none specified + self.dispose = None + elif self.disposal_method == 2: + # replace with background colour + self.dispose = Image.core.fill("P", self.size, + self.info["background"]) + else: + # replace with previous contents + if self.im: + self.dispose = self.im.copy() + + # only dispose the extent in this frame + if self.dispose: + self.dispose = self.dispose.crop(self.dispose_extent) + except (AttributeError, KeyError): + pass + + if not self.tile: + # self.__fp = None + raise EOFError + + self.mode = "L" + if self.palette: + self.mode = "P" + + def tell(self): + return self.__frame + + def load_end(self): + ImageFile.ImageFile.load_end(self) + + # if the disposal method is 'do not dispose', transparent + # pixels should show the content of the previous frame + if self._prev_im and self.disposal_method == 1: + # we do this by pasting the updated area onto the previous + # frame which we then use as the current image content + updated = self.im.crop(self.dispose_extent) + self._prev_im.paste(updated, self.dispose_extent, + updated.convert('RGBA')) + self.im = self._prev_im + self._prev_im = self.im.copy() + +# -------------------------------------------------------------------- +# Write GIF files + +try: + import _imaging_gif +except ImportError: + _imaging_gif = None + +RAWMODE = { + "1": "L", + "L": "L", + "P": "P", +} + + +def _convert_mode(im, initial_call=False): + # convert on the fly (EXPERIMENTAL -- I'm not sure PIL + # should automatically convert images on save...) + if Image.getmodebase(im.mode) == "RGB": + if initial_call: + palette_size = 256 + if im.palette: + palette_size = len(im.palette.getdata()[1]) // 3 + return im.convert("P", palette=1, colors=palette_size) + else: + return im.convert("P") + return im.convert("L") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +def _save(im, fp, filename, save_all=False): + + im.encoderinfo.update(im.info) + if _imaging_gif: + # call external driver + try: + _imaging_gif.save(im, fp, filename) + return + except IOError: + pass # write uncompressed file + + if im.mode in RAWMODE: + im_out = im.copy() + else: + im_out = _convert_mode(im, True) + + # header + try: + palette = im.encoderinfo["palette"] + except KeyError: + palette = None + im.encoderinfo["optimize"] = im.encoderinfo.get("optimize", True) + + if save_all: + previous = None + + first_frame = None + for im_frame in ImageSequence.Iterator(im): + im_frame = _convert_mode(im_frame) + + # To specify duration, add the time in milliseconds to getdata(), + # e.g. getdata(im_frame, duration=1000) + if not previous: + # global header + first_frame = getheader(im_frame, palette, im.encoderinfo)[0] + first_frame += getdata(im_frame, (0, 0), **im.encoderinfo) + else: + if first_frame: + for s in first_frame: + fp.write(s) + first_frame = None + + # delta frame + delta = ImageChops.subtract_modulo(im_frame, previous.copy()) + bbox = delta.getbbox() + + if bbox: + # compress difference + for s in getdata(im_frame.crop(bbox), + bbox[:2], **im.encoderinfo): + fp.write(s) + else: + # FIXME: what should we do in this case? + pass + previous = im_frame + if first_frame: + save_all = False + if not save_all: + header = getheader(im_out, palette, im.encoderinfo)[0] + for s in header: + fp.write(s) + + flags = 0 + + if get_interlace(im): + flags = flags | 64 + + # local image header + _get_local_header(fp, im, (0, 0), flags) + + im_out.encoderconfig = (8, get_interlace(im)) + ImageFile._save(im_out, fp, [("gif", (0, 0)+im.size, 0, + RAWMODE[im_out.mode])]) + + fp.write(b"\0") # end of image data + + fp.write(b";") # end of file + + if hasattr(fp, "flush"): + fp.flush() + + +def get_interlace(im): + try: + interlace = im.encoderinfo["interlace"] + except KeyError: + interlace = 1 + + # workaround for @PIL153 + if min(im.size) < 16: + interlace = 0 + + return interlace + + +def _get_local_header(fp, im, offset, flags): + transparent_color_exists = False + try: + transparency = im.encoderinfo["transparency"] + except KeyError: + pass + else: + transparency = int(transparency) + # optimize the block away if transparent color is not used + transparent_color_exists = True + + if _get_optimize(im, im.encoderinfo): + used_palette_colors = _get_used_palette_colors(im) + + # adjust the transparency index after optimize + if len(used_palette_colors) < 256: + for i in range(len(used_palette_colors)): + if used_palette_colors[i] == transparency: + transparency = i + transparent_color_exists = True + break + else: + transparent_color_exists = False + + if "duration" in im.encoderinfo: + duration = int(im.encoderinfo["duration"] / 10) + else: + duration = 0 + if transparent_color_exists or duration != 0: + transparency_flag = 1 if transparent_color_exists else 0 + if not transparent_color_exists: + transparency = 0 + + fp.write(b"!" + + o8(249) + # extension intro + o8(4) + # length + o8(transparency_flag) + # transparency info present + o16(duration) + # duration + o8(transparency) + # transparency index + o8(0)) + + if "loop" in im.encoderinfo: + number_of_loops = im.encoderinfo["loop"] + fp.write(b"!" + + o8(255) + # extension intro + o8(11) + + b"NETSCAPE2.0" + + o8(3) + + o8(1) + + o16(number_of_loops) + # number of loops + o8(0)) + fp.write(b"," + + o16(offset[0]) + # offset + o16(offset[1]) + + o16(im.size[0]) + # size + o16(im.size[1]) + + o8(flags) + # flags + o8(8)) # bits + + +def _save_netpbm(im, fp, filename): + + # + # If you need real GIF compression and/or RGB quantization, you + # can use the external NETPBM/PBMPLUS utilities. See comments + # below for information on how to enable this. + + import os + from subprocess import Popen, check_call, PIPE, CalledProcessError + import tempfile + file = im._dump() + + if im.mode != "RGB": + with open(filename, 'wb') as f: + stderr = tempfile.TemporaryFile() + check_call(["ppmtogif", file], stdout=f, stderr=stderr) + else: + with open(filename, 'wb') as f: + + # Pipe ppmquant output into ppmtogif + # "ppmquant 256 %s | ppmtogif > %s" % (file, filename) + quant_cmd = ["ppmquant", "256", file] + togif_cmd = ["ppmtogif"] + stderr = tempfile.TemporaryFile() + quant_proc = Popen(quant_cmd, stdout=PIPE, stderr=stderr) + stderr = tempfile.TemporaryFile() + togif_proc = Popen(togif_cmd, stdin=quant_proc.stdout, stdout=f, + stderr=stderr) + + # Allow ppmquant to receive SIGPIPE if ppmtogif exits + quant_proc.stdout.close() + + retcode = quant_proc.wait() + if retcode: + raise CalledProcessError(retcode, quant_cmd) + + retcode = togif_proc.wait() + if retcode: + raise CalledProcessError(retcode, togif_cmd) + + try: + os.unlink(file) + except OSError: + pass + + +# -------------------------------------------------------------------- +# GIF utilities + +def _get_optimize(im, info): + return im.mode in ("P", "L") and info and info.get("optimize", 0) + + +def _get_used_palette_colors(im): + used_palette_colors = [] + + # check which colors are used + i = 0 + for count in im.histogram(): + if count: + used_palette_colors.append(i) + i += 1 + + return used_palette_colors + + +def getheader(im, palette=None, info=None): + """Return a list of strings representing a GIF header""" + + # Header Block + # http://www.matthewflickinger.com/lab/whatsinagif/bits_and_bytes.asp + + version = b"87a" + for extensionKey in ["transparency", "duration", "loop"]: + if info and extensionKey in info and \ + not (extensionKey == "duration" and info[extensionKey] == 0): + version = b"89a" + break + else: + if im.info.get("version") == "89a": + version = b"89a" + + header = [ + b"GIF"+version + # signature + version + o16(im.size[0]) + # canvas width + o16(im.size[1]) # canvas height + ] + + if im.mode == "P": + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = im.im.getpalette("RGB")[:768] + else: # L-mode + if palette and isinstance(palette, bytes): + source_palette = palette[:768] + else: + source_palette = bytearray([i//3 for i in range(768)]) + + used_palette_colors = palette_bytes = None + + if _get_optimize(im, info): + used_palette_colors = _get_used_palette_colors(im) + + # create the new palette if not every color is used + if len(used_palette_colors) < 256: + palette_bytes = b"" + new_positions = {} + + i = 0 + # pick only the used colors from the palette + for oldPosition in used_palette_colors: + palette_bytes += source_palette[oldPosition*3:oldPosition*3+3] + new_positions[oldPosition] = i + i += 1 + + # replace the palette color id of all pixel with the new id + image_bytes = bytearray(im.tobytes()) + for i in range(len(image_bytes)): + image_bytes[i] = new_positions[image_bytes[i]] + im.frombytes(bytes(image_bytes)) + new_palette_bytes = (palette_bytes + + (768 - len(palette_bytes)) * b'\x00') + im.putpalette(new_palette_bytes) + im.palette = ImagePalette.ImagePalette("RGB", + palette=palette_bytes, + size=len(palette_bytes)) + + if not palette_bytes: + palette_bytes = source_palette + + # Logical Screen Descriptor + # calculate the palette size for the header + import math + color_table_size = int(math.ceil(math.log(len(palette_bytes)//3, 2)))-1 + if color_table_size < 0: + color_table_size = 0 + # size of global color table + global color table flag + header.append(o8(color_table_size + 128)) + # background + reserved/aspect + if info and "background" in info: + background = info["background"] + elif "background" in im.info: + # This elif is redundant within GifImagePlugin + # since im.info parameters are bundled into the info dictionary + # However, external scripts may call getheader directly + # So this maintains earlier behaviour + background = im.info["background"] + else: + background = 0 + header.append(o8(background) + o8(0)) + # end of Logical Screen Descriptor + + # add the missing amount of bytes + # the palette has to be 2< 0: + palette_bytes += o8(0) * 3 * actual_target_size_diff + + # Header + Logical Screen Descriptor + Global Color Table + header.append(palette_bytes) + return header, used_palette_colors + + +def getdata(im, offset=(0, 0), **params): + """Return a list of strings representing this image. + The first string is a local image header, the rest contains + encoded image data.""" + + class Collector(object): + data = [] + + def write(self, data): + self.data.append(data) + + im.load() # make sure raster data is available + + fp = Collector() + + try: + im.encoderinfo = params + + # local image header + _get_local_header(fp, im, offset, 0) + + ImageFile._save(im, fp, [("gif", (0, 0)+im.size, 0, RAWMODE[im.mode])]) + + fp.write(b"\0") # end of image data + + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GifImageFile.format, GifImageFile, _accept) +Image.register_save(GifImageFile.format, _save) +Image.register_save_all(GifImageFile.format, _save_all) +Image.register_extension(GifImageFile.format, ".gif") +Image.register_mime(GifImageFile.format, "image/gif") + +# +# Uncomment the following line if you wish to use NETPBM/PBMPLUS +# instead of the built-in "uncompressed" GIF encoder + +# Image.register_save(GifImageFile.format, _save_netpbm) diff --git a/Lib/site-packages/PIL/GimpGradientFile.py b/Lib/site-packages/PIL/GimpGradientFile.py new file mode 100644 index 0000000..45af573 --- /dev/null +++ b/Lib/site-packages/PIL/GimpGradientFile.py @@ -0,0 +1,137 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read (and render) GIMP gradient files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from math import pi, log, sin, sqrt +from PIL._binary import o8 + +# -------------------------------------------------------------------- +# Stuff to translate curve segments to palette values (derived from +# the corresponding code in GIMP, written by Federico Mena Quintero. +# See the GIMP distribution for more information.) +# + +EPSILON = 1e-10 + + +def linear(middle, pos): + if pos <= middle: + if middle < EPSILON: + return 0.0 + else: + return 0.5 * pos / middle + else: + pos = pos - middle + middle = 1.0 - middle + if middle < EPSILON: + return 1.0 + else: + return 0.5 + 0.5 * pos / middle + + +def curved(middle, pos): + return pos ** (log(0.5) / log(max(middle, EPSILON))) + + +def sine(middle, pos): + return (sin((-pi / 2.0) + pi * linear(middle, pos)) + 1.0) / 2.0 + + +def sphere_increasing(middle, pos): + return sqrt(1.0 - (linear(middle, pos) - 1.0) ** 2) + + +def sphere_decreasing(middle, pos): + return 1.0 - sqrt(1.0 - linear(middle, pos) ** 2) + +SEGMENTS = [linear, curved, sine, sphere_increasing, sphere_decreasing] + + +class GradientFile(object): + + gradient = None + + def getpalette(self, entries=256): + + palette = [] + + ix = 0 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + for i in range(entries): + + x = i / float(entries-1) + + while x1 < x: + ix += 1 + x0, x1, xm, rgb0, rgb1, segment = self.gradient[ix] + + w = x1 - x0 + + if w < EPSILON: + scale = segment(0.5, 0.5) + else: + scale = segment((xm - x0) / w, (x - x0) / w) + + # expand to RGBA + r = o8(int(255 * ((rgb1[0] - rgb0[0]) * scale + rgb0[0]) + 0.5)) + g = o8(int(255 * ((rgb1[1] - rgb0[1]) * scale + rgb0[1]) + 0.5)) + b = o8(int(255 * ((rgb1[2] - rgb0[2]) * scale + rgb0[2]) + 0.5)) + a = o8(int(255 * ((rgb1[3] - rgb0[3]) * scale + rgb0[3]) + 0.5)) + + # add to palette + palette.append(r + g + b + a) + + return b"".join(palette), "RGBA" + + +## +# File handler for GIMP's gradient format. + +class GimpGradientFile(GradientFile): + + def __init__(self, fp): + + if fp.readline()[:13] != b"GIMP Gradient": + raise SyntaxError("not a GIMP gradient file") + + line = fp.readline() + + # GIMP 1.2 gradient files don't contain a name, but GIMP 1.3 files do + if line.startswith(b"Name: "): + line = fp.readline().strip() + + count = int(line) + + gradient = [] + + for i in range(count): + + s = fp.readline().split() + w = [float(x) for x in s[:11]] + + x0, x1 = w[0], w[2] + xm = w[1] + rgb0 = w[3:7] + rgb1 = w[7:11] + + segment = SEGMENTS[int(s[11])] + cspace = int(s[12]) + + if cspace != 0: + raise IOError("cannot handle HSV colour space") + + gradient.append((x0, x1, xm, rgb0, rgb1, segment)) + + self.gradient = gradient diff --git a/Lib/site-packages/PIL/GimpPaletteFile.py b/Lib/site-packages/PIL/GimpPaletteFile.py new file mode 100644 index 0000000..4bf3ca3 --- /dev/null +++ b/Lib/site-packages/PIL/GimpPaletteFile.py @@ -0,0 +1,62 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read GIMP palette files +# +# History: +# 1997-08-23 fl Created +# 2004-09-07 fl Support GIMP 2.0 palette files. +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1997-2004. +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL._binary import o8 + + +## +# File handler for GIMP's palette format. + +class GimpPaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [o8(i)*3 for i in range(256)] + + if fp.readline()[:12] != b"GIMP Palette": + raise SyntaxError("not a GIMP palette file") + + i = 0 + + while i <= 255: + + s = fp.readline() + + if not s: + break + # skip fields and comment lines + if re.match(b"\w+:|#", s): + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = tuple(map(int, s.split()[:3])) + if len(v) != 3: + raise ValueError("bad palette entry") + + if 0 <= i <= 255: + self.palette[i] = o8(v[0]) + o8(v[1]) + o8(v[2]) + + i += 1 + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/Lib/site-packages/PIL/GribStubImagePlugin.py b/Lib/site-packages/PIL/GribStubImagePlugin.py new file mode 100644 index 0000000..8ffad81 --- /dev/null +++ b/Lib/site-packages/PIL/GribStubImagePlugin.py @@ -0,0 +1,72 @@ +# +# The Python Imaging Library +# $Id$ +# +# GRIB stub adapter +# +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific GRIB image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[0:4] == b"GRIB" and prefix[7] == b'\x01' + + +class GribStubImageFile(ImageFile.StubImageFile): + + format = "GRIB" + format_description = "GRIB" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not a GRIB file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("GRIB save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(GribStubImageFile.format, GribStubImageFile, _accept) +Image.register_save(GribStubImageFile.format, _save) + +Image.register_extension(GribStubImageFile.format, ".grib") diff --git a/Lib/site-packages/PIL/Hdf5StubImagePlugin.py b/Lib/site-packages/PIL/Hdf5StubImagePlugin.py new file mode 100644 index 0000000..f7945be --- /dev/null +++ b/Lib/site-packages/PIL/Hdf5StubImagePlugin.py @@ -0,0 +1,73 @@ +# +# The Python Imaging Library +# $Id$ +# +# HDF5 stub adapter +# +# Copyright (c) 2000-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile + +_handler = None + + +## +# Install application-specific HDF5 image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + + +# -------------------------------------------------------------------- +# Image adapter + +def _accept(prefix): + return prefix[:8] == b"\x89HDF\r\n\x1a\n" + + +class HDF5StubImageFile(ImageFile.StubImageFile): + + format = "HDF5" + format_description = "HDF5" + + def _open(self): + + offset = self.fp.tell() + + if not _accept(self.fp.read(8)): + raise SyntaxError("Not an HDF file") + + self.fp.seek(offset) + + # make something up + self.mode = "F" + self.size = 1, 1 + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("HDF5 save handler not installed") + _handler.save(im, fp, filename) + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(HDF5StubImageFile.format, HDF5StubImageFile, _accept) +Image.register_save(HDF5StubImageFile.format, _save) + +Image.register_extension(HDF5StubImageFile.format, ".h5") +Image.register_extension(HDF5StubImageFile.format, ".hdf") diff --git a/Lib/site-packages/PIL/IcnsImagePlugin.py b/Lib/site-packages/PIL/IcnsImagePlugin.py new file mode 100644 index 0000000..a4366e9 --- /dev/null +++ b/Lib/site-packages/PIL/IcnsImagePlugin.py @@ -0,0 +1,366 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Mac OS X icns file decoder, based on icns.py by Bob Ippolito. +# +# history: +# 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. +# +# Copyright (c) 2004 by Bob Ippolito. +# Copyright (c) 2004 by Secret Labs. +# Copyright (c) 2004 by Fredrik Lundh. +# Copyright (c) 2014 by Alastair Houghton. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, PngImagePlugin, _binary +import io +import os +import shutil +import struct +import sys +import tempfile + +enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') +if enable_jpeg2k: + from PIL import Jpeg2KImagePlugin + +i8 = _binary.i8 + +HEADERSIZE = 8 + + +def nextheader(fobj): + return struct.unpack('>4sI', fobj.read(HEADERSIZE)) + + +def read_32t(fobj, start_length, size): + # The 128x128 icon seems to have an extra header for some reason. + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(4) + if sig != b'\x00\x00\x00\x00': + raise SyntaxError('Unknown signature, expecting 0x00000000') + return read_32(fobj, (start + 4, length - 4), size) + + +def read_32(fobj, start_length, size): + """ + Read a 32bit RGB icon resource. Seems to be either uncompressed or + an RLE packbits-like scheme. + """ + (start, length) = start_length + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + if length == sizesq * 3: + # uncompressed ("RGBRGBGB") + indata = fobj.read(length) + im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) + else: + # decode image + im = Image.new("RGB", pixel_size, None) + for band_ix in range(3): + data = [] + bytesleft = sizesq + while bytesleft > 0: + byte = fobj.read(1) + if not byte: + break + byte = i8(byte) + if byte & 0x80: + blocksize = byte - 125 + byte = fobj.read(1) + for i in range(blocksize): + data.append(byte) + else: + blocksize = byte + 1 + data.append(fobj.read(blocksize)) + bytesleft -= blocksize + if bytesleft <= 0: + break + if bytesleft != 0: + raise SyntaxError( + "Error reading channel [%r left]" % bytesleft + ) + band = Image.frombuffer( + "L", pixel_size, b"".join(data), "raw", "L", 0, 1 + ) + im.im.putband(band.im, band_ix) + return {"RGB": im} + + +def read_mk(fobj, start_length, size): + # Alpha masks seem to be uncompressed + start = start_length[0] + fobj.seek(start) + pixel_size = (size[0] * size[2], size[1] * size[2]) + sizesq = pixel_size[0] * pixel_size[1] + band = Image.frombuffer( + "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1 + ) + return {"A": band} + + +def read_png_or_jpeg2000(fobj, start_length, size): + (start, length) = start_length + fobj.seek(start) + sig = fobj.read(12) + if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a': + fobj.seek(start) + im = PngImagePlugin.PngImageFile(fobj) + return {"RGBA": im} + elif sig[:4] == b'\xff\x4f\xff\x51' \ + or sig[:4] == b'\x0d\x0a\x87\x0a' \ + or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + if not enable_jpeg2k: + raise ValueError('Unsupported icon subimage format (rebuild PIL ' + 'with JPEG 2000 support to fix this)') + # j2k, jpc or j2c + fobj.seek(start) + jp2kstream = fobj.read(length) + f = io.BytesIO(jp2kstream) + im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) + if im.mode != 'RGBA': + im = im.convert('RGBA') + return {"RGBA": im} + else: + raise ValueError('Unsupported icon subimage format') + + +class IcnsFile(object): + + SIZES = { + (512, 512, 2): [ + (b'ic10', read_png_or_jpeg2000), + ], + (512, 512, 1): [ + (b'ic09', read_png_or_jpeg2000), + ], + (256, 256, 2): [ + (b'ic14', read_png_or_jpeg2000), + ], + (256, 256, 1): [ + (b'ic08', read_png_or_jpeg2000), + ], + (128, 128, 2): [ + (b'ic13', read_png_or_jpeg2000), + ], + (128, 128, 1): [ + (b'ic07', read_png_or_jpeg2000), + (b'it32', read_32t), + (b't8mk', read_mk), + ], + (64, 64, 1): [ + (b'icp6', read_png_or_jpeg2000), + ], + (32, 32, 2): [ + (b'ic12', read_png_or_jpeg2000), + ], + (48, 48, 1): [ + (b'ih32', read_32), + (b'h8mk', read_mk), + ], + (32, 32, 1): [ + (b'icp5', read_png_or_jpeg2000), + (b'il32', read_32), + (b'l8mk', read_mk), + ], + (16, 16, 2): [ + (b'ic11', read_png_or_jpeg2000), + ], + (16, 16, 1): [ + (b'icp4', read_png_or_jpeg2000), + (b'is32', read_32), + (b's8mk', read_mk), + ], + } + + def __init__(self, fobj): + """ + fobj is a file-like object as an icns resource + """ + # signature : (start, length) + self.dct = dct = {} + self.fobj = fobj + sig, filesize = nextheader(fobj) + if sig != b'icns': + raise SyntaxError('not an icns file') + i = HEADERSIZE + while i < filesize: + sig, blocksize = nextheader(fobj) + if blocksize <= 0: + raise SyntaxError('invalid block header') + i += HEADERSIZE + blocksize -= HEADERSIZE + dct[sig] = (i, blocksize) + fobj.seek(blocksize, 1) + i += blocksize + + def itersizes(self): + sizes = [] + for size, fmts in self.SIZES.items(): + for (fmt, reader) in fmts: + if fmt in self.dct: + sizes.append(size) + break + return sizes + + def bestsize(self): + sizes = self.itersizes() + if not sizes: + raise SyntaxError("No 32bit icon resources found") + return max(sizes) + + def dataforsize(self, size): + """ + Get an icon resource as {channel: array}. Note that + the arrays are bottom-up like windows bitmaps and will likely + need to be flipped or transposed in some way. + """ + dct = {} + for code, reader in self.SIZES[size]: + desc = self.dct.get(code) + if desc is not None: + dct.update(reader(self.fobj, desc, size)) + return dct + + def getimage(self, size=None): + if size is None: + size = self.bestsize() + if len(size) == 2: + size = (size[0], size[1], 1) + channels = self.dataforsize(size) + + im = channels.get('RGBA', None) + if im: + return im + + im = channels.get("RGB").copy() + try: + im.putalpha(channels["A"]) + except KeyError: + pass + return im + + +## +# Image plugin for Mac OS icons. + +class IcnsImageFile(ImageFile.ImageFile): + """ + PIL image support for Mac OS .icns files. + Chooses the best resolution, but will possibly load + a different size image if you mutate the size attribute + before calling 'load'. + + The info dictionary has a key 'sizes' that is a list + of sizes that the icns file has. + """ + + format = "ICNS" + format_description = "Mac OS icns resource" + + def _open(self): + self.icns = IcnsFile(self.fp) + self.mode = 'RGBA' + self.best_size = self.icns.bestsize() + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + self.info['sizes'] = self.icns.itersizes() + # Just use this to see if it's loaded or not yet. + self.tile = ('',) + + def load(self): + if len(self.size) == 3: + self.best_size = self.size + self.size = (self.best_size[0] * self.best_size[2], + self.best_size[1] * self.best_size[2]) + + Image.Image.load(self) + if not self.tile: + return + self.load_prepare() + # This is likely NOT the best way to do it, but whatever. + im = self.icns.getimage(self.best_size) + + # If this is a PNG or JPEG 2000, it won't be loaded yet + im.load() + + self.im = im.im + self.mode = im.mode + self.size = im.size + self.fp = None + self.icns = None + self.tile = () + self.load_end() + + +def _save(im, fp, filename): + """ + Saves the image as a series of PNG files, + that are then converted to a .icns file + using the OS X command line utility 'iconutil'. + + OS X only. + """ + if hasattr(fp, "flush"): + fp.flush() + + # create the temporary set of pngs + iconset = tempfile.mkdtemp('.iconset') + last_w = None + last_im = None + for w in [16, 32, 128, 256, 512]: + prefix = 'icon_{}x{}'.format(w, w) + + if last_w == w: + im_scaled = last_im + else: + im_scaled = im.resize((w, w), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'.png')) + + im_scaled = im.resize((w*2, w*2), Image.LANCZOS) + im_scaled.save(os.path.join(iconset, prefix+'@2x.png')) + last_im = im_scaled + + # iconutil -c icns -o {} {} + from subprocess import Popen, PIPE, CalledProcessError + + convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] + stderr = tempfile.TemporaryFile() + convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=stderr) + + convert_proc.stdout.close() + + retcode = convert_proc.wait() + + # remove the temporary files + shutil.rmtree(iconset) + + if retcode: + raise CalledProcessError(retcode, convert_cmd) + +Image.register_open(IcnsImageFile.format, IcnsImageFile, + lambda x: x[:4] == b'icns') +Image.register_extension(IcnsImageFile.format, '.icns') + +if sys.platform == 'darwin': + Image.register_save(IcnsImageFile.format, _save) + + Image.register_mime(IcnsImageFile.format, "image/icns") + + +if __name__ == '__main__': + imf = IcnsImageFile(open(sys.argv[1], 'rb')) + for size in imf.info['sizes']: + imf.size = size + imf.load() + im = imf.im + im.save('out-%s-%s-%s.png' % size) + im = Image.open(open(sys.argv[1], "rb")) + im.save("out.png") + if sys.platform == 'windows': + os.startfile("out.png") diff --git a/Lib/site-packages/PIL/IcoImagePlugin.py b/Lib/site-packages/PIL/IcoImagePlugin.py new file mode 100644 index 0000000..4aa7687 --- /dev/null +++ b/Lib/site-packages/PIL/IcoImagePlugin.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Windows Icon support for PIL +# +# History: +# 96-05-27 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +# This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis +# . +# https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin +# +# Icon format references: +# * https://en.wikipedia.org/wiki/ICO_(file_format) +# * http://msdn.microsoft.com/en-us/library/ms997538.aspx + + +import struct +from io import BytesIO + +from PIL import Image, ImageFile, BmpImagePlugin, PngImagePlugin, _binary +from math import log, ceil + +__version__ = "0.1" + +# +# -------------------------------------------------------------------- + +i8 = _binary.i8 +i16 = _binary.i16le +i32 = _binary.i32le + +_MAGIC = b"\0\0\1\0" + + +def _save(im, fp, filename): + fp.write(_MAGIC) # (2+2) + sizes = im.encoderinfo.get("sizes", + [(16, 16), (24, 24), (32, 32), (48, 48), + (64, 64), (128, 128), (255, 255)]) + width, height = im.size + filter(lambda x: False if (x[0] > width or x[1] > height or + x[0] > 255 or x[1] > 255) else True, sizes) + fp.write(struct.pack("=8bpp) + 'reserved': i8(s[3]), + 'planes': i16(s[4:]), + 'bpp': i16(s[6:]), + 'size': i32(s[8:]), + 'offset': i32(s[12:]) + } + + # See Wikipedia + for j in ('width', 'height'): + if not icon_header[j]: + icon_header[j] = 256 + + # See Wikipedia notes about color depth. + # We need this just to differ images with equal sizes + icon_header['color_depth'] = (icon_header['bpp'] or + (icon_header['nb_color'] != 0 and + ceil(log(icon_header['nb_color'], + 2))) or 256) + + icon_header['dim'] = (icon_header['width'], icon_header['height']) + icon_header['square'] = (icon_header['width'] * + icon_header['height']) + + self.entry.append(icon_header) + + self.entry = sorted(self.entry, key=lambda x: x['color_depth']) + # ICO images are usually squares + # self.entry = sorted(self.entry, key=lambda x: x['width']) + self.entry = sorted(self.entry, key=lambda x: x['square']) + self.entry.reverse() + + def sizes(self): + """ + Get a list of all available icon sizes and color depths. + """ + return set((h['width'], h['height']) for h in self.entry) + + def getimage(self, size, bpp=False): + """ + Get an image from the icon + """ + for (i, h) in enumerate(self.entry): + if size == h['dim'] and (bpp is False or bpp == h['color_depth']): + return self.frame(i) + return self.frame(0) + + def frame(self, idx): + """ + Get an image from frame idx + """ + + header = self.entry[idx] + + self.buf.seek(header['offset']) + data = self.buf.read(8) + self.buf.seek(header['offset']) + + if data[:8] == PngImagePlugin._MAGIC: + # png frame + im = PngImagePlugin.PngImageFile(self.buf) + else: + # XOR + AND mask bmp frame + im = BmpImagePlugin.DibImageFile(self.buf) + + # change tile dimension to only encompass XOR image + im.size = (im.size[0], int(im.size[1] / 2)) + d, e, o, a = im.tile[0] + im.tile[0] = d, (0, 0) + im.size, o, a + + # figure out where AND mask image starts + mode = a[0] + bpp = 8 + for k in BmpImagePlugin.BIT2MODE.keys(): + if mode == BmpImagePlugin.BIT2MODE[k][1]: + bpp = k + break + + if 32 == bpp: + # 32-bit color depth icon image allows semitransparent areas + # PIL's DIB format ignores transparency bits, recover them. + # The DIB is packed in BGRX byte order where X is the alpha + # channel. + + # Back up to start of bmp data + self.buf.seek(o) + # extract every 4th byte (eg. 3,7,11,15,...) + alpha_bytes = self.buf.read(im.size[0] * im.size[1] * 4)[3::4] + + # convert to an 8bpp grayscale image + mask = Image.frombuffer( + 'L', # 8bpp + im.size, # (w, h) + alpha_bytes, # source chars + 'raw', # raw decoder + ('L', 0, -1) # 8bpp inverted, unpadded, reversed + ) + else: + # get AND image from end of bitmap + w = im.size[0] + if (w % 32) > 0: + # bitmap row data is aligned to word boundaries + w += 32 - (im.size[0] % 32) + + # the total mask data is + # padded row size * height / bits per char + + and_mask_offset = o + int(im.size[0] * im.size[1] * + (bpp / 8.0)) + total_bytes = int((w * im.size[1]) / 8) + + self.buf.seek(and_mask_offset) + maskData = self.buf.read(total_bytes) + + # convert raw data to image + mask = Image.frombuffer( + '1', # 1 bpp + im.size, # (w, h) + maskData, # source chars + 'raw', # raw decoder + ('1;I', int(w/8), -1) # 1bpp inverted, padded, reversed + ) + + # now we have two images, im is XOR image and mask is AND image + + # apply mask image as alpha channel + im = im.convert('RGBA') + im.putalpha(mask) + + return im + + +## +# Image plugin for Windows Icon files. + +class IcoImageFile(ImageFile.ImageFile): + """ + PIL read-only image support for Microsoft Windows .ico files. + + By default the largest resolution image in the file will be loaded. This + can be changed by altering the 'size' attribute before calling 'load'. + + The info dictionary has a key 'sizes' that is a list of the sizes available + in the icon file. + + Handles classic, XP and Vista icon formats. + + This plugin is a refactored version of Win32IconImagePlugin by Bryan Davis + . + https://code.google.com/p/casadebender/wiki/Win32IconImagePlugin + """ + format = "ICO" + format_description = "Windows Icon" + + def _open(self): + self.ico = IcoFile(self.fp) + self.info['sizes'] = self.ico.sizes() + self.size = self.ico.entry[0]['dim'] + self.load() + + def load(self): + im = self.ico.getimage(self.size) + # if tile is PNG, it won't really be loaded yet + im.load() + self.im = im.im + self.mode = im.mode + self.size = im.size + + def load_seek(self): + # Flag the ImageFile.Parser so that it + # just does all the decode at the end. + pass +# +# -------------------------------------------------------------------- + +Image.register_open(IcoImageFile.format, IcoImageFile, _accept) +Image.register_save(IcoImageFile.format, _save) +Image.register_extension(IcoImageFile.format, ".ico") diff --git a/Lib/site-packages/PIL/ImImagePlugin.py b/Lib/site-packages/PIL/ImImagePlugin.py new file mode 100644 index 0000000..dd4f829 --- /dev/null +++ b/Lib/site-packages/PIL/ImImagePlugin.py @@ -0,0 +1,355 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IFUNC IM file handling for PIL +# +# history: +# 1995-09-01 fl Created. +# 1997-01-03 fl Save palette images +# 1997-01-08 fl Added sequence support +# 1997-01-23 fl Added P and RGB save support +# 1997-05-31 fl Read floating point images +# 1997-06-22 fl Save floating point images +# 1997-08-27 fl Read and save 1-bit images +# 1998-06-25 fl Added support for RGB+LUT images +# 1998-07-02 fl Added support for YCC images +# 1998-07-15 fl Renamed offset attribute to avoid name clash +# 1998-12-29 fl Added I;16 support +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# 2003-09-26 fl Added LA/PA support +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8 + +__version__ = "0.7" + + +# -------------------------------------------------------------------- +# Standard tags + +COMMENT = "Comment" +DATE = "Date" +EQUIPMENT = "Digitalization equipment" +FRAMES = "File size (no of images)" +LUT = "Lut" +NAME = "Name" +SCALE = "Scale (x,y)" +SIZE = "Image size (x*y)" +MODE = "Image type" + +TAGS = {COMMENT: 0, DATE: 0, EQUIPMENT: 0, FRAMES: 0, LUT: 0, NAME: 0, + SCALE: 0, SIZE: 0, MODE: 0} + +OPEN = { + # ifunc93/p3cfunc formats + "0 1 image": ("1", "1"), + "L 1 image": ("1", "1"), + "Greyscale image": ("L", "L"), + "Grayscale image": ("L", "L"), + "RGB image": ("RGB", "RGB;L"), + "RLB image": ("RGB", "RLB"), + "RYB image": ("RGB", "RLB"), + "B1 image": ("1", "1"), + "B2 image": ("P", "P;2"), + "B4 image": ("P", "P;4"), + "X 24 image": ("RGB", "RGB"), + "L 32 S image": ("I", "I;32"), + "L 32 F image": ("F", "F;32"), + # old p3cfunc formats + "RGB3 image": ("RGB", "RGB;T"), + "RYB3 image": ("RGB", "RYB;T"), + # extensions + "LA image": ("LA", "LA;L"), + "RGBA image": ("RGBA", "RGBA;L"), + "RGBX image": ("RGBX", "RGBX;L"), + "CMYK image": ("CMYK", "CMYK;L"), + "YCC image": ("YCbCr", "YCbCr;L"), +} + +# ifunc95 extensions +for i in ["8", "8S", "16", "16S", "32", "32F"]: + OPEN["L %s image" % i] = ("F", "F;%s" % i) + OPEN["L*%s image" % i] = ("F", "F;%s" % i) +for i in ["16", "16L", "16B"]: + OPEN["L %s image" % i] = ("I;%s" % i, "I;%s" % i) + OPEN["L*%s image" % i] = ("I;%s" % i, "I;%s" % i) +for i in ["32S"]: + OPEN["L %s image" % i] = ("I", "I;%s" % i) + OPEN["L*%s image" % i] = ("I", "I;%s" % i) +for i in range(2, 33): + OPEN["L*%s image" % i] = ("F", "F;%s" % i) + + +# -------------------------------------------------------------------- +# Read IM directory + +split = re.compile(br"^([A-Za-z][^:]*):[ \t]*(.*)[ \t]*$") + + +def number(s): + try: + return int(s) + except ValueError: + return float(s) + + +## +# Image plugin for the IFUNC IM file format. + +class ImImageFile(ImageFile.ImageFile): + + format = "IM" + format_description = "IFUNC Image Memory" + + def _open(self): + + # Quick rejection: if there's not an LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + n = 0 + + # Default values + self.info[MODE] = "L" + self.info[SIZE] = (512, 512) + self.info[FRAMES] = 1 + + self.rawmode = "L" + + while True: + + s = self.fp.read(1) + + # Some versions of IFUNC uses \n\r instead of \r\n... + if s == b"\r": + continue + + if not s or s == b'\0' or s == b'\x1A': + break + + # FIXME: this may read whole file if not a text file + s = s + self.fp.readline() + + if len(s) > 100: + raise SyntaxError("not an IM file") + + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] == b'\n': + s = s[:-1] + + try: + m = split.match(s) + except re.error as v: + raise SyntaxError("not an IM file") + + if m: + + k, v = m.group(1, 2) + + # Don't know if this is the correct encoding, + # but a decent guess (I guess) + k = k.decode('latin-1', 'replace') + v = v.decode('latin-1', 'replace') + + # Convert value as appropriate + if k in [FRAMES, SCALE, SIZE]: + v = v.replace("*", ",") + v = tuple(map(number, v.split(","))) + if len(v) == 1: + v = v[0] + elif k == MODE and v in OPEN: + v, self.rawmode = OPEN[v] + + # Add to dictionary. Note that COMMENT tags are + # combined into a list of strings. + if k == COMMENT: + if k in self.info: + self.info[k].append(v) + else: + self.info[k] = [v] + else: + self.info[k] = v + + if k in TAGS: + n += 1 + + else: + + raise SyntaxError("Syntax error in IM header: " + + s.decode('ascii', 'replace')) + + if not n: + raise SyntaxError("Not an IM file") + + # Basic attributes + self.size = self.info[SIZE] + self.mode = self.info[MODE] + + # Skip forward to start of image data + while s and s[0:1] != b'\x1A': + s = self.fp.read(1) + if not s: + raise SyntaxError("File truncated") + + if LUT in self.info: + # convert lookup table to palette or lut attribute + palette = self.fp.read(768) + greyscale = 1 # greyscale palette + linear = 1 # linear greyscale palette + for i in range(256): + if palette[i] == palette[i+256] == palette[i+512]: + if i8(palette[i]) != i: + linear = 0 + else: + greyscale = 0 + if self.mode == "L" or self.mode == "LA": + if greyscale: + if not linear: + self.lut = [i8(c) for c in palette[:256]] + else: + if self.mode == "L": + self.mode = self.rawmode = "P" + elif self.mode == "LA": + self.mode = self.rawmode = "PA" + self.palette = ImagePalette.raw("RGB;L", palette) + elif self.mode == "RGB": + if not greyscale or not linear: + self.lut = [i8(c) for c in palette] + + self.frame = 0 + + self.__offset = offs = self.fp.tell() + + self.__fp = self.fp # FIXME: hack + + if self.rawmode[:2] == "F;": + + # ifunc95 formats + try: + # use bit decoder (if necessary) + bits = int(self.rawmode[2:]) + if bits not in [8, 16, 32]: + self.tile = [("bit", (0, 0)+self.size, offs, + (bits, 8, 3, 0, -1))] + return + except ValueError: + pass + + if self.rawmode in ["RGB;T", "RYB;T"]: + # Old LabEye/3PC files. Would be very surprised if anyone + # ever stumbled upon such a file ;-) + size = self.size[0] * self.size[1] + self.tile = [("raw", (0, 0)+self.size, offs, ("G", 0, -1)), + ("raw", (0, 0)+self.size, offs+size, ("R", 0, -1)), + ("raw", (0, 0)+self.size, offs+2*size, ("B", 0, -1))] + else: + # LabEye/IFUNC files + self.tile = [("raw", (0, 0)+self.size, offs, + (self.rawmode, 0, -1))] + + @property + def n_frames(self): + return self.info[FRAMES] + + @property + def is_animated(self): + return self.info[FRAMES] > 1 + + def seek(self, frame): + + if frame < 0 or frame >= self.info[FRAMES]: + raise EOFError("seek outside sequence") + + if self.frame == frame: + return + + self.frame = frame + + if self.mode == "1": + bits = 1 + else: + bits = 8 * len(self.mode) + + size = ((self.size[0] * bits + 7) // 8) * self.size[1] + offs = self.__offset + frame * size + + self.fp = self.__fp + + self.tile = [("raw", (0, 0)+self.size, offs, (self.rawmode, 0, -1))] + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- +# Save IM files + +SAVE = { + # mode: (im type, raw mode) + "1": ("0 1", "1"), + "L": ("Greyscale", "L"), + "LA": ("LA", "LA;L"), + "P": ("Greyscale", "P"), + "PA": ("LA", "PA;L"), + "I": ("L 32S", "I;32S"), + "I;16": ("L 16", "I;16"), + "I;16L": ("L 16L", "I;16L"), + "I;16B": ("L 16B", "I;16B"), + "F": ("L 32F", "F;32F"), + "RGB": ("RGB", "RGB;L"), + "RGBA": ("RGBA", "RGBA;L"), + "RGBX": ("RGBX", "RGBX;L"), + "CMYK": ("CMYK", "CMYK;L"), + "YCbCr": ("YCC", "YCbCr;L") +} + + +def _save(im, fp, filename, check=0): + + try: + image_type, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as IM" % im.mode) + + try: + frames = im.encoderinfo["frames"] + except KeyError: + frames = 1 + + if check: + return check + + fp.write(("Image type: %s image\r\n" % image_type).encode('ascii')) + if filename: + fp.write(("Name: %s\r\n" % filename).encode('ascii')) + fp.write(("Image size (x*y): %d*%d\r\n" % im.size).encode('ascii')) + fp.write(("File size (no of images): %d\r\n" % frames).encode('ascii')) + if im.mode == "P": + fp.write(b"Lut: 1\r\n") + fp.write(b"\000" * (511-fp.tell()) + b"\032") + if im.mode == "P": + fp.write(im.im.getpalette("RGB", "RGB;L")) # 768 bytes + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, -1))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(ImImageFile.format, ImImageFile) +Image.register_save(ImImageFile.format, _save) + +Image.register_extension(ImImageFile.format, ".im") diff --git a/Lib/site-packages/PIL/Image.py b/Lib/site-packages/PIL/Image.py new file mode 100644 index 0000000..89549ea --- /dev/null +++ b/Lib/site-packages/PIL/Image.py @@ -0,0 +1,2500 @@ +# +# The Python Imaging Library. +# $Id$ +# +# the Image class wrapper +# +# partial release history: +# 1995-09-09 fl Created +# 1996-03-11 fl PIL release 0.0 (proof of concept) +# 1996-04-30 fl PIL release 0.1b1 +# 1999-07-28 fl PIL release 1.0 final +# 2000-06-07 fl PIL release 1.1 +# 2000-10-20 fl PIL release 1.1.1 +# 2001-05-07 fl PIL release 1.1.2 +# 2002-03-15 fl PIL release 1.1.3 +# 2003-05-10 fl PIL release 1.1.4 +# 2005-03-28 fl PIL release 1.1.5 +# 2006-12-02 fl PIL release 1.1.6 +# 2009-11-15 fl PIL release 1.1.7 +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import VERSION, PILLOW_VERSION, _plugins + +import logging +import warnings + +logger = logging.getLogger(__name__) + + +class DecompressionBombWarning(RuntimeWarning): + pass + + +class _imaging_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imaging C module is not installed") + + +# Limit to around a quarter gigabyte for a 24 bit (3 bpp) image +MAX_IMAGE_PIXELS = int(1024 * 1024 * 1024 / 4 / 3) + +try: + # give Tk a chance to set up the environment, in case we're + # using an _imaging module linked against libtcl/libtk (use + # __import__ to hide this from naive packagers; we don't really + # depend on Tk unless ImageTk is used, and that module already + # imports Tkinter) + __import__("FixTk") +except ImportError: + pass + +try: + # If the _imaging C module is not present, Pillow will not load. + # Note that other modules should not refer to _imaging directly; + # import Image and use the Image.core variable instead. + # Also note that Image.core is not a publicly documented interface, + # and should be considered private and subject to change. + from PIL import _imaging as core + if PILLOW_VERSION != getattr(core, 'PILLOW_VERSION', None): + raise ImportError("The _imaging extension was built for another " + " version of Pillow or PIL") + +except ImportError as v: + core = _imaging_not_installed() + # Explanations for ways that we know we might have an import error + if str(v).startswith("Module use of python"): + # The _imaging C module is present, but not compiled for + # the right version (windows only). Print a warning, if + # possible. + warnings.warn( + "The _imaging extension was built for another version " + "of Python.", + RuntimeWarning + ) + elif str(v).startswith("The _imaging extension"): + warnings.warn(str(v), RuntimeWarning) + elif "Symbol not found: _PyUnicodeUCS2_FromString" in str(v): + warnings.warn( + "The _imaging extension was built for Python with UCS2 support; " + "recompile PIL or build Python --without-wide-unicode. ", + RuntimeWarning + ) + elif "Symbol not found: _PyUnicodeUCS4_FromString" in str(v): + warnings.warn( + "The _imaging extension was built for Python with UCS4 support; " + "recompile PIL or build Python --with-wide-unicode. ", + RuntimeWarning + ) + # Fail here anyway. Don't let people run with a mostly broken Pillow. + # see docs/porting-pil-to-pillow.rst + raise + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +from PIL import ImageMode +from PIL._binary import i8 +from PIL._util import isPath +from PIL._util import isStringType +from PIL._util import deferred_error + +import os +import sys +import io +import struct + +# type stuff +import collections +import numbers + +# works everywhere, win for pypy, not cpython +USE_CFFI_ACCESS = hasattr(sys, 'pypy_version_info') +try: + import cffi + HAS_CFFI = True +except ImportError: + HAS_CFFI = False + + +def isImageType(t): + """ + Checks if an object is an image object. + + .. warning:: + + This function is for internal use only. + + :param t: object to check if it's an image + :returns: True if the object is an image + """ + return hasattr(t, "im") + +# +# Constants (also defined in _imagingmodule.c!) + +NONE = 0 + +# transpose +FLIP_LEFT_RIGHT = 0 +FLIP_TOP_BOTTOM = 1 +ROTATE_90 = 2 +ROTATE_180 = 3 +ROTATE_270 = 4 +TRANSPOSE = 5 + +# transforms +AFFINE = 0 +EXTENT = 1 +PERSPECTIVE = 2 +QUAD = 3 +MESH = 4 + +# resampling filters +NEAREST = NONE = 0 +LANCZOS = ANTIALIAS = 1 +BILINEAR = LINEAR = 2 +BICUBIC = CUBIC = 3 + +# dithers +NONE = 0 +NEAREST = 0 +ORDERED = 1 # Not yet implemented +RASTERIZE = 2 # Not yet implemented +FLOYDSTEINBERG = 3 # default + +# palettes/quantizers +WEB = 0 +ADAPTIVE = 1 + +MEDIANCUT = 0 +MAXCOVERAGE = 1 +FASTOCTREE = 2 + +# categories +NORMAL = 0 +SEQUENCE = 1 +CONTAINER = 2 + +if hasattr(core, 'DEFAULT_STRATEGY'): + DEFAULT_STRATEGY = core.DEFAULT_STRATEGY + FILTERED = core.FILTERED + HUFFMAN_ONLY = core.HUFFMAN_ONLY + RLE = core.RLE + FIXED = core.FIXED + + +# -------------------------------------------------------------------- +# Registries + +ID = [] +OPEN = {} +MIME = {} +SAVE = {} +SAVE_ALL = {} +EXTENSION = {} + +# -------------------------------------------------------------------- +# Modes supported by this version + +_MODEINFO = { + # NOTE: this table will be removed in future versions. use + # getmode* functions or ImageMode descriptors instead. + + # official modes + "1": ("L", "L", ("1",)), + "L": ("L", "L", ("L",)), + "I": ("L", "I", ("I",)), + "F": ("L", "F", ("F",)), + "P": ("RGB", "L", ("P",)), + "RGB": ("RGB", "L", ("R", "G", "B")), + "RGBX": ("RGB", "L", ("R", "G", "B", "X")), + "RGBA": ("RGB", "L", ("R", "G", "B", "A")), + "CMYK": ("RGB", "L", ("C", "M", "Y", "K")), + "YCbCr": ("RGB", "L", ("Y", "Cb", "Cr")), + "LAB": ("RGB", "L", ("L", "A", "B")), + "HSV": ("RGB", "L", ("H", "S", "V")), + + # Experimental modes include I;16, I;16L, I;16B, RGBa, BGR;15, and + # BGR;24. Use these modes only if you know exactly what you're + # doing... + +} + +if sys.byteorder == 'little': + _ENDIAN = '<' +else: + _ENDIAN = '>' + +_MODE_CONV = { + # official modes + "1": ('|b1', None), # broken + "L": ('|u1', None), + "I": (_ENDIAN + 'i4', None), + "F": (_ENDIAN + 'f4', None), + "P": ('|u1', None), + "RGB": ('|u1', 3), + "RGBX": ('|u1', 4), + "RGBA": ('|u1', 4), + "CMYK": ('|u1', 4), + "YCbCr": ('|u1', 3), + "LAB": ('|u1', 3), # UNDONE - unsigned |u1i1i1 + "HSV": ('|u1', 3), + # I;16 == I;16L, and I;32 == I;32L + "I;16": ('u2', None), + "I;16L": ('i2', None), + "I;16LS": ('u4', None), + "I;32L": ('i4', None), + "I;32LS": ('= 1: + return + + try: + from PIL import BmpImagePlugin + except ImportError: + pass + try: + from PIL import GifImagePlugin + except ImportError: + pass + try: + from PIL import JpegImagePlugin + except ImportError: + pass + try: + from PIL import PpmImagePlugin + except ImportError: + pass + try: + from PIL import PngImagePlugin + except ImportError: + pass +# try: +# import TiffImagePlugin +# except ImportError: +# pass + + _initialized = 1 + + +def init(): + """ + Explicitly initializes the Python Imaging Library. This function + loads all available file format drivers. + """ + + global _initialized + if _initialized >= 2: + return 0 + + for plugin in _plugins: + try: + logger.debug("Importing %s", plugin) + __import__("PIL.%s" % plugin, globals(), locals(), []) + except ImportError as e: + logger.debug("Image: failed to import %s: %s", plugin, e) + + if OPEN or SAVE: + _initialized = 2 + return 1 + + +# -------------------------------------------------------------------- +# Codec factories (used by tobytes/frombytes and ImageFile.load) + +def _getdecoder(mode, decoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + # get decoder + decoder = getattr(core, decoder_name + "_decoder") + # print(decoder, mode, args + extra) + return decoder(mode, *args + extra) + except AttributeError: + raise IOError("decoder %s not available" % decoder_name) + + +def _getencoder(mode, encoder_name, args, extra=()): + + # tweak arguments + if args is None: + args = () + elif not isinstance(args, tuple): + args = (args,) + + try: + # get encoder + encoder = getattr(core, encoder_name + "_encoder") + # print(encoder, mode, args + extra) + return encoder(mode, *args + extra) + except AttributeError: + raise IOError("encoder %s not available" % encoder_name) + + +# -------------------------------------------------------------------- +# Simple expression analyzer + +def coerce_e(value): + return value if isinstance(value, _E) else _E(value) + + +class _E(object): + def __init__(self, data): + self.data = data + + def __add__(self, other): + return _E((self.data, "__add__", coerce_e(other).data)) + + def __mul__(self, other): + return _E((self.data, "__mul__", coerce_e(other).data)) + + +def _getscaleoffset(expr): + stub = ["stub"] + data = expr(_E(stub)).data + try: + (a, b, c) = data # simplified syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number)): + return c, 0.0 + if a is stub and b == "__add__" and isinstance(c, numbers.Number): + return 1.0, c + except TypeError: + pass + try: + ((a, b, c), d, e) = data # full syntax + if (a is stub and b == "__mul__" and isinstance(c, numbers.Number) and + d == "__add__" and isinstance(e, numbers.Number)): + return c, e + except TypeError: + pass + raise ValueError("illegal expression") + + +# -------------------------------------------------------------------- +# Implementation wrapper + +class Image(object): + """ + This class represents an image object. To create + :py:class:`~PIL.Image.Image` objects, use the appropriate factory + functions. There's hardly ever any reason to call the Image constructor + directly. + + * :py:func:`~PIL.Image.open` + * :py:func:`~PIL.Image.new` + * :py:func:`~PIL.Image.frombytes` + """ + format = None + format_description = None + + def __init__(self): + # FIXME: take "new" parameters / other image? + # FIXME: turn mode and size into delegating properties? + self.im = None + self.mode = "" + self.size = (0, 0) + self.palette = None + self.info = {} + self.category = NORMAL + self.readonly = 0 + self.pyaccess = None + + @property + def width(self): + return self.size[0] + + @property + def height(self): + return self.size[1] + + def _new(self, im): + new = Image() + new.im = im + new.mode = im.mode + new.size = im.size + if self.palette: + new.palette = self.palette.copy() + if im.mode == "P" and not new.palette: + from PIL import ImagePalette + new.palette = ImagePalette.ImagePalette() + try: + new.info = self.info.copy() + except AttributeError: + # fallback (pre-1.5.2) + new.info = {} + for k, v in self.info: + new.info[k] = v + return new + + _makeself = _new # compatibility + + # Context Manager Support + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + """ + Closes the file pointer, if possible. + + This operation will destroy the image core and release its memory. + The image data will be unusable afterward. + + This function is only required to close images that have not + had their file read and closed by the + :py:meth:`~PIL.Image.Image.load` method. + """ + try: + self.fp.close() + except Exception as msg: + logger.debug("Error closing: %s", msg) + + # Instead of simply setting to None, we're setting up a + # deferred error that will better explain that the core image + # object is gone. + self.im = deferred_error(ValueError("Operation on closed image")) + + def _copy(self): + self.load() + self.im = self.im.copy() + self.pyaccess = None + self.readonly = 0 + + def _dump(self, file=None, format=None): + import tempfile + suffix = '' + if format: + suffix = '.'+format + if not file: + f, file = tempfile.mkstemp(suffix) + os.close(f) + + self.load() + if not format or format == "PPM": + self.im.save_ppm(file) + else: + if not file.endswith(format): + file = file + "." + format + self.save(file, format) + return file + + def __eq__(self, other): + if self.__class__.__name__ != other.__class__.__name__: + return False + a = (self.mode == other.mode) + b = (self.size == other.size) + c = (self.getpalette() == other.getpalette()) + d = (self.info == other.info) + e = (self.category == other.category) + f = (self.readonly == other.readonly) + g = (self.tobytes() == other.tobytes()) + return a and b and c and d and e and f and g + + def __ne__(self, other): + eq = (self == other) + return not eq + + def __repr__(self): + return "<%s.%s image mode=%s size=%dx%d at 0x%X>" % ( + self.__class__.__module__, self.__class__.__name__, + self.mode, self.size[0], self.size[1], + id(self) + ) + + def _repr_png_(self): + """ iPython display hook support + + :returns: png version of the image as bytes + """ + from io import BytesIO + b = BytesIO() + self.save(b, 'PNG') + return b.getvalue() + + def __getattr__(self, name): + if name == "__array_interface__": + # numpy array interface support + new = {} + shape, typestr = _conv_type_shape(self) + new['shape'] = shape + new['typestr'] = typestr + new['data'] = self.tobytes() + new['version'] = 3 + return new + raise AttributeError(name) + + def __getstate__(self): + return [ + self.info, + self.mode, + self.size, + self.getpalette(), + self.tobytes()] + + def __setstate__(self, state): + Image.__init__(self) + self.tile = [] + info, mode, size, palette, data = state + self.info = info + self.mode = mode + self.size = size + self.im = core.new(mode, size) + if mode in ("L", "P") and palette: + self.putpalette(palette) + self.frombytes(data) + + def tobytes(self, encoder_name="raw", *args): + """ + Return image as a bytes object. + + .. warning:: + + This method returns the raw image data from the internal + storage. For compressed image data (e.g. PNG, JPEG) use + :meth:`~.save`, with a BytesIO parameter for in-memory + data. + + :param encoder_name: What encoder to use. The default is to + use the standard "raw" encoder. + :param args: Extra arguments to the encoder. + :rtype: A bytes object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if encoder_name == "raw" and args == (): + args = self.mode + + self.load() + + # unpack data + e = _getencoder(self.mode, encoder_name, args) + e.setimage(self.im) + + bufsize = max(65536, self.size[0] * 4) # see RawEncode.c + + data = [] + while True: + l, s, d = e.encode(bufsize) + data.append(d) + if s: + break + if s < 0: + raise RuntimeError("encoder error %d in tobytes" % s) + + return b"".join(data) + + def tostring(self, *args, **kw): + raise Exception("tostring() has been removed. " + + "Please call tobytes() instead.") + + def tobitmap(self, name="image"): + """ + Returns the image converted to an X11 bitmap. + + .. note:: This method only works for mode "1" images. + + :param name: The name prefix to use for the bitmap variables. + :returns: A string containing an X11 bitmap. + :raises ValueError: If the mode is not "1" + """ + + self.load() + if self.mode != "1": + raise ValueError("not a bitmap") + data = self.tobytes("xbm") + return b"".join([ + ("#define %s_width %d\n" % (name, self.size[0])).encode('ascii'), + ("#define %s_height %d\n" % (name, self.size[1])).encode('ascii'), + ("static char %s_bits[] = {\n" % name).encode('ascii'), data, b"};" + ]) + + def frombytes(self, data, decoder_name="raw", *args): + """ + Loads this image with pixel data from a bytes object. + + This method is similar to the :py:func:`~PIL.Image.frombytes` function, + but loads data into this image instead of creating a new image object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + # default format + if decoder_name == "raw" and args == (): + args = self.mode + + # unpack data + d = _getdecoder(self.mode, decoder_name, args) + d.setimage(self.im) + s = d.decode(data) + + if s[0] >= 0: + raise ValueError("not enough image data") + if s[1] != 0: + raise ValueError("cannot decode image data") + + def fromstring(self, *args, **kw): + raise Exception("fromstring() has been removed. " + + "Please call frombytes() instead.") + + def load(self): + """ + Allocates storage for the image and loads the pixel data. In + normal cases, you don't need to call this method, since the + Image class automatically loads an opened image when it is + accessed for the first time. This method will close the file + associated with the image. + + :returns: An image access object. + :rtype: :ref:`PixelAccess` or :py:class:`PIL.PyAccess` + """ + if self.im and self.palette and self.palette.dirty: + # realize palette + self.im.putpalette(*self.palette.getdata()) + self.palette.dirty = 0 + self.palette.mode = "RGB" + self.palette.rawmode = None + if "transparency" in self.info: + if isinstance(self.info["transparency"], int): + self.im.putpalettealpha(self.info["transparency"], 0) + else: + self.im.putpalettealphas(self.info["transparency"]) + self.palette.mode = "RGBA" + + if self.im: + if HAS_CFFI and USE_CFFI_ACCESS: + if self.pyaccess: + return self.pyaccess + from PIL import PyAccess + self.pyaccess = PyAccess.new(self, self.readonly) + if self.pyaccess: + return self.pyaccess + return self.im.pixel_access(self.readonly) + + def verify(self): + """ + Verifies the contents of a file. For data read from a file, this + method attempts to determine if the file is broken, without + actually decoding the image data. If this method finds any + problems, it raises suitable exceptions. If you need to load + the image after using this method, you must reopen the image + file. + """ + pass + + def convert(self, mode=None, matrix=None, dither=None, + palette=WEB, colors=256): + """ + Returns a converted copy of this image. For the "P" mode, this + method translates pixels through the palette. If mode is + omitted, a mode is chosen so that all information in the image + and the palette can be represented without a palette. + + The current version supports all possible conversions between + "L", "RGB" and "CMYK." The **matrix** argument only supports "L" + and "RGB". + + When translating a color image to black and white (mode "L"), + the library uses the ITU-R 601-2 luma transform:: + + L = R * 299/1000 + G * 587/1000 + B * 114/1000 + + The default method of converting a greyscale ("L") or "RGB" + image into a bilevel (mode "1") image uses Floyd-Steinberg + dither to approximate the original image luminosity levels. If + dither is NONE, all non-zero values are set to 255 (white). To + use other thresholds, use the :py:meth:`~PIL.Image.Image.point` + method. + + :param mode: The requested mode. See: :ref:`concept-modes`. + :param matrix: An optional conversion matrix. If given, this + should be 4- or 12-tuple containing floating point values. + :param dither: Dithering method, used when converting from + mode "RGB" to "P" or from "RGB" or "L" to "1". + Available methods are NONE or FLOYDSTEINBERG (default). + :param palette: Palette to use when converting from mode "RGB" + to "P". Available palettes are WEB or ADAPTIVE. + :param colors: Number of colors to use for the ADAPTIVE palette. + Defaults to 256. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if not mode: + # determine default mode + if self.mode == "P": + self.load() + if self.palette: + mode = self.palette.mode + else: + mode = "RGB" + else: + return self.copy() + + self.load() + + if matrix: + # matrix conversion + if mode not in ("L", "RGB"): + raise ValueError("illegal conversion") + im = self.im.convert_matrix(mode, matrix) + return self._new(im) + + if mode == "P" and self.mode == "RGBA": + return self.quantize(colors) + + trns = None + delete_trns = False + # transparency handling + if "transparency" in self.info and \ + self.info['transparency'] is not None: + if self.mode in ('L', 'RGB') and mode == 'RGBA': + # Use transparent conversion to promote from transparent + # color to an alpha channel. + return self._new(self.im.convert_transparent( + mode, self.info['transparency'])) + elif self.mode in ('L', 'RGB', 'P') and mode in ('L', 'RGB', 'P'): + t = self.info['transparency'] + if isinstance(t, bytes): + # Dragons. This can't be represented by a single color + warnings.warn('Palette images with Transparency ' + + ' expressed in bytes should be converted ' + + 'to RGBA images') + delete_trns = True + else: + # get the new transparency color. + # use existing conversions + trns_im = Image()._new(core.new(self.mode, (1, 1))) + if self.mode == 'P': + trns_im.putpalette(self.palette) + trns_im.putpixel((0, 0), t) + + if mode in ('L', 'RGB'): + trns_im = trns_im.convert(mode) + else: + # can't just retrieve the palette number, got to do it + # after quantization. + trns_im = trns_im.convert('RGB') + trns = trns_im.getpixel((0, 0)) + + elif self.mode == 'P' and mode == 'RGBA': + t = self.info['transparency'] + delete_trns = True + + if isinstance(t, bytes): + self.im.putpalettealphas(t) + elif isinstance(t, int): + self.im.putpalettealpha(t, 0) + else: + raise ValueError("Transparency for P mode should" + + " be bytes or int") + + if mode == "P" and palette == ADAPTIVE: + im = self.im.quantize(colors) + new = self._new(im) + from PIL import ImagePalette + new.palette = ImagePalette.raw("RGB", new.im.getpalette("RGB")) + if delete_trns: + # This could possibly happen if we requantize to fewer colors. + # The transparency would be totally off in that case. + del(new.info['transparency']) + if trns is not None: + try: + new.info['transparency'] = new.palette.getcolor(trns) + except: + # if we can't make a transparent color, don't leave the old + # transparency hanging around to mess us up. + del(new.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + return new + + # colorspace conversion + if dither is None: + dither = FLOYDSTEINBERG + + try: + im = self.im.convert(mode, dither) + except ValueError: + try: + # normalize source image and try again + im = self.im.convert(getmodebase(self.mode)) + im = im.convert(mode, dither) + except KeyError: + raise ValueError("illegal conversion") + + new_im = self._new(im) + if delete_trns: + # crash fail if we leave a bytes transparency in an rgb/l mode. + del(new_im.info['transparency']) + if trns is not None: + if new_im.mode == 'P': + try: + new_im.info['transparency'] = new_im.palette.getcolor(trns) + except: + del(new_im.info['transparency']) + warnings.warn("Couldn't allocate palette entry " + + "for transparency") + else: + new_im.info['transparency'] = trns + return new_im + + def quantize(self, colors=256, method=None, kmeans=0, palette=None): + """ + Convert the image to 'P' mode with the specified number + of colors. + + :param colors: The desired number of colors, <= 256 + :param method: 0 = median cut + 1 = maximum coverage + 2 = fast octree + :param kmeans: Integer + :param palette: Quantize to the :py:class:`PIL.ImagingPalette` palette. + :returns: A new image + + """ + + self.load() + + if method is None: + # defaults: + method = 0 + if self.mode == 'RGBA': + method = 2 + + if self.mode == 'RGBA' and method != 2: + # Caller specified an invalid mode. + raise ValueError('Fast Octree (method == 2) is the ' + + ' only valid method for quantizing RGBA images') + + if palette: + # use palette from reference image + palette.load() + if palette.mode != "P": + raise ValueError("bad mode for palette image") + if self.mode != "RGB" and self.mode != "L": + raise ValueError( + "only RGB or L mode images can be quantized to a palette" + ) + im = self.im.convert("P", 1, palette.im) + return self._makeself(im) + + im = self.im.quantize(colors, method, kmeans) + return self._new(im) + + def copy(self): + """ + Copies this image. Use this method if you wish to paste things + into an image, but still retain the original. + + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + self.load() + im = self.im.copy() + return self._new(im) + + def crop(self, box=None): + """ + Returns a rectangular region from this image. The box is a + 4-tuple defining the left, upper, right, and lower pixel + coordinate. + + This is a lazy operation. Changes to the source image may or + may not be reflected in the cropped image. To break the + connection, call the :py:meth:`~PIL.Image.Image.load` method on + the cropped copy. + + :param box: The crop rectangle, as a (left, upper, right, lower)-tuple. + :rtype: :py:class:`~PIL.Image.Image` + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + if box is None: + return self.copy() + + # lazy operation + return _ImageCrop(self, box) + + def draft(self, mode, size): + """ + Configures the image file loader so it returns a version of the + image that as closely as possible matches the given mode and + size. For example, you can use this method to convert a color + JPEG to greyscale while loading it, or to extract a 128x192 + version from a PCD file. + + Note that this method modifies the :py:class:`~PIL.Image.Image` object + in place. If the image has already been loaded, this method has no + effect. + + :param mode: The requested mode. + :param size: The requested size. + """ + pass + + def _expand(self, xmargin, ymargin=None): + if ymargin is None: + ymargin = xmargin + self.load() + return self._new(self.im.expand(xmargin, ymargin, 0)) + + def filter(self, filter): + """ + Filters this image using the given filter. For a list of + available filters, see the :py:mod:`~PIL.ImageFilter` module. + + :param filter: Filter kernel. + :returns: An :py:class:`~PIL.Image.Image` object. """ + + self.load() + + if isinstance(filter, collections.Callable): + filter = filter() + if not hasattr(filter, "filter"): + raise TypeError("filter argument should be ImageFilter.Filter " + + "instance or class") + + if self.im.bands == 1: + return self._new(filter.filter(self.im)) + # fix to handle multiband images since _imaging doesn't + ims = [] + for c in range(self.im.bands): + ims.append(self._new(filter.filter(self.im.getband(c)))) + return merge(self.mode, ims) + + def getbands(self): + """ + Returns a tuple containing the name of each band in this image. + For example, **getbands** on an RGB image returns ("R", "G", "B"). + + :returns: A tuple containing band names. + :rtype: tuple + """ + return ImageMode.getmode(self.mode).bands + + def getbbox(self): + """ + Calculates the bounding box of the non-zero regions in the + image. + + :returns: The bounding box is returned as a 4-tuple defining the + left, upper, right, and lower pixel coordinate. If the image + is completely empty, this method returns None. + + """ + + self.load() + return self.im.getbbox() + + def getcolors(self, maxcolors=256): + """ + Returns a list of colors used in this image. + + :param maxcolors: Maximum number of colors. If this number is + exceeded, this method returns None. The default limit is + 256 colors. + :returns: An unsorted list of (count, pixel) values. + """ + + self.load() + if self.mode in ("1", "L", "P"): + h = self.im.histogram() + out = [] + for i in range(256): + if h[i]: + out.append((h[i], i)) + if len(out) > maxcolors: + return None + return out + return self.im.getcolors(maxcolors) + + def getdata(self, band=None): + """ + Returns the contents of this image as a sequence object + containing pixel values. The sequence object is flattened, so + that values for line one follow directly after the values of + line zero, and so on. + + Note that the sequence object returned by this method is an + internal PIL data type, which only supports certain sequence + operations. To convert it to an ordinary sequence (e.g. for + printing), use **list(im.getdata())**. + + :param band: What band to return. The default is to return + all bands. To return a single band, pass in the index + value (e.g. 0 to get the "R" band from an "RGB" image). + :returns: A sequence-like object. + """ + + self.load() + if band is not None: + return self.im.getband(band) + return self.im # could be abused + + def getextrema(self): + """ + Gets the the minimum and maximum pixel values for each band in + the image. + + :returns: For a single-band image, a 2-tuple containing the + minimum and maximum pixel value. For a multi-band image, + a tuple containing one 2-tuple for each band. + """ + + self.load() + if self.im.bands > 1: + extrema = [] + for i in range(self.im.bands): + extrema.append(self.im.getband(i).getextrema()) + return tuple(extrema) + return self.im.getextrema() + + def getim(self): + """ + Returns a capsule that points to the internal image memory. + + :returns: A capsule object. + """ + + self.load() + return self.im.ptr + + def getpalette(self): + """ + Returns the image palette as a list. + + :returns: A list of color values [r, g, b, ...], or None if the + image has no palette. + """ + + self.load() + try: + if bytes is str: + return [i8(c) for c in self.im.getpalette()] + else: + return list(self.im.getpalette()) + except ValueError: + return None # no palette + + def getpixel(self, xy): + """ + Returns the pixel value at a given position. + + :param xy: The coordinate, given as (x, y). + :returns: The pixel value. If the image is a multi-layer image, + this method returns a tuple. + """ + + self.load() + if self.pyaccess: + return self.pyaccess.getpixel(xy) + return self.im.getpixel(xy) + + def getprojection(self): + """ + Get projection to x and y axes + + :returns: Two sequences, indicating where there are non-zero + pixels along the X-axis and the Y-axis, respectively. + """ + + self.load() + x, y = self.im.getprojection() + return [i8(c) for c in x], [i8(c) for c in y] + + def histogram(self, mask=None, extrema=None): + """ + Returns a histogram for the image. The histogram is returned as + a list of pixel counts, one for each pixel value in the source + image. If the image has more than one band, the histograms for + all bands are concatenated (for example, the histogram for an + "RGB" image contains 768 values). + + A bilevel image (mode "1") is treated as a greyscale ("L") image + by this method. + + If a mask is provided, the method returns a histogram for those + parts of the image where the mask image is non-zero. The mask + image must have the same size as the image, and be either a + bi-level image (mode "1") or a greyscale image ("L"). + + :param mask: An optional mask. + :returns: A list containing pixel counts. + """ + self.load() + if mask: + mask.load() + return self.im.histogram((0, 0), mask.im) + if self.mode in ("I", "F"): + if extrema is None: + extrema = self.getextrema() + return self.im.histogram(extrema) + return self.im.histogram() + + def offset(self, xoffset, yoffset=None): + raise Exception("offset() has been removed. " + + "Please call ImageChops.offset() instead.") + + def paste(self, im, box=None, mask=None): + """ + Pastes another image into this image. The box argument is either + a 2-tuple giving the upper left corner, a 4-tuple defining the + left, upper, right, and lower pixel coordinate, or None (same as + (0, 0)). If a 4-tuple is given, the size of the pasted image + must match the size of the region. + + If the modes don't match, the pasted image is converted to the mode of + this image (see the :py:meth:`~PIL.Image.Image.convert` method for + details). + + Instead of an image, the source can be a integer or tuple + containing pixel values. The method then fills the region + with the given color. When creating RGB images, you can + also use color strings as supported by the ImageColor module. + + If a mask is given, this method updates only the regions + indicated by the mask. You can use either "1", "L" or "RGBA" + images (in the latter case, the alpha band is used as mask). + Where the mask is 255, the given image is copied as is. Where + the mask is 0, the current value is preserved. Intermediate + values will mix the two images together, including their alpha + channels if they have them. + + See :py:meth:`~PIL.Image.Image.alpha_composite` if you want to + combine images with respect to their alpha channels. + + :param im: Source image or pixel value (integer or tuple). + :param box: An optional 4-tuple giving the region to paste into. + If a 2-tuple is used instead, it's treated as the upper left + corner. If omitted or None, the source is pasted into the + upper left corner. + + If an image is given as the second argument and there is no + third, the box defaults to (0, 0), and the second argument + is interpreted as a mask image. + :param mask: An optional mask image. + """ + + if isImageType(box) and mask is None: + # abbreviated paste(im, mask) syntax + mask = box + box = None + + if box is None: + # cover all of self + box = (0, 0) + self.size + + if len(box) == 2: + # lower left corner given; get size from image or mask + if isImageType(im): + size = im.size + elif isImageType(mask): + size = mask.size + else: + # FIXME: use self.size here? + raise ValueError( + "cannot determine region size; use 4-item box" + ) + box = box + (box[0]+size[0], box[1]+size[1]) + + if isStringType(im): + from PIL import ImageColor + im = ImageColor.getcolor(im, self.mode) + + elif isImageType(im): + im.load() + if self.mode != im.mode: + if self.mode != "RGB" or im.mode not in ("RGBA", "RGBa"): + # should use an adapter for this! + im = im.convert(self.mode) + im = im.im + + self.load() + if self.readonly: + self._copy() + + if mask: + mask.load() + self.im.paste(im, box, mask.im) + else: + self.im.paste(im, box) + + def point(self, lut, mode=None): + """ + Maps this image through a lookup table or function. + + :param lut: A lookup table, containing 256 (or 65336 if + self.mode=="I" and mode == "L") values per band in the + image. A function can be used instead, it should take a + single argument. The function is called once for each + possible pixel value, and the resulting table is applied to + all bands of the image. + :param mode: Output mode (default is same as input). In the + current version, this can only be used if the source image + has mode "L" or "P", and the output has mode "1" or the + source image mode is "I" and the output mode is "L". + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + self.load() + + if isinstance(lut, ImagePointHandler): + return lut.point(self) + + if callable(lut): + # if it isn't a list, it should be a function + if self.mode in ("I", "I;16", "F"): + # check if the function can be used with point_transform + # UNDONE wiredfool -- I think this prevents us from ever doing + # a gamma function point transform on > 8bit images. + scale, offset = _getscaleoffset(lut) + return self._new(self.im.point_transform(scale, offset)) + # for other modes, convert the function to a table + lut = [lut(i) for i in range(256)] * self.im.bands + + if self.mode == "F": + # FIXME: _imaging returns a confusing error message for this case + raise ValueError("point operation not supported for this mode") + + return self._new(self.im.point(lut, mode)) + + def putalpha(self, alpha): + """ + Adds or replaces the alpha layer in this image. If the image + does not have an alpha layer, it's converted to "LA" or "RGBA". + The new layer must be either "L" or "1". + + :param alpha: The new alpha layer. This can either be an "L" or "1" + image having the same size as this image, or an integer or + other color value. + """ + + self.load() + if self.readonly: + self._copy() + + if self.mode not in ("LA", "RGBA"): + # attempt to promote self to a matching alpha mode + try: + mode = getmodebase(self.mode) + "A" + try: + self.im.setmode(mode) + self.pyaccess = None + except (AttributeError, ValueError): + # do things the hard way + im = self.im.convert(mode) + if im.mode not in ("LA", "RGBA"): + raise ValueError # sanity check + self.im = im + self.pyaccess = None + self.mode = self.im.mode + except (KeyError, ValueError): + raise ValueError("illegal image mode") + + if self.mode == "LA": + band = 1 + else: + band = 3 + + if isImageType(alpha): + # alpha layer + if alpha.mode not in ("1", "L"): + raise ValueError("illegal image mode") + alpha.load() + if alpha.mode == "1": + alpha = alpha.convert("L") + else: + # constant alpha + try: + self.im.fillband(band, alpha) + except (AttributeError, ValueError): + # do things the hard way + alpha = new("L", self.size, alpha) + else: + return + + self.im.putband(alpha.im, band) + + def putdata(self, data, scale=1.0, offset=0.0): + """ + Copies pixel data to this image. This method copies data from a + sequence object into the image, starting at the upper left + corner (0, 0), and continuing until either the image or the + sequence ends. The scale and offset values are used to adjust + the sequence values: **pixel = value*scale + offset**. + + :param data: A sequence object. + :param scale: An optional scale value. The default is 1.0. + :param offset: An optional offset value. The default is 0.0. + """ + + self.load() + if self.readonly: + self._copy() + + self.im.putdata(data, scale, offset) + + def putpalette(self, data, rawmode="RGB"): + """ + Attaches a palette to this image. The image must be a "P" or + "L" image, and the palette sequence must contain 768 integer + values, where each group of three values represent the red, + green, and blue values for the corresponding pixel + index. Instead of an integer sequence, you can use an 8-bit + string. + + :param data: A palette sequence (either a list or a string). + """ + from PIL import ImagePalette + + if self.mode not in ("L", "P"): + raise ValueError("illegal image mode") + self.load() + if isinstance(data, ImagePalette.ImagePalette): + palette = ImagePalette.raw(data.rawmode, data.palette) + else: + if not isinstance(data, bytes): + if bytes is str: + data = "".join(chr(x) for x in data) + else: + data = bytes(data) + palette = ImagePalette.raw(rawmode, data) + self.mode = "P" + self.palette = palette + self.palette.mode = "RGB" + self.load() # install new palette + + def putpixel(self, xy, value): + """ + Modifies the pixel at the given position. The color is given as + a single numerical value for single-band images, and a tuple for + multi-band images. + + Note that this method is relatively slow. For more extensive changes, + use :py:meth:`~PIL.Image.Image.paste` or the :py:mod:`~PIL.ImageDraw` + module instead. + + See: + + * :py:meth:`~PIL.Image.Image.paste` + * :py:meth:`~PIL.Image.Image.putdata` + * :py:mod:`~PIL.ImageDraw` + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + + self.load() + if self.readonly: + self._copy() + self.pyaccess = None + self.load() + + if self.pyaccess: + return self.pyaccess.putpixel(xy, value) + return self.im.putpixel(xy, value) + + def resize(self, size, resample=NEAREST): + """ + Returns a resized copy of this image. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation), + :py:attr:`PIL.Image.BICUBIC` (cubic spline interpolation), or + :py:attr:`PIL.Image.LANCZOS` (a high-quality downsampling filter). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if resample not in (NEAREST, BILINEAR, BICUBIC, LANCZOS): + raise ValueError("unknown resampling filter") + + self.load() + + size = tuple(size) + if self.size == size: + return self._new(self.im) + + if self.mode in ("1", "P"): + resample = NEAREST + + if self.mode == 'RGBA': + return self.convert('RGBa').resize(size, resample).convert('RGBA') + + return self._new(self.im.resize(size, resample)) + + def rotate(self, angle, resample=NEAREST, expand=0): + """ + Returns a rotated copy of this image. This method returns a + copy of this image, rotated the given number of degrees counter + clockwise around its centre. + + :param angle: In degrees counter clockwise. + :param resample: An optional resampling filter. This can be + one of :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` + (cubic spline interpolation in a 4x4 environment). + If omitted, or if the image has mode "1" or "P", it is + set :py:attr:`PIL.Image.NEAREST`. + :param expand: Optional expansion flag. If true, expands the output + image to make it large enough to hold the entire rotated image. + If false or omitted, make the output image the same size as the + input image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if expand: + import math + angle = -angle * math.pi / 180 + matrix = [ + math.cos(angle), math.sin(angle), 0.0, + -math.sin(angle), math.cos(angle), 0.0 + ] + + def transform(x, y, matrix=matrix): + (a, b, c, d, e, f) = matrix + return a*x + b*y + c, d*x + e*y + f + + # calculate output size + w, h = self.size + xx = [] + yy = [] + for x, y in ((0, 0), (w, 0), (w, h), (0, h)): + x, y = transform(x, y) + xx.append(x) + yy.append(y) + w = int(math.ceil(max(xx)) - math.floor(min(xx))) + h = int(math.ceil(max(yy)) - math.floor(min(yy))) + + # adjust center + x, y = transform(w / 2.0, h / 2.0) + matrix[2] = self.size[0] / 2.0 - x + matrix[5] = self.size[1] / 2.0 - y + + return self.transform((w, h), AFFINE, matrix, resample) + + if resample not in (NEAREST, BILINEAR, BICUBIC): + raise ValueError("unknown resampling filter") + + self.load() + + if self.mode in ("1", "P"): + resample = NEAREST + + return self._new(self.im.rotate(angle, resample, expand)) + + def save(self, fp, format=None, **params): + """ + Saves this image under the given filename. If no format is + specified, the format to use is determined from the filename + extension, if possible. + + Keyword options can be used to provide additional instructions + to the writer. If a writer doesn't recognise an option, it is + silently ignored. The available options are described in the + :doc:`image format documentation + <../handbook/image-file-formats>` for each writer. + + You can use a file object instead of a filename. In this case, + you must always specify the format. The file object must + implement the ``seek``, ``tell``, and ``write`` + methods, and be opened in binary mode. + + :param fp: A filename (string), pathlib.Path object or file object. + :param format: Optional format override. If omitted, the + format to use is determined from the filename extension. + If a file object was used instead of a filename, this + parameter should always be used. + :param options: Extra parameters to the image writer. + :returns: None + :exception KeyError: If the output format could not be determined + from the file name. Use the format option to solve this. + :exception IOError: If the file could not be written. The file + may have been created, and may contain partial data. + """ + + filename = "" + open_fp = False + if isPath(fp): + filename = fp + open_fp = True + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + open_fp = True + elif hasattr(fp, "name") and isPath(fp.name): + # only set the name for metadata purposes + filename = fp.name + + # may mutate self! + self.load() + + save_all = False + if 'save_all' in params: + save_all = params['save_all'] + del params['save_all'] + self.encoderinfo = params + self.encoderconfig = () + + preinit() + + ext = os.path.splitext(filename)[1].lower() + + if not format: + if ext not in EXTENSION: + init() + format = EXTENSION[ext] + + if format.upper() not in SAVE: + init() + if save_all: + save_handler = SAVE_ALL[format.upper()] + else: + save_handler = SAVE[format.upper()] + + if open_fp: + fp = builtins.open(filename, "wb") + + try: + save_handler(self, fp, filename) + finally: + # do what we can to clean up + if open_fp: + fp.close() + + def seek(self, frame): + """ + Seeks to the given frame in this sequence file. If you seek + beyond the end of the sequence, the method raises an + **EOFError** exception. When a sequence file is opened, the + library automatically seeks to frame 0. + + Note that in the current version of the library, most sequence + formats only allows you to seek to the next frame. + + See :py:meth:`~PIL.Image.Image.tell`. + + :param frame: Frame number, starting at 0. + :exception EOFError: If the call attempts to seek beyond the end + of the sequence. + """ + + # overridden by file handlers + if frame != 0: + raise EOFError + + def show(self, title=None, command=None): + """ + Displays this image. This method is mainly intended for + debugging purposes. + + On Unix platforms, this method saves the image to a temporary + PPM file, and calls the **xv** utility. + + On Windows, it saves the image to a temporary BMP file, and uses + the standard BMP display utility to show it (usually Paint). + + :param title: Optional title to use for the image window, + where possible. + :param command: command used to show the image + """ + + _show(self, title=title, command=command) + + def split(self): + """ + Split this image into individual bands. This method returns a + tuple of individual image bands from an image. For example, + splitting an "RGB" image creates three new images each + containing a copy of one of the original bands (red, green, + blue). + + :returns: A tuple containing bands. + """ + + self.load() + if self.im.bands == 1: + ims = [self.copy()] + else: + ims = [] + for i in range(self.im.bands): + ims.append(self._new(self.im.getband(i))) + return tuple(ims) + + def tell(self): + """ + Returns the current frame number. See :py:meth:`~PIL.Image.Image.seek`. + + :returns: Frame number, starting with 0. + """ + return 0 + + def thumbnail(self, size, resample=BICUBIC): + """ + Make this image into a thumbnail. This method modifies the + image to contain a thumbnail version of itself, no larger than + the given size. This method calculates an appropriate thumbnail + size to preserve the aspect of the image, calls the + :py:meth:`~PIL.Image.Image.draft` method to configure the file reader + (where applicable), and finally resizes the image. + + Note that this function modifies the :py:class:`~PIL.Image.Image` + object in place. If you need to use the full resolution image as well, + apply this method to a :py:meth:`~PIL.Image.Image.copy` of the original + image. + + :param size: Requested size. + :param resample: Optional resampling filter. This can be one + of :py:attr:`PIL.Image.NEAREST`, :py:attr:`PIL.Image.BILINEAR`, + :py:attr:`PIL.Image.BICUBIC`, or :py:attr:`PIL.Image.LANCZOS`. + If omitted, it defaults to :py:attr:`PIL.Image.BICUBIC`. + (was :py:attr:`PIL.Image.NEAREST` prior to version 2.5.0) + :returns: None + """ + + # preserve aspect ratio + x, y = self.size + if x > size[0]: + y = int(max(y * size[0] / x, 1)) + x = int(size[0]) + if y > size[1]: + x = int(max(x * size[1] / y, 1)) + y = int(size[1]) + size = x, y + + if size == self.size: + return + + self.draft(None, size) + + im = self.resize(size, resample) + + self.im = im.im + self.mode = im.mode + self.size = size + + self.readonly = 0 + self.pyaccess = None + + # FIXME: the different transform methods need further explanation + # instead of bloating the method docs, add a separate chapter. + def transform(self, size, method, data=None, resample=NEAREST, fill=1): + """ + Transforms this image. This method creates a new image with the + given size, and the same mode as the original, and copies data + to the new image using the given transform. + + :param size: The output size. + :param method: The transformation method. This is one of + :py:attr:`PIL.Image.EXTENT` (cut out a rectangular subregion), + :py:attr:`PIL.Image.AFFINE` (affine transform), + :py:attr:`PIL.Image.PERSPECTIVE` (perspective transform), + :py:attr:`PIL.Image.QUAD` (map a quadrilateral to a rectangle), or + :py:attr:`PIL.Image.MESH` (map a number of source quadrilaterals + in one operation). + :param data: Extra data to the transformation method. + :param resample: Optional resampling filter. It can be one of + :py:attr:`PIL.Image.NEAREST` (use nearest neighbour), + :py:attr:`PIL.Image.BILINEAR` (linear interpolation in a 2x2 + environment), or :py:attr:`PIL.Image.BICUBIC` (cubic spline + interpolation in a 4x4 environment). If omitted, or if the image + has mode "1" or "P", it is set to :py:attr:`PIL.Image.NEAREST`. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if self.mode == 'RGBA': + return self.convert('RGBa').transform( + size, method, data, resample, fill).convert('RGBA') + + if isinstance(method, ImageTransformHandler): + return method.transform(size, self, resample=resample, fill=fill) + if hasattr(method, "getdata"): + # compatibility w. old-style transform objects + method, data = method.getdata() + if data is None: + raise ValueError("missing method data") + + im = new(self.mode, size, None) + if method == MESH: + # list of quads + for box, quad in data: + im.__transformer(box, self, QUAD, quad, resample, fill) + else: + im.__transformer((0, 0)+size, self, method, data, resample, fill) + + return im + + def __transformer(self, box, image, method, data, + resample=NEAREST, fill=1): + + # FIXME: this should be turned into a lazy operation (?) + + w = box[2]-box[0] + h = box[3]-box[1] + + if method == AFFINE: + # change argument order to match implementation + data = (data[2], data[0], data[1], + data[5], data[3], data[4]) + elif method == EXTENT: + # convert extent to an affine transform + x0, y0, x1, y1 = data + xs = float(x1 - x0) / w + ys = float(y1 - y0) / h + method = AFFINE + data = (x0 + xs/2, xs, 0, y0 + ys/2, 0, ys) + elif method == PERSPECTIVE: + # change argument order to match implementation + data = (data[2], data[0], data[1], + data[5], data[3], data[4], + data[6], data[7]) + elif method == QUAD: + # quadrilateral warp. data specifies the four corners + # given as NW, SW, SE, and NE. + nw = data[0:2] + sw = data[2:4] + se = data[4:6] + ne = data[6:8] + x0, y0 = nw + As = 1.0 / w + At = 1.0 / h + data = (x0, (ne[0]-x0)*As, (sw[0]-x0)*At, + (se[0]-sw[0]-ne[0]+x0)*As*At, + y0, (ne[1]-y0)*As, (sw[1]-y0)*At, + (se[1]-sw[1]-ne[1]+y0)*As*At) + else: + raise ValueError("unknown transformation method") + + if resample not in (NEAREST, BILINEAR, BICUBIC): + raise ValueError("unknown resampling filter") + + image.load() + + self.load() + + if image.mode in ("1", "P"): + resample = NEAREST + + self.im.transform2(box, image.im, method, data, resample, fill) + + def transpose(self, method): + """ + Transpose image (flip or rotate in 90 degree steps) + + :param method: One of :py:attr:`PIL.Image.FLIP_LEFT_RIGHT`, + :py:attr:`PIL.Image.FLIP_TOP_BOTTOM`, :py:attr:`PIL.Image.ROTATE_90`, + :py:attr:`PIL.Image.ROTATE_180`, :py:attr:`PIL.Image.ROTATE_270` or + :py:attr:`PIL.Image.TRANSPOSE`. + :returns: Returns a flipped or rotated copy of this image. + """ + + self.load() + return self._new(self.im.transpose(method)) + + def effect_spread(self, distance): + """ + Randomly spread pixels in an image. + + :param distance: Distance to spread pixels. + """ + self.load() + im = self.im.effect_spread(distance) + return self._new(im) + + def toqimage(self): + """Returns a QImage copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqimage(self) + + def toqpixmap(self): + """Returns a QPixmap copy of this image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.toqpixmap(self) + + +# -------------------------------------------------------------------- +# Lazy operations + +class _ImageCrop(Image): + + def __init__(self, im, box): + + Image.__init__(self) + + x0, y0, x1, y1 = box + if x1 < x0: + x1 = x0 + if y1 < y0: + y1 = y0 + + self.mode = im.mode + self.size = x1-x0, y1-y0 + + self.__crop = x0, y0, x1, y1 + + self.im = im.im + + def load(self): + + # lazy evaluation! + if self.__crop: + self.im = self.im.crop(self.__crop) + self.__crop = None + + if self.im: + return self.im.pixel_access(self.readonly) + + # FIXME: future versions should optimize crop/paste + # sequences! + + +# -------------------------------------------------------------------- +# Abstract handlers. + +class ImagePointHandler(object): + # used as a mixin by point transforms (for use with im.point) + pass + + +class ImageTransformHandler(object): + # used as a mixin by geometry transforms (for use with im.transform) + pass + + +# -------------------------------------------------------------------- +# Factories + +# +# Debugging + +def _wedge(): + "Create greyscale wedge (for debugging only)" + + return Image()._new(core.wedge("L")) + + +def new(mode, size, color=0): + """ + Creates a new image with the given mode and size. + + :param mode: The mode to use for the new image. See: + :ref:`concept-modes`. + :param size: A 2-tuple, containing (width, height) in pixels. + :param color: What color to use for the image. Default is black. + If given, this should be a single integer or floating point value + for single-band modes, and a tuple for multi-band modes (one value + per band). When creating RGB images, you can also use color + strings as supported by the ImageColor module. If the color is + None, the image is not initialised. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if color is None: + # don't initialize + return Image()._new(core.new(mode, size)) + + if isStringType(color): + # css3-style specifier + + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + + return Image()._new(core.fill(mode, size, color)) + + +def frombytes(mode, size, data, decoder_name="raw", *args): + """ + Creates a copy of an image memory from pixel data in a buffer. + + In its simplest form, this function takes three arguments + (mode, size, and unpacked pixel data). + + You can also use any pixel decoder supported by PIL. For more + information on available decoders, see the section + :ref:`Writing Your Own File Decoder `. + + Note that this function decodes pixel data only, not entire images. + If you have an entire image in a string, wrap it in a + :py:class:`~io.BytesIO` object, and use :py:func:`~PIL.Image.open` to load + it. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A byte buffer containing raw data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw" and args == (): + args = mode + + im = new(mode, size) + im.frombytes(data, decoder_name, args) + return im + + +def fromstring(*args, **kw): + raise Exception("fromstring() has been removed. " + + "Please call frombytes() instead.") + + +def frombuffer(mode, size, data, decoder_name="raw", *args): + """ + Creates an image memory referencing pixel data in a byte buffer. + + This function is similar to :py:func:`~PIL.Image.frombytes`, but uses data + in the byte buffer, where possible. This means that changes to the + original buffer object are reflected in this image). Not all modes can + share memory; supported modes include "L", "RGBX", "RGBA", and "CMYK". + + Note that this function decodes pixel data only, not entire images. + If you have an entire image file in a string, wrap it in a + **BytesIO** object, and use :py:func:`~PIL.Image.open` to load it. + + In the current version, the default parameters used for the "raw" decoder + differs from that used for :py:func:`~PIL.Image.fromstring`. This is a + bug, and will probably be fixed in a future release. The current release + issues a warning if you do this; to disable the warning, you should provide + the full set of parameters. See below for details. + + :param mode: The image mode. See: :ref:`concept-modes`. + :param size: The image size. + :param data: A bytes or other buffer object containing raw + data for the given mode. + :param decoder_name: What decoder to use. + :param args: Additional parameters for the given decoder. For the + default encoder ("raw"), it's recommended that you provide the + full set of parameters:: + + frombuffer(mode, size, data, "raw", mode, 0, 1) + + :returns: An :py:class:`~PIL.Image.Image` object. + + .. versionadded:: 1.1.4 + """ + + # may pass tuple instead of argument list + if len(args) == 1 and isinstance(args[0], tuple): + args = args[0] + + if decoder_name == "raw": + if args == (): + warnings.warn( + "the frombuffer defaults may change in a future release; " + "for portability, change the call to read:\n" + " frombuffer(mode, size, data, 'raw', mode, 0, 1)", + RuntimeWarning, stacklevel=2 + ) + args = mode, 0, -1 # may change to (mode, 0, 1) post-1.1.6 + if args[0] in _MAPMODES: + im = new(mode, (1, 1)) + im = im._new( + core.map_buffer(data, size, decoder_name, None, 0, args) + ) + im.readonly = 1 + return im + + return frombytes(mode, size, data, decoder_name, args) + + +def fromarray(obj, mode=None): + """ + Creates an image memory from an object exporting the array interface + (using the buffer protocol). + + If obj is not contiguous, then the tobytes method is called + and :py:func:`~PIL.Image.frombuffer` is used. + + :param obj: Object with array interface + :param mode: Mode to use (will be determined from type if None) + See: :ref:`concept-modes`. + :returns: An image object. + + .. versionadded:: 1.1.6 + """ + arr = obj.__array_interface__ + shape = arr['shape'] + ndim = len(shape) + try: + strides = arr['strides'] + except KeyError: + strides = None + if mode is None: + try: + typekey = (1, 1) + shape[2:], arr['typestr'] + mode, rawmode = _fromarray_typemap[typekey] + except KeyError: + # print typekey + raise TypeError("Cannot handle this data type") + else: + rawmode = mode + if mode in ["1", "L", "I", "P", "F"]: + ndmax = 2 + elif mode == "RGB": + ndmax = 3 + else: + ndmax = 4 + if ndim > ndmax: + raise ValueError("Too many dimensions: %d > %d." % (ndim, ndmax)) + + size = shape[1], shape[0] + if strides is not None: + if hasattr(obj, 'tobytes'): + obj = obj.tobytes() + else: + obj = obj.tostring() + + return frombuffer(mode, size, obj, "raw", rawmode, 0, 1) + + +def fromqimage(im): + """Creates an image instance from a QImage image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqimage(im) + + +def fromqpixmap(im): + """Creates an image instance from a QPixmap image""" + from PIL import ImageQt + if not ImageQt.qt_is_installed: + raise ImportError("Qt bindings are not installed") + return ImageQt.fromqpixmap(im) + +_fromarray_typemap = { + # (shape, typestr) => mode, rawmode + # first two members of shape are set to one + # ((1, 1), "|b1"): ("1", "1"), # broken + ((1, 1), "|u1"): ("L", "L"), + ((1, 1), "|i1"): ("I", "I;8"), + ((1, 1), "i2"): ("I", "I;16B"), + ((1, 1), "i4"): ("I", "I;32B"), + ((1, 1), "f4"): ("F", "F;32BF"), + ((1, 1), "f8"): ("F", "F;64BF"), + ((1, 1, 3), "|u1"): ("RGB", "RGB"), + ((1, 1, 4), "|u1"): ("RGBA", "RGBA"), + } + +# shortcuts +_fromarray_typemap[((1, 1), _ENDIAN + "i4")] = ("I", "I") +_fromarray_typemap[((1, 1), _ENDIAN + "f4")] = ("F", "F") + + +def _decompression_bomb_check(size): + if MAX_IMAGE_PIXELS is None: + return + + pixels = size[0] * size[1] + + if pixels > MAX_IMAGE_PIXELS: + warnings.warn( + "Image size (%d pixels) exceeds limit of %d pixels, " + "could be decompression bomb DOS attack." % + (pixels, MAX_IMAGE_PIXELS), + DecompressionBombWarning) + + +def open(fp, mode="r"): + """ + Opens and identifies the given image file. + + This is a lazy operation; this function identifies the file, but + the file remains open and the actual image data is not read from + the file until you try to process the data (or call the + :py:meth:`~PIL.Image.Image.load` method). See + :py:func:`~PIL.Image.new`. + + :param fp: A filename (string), pathlib.Path object or a file object. + The file object must implement :py:meth:`~file.read`, + :py:meth:`~file.seek`, and :py:meth:`~file.tell` methods, + and be opened in binary mode. + :param mode: The mode. If given, this argument must be "r". + :returns: An :py:class:`~PIL.Image.Image` object. + :exception IOError: If the file cannot be found, or the image cannot be + opened and identified. + """ + + if mode != "r": + raise ValueError("bad mode %r" % mode) + + filename = "" + if isPath(fp): + filename = fp + elif sys.version_info >= (3, 4): + from pathlib import Path + if isinstance(fp, Path): + filename = str(fp.resolve()) + if filename: + fp = builtins.open(filename, "rb") + + try: + fp.seek(0) + except (AttributeError, io.UnsupportedOperation): + fp = io.BytesIO(fp.read()) + + prefix = fp.read(16) + + preinit() + + def _open_core(fp, filename, prefix): + for i in ID: + try: + factory, accept = OPEN[i] + if not accept or accept(prefix): + fp.seek(0) + im = factory(fp, filename) + _decompression_bomb_check(im.size) + return im + except (SyntaxError, IndexError, TypeError, struct.error): + # Leave disabled by default, spams the logs with image + # opening failures that are entirely expected. + # logger.debug("", exc_info=True) + continue + return None + + im = _open_core(fp, filename, prefix) + + if im is None: + if init(): + im = _open_core(fp, filename, prefix) + + if im: + return im + + raise IOError("cannot identify image file %r" + % (filename if filename else fp)) + +# +# Image processing. + + +def alpha_composite(im1, im2): + """ + Alpha composite im2 over im1. + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.alpha_composite(im1.im, im2.im)) + + +def blend(im1, im2, alpha): + """ + Creates a new image by interpolating between two input images, using + a constant alpha.:: + + out = image1 * (1.0 - alpha) + image2 * alpha + + :param im1: The first image. + :param im2: The second image. Must have the same mode and size as + the first image. + :param alpha: The interpolation alpha factor. If alpha is 0.0, a + copy of the first image is returned. If alpha is 1.0, a copy of + the second image is returned. There are no restrictions on the + alpha value. If necessary, the result is clipped to fit into + the allowed output range. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + im1.load() + im2.load() + return im1._new(core.blend(im1.im, im2.im, alpha)) + + +def composite(image1, image2, mask): + """ + Create composite image by blending images using a transparency mask. + + :param image1: The first image. + :param image2: The second image. Must have the same mode and + size as the first image. + :param mask: A mask image. This image can have mode + "1", "L", or "RGBA", and must have the same size as the + other two images. + """ + + image = image2.copy() + image.paste(image1, None, mask) + return image + + +def eval(image, *args): + """ + Applies the function (which should take one argument) to each pixel + in the given image. If the image has more than one band, the same + function is applied to each band. Note that the function is + evaluated once for each possible pixel value, so you cannot use + random components or other generators. + + :param image: The input image. + :param function: A function object, taking one integer argument. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + return image.point(args[0]) + + +def merge(mode, bands): + """ + Merge a set of single band images into a new multiband image. + + :param mode: The mode to use for the output image. See: + :ref:`concept-modes`. + :param bands: A sequence containing one single-band image for + each band in the output image. All bands must have the + same size. + :returns: An :py:class:`~PIL.Image.Image` object. + """ + + if getmodebands(mode) != len(bands) or "*" in mode: + raise ValueError("wrong number of bands") + for im in bands[1:]: + if im.mode != getmodetype(mode): + raise ValueError("mode mismatch") + if im.size != bands[0].size: + raise ValueError("size mismatch") + im = core.new(mode, bands[0].size) + for i in range(getmodebands(mode)): + bands[i].load() + im.putband(bands[i].im, i) + return bands[0]._new(im) + + +# -------------------------------------------------------------------- +# Plugin registry + +def register_open(id, factory, accept=None): + """ + Register an image file plugin. This function should not be used + in application code. + + :param id: An image format identifier. + :param factory: An image file factory method. + :param accept: An optional function that can be used to quickly + reject images having another format. + """ + id = id.upper() + ID.append(id) + OPEN[id] = factory, accept + + +def register_mime(id, mimetype): + """ + Registers an image MIME type. This function should not be used + in application code. + + :param id: An image format identifier. + :param mimetype: The image MIME type for this format. + """ + MIME[id.upper()] = mimetype + + +def register_save(id, driver): + """ + Registers an image save function. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE[id.upper()] = driver + + +def register_save_all(id, driver): + """ + Registers an image function to save all the frames + of a multiframe format. This function should not be + used in application code. + + :param id: An image format identifier. + :param driver: A function to save images in this format. + """ + SAVE_ALL[id.upper()] = driver + + +def register_extension(id, extension): + """ + Registers an image extension. This function should not be + used in application code. + + :param id: An image format identifier. + :param extension: An extension used for this format. + """ + EXTENSION[extension.lower()] = id.upper() + + +# -------------------------------------------------------------------- +# Simple display support. User code may override this. + +def _show(image, **options): + # override me, as necessary + _showxv(image, **options) + + +def _showxv(image, title=None, **options): + from PIL import ImageShow + ImageShow.show(image, title, **options) + + +# -------------------------------------------------------------------- +# Effects + +def effect_mandelbrot(size, extent, quality): + """ + Generate a Mandelbrot set covering the given extent. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param extent: The extent to cover, as a 4-tuple: + (x0, y0, x1, y2). + :param quality: Quality. + """ + return Image()._new(core.effect_mandelbrot(size, extent, quality)) + + +def effect_noise(size, sigma): + """ + Generate Gaussian noise centered around 128. + + :param size: The requested size in pixels, as a 2-tuple: + (width, height). + :param sigma: Standard deviation of noise. + """ + return Image()._new(core.effect_noise(size, sigma)) + +# End of file diff --git a/Lib/site-packages/PIL/ImageChops.py b/Lib/site-packages/PIL/ImageChops.py new file mode 100644 index 0000000..ba5350e --- /dev/null +++ b/Lib/site-packages/PIL/ImageChops.py @@ -0,0 +1,283 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard channel operations +# +# History: +# 1996-03-24 fl Created +# 1996-08-13 fl Added logical operations (for "1" images) +# 2000-10-12 fl Added offset method (from Image.py) +# +# Copyright (c) 1997-2000 by Secret Labs AB +# Copyright (c) 1996-2000 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +def constant(image, value): + """Fill a channel with a given grey level. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.new("L", image.size, value) + + +def duplicate(image): + """Copy a channel. Alias for :py:meth:`PIL.Image.Image.copy`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return image.copy() + + +def invert(image): + """ + Invert an image (channel). + + .. code-block:: python + + out = MAX - image + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image.load() + return image._new(image.im.chop_invert()) + + +def lighter(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image containing + the lighter values. + + .. code-block:: python + + out = max(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_lighter(image2.im)) + + +def darker(image1, image2): + """ + Compares the two images, pixel by pixel, and returns a new image + containing the darker values. + + .. code-block:: python + + out = min(image1, image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_darker(image2.im)) + + +def difference(image1, image2): + """ + Returns the absolute value of the pixel-by-pixel difference between the two + images. + + .. code-block:: python + + out = abs(image1 - image2) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_difference(image2.im)) + + +def multiply(image1, image2): + """ + Superimposes two images on top of each other. + + If you multiply an image with a solid black image, the result is black. If + you multiply with a solid white image, the image is unaffected. + + .. code-block:: python + + out = image1 * image2 / MAX + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_multiply(image2.im)) + + +def screen(image1, image2): + """ + Superimposes two inverted images on top of each other. + + .. code-block:: python + + out = MAX - ((MAX - image1) * (MAX - image2) / MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_screen(image2.im)) + + +def add(image1, image2, scale=1.0, offset=0): + """ + Adds two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 + image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add(image2.im, scale, offset)) + + +def subtract(image1, image2, scale=1.0, offset=0): + """ + Subtracts two images, dividing the result by scale and adding the + offset. If omitted, scale defaults to 1.0, and offset to 0.0. + + .. code-block:: python + + out = ((image1 - image2) / scale + offset) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract(image2.im, scale, offset)) + + +def add_modulo(image1, image2): + """Add two images, without clipping the result. + + .. code-block:: python + + out = ((image1 + image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_add_modulo(image2.im)) + + +def subtract_modulo(image1, image2): + """Subtract two images, without clipping the result. + + .. code-block:: python + + out = ((image1 - image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_subtract_modulo(image2.im)) + + +def logical_and(image1, image2): + """Logical AND between two images. + + .. code-block:: python + + out = ((image1 and image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_and(image2.im)) + + +def logical_or(image1, image2): + """Logical OR between two images. + + .. code-block:: python + + out = ((image1 or image2) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_or(image2.im)) + + +def logical_xor(image1, image2): + """Logical XOR between two images. + + .. code-block:: python + + out = ((bool(image1) != bool(image2)) % MAX) + + :rtype: :py:class:`~PIL.Image.Image` + """ + + image1.load() + image2.load() + return image1._new(image1.im.chop_xor(image2.im)) + + +def blend(image1, image2, alpha): + """Blend images using constant transparency weight. Alias for + :py:meth:`PIL.Image.Image.blend`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.blend(image1, image2, alpha) + + +def composite(image1, image2, mask): + """Create composite using transparency mask. Alias for + :py:meth:`PIL.Image.Image.composite`. + + :rtype: :py:class:`~PIL.Image.Image` + """ + + return Image.composite(image1, image2, mask) + + +def offset(image, xoffset, yoffset=None): + """Returns a copy of the image where data has been offset by the given + distances. Data wraps around the edges. If **yoffset** is omitted, it + is assumed to be equal to **xoffset**. + + :param xoffset: The horizontal distance. + :param yoffset: The vertical distance. If omitted, both + distances are set to the same value. + :rtype: :py:class:`~PIL.Image.Image` + """ + + if yoffset is None: + yoffset = xoffset + image.load() + return image._new(image.im.offset(xoffset, yoffset)) diff --git a/Lib/site-packages/PIL/ImageCms.py b/Lib/site-packages/PIL/ImageCms.py new file mode 100644 index 0000000..ba5504a --- /dev/null +++ b/Lib/site-packages/PIL/ImageCms.py @@ -0,0 +1,970 @@ +# The Python Imaging Library. +# $Id$ + +# Optional color managment support, based on Kevin Cazabon's PyCMS +# library. + +# History: + +# 2009-03-08 fl Added to PIL. + +# Copyright (C) 2002-2003 Kevin Cazabon +# Copyright (c) 2009 by Fredrik Lundh +# Copyright (c) 2013 by Eric Soroos + +# See the README file for information on usage and redistribution. See +# below for the original description. + +from __future__ import print_function +import sys + +DESCRIPTION = """ +pyCMS + + a Python / PIL interface to the littleCMS ICC Color Management System + Copyright (C) 2002-2003 Kevin Cazabon + kevin@cazabon.com + http://www.cazabon.com + + pyCMS home page: http://www.cazabon.com/pyCMS + littleCMS home page: http://www.littlecms.com + (littleCMS is Copyright (C) 1998-2001 Marti Maria) + + Originally released under LGPL. Graciously donated to PIL in + March 2009, for distribution under the standard PIL license + + The pyCMS.py module provides a "clean" interface between Python/PIL and + pyCMSdll, taking care of some of the more complex handling of the direct + pyCMSdll functions, as well as error-checking and making sure that all + relevant data is kept together. + + While it is possible to call pyCMSdll functions directly, it's not highly + recommended. + + Version History: + + 1.0.0 pil Oct 2013 Port to LCMS 2. + + 0.1.0 pil mod March 10, 2009 + + Renamed display profile to proof profile. The proof + profile is the profile of the device that is being + simulated, not the profile of the device which is + actually used to display/print the final simulation + (that'd be the output profile) - also see LCMSAPI.txt + input colorspace -> using 'renderingIntent' -> proof + colorspace -> using 'proofRenderingIntent' -> output + colorspace + + Added LCMS FLAGS support. + Added FLAGS["SOFTPROOFING"] as default flag for + buildProofTransform (otherwise the proof profile/intent + would be ignored). + + 0.1.0 pil March 2009 - added to PIL, as PIL.ImageCms + + 0.0.2 alpha Jan 6, 2002 + + Added try/except statements around type() checks of + potential CObjects... Python won't let you use type() + on them, and raises a TypeError (stupid, if you ask + me!) + + Added buildProofTransformFromOpenProfiles() function. + Additional fixes in DLL, see DLL code for details. + + 0.0.1 alpha first public release, Dec. 26, 2002 + + Known to-do list with current version (of Python interface, not pyCMSdll): + + none + +""" + +VERSION = "1.0.0 pil" + +# --------------------------------------------------------------------. + +from PIL import Image +try: + from PIL import _imagingcms +except ImportError as ex: + # Allow error import for doc purposes, but error out when accessing + # anything in core. + from _util import deferred_error + _imagingcms = deferred_error(ex) +from PIL._util import isStringType + +core = _imagingcms + +# +# intent/direction values + +INTENT_PERCEPTUAL = 0 +INTENT_RELATIVE_COLORIMETRIC = 1 +INTENT_SATURATION = 2 +INTENT_ABSOLUTE_COLORIMETRIC = 3 + +DIRECTION_INPUT = 0 +DIRECTION_OUTPUT = 1 +DIRECTION_PROOF = 2 + +# +# flags + +FLAGS = { + "MATRIXINPUT": 1, + "MATRIXOUTPUT": 2, + "MATRIXONLY": (1 | 2), + "NOWHITEONWHITEFIXUP": 4, # Don't hot fix scum dot + # Don't create prelinearization tables on precalculated transforms + # (internal use): + "NOPRELINEARIZATION": 16, + "GUESSDEVICECLASS": 32, # Guess device class (for transform2devicelink) + "NOTCACHE": 64, # Inhibit 1-pixel cache + "NOTPRECALC": 256, + "NULLTRANSFORM": 512, # Don't transform anyway + "HIGHRESPRECALC": 1024, # Use more memory to give better accuracy + "LOWRESPRECALC": 2048, # Use less memory to minimize resources + "WHITEBLACKCOMPENSATION": 8192, + "BLACKPOINTCOMPENSATION": 8192, + "GAMUTCHECK": 4096, # Out of Gamut alarm + "SOFTPROOFING": 16384, # Do softproofing + "PRESERVEBLACK": 32768, # Black preservation + "NODEFAULTRESOURCEDEF": 16777216, # CRD special + "GRIDPOINTS": lambda n: ((n) & 0xFF) << 16 # Gridpoints +} + +_MAX_FLAG = 0 +for flag in FLAGS.values(): + if isinstance(flag, int): + _MAX_FLAG = _MAX_FLAG | flag + + +# --------------------------------------------------------------------. +# Experimental PIL-level API +# --------------------------------------------------------------------. + +## +# Profile. + +class ImageCmsProfile(object): + + def __init__(self, profile): + """ + :param profile: Either a string representing a filename, + a file like object containing a profile or a + low-level profile object + + """ + + if isStringType(profile): + self._set(core.profile_open(profile), profile) + elif hasattr(profile, "read"): + self._set(core.profile_frombytes(profile.read())) + else: + self._set(profile) # assume it's already a profile + + def _set(self, profile, filename=None): + self.profile = profile + self.filename = filename + if profile: + self.product_name = None # profile.product_name + self.product_info = None # profile.product_info + else: + self.product_name = None + self.product_info = None + + def tobytes(self): + """ + Returns the profile in a format suitable for embedding in + saved images. + + :returns: a bytes object containing the ICC profile. + """ + + return core.profile_tobytes(self.profile) + + +class ImageCmsTransform(Image.ImagePointHandler): + + # Transform. This can be used with the procedural API, or with the + # standard Image.point() method. + # + # Will return the output profile in the output.info['icc_profile']. + + def __init__(self, input, output, input_mode, output_mode, + intent=INTENT_PERCEPTUAL, proof=None, + proof_intent=INTENT_ABSOLUTE_COLORIMETRIC, flags=0): + if proof is None: + self.transform = core.buildTransform( + input.profile, output.profile, + input_mode, output_mode, + intent, + flags + ) + else: + self.transform = core.buildProofTransform( + input.profile, output.profile, proof.profile, + input_mode, output_mode, + intent, proof_intent, + flags + ) + # Note: inputMode and outputMode are for pyCMS compatibility only + self.input_mode = self.inputMode = input_mode + self.output_mode = self.outputMode = output_mode + + self.output_profile = output + + def point(self, im): + return self.apply(im) + + def apply(self, im, imOut=None): + im.load() + if imOut is None: + imOut = Image.new(self.output_mode, im.size, None) + self.transform.apply(im.im.id, imOut.im.id) + imOut.info['icc_profile'] = self.output_profile.tobytes() + return imOut + + def apply_in_place(self, im): + im.load() + if im.mode != self.output_mode: + raise ValueError("mode mismatch") # wrong output mode + self.transform.apply(im.im.id, im.im.id) + im.info['icc_profile'] = self.output_profile.tobytes() + return im + + +def get_display_profile(handle=None): + """ (experimental) Fetches the profile for the current display device. + :returns: None if the profile is not known. + """ + + if sys.platform == "win32": + from PIL import ImageWin + if isinstance(handle, ImageWin.HDC): + profile = core.get_display_profile_win32(handle, 1) + else: + profile = core.get_display_profile_win32(handle or 0) + else: + try: + get = _imagingcms.get_display_profile + except AttributeError: + return None + else: + profile = get() + return ImageCmsProfile(profile) + + +# --------------------------------------------------------------------. +# pyCMS compatible layer +# --------------------------------------------------------------------. + +class PyCMSError(Exception): + + """ (pyCMS) Exception class. + This is used for all errors in the pyCMS API. """ + pass + + +def profileToProfile( + im, inputProfile, outputProfile, renderingIntent=INTENT_PERCEPTUAL, + outputMode=None, inPlace=0, flags=0): + """ + (pyCMS) Applies an ICC transformation to a given image, mapping from + inputProfile to outputProfile. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If inPlace == TRUE and outputMode != im.mode, + a PyCMSError will be raised. If an error occurs during application of + the profiles, a PyCMSError will be raised. If outputMode is not a mode + supported by the outputProfile (or by pyCMS), a PyCMSError will be + raised. + + This function applies an ICC transformation to im from inputProfile's + color space to outputProfile's color space using the specified rendering + intent to decide how to handle out-of-gamut colors. + + OutputMode can be used to specify that a color mode conversion is to + be done using these profiles, but the specified profiles must be able + to handle that mode. I.e., if converting im from RGB to CMYK using + profiles, the input profile must handle RGB data, and the output + profile must handle CMYK data. + + :param im: An open PIL image object (i.e. Image.new(...) or + Image.open(...), etc.) + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this image, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this image, or a profile object + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param outputMode: A valid PIL mode for the output image (i.e. "RGB", + "CMYK", etc.). Note: if rendering the image "inPlace", outputMode + MUST be the same mode as the input, or omitted completely. If + omitted, the outputMode will be the same as the mode of the input + image (im.mode) + :param inPlace: Boolean (1 = True, None or 0 = False). If True, the + original image is modified in-place, and None is returned. If False + (default), a new Image object is returned with the transform applied. + :param flags: Integer (0-...) specifying additional flags + :returns: Either None or a new PIL image object, depending on value of + inPlace + :exception PyCMSError: + """ + + if outputMode is None: + outputMode = im.mode + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + transform = ImageCmsTransform( + inputProfile, outputProfile, im.mode, outputMode, + renderingIntent, flags=flags + ) + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def getOpenProfile(profileFilename): + """ + (pyCMS) Opens an ICC profile file. + + The PyCMSProfile object can be passed back into pyCMS for use in creating + transforms and such (as in ImageCms.buildTransformFromOpenProfiles()). + + If profileFilename is not a vaild filename for an ICC profile, a PyCMSError + will be raised. + + :param profileFilename: String, as a valid filename path to the ICC profile + you wish to open, or a file-like object. + :returns: A CmsProfile class object. + :exception PyCMSError: + """ + + try: + return ImageCmsProfile(profileFilename) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, flags=0): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile. Use applyTransform to apply the transform to a given + image. + + If the input or output profiles specified are not valid filenames, a + PyCMSError will be raised. If an error occurs during creation of the + transform, a PyCMSError will be raised. + + If inMode or outMode are not a mode supported by the outputProfile (or + by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile using the renderingIntent to determine what to do + with out-of-gamut colors. It will ONLY work for converting images that + are in inMode to images that are in outMode color format (PIL mode, + i.e. "RGB", "RGBA", "CMYK", etc.). + + Building the transform is a fair part of the overhead in + ImageCms.profileToProfile(), so if you're planning on converting multiple + images using the same input/output settings, this can save you time. + Once you have a transform object, it can be used with + ImageCms.applyProfile() to convert images without the need to re-compute + the lookup table for the transform. + + The reason pyCMS returns a class object rather than a handle directly + to the transform is that it needs to keep track of the PIL input/output + modes that the transform is meant for. These attributes are stored in + the "inMode" and "outMode" attributes of the object (which can be + manually overridden if you really want to, but I don't know of any + time that would be of use, or would even work). + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, + renderingIntent, flags=flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def buildProofTransform( + inputProfile, outputProfile, proofProfile, inMode, outMode, + renderingIntent=INTENT_PERCEPTUAL, + proofRenderingIntent=INTENT_ABSOLUTE_COLORIMETRIC, + flags=FLAGS["SOFTPROOFING"]): + """ + (pyCMS) Builds an ICC transform mapping from the inputProfile to the + outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device. + + If the input, output, or proof profiles specified are not valid + filenames, a PyCMSError will be raised. + + If an error occurs during creation of the transform, a PyCMSError will + be raised. + + If inMode or outMode are not a mode supported by the outputProfile + (or by pyCMS), a PyCMSError will be raised. + + This function builds and returns an ICC transform from the inputProfile + to the outputProfile, but tries to simulate the result that would be + obtained on the proofProfile device using renderingIntent and + proofRenderingIntent to determine what to do with out-of-gamut + colors. This is known as "soft-proofing". It will ONLY work for + converting images that are in inMode to images that are in outMode + color format (PIL mode, i.e. "RGB", "RGBA", "CMYK", etc.). + + Usage of the resulting transform object is exactly the same as with + ImageCms.buildTransform(). + + Proof profiling is generally used when using an output device to get a + good idea of what the final printed/displayed image would look like on + the proofProfile device when it's quicker and easier to use the + output device for judging color. Generally, this means that the + output device is a monitor, or a dye-sub printer (etc.), and the simulated + device is something more expensive, complicated, or time consuming + (making it difficult to make a real print for color judgement purposes). + + Soft-proofing basically functions by adjusting the colors on the + output device to match the colors of the device being simulated. However, + when the simulated device has a much wider gamut than the output + device, you may obtain marginal results. + + :param inputProfile: String, as a valid filename path to the ICC input + profile you wish to use for this transform, or a profile object + :param outputProfile: String, as a valid filename path to the ICC output + (monitor, usually) profile you wish to use for this transform, or a + profile object + :param proofProfile: String, as a valid filename path to the ICC proof + profile you wish to use for this transform, or a profile object + :param inMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param outMode: String, as a valid PIL mode that the appropriate profile + also supports (i.e. "RGB", "RGBA", "CMYK", etc.) + :param renderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for the input->proof (simulated) transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param proofRenderingIntent: Integer (0-3) specifying the rendering intent you + wish to use for proof->output transform + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param flags: Integer (0-...) specifying additional flags + :returns: A CmsTransform class object. + :exception PyCMSError: + """ + + if not isinstance(renderingIntent, int) or not (0 <= renderingIntent <= 3): + raise PyCMSError("renderingIntent must be an integer between 0 and 3") + + if not isinstance(flags, int) or not (0 <= flags <= _MAX_FLAG): + raise PyCMSError( + "flags must be an integer between 0 and %s" + _MAX_FLAG) + + try: + if not isinstance(inputProfile, ImageCmsProfile): + inputProfile = ImageCmsProfile(inputProfile) + if not isinstance(outputProfile, ImageCmsProfile): + outputProfile = ImageCmsProfile(outputProfile) + if not isinstance(proofProfile, ImageCmsProfile): + proofProfile = ImageCmsProfile(proofProfile) + return ImageCmsTransform( + inputProfile, outputProfile, inMode, outMode, renderingIntent, + proofProfile, proofRenderingIntent, flags) + except (IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + +buildTransformFromOpenProfiles = buildTransform +buildProofTransformFromOpenProfiles = buildProofTransform + + +def applyTransform(im, transform, inPlace=0): + """ + (pyCMS) Applies a transform to a given image. + + If im.mode != transform.inMode, a PyCMSError is raised. + + If inPlace == TRUE and transform.inMode != transform.outMode, a + PyCMSError is raised. + + If im.mode, transfer.inMode, or transfer.outMode is not supported by + pyCMSdll or the profiles you used for the transform, a PyCMSError is + raised. + + If an error occurs while the transform is being applied, a PyCMSError + is raised. + + This function applies a pre-calculated transform (from + ImageCms.buildTransform() or ImageCms.buildTransformFromOpenProfiles()) + to an image. The transform can be used for multiple images, saving + considerable calculation time if doing the same conversion multiple times. + + If you want to modify im in-place instead of receiving a new image as + the return value, set inPlace to TRUE. This can only be done if + transform.inMode and transform.outMode are the same, because we can't + change the mode in-place (the buffer sizes for some modes are + different). The default behavior is to return a new Image object of + the same dimensions in mode transform.outMode. + + :param im: A PIL Image object, and im.mode must be the same as the inMode + supported by the transform. + :param transform: A valid CmsTransform class object + :param inPlace: Bool (1 == True, 0 or None == False). If True, im is + modified in place and None is returned, if False, a new Image object + with the transform applied is returned (and im is not changed). The + default is False. + :returns: Either None, or a new PIL Image object, depending on the value of + inPlace. The profile will be returned in the image's info['icc_profile']. + :exception PyCMSError: + """ + + try: + if inPlace: + transform.apply_in_place(im) + imOut = None + else: + imOut = transform.apply(im) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + return imOut + + +def createProfile(colorSpace, colorTemp=-1): + """ + (pyCMS) Creates a profile. + + If colorSpace not in ["LAB", "XYZ", "sRGB"], a PyCMSError is raised + + If using LAB and colorTemp != a positive integer, a PyCMSError is raised. + + If an error occurs while creating the profile, a PyCMSError is raised. + + Use this function to create common profiles on-the-fly instead of + having to supply a profile on disk and knowing the path to it. It + returns a normal CmsProfile object that can be passed to + ImageCms.buildTransformFromOpenProfiles() to create a transform to apply + to images. + + :param colorSpace: String, the color space of the profile you wish to + create. + Currently only "LAB", "XYZ", and "sRGB" are supported. + :param colorTemp: Positive integer for the white point for the profile, in + degrees Kelvin (i.e. 5000, 6500, 9600, etc.). The default is for D50 + illuminant if omitted (5000k). colorTemp is ONLY applied to LAB + profiles, and is ignored for XYZ and sRGB. + :returns: A CmsProfile class object + :exception PyCMSError: + """ + + if colorSpace not in ["LAB", "XYZ", "sRGB"]: + raise PyCMSError( + "Color space not supported for on-the-fly profile creation (%s)" + % colorSpace) + + if colorSpace == "LAB": + try: + colorTemp = float(colorTemp) + except: + raise PyCMSError( + "Color temperature must be numeric, \"%s\" not valid" + % colorTemp) + + try: + return core.createProfile(colorSpace, colorTemp) + except (TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileName(profile): + """ + + (pyCMS) Gets the internal product name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised If an error occurs while trying to obtain the + name tag, a PyCMSError is raised. + + Use this function to obtain the INTERNAL name of the profile (stored + in an ICC tag in the profile itself), usually the one used when the + profile was originally created. Sometimes this tag also contains + additional information supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal name of the profile as stored + in an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # do it in python, not c. + # // name was "%s - %s" (model, manufacturer) || Description , + # // but if the Model and Manufacturer were the same or the model + # // was long, Just the model, in 1.x + model = profile.profile.product_model + manufacturer = profile.profile.product_manufacturer + + if not (model or manufacturer): + return profile.profile.product_description + "\n" + if not manufacturer or len(model) > 30: + return model + "\n" + return "%s - %s\n" % (model, manufacturer) + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileInfo(profile): + """ + (pyCMS) Gets the internal product information for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the info tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + info tag. This often contains details about the profile, and how it + was created, as supplied by the creator. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # add an extra newline to preserve pyCMS compatibility + # Python, not C. the white point bits weren't working well, + # so skipping. + # // info was description \r\n\r\n copyright \r\n\r\n K007 tag \r\n\r\n whitepoint + description = profile.profile.product_description + cpright = profile.profile.product_copyright + arr = [] + for elt in (description, cpright): + if elt: + arr.append(elt) + return "\r\n\r\n".join(arr) + "\r\n\r\n" + + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileCopyright(profile): + """ + (pyCMS) Gets the copyright for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the copyright tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + copyright tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_copyright + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileManufacturer(profile): + """ + (pyCMS) Gets the manufacturer for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the manufacturer tag, a + PyCMSError is raised + + Use this function to obtain the information stored in the profile's + manufacturer tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_manufacturer + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileModel(profile): + """ + (pyCMS) Gets the model for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the model tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + model tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in + an ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_model + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getProfileDescription(profile): + """ + (pyCMS) Gets the description for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the description tag, a PyCMSError + is raised + + Use this function to obtain the information stored in the profile's + description tag. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: A string containing the internal profile information stored in an + ICC tag. + :exception PyCMSError: + """ + + try: + # add an extra newline to preserve pyCMS compatibility + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.product_description + "\n" + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def getDefaultIntent(profile): + """ + (pyCMS) Gets the default intent name for the given profile. + + If profile isn't a valid CmsProfile object or filename to a profile, + a PyCMSError is raised. + + If an error occurs while trying to obtain the default intent, a + PyCMSError is raised. + + Use this function to determine the default (and usually best optimized) + rendering intent for this profile. Most profiles support multiple + rendering intents, but are intended mostly for one type of conversion. + If you wish to use a different intent than returned, use + ImageCms.isIntentSupported() to verify it will work first. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :returns: Integer 0-3 specifying the default rendering intent for this + profile. + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + return profile.profile.rendering_intent + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def isIntentSupported(profile, intent, direction): + """ + (pyCMS) Checks if a given intent is supported. + + Use this function to verify that you can use your desired + renderingIntent with profile, and that profile can be used for the + input/output/proof profile as you desire. + + Some profiles are created specifically for one "direction", can cannot + be used for others. Some profiles can only be used for certain + rendering intents... so it's best to either verify this before trying + to create a transform with them (using this function), or catch the + potential PyCMSError that will occur if they don't support the modes + you select. + + :param profile: EITHER a valid CmsProfile object, OR a string of the + filename of an ICC profile. + :param intent: Integer (0-3) specifying the rendering intent you wish to + use with this profile + + INTENT_PERCEPTUAL = 0 (DEFAULT) (ImageCms.INTENT_PERCEPTUAL) + INTENT_RELATIVE_COLORIMETRIC = 1 (ImageCms.INTENT_RELATIVE_COLORIMETRIC) + INTENT_SATURATION = 2 (ImageCms.INTENT_SATURATION) + INTENT_ABSOLUTE_COLORIMETRIC = 3 (ImageCms.INTENT_ABSOLUTE_COLORIMETRIC) + + see the pyCMS documentation for details on rendering intents and what + they do. + :param direction: Integer specifying if the profile is to be used for input, + output, or proof + + INPUT = 0 (or use ImageCms.DIRECTION_INPUT) + OUTPUT = 1 (or use ImageCms.DIRECTION_OUTPUT) + PROOF = 2 (or use ImageCms.DIRECTION_PROOF) + + :returns: 1 if the intent/direction are supported, -1 if they are not. + :exception PyCMSError: + """ + + try: + if not isinstance(profile, ImageCmsProfile): + profile = ImageCmsProfile(profile) + # FIXME: I get different results for the same data w. different + # compilers. Bug in LittleCMS or in the binding? + if profile.profile.is_intent_supported(intent, direction): + return 1 + else: + return -1 + except (AttributeError, IOError, TypeError, ValueError) as v: + raise PyCMSError(v) + + +def versions(): + """ + (pyCMS) Fetches versions. + """ + + return ( + VERSION, core.littlecms_version, + sys.version.split()[0], Image.VERSION + ) + +# -------------------------------------------------------------------- + +if __name__ == "__main__": + # create a cheap manual from the __doc__ strings for the functions above + + print(__doc__) + + for f in dir(sys.modules[__name__]): + doc = None + try: + exec("doc = %s.__doc__" % (f)) + if "pyCMS" in doc: + # so we don't get the __doc__ string for imported modules + print("=" * 80) + print("%s" % f) + print(doc) + except (AttributeError, TypeError): + pass + +# End of file diff --git a/Lib/site-packages/PIL/ImageColor.py b/Lib/site-packages/PIL/ImageColor.py new file mode 100644 index 0000000..fc95e6d --- /dev/null +++ b/Lib/site-packages/PIL/ImageColor.py @@ -0,0 +1,279 @@ +# +# The Python Imaging Library +# $Id$ +# +# map CSS3-style colour description strings to RGB +# +# History: +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-15 fl Added RGBA support +# 2004-03-27 fl Fixed remaining int() problems for Python 1.5.2 +# 2004-07-19 fl Fixed gray/grey spelling issues +# 2009-03-05 fl Fixed rounding error in grayscale calculation +# +# Copyright (c) 2002-2004 by Secret Labs AB +# Copyright (c) 2002-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +import re + + +def getrgb(color): + """ + Convert a color string to an RGB tuple. If the string cannot be parsed, + this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(red, green, blue[, alpha])`` + """ + try: + rgb = colormap[color] + except KeyError: + try: + # fall back on case-insensitive lookup + rgb = colormap[color.lower()] + except KeyError: + rgb = None + # found color in cache + if rgb: + if isinstance(rgb, tuple): + return rgb + colormap[color] = rgb = getrgb(rgb) + return rgb + # check for known string formats + m = re.match("#\w\w\w$", color) + if m: + return ( + int(color[1]*2, 16), + int(color[2]*2, 16), + int(color[3]*2, 16) + ) + m = re.match("#\w\w\w\w\w\w$", color) + if m: + return ( + int(color[1:3], 16), + int(color[3:5], 16), + int(color[5:7], 16) + ) + m = re.match("rgb\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)) + ) + m = re.match("rgb\(\s*(\d+)%\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + return ( + int((int(m.group(1)) * 255) / 100.0 + 0.5), + int((int(m.group(2)) * 255) / 100.0 + 0.5), + int((int(m.group(3)) * 255) / 100.0 + 0.5) + ) + m = re.match("hsl\(\s*(\d+)\s*,\s*(\d+)%\s*,\s*(\d+)%\s*\)$", color) + if m: + from colorsys import hls_to_rgb + rgb = hls_to_rgb( + float(m.group(1)) / 360.0, + float(m.group(3)) / 100.0, + float(m.group(2)) / 100.0, + ) + return ( + int(rgb[0] * 255 + 0.5), + int(rgb[1] * 255 + 0.5), + int(rgb[2] * 255 + 0.5) + ) + m = re.match("rgba\(\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*,\s*(\d+)\s*\)$", + color) + if m: + return ( + int(m.group(1)), + int(m.group(2)), + int(m.group(3)), + int(m.group(4)) + ) + raise ValueError("unknown color specifier: %r" % color) + + +def getcolor(color, mode): + """ + Same as :py:func:`~PIL.ImageColor.getrgb`, but converts the RGB value to a + greyscale value if the mode is not color or a palette image. If the string + cannot be parsed, this function raises a :py:exc:`ValueError` exception. + + .. versionadded:: 1.1.4 + + :param color: A color string + :return: ``(graylevel [, alpha]) or (red, green, blue[, alpha])`` + """ + # same as getrgb, but converts the result to the given mode + color, alpha = getrgb(color), 255 + if len(color) == 4: + color, alpha = color[0:3], color[3] + + if Image.getmodebase(mode) == "L": + r, g, b = color + color = (r*299 + g*587 + b*114)//1000 + if mode[-1] == 'A': + return (color, alpha) + else: + if mode[-1] == 'A': + return color + (alpha,) + return color + +colormap = { + # X11 colour table (from "CSS3 module: Color working draft"), with + # gray/grey spelling issues fixed. This is a superset of HTML 4.0 + # colour names used in CSS 1. + "aliceblue": "#f0f8ff", + "antiquewhite": "#faebd7", + "aqua": "#00ffff", + "aquamarine": "#7fffd4", + "azure": "#f0ffff", + "beige": "#f5f5dc", + "bisque": "#ffe4c4", + "black": "#000000", + "blanchedalmond": "#ffebcd", + "blue": "#0000ff", + "blueviolet": "#8a2be2", + "brown": "#a52a2a", + "burlywood": "#deb887", + "cadetblue": "#5f9ea0", + "chartreuse": "#7fff00", + "chocolate": "#d2691e", + "coral": "#ff7f50", + "cornflowerblue": "#6495ed", + "cornsilk": "#fff8dc", + "crimson": "#dc143c", + "cyan": "#00ffff", + "darkblue": "#00008b", + "darkcyan": "#008b8b", + "darkgoldenrod": "#b8860b", + "darkgray": "#a9a9a9", + "darkgrey": "#a9a9a9", + "darkgreen": "#006400", + "darkkhaki": "#bdb76b", + "darkmagenta": "#8b008b", + "darkolivegreen": "#556b2f", + "darkorange": "#ff8c00", + "darkorchid": "#9932cc", + "darkred": "#8b0000", + "darksalmon": "#e9967a", + "darkseagreen": "#8fbc8f", + "darkslateblue": "#483d8b", + "darkslategray": "#2f4f4f", + "darkslategrey": "#2f4f4f", + "darkturquoise": "#00ced1", + "darkviolet": "#9400d3", + "deeppink": "#ff1493", + "deepskyblue": "#00bfff", + "dimgray": "#696969", + "dimgrey": "#696969", + "dodgerblue": "#1e90ff", + "firebrick": "#b22222", + "floralwhite": "#fffaf0", + "forestgreen": "#228b22", + "fuchsia": "#ff00ff", + "gainsboro": "#dcdcdc", + "ghostwhite": "#f8f8ff", + "gold": "#ffd700", + "goldenrod": "#daa520", + "gray": "#808080", + "grey": "#808080", + "green": "#008000", + "greenyellow": "#adff2f", + "honeydew": "#f0fff0", + "hotpink": "#ff69b4", + "indianred": "#cd5c5c", + "indigo": "#4b0082", + "ivory": "#fffff0", + "khaki": "#f0e68c", + "lavender": "#e6e6fa", + "lavenderblush": "#fff0f5", + "lawngreen": "#7cfc00", + "lemonchiffon": "#fffacd", + "lightblue": "#add8e6", + "lightcoral": "#f08080", + "lightcyan": "#e0ffff", + "lightgoldenrodyellow": "#fafad2", + "lightgreen": "#90ee90", + "lightgray": "#d3d3d3", + "lightgrey": "#d3d3d3", + "lightpink": "#ffb6c1", + "lightsalmon": "#ffa07a", + "lightseagreen": "#20b2aa", + "lightskyblue": "#87cefa", + "lightslategray": "#778899", + "lightslategrey": "#778899", + "lightsteelblue": "#b0c4de", + "lightyellow": "#ffffe0", + "lime": "#00ff00", + "limegreen": "#32cd32", + "linen": "#faf0e6", + "magenta": "#ff00ff", + "maroon": "#800000", + "mediumaquamarine": "#66cdaa", + "mediumblue": "#0000cd", + "mediumorchid": "#ba55d3", + "mediumpurple": "#9370db", + "mediumseagreen": "#3cb371", + "mediumslateblue": "#7b68ee", + "mediumspringgreen": "#00fa9a", + "mediumturquoise": "#48d1cc", + "mediumvioletred": "#c71585", + "midnightblue": "#191970", + "mintcream": "#f5fffa", + "mistyrose": "#ffe4e1", + "moccasin": "#ffe4b5", + "navajowhite": "#ffdead", + "navy": "#000080", + "oldlace": "#fdf5e6", + "olive": "#808000", + "olivedrab": "#6b8e23", + "orange": "#ffa500", + "orangered": "#ff4500", + "orchid": "#da70d6", + "palegoldenrod": "#eee8aa", + "palegreen": "#98fb98", + "paleturquoise": "#afeeee", + "palevioletred": "#db7093", + "papayawhip": "#ffefd5", + "peachpuff": "#ffdab9", + "peru": "#cd853f", + "pink": "#ffc0cb", + "plum": "#dda0dd", + "powderblue": "#b0e0e6", + "purple": "#800080", + "red": "#ff0000", + "rosybrown": "#bc8f8f", + "royalblue": "#4169e1", + "saddlebrown": "#8b4513", + "salmon": "#fa8072", + "sandybrown": "#f4a460", + "seagreen": "#2e8b57", + "seashell": "#fff5ee", + "sienna": "#a0522d", + "silver": "#c0c0c0", + "skyblue": "#87ceeb", + "slateblue": "#6a5acd", + "slategray": "#708090", + "slategrey": "#708090", + "snow": "#fffafa", + "springgreen": "#00ff7f", + "steelblue": "#4682b4", + "tan": "#d2b48c", + "teal": "#008080", + "thistle": "#d8bfd8", + "tomato": "#ff6347", + "turquoise": "#40e0d0", + "violet": "#ee82ee", + "wheat": "#f5deb3", + "white": "#ffffff", + "whitesmoke": "#f5f5f5", + "yellow": "#ffff00", + "yellowgreen": "#9acd32", +} diff --git a/Lib/site-packages/PIL/ImageDraw.py b/Lib/site-packages/PIL/ImageDraw.py new file mode 100644 index 0000000..f5b72ef --- /dev/null +++ b/Lib/site-packages/PIL/ImageDraw.py @@ -0,0 +1,407 @@ +# +# The Python Imaging Library +# $Id$ +# +# drawing interface operations +# +# History: +# 1996-04-13 fl Created (experimental) +# 1996-08-07 fl Filled polygons, ellipses. +# 1996-08-13 fl Added text support +# 1998-06-28 fl Handle I and F images +# 1998-12-29 fl Added arc; use arc primitive to draw ellipses +# 1999-01-10 fl Added shape stuff (experimental) +# 1999-02-06 fl Added bitmap support +# 1999-02-11 fl Changed all primitives to take options +# 1999-02-20 fl Fixed backwards compatibility +# 2000-10-12 fl Copy on write, when necessary +# 2001-02-18 fl Use default ink for bitmap/text also in fill mode +# 2002-10-24 fl Added support for CSS-style color strings +# 2002-12-10 fl Added experimental support for RGBA-on-RGB drawing +# 2002-12-11 fl Refactored low-level drawing API (work in progress) +# 2004-08-26 fl Made Draw() a factory function, added getdraw() support +# 2004-09-04 fl Added width support to line primitive +# 2004-09-10 fl Added font mode handling +# 2006-06-19 fl Added font bearing support (getmask2) +# +# Copyright (c) 1997-2006 by Secret Labs AB +# Copyright (c) 1996-2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import numbers +import warnings + +from PIL import Image, ImageColor +from PIL._util import isStringType + +## +# A simple 2D drawing interface for PIL images. +#

+# Application code should use the Draw factory, instead of +# directly. + + +class ImageDraw(object): + + ## + # Create a drawing instance. + # + # @param im The image to draw in. + # @param mode Optional mode to use for color values. For RGB + # images, this argument can be RGB or RGBA (to blend the + # drawing into the image). For all other modes, this argument + # must be the same as the image mode. If omitted, the mode + # defaults to the mode of the image. + + def __init__(self, im, mode=None): + im.load() + if im.readonly: + im._copy() # make it writeable + blend = 0 + if mode is None: + mode = im.mode + if mode != im.mode: + if mode == "RGBA" and im.mode == "RGB": + blend = 1 + else: + raise ValueError("mode mismatch") + if mode == "P": + self.palette = im.palette + else: + self.palette = None + self.im = im.im + self.draw = Image.core.draw(self.im, blend) + self.mode = mode + if mode in ("I", "F"): + self.ink = self.draw.draw_ink(1, mode) + else: + self.ink = self.draw.draw_ink(-1, mode) + if mode in ("1", "P", "I", "F"): + # FIXME: fix Fill2 to properly support matte for I+F images + self.fontmode = "1" + else: + self.fontmode = "L" # aliasing is okay for other modes + self.fill = 0 + self.font = None + + def setink(self, ink): + raise Exception("setink() has been removed. " + + "Please use keyword arguments instead.") + + def setfill(self, onoff): + raise Exception("setfill() has been removed. " + + "Please use keyword arguments instead.") + + def setfont(self, font): + warnings.warn("setfont() is deprecated. " + + "Please set the attribute directly instead.") + # compatibility + self.font = font + + ## + # Get the current default font. + + def getfont(self): + if not self.font: + # FIXME: should add a font repository + from PIL import ImageFont + self.font = ImageFont.load_default() + return self.font + + def _getink(self, ink, fill=None): + if ink is None and fill is None: + if self.fill: + fill = self.ink + else: + ink = self.ink + else: + if ink is not None: + if isStringType(ink): + ink = ImageColor.getcolor(ink, self.mode) + if self.palette and not isinstance(ink, numbers.Number): + ink = self.palette.getcolor(ink) + ink = self.draw.draw_ink(ink, self.mode) + if fill is not None: + if isStringType(fill): + fill = ImageColor.getcolor(fill, self.mode) + if self.palette and not isinstance(fill, numbers.Number): + fill = self.palette.getcolor(fill) + fill = self.draw.draw_ink(fill, self.mode) + return ink, fill + + ## + # Draw an arc. + + def arc(self, xy, start, end, fill=None): + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_arc(xy, start, end, ink) + + ## + # Draw a bitmap. + + def bitmap(self, xy, bitmap, fill=None): + bitmap.load() + ink, fill = self._getink(fill) + if ink is None: + ink = fill + if ink is not None: + self.draw.draw_bitmap(xy, bitmap.im, ink) + + ## + # Draw a chord. + + def chord(self, xy, start, end, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_chord(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_chord(xy, start, end, ink, 0) + + ## + # Draw an ellipse. + + def ellipse(self, xy, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_ellipse(xy, fill, 1) + if ink is not None: + self.draw.draw_ellipse(xy, ink, 0) + + ## + # Draw a line, or a connected sequence of line segments. + + def line(self, xy, fill=None, width=0): + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_lines(xy, ink, width) + + ## + # (Experimental) Draw a shape. + + def shape(self, shape, fill=None, outline=None): + # experimental + shape.close() + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_outline(shape, fill, 1) + if ink is not None: + self.draw.draw_outline(shape, ink, 0) + + ## + # Draw a pieslice. + + def pieslice(self, xy, start, end, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_pieslice(xy, start, end, fill, 1) + if ink is not None: + self.draw.draw_pieslice(xy, start, end, ink, 0) + + ## + # Draw one or more individual pixels. + + def point(self, xy, fill=None): + ink, fill = self._getink(fill) + if ink is not None: + self.draw.draw_points(xy, ink) + + ## + # Draw a polygon. + + def polygon(self, xy, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_polygon(xy, fill, 1) + if ink is not None: + self.draw.draw_polygon(xy, ink, 0) + + ## + # Draw a rectangle. + + def rectangle(self, xy, fill=None, outline=None): + ink, fill = self._getink(outline, fill) + if fill is not None: + self.draw.draw_rectangle(xy, fill, 1) + if ink is not None: + self.draw.draw_rectangle(xy, ink, 0) + + ## + # Draw text. + + def _multiline_check(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return split_character in text + + def _multiline_split(self, text): + split_character = "\n" if isinstance(text, type("")) else b"\n" + + return text.split(split_character) + + def text(self, xy, text, fill=None, font=None, anchor=None): + if self._multiline_check(text): + return self.multiline_text(xy, text, fill, font, anchor) + + ink, fill = self._getink(fill) + if font is None: + font = self.getfont() + if ink is None: + ink = fill + if ink is not None: + try: + mask, offset = font.getmask2(text, self.fontmode) + xy = xy[0] + offset[0], xy[1] + offset[1] + except AttributeError: + try: + mask = font.getmask(text, self.fontmode) + except TypeError: + mask = font.getmask(text) + self.draw.draw_bitmap(xy, mask, ink) + + def multiline_text(self, xy, text, fill=None, font=None, anchor=None, + spacing=4, align="left"): + widths = [] + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + widths.append(line_width) + max_width = max(max_width, line_width) + left, top = xy + for idx, line in enumerate(lines): + if align == "left": + pass # left = x + elif align == "center": + left += (max_width - widths[idx]) / 2.0 + elif align == "right": + left += (max_width - widths[idx]) + else: + assert False, 'align must be "left", "center" or "right"' + self.text((left, top), line, fill, font, anchor) + top += line_spacing + left = xy[0] + + ## + # Get the size of a given string, in pixels. + + def textsize(self, text, font=None): + if self._multiline_check(text): + return self.multiline_textsize(text, font) + + if font is None: + font = self.getfont() + return font.getsize(text) + + def multiline_textsize(self, text, font=None, spacing=4): + max_width = 0 + lines = self._multiline_split(text) + line_spacing = self.textsize('A', font=font)[1] + spacing + for line in lines: + line_width, line_height = self.textsize(line, font) + max_width = max(max_width, line_width) + return max_width, len(lines)*line_spacing + + +## +# A simple 2D drawing interface for PIL images. +# +# @param im The image to draw in. +# @param mode Optional mode to use for color values. For RGB +# images, this argument can be RGB or RGBA (to blend the +# drawing into the image). For all other modes, this argument +# must be the same as the image mode. If omitted, the mode +# defaults to the mode of the image. + +def Draw(im, mode=None): + try: + return im.getdraw(mode) + except AttributeError: + return ImageDraw(im, mode) + +# experimental access to the outline API +try: + Outline = Image.core.outline +except AttributeError: + Outline = None + + +## +# (Experimental) A more advanced 2D drawing interface for PIL images, +# based on the WCK interface. +# +# @param im The image to draw in. +# @param hints An optional list of hints. +# @return A (drawing context, drawing resource factory) tuple. + +def getdraw(im=None, hints=None): + # FIXME: this needs more work! + # FIXME: come up with a better 'hints' scheme. + handler = None + if not hints or "nicest" in hints: + try: + from PIL import _imagingagg as handler + except ImportError: + pass + if handler is None: + from PIL import ImageDraw2 as handler + if im: + im = handler.Draw(im) + return im, handler + + +## +# (experimental) Fills a bounded region with a given color. +# +# @param image Target image. +# @param xy Seed position (a 2-item coordinate tuple). +# @param value Fill color. +# @param border Optional border value. If given, the region consists of +# pixels with a color different from the border color. If not given, +# the region consists of pixels having the same color as the seed +# pixel. + +def floodfill(image, xy, value, border=None): + "Fill bounded region." + # based on an implementation by Eric S. Raymond + pixel = image.load() + x, y = xy + try: + background = pixel[x, y] + if background == value: + return # seed point already has fill color + pixel[x, y] = value + except IndexError: + return # seed point outside image + edge = [(x, y)] + if border is None: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p == background: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge + else: + while edge: + newedge = [] + for (x, y) in edge: + for (s, t) in ((x+1, y), (x-1, y), (x, y+1), (x, y-1)): + try: + p = pixel[s, t] + except IndexError: + pass + else: + if p != value and p != border: + pixel[s, t] = value + newedge.append((s, t)) + edge = newedge diff --git a/Lib/site-packages/PIL/ImageDraw2.py b/Lib/site-packages/PIL/ImageDraw2.py new file mode 100644 index 0000000..62ee116 --- /dev/null +++ b/Lib/site-packages/PIL/ImageDraw2.py @@ -0,0 +1,111 @@ +# +# The Python Imaging Library +# $Id$ +# +# WCK-style drawing interface operations +# +# History: +# 2003-12-07 fl created +# 2005-05-15 fl updated; added to PIL as ImageDraw2 +# 2005-05-15 fl added text support +# 2005-05-20 fl added arc/chord/pieslice support +# +# Copyright (c) 2003-2005 by Secret Labs AB +# Copyright (c) 2003-2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageColor, ImageDraw, ImageFont, ImagePath + + +class Pen(object): + def __init__(self, color, width=1, opacity=255): + self.color = ImageColor.getrgb(color) + self.width = width + + +class Brush(object): + def __init__(self, color, opacity=255): + self.color = ImageColor.getrgb(color) + + +class Font(object): + def __init__(self, color, file, size=12): + # FIXME: add support for bitmap fonts + self.color = ImageColor.getrgb(color) + self.font = ImageFont.truetype(file, size) + + +class Draw(object): + + def __init__(self, image, size=None, color=None): + if not hasattr(image, "im"): + image = Image.new(image, size, color) + self.draw = ImageDraw.Draw(image) + self.image = image + self.transform = None + + def flush(self): + return self.image + + def render(self, op, xy, pen, brush=None): + # handle color arguments + outline = fill = None + width = 1 + if isinstance(pen, Pen): + outline = pen.color + width = pen.width + elif isinstance(brush, Pen): + outline = brush.color + width = brush.width + if isinstance(brush, Brush): + fill = brush.color + elif isinstance(pen, Brush): + fill = pen.color + # handle transformation + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + # render the item + if op == "line": + self.draw.line(xy, fill=outline, width=width) + else: + getattr(self.draw, op)(xy, fill=fill, outline=outline) + + def settransform(self, offset): + (xoffset, yoffset) = offset + self.transform = (1, 0, xoffset, 0, 1, yoffset) + + def arc(self, xy, start, end, *options): + self.render("arc", xy, start, end, *options) + + def chord(self, xy, start, end, *options): + self.render("chord", xy, start, end, *options) + + def ellipse(self, xy, *options): + self.render("ellipse", xy, *options) + + def line(self, xy, *options): + self.render("line", xy, *options) + + def pieslice(self, xy, start, end, *options): + self.render("pieslice", xy, start, end, *options) + + def polygon(self, xy, *options): + self.render("polygon", xy, *options) + + def rectangle(self, xy, *options): + self.render("rectangle", xy, *options) + + def symbol(self, xy, symbol, *options): + raise NotImplementedError("not in this version") + + def text(self, xy, text, font): + if self.transform: + xy = ImagePath.Path(xy) + xy.transform(self.transform) + self.draw.text(xy, text, font=font.font, fill=font.color) + + def textsize(self, text, font): + return self.draw.textsize(text, font=font.font) diff --git a/Lib/site-packages/PIL/ImageEnhance.py b/Lib/site-packages/PIL/ImageEnhance.py new file mode 100644 index 0000000..56b5c01 --- /dev/null +++ b/Lib/site-packages/PIL/ImageEnhance.py @@ -0,0 +1,100 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image enhancement classes +# +# For a background, see "Image Processing By Interpolation and +# Extrapolation", Paul Haeberli and Douglas Voorhies. Available +# at http://www.graficaobscura.com/interp/index.html +# +# History: +# 1996-03-23 fl Created +# 2009-06-16 fl Fixed mean calculation +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFilter, ImageStat + + +class _Enhance(object): + + def enhance(self, factor): + """ + Returns an enhanced image. + + :param factor: A floating point value controlling the enhancement. + Factor 1.0 always returns a copy of the original image, + lower factors mean less color (brightness, contrast, + etc), and higher values more. There are no restrictions + on this value. + :rtype: :py:class:`~PIL.Image.Image` + """ + return Image.blend(self.degenerate, self.image, factor) + + +class Color(_Enhance): + """Adjust image color balance. + + This class can be used to adjust the colour balance of an image, in + a manner similar to the controls on a colour TV set. An enhancement + factor of 0.0 gives a black and white image. A factor of 1.0 gives + the original image. + """ + def __init__(self, image): + self.image = image + self.intermediate_mode = 'L' + if 'A' in image.getbands(): + self.intermediate_mode = 'LA' + + self.degenerate = image.convert(self.intermediate_mode).convert(image.mode) + + +class Contrast(_Enhance): + """Adjust image contrast. + + This class can be used to control the contrast of an image, similar + to the contrast control on a TV set. An enhancement factor of 0.0 + gives a solid grey image. A factor of 1.0 gives the original image. + """ + def __init__(self, image): + self.image = image + mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5) + self.degenerate = Image.new("L", image.size, mean).convert(image.mode) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Brightness(_Enhance): + """Adjust image brightness. + + This class can be used to control the brightness of an image. An + enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the + original image. + """ + def __init__(self, image): + self.image = image + self.degenerate = Image.new(image.mode, image.size, 0) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) + + +class Sharpness(_Enhance): + """Adjust image sharpness. + + This class can be used to adjust the sharpness of an image. An + enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the + original image, and a factor of 2.0 gives a sharpened image. + """ + def __init__(self, image): + self.image = image + self.degenerate = image.filter(ImageFilter.SMOOTH) + + if 'A' in image.getbands(): + self.degenerate.putalpha(image.split()[-1]) diff --git a/Lib/site-packages/PIL/ImageFile.py b/Lib/site-packages/PIL/ImageFile.py new file mode 100644 index 0000000..9617ffb --- /dev/null +++ b/Lib/site-packages/PIL/ImageFile.py @@ -0,0 +1,513 @@ +# +# The Python Imaging Library. +# $Id$ +# +# base class for image file handlers +# +# history: +# 1995-09-09 fl Created +# 1996-03-11 fl Fixed load mechanism. +# 1996-04-15 fl Added pcx/xbm decoders. +# 1996-04-30 fl Added encoders. +# 1996-12-14 fl Added load helpers +# 1997-01-11 fl Use encode_to_file where possible +# 1997-08-27 fl Flush output in _save +# 1998-03-05 fl Use memory mapping for some modes +# 1999-02-04 fl Use memory mapping also for "I;16" and "I;16B" +# 1999-05-31 fl Added image parser +# 2000-10-12 fl Set readonly flag on memory-mapped images +# 2002-03-20 fl Use better messages for common decoder errors +# 2003-04-21 fl Fall back on mmap/map_buffer if map is not available +# 2003-10-30 fl Added StubImageFile class +# 2004-02-25 fl Made incremental parser more robust +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1995-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +import io +import os +import sys +import struct + +MAXBLOCK = 65536 + +SAFEBLOCK = 1024*1024 + +LOAD_TRUNCATED_IMAGES = False + +ERRORS = { + -1: "image buffer overrun error", + -2: "decoding error", + -3: "unknown error", + -8: "bad configuration", + -9: "out of memory error" +} + + +def raise_ioerror(error): + try: + message = Image.core.getcodecstatus(error) + except AttributeError: + message = ERRORS.get(error) + if not message: + message = "decoder error %d" % error + raise IOError(message + " when reading image file") + + +# +# -------------------------------------------------------------------- +# Helpers + +def _tilesort(t): + # sort on offset + return t[2] + + +# +# -------------------------------------------------------------------- +# ImageFile base class + +class ImageFile(Image.Image): + "Base class for image file format handlers." + + def __init__(self, fp=None, filename=None): + Image.Image.__init__(self) + + self.tile = None + self.readonly = 1 # until we know better + + self.decoderconfig = () + self.decodermaxblock = MAXBLOCK + + if isPath(fp): + # filename + self.fp = open(fp, "rb") + self.filename = fp + else: + # stream + self.fp = fp + self.filename = filename + + try: + self._open() + except (IndexError, # end of data + TypeError, # end of data (ord) + KeyError, # unsupported mode + EOFError, # got header but not the first frame + struct.error) as v: + raise SyntaxError(v) + + if not self.mode or self.size[0] <= 0: + raise SyntaxError("not identified by this driver") + + def draft(self, mode, size): + "Set draft mode" + + pass + + def verify(self): + "Check file integrity" + + # raise exception if something's wrong. must be called + # directly after open, and closes file when finished. + self.fp = None + + def load(self): + "Load image data based on tile list" + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.map = None + use_mmap = self.filename and len(self.tile) == 1 + # As of pypy 2.1.0, memory mapping was failing here. + use_mmap = use_mmap and not hasattr(sys, 'pypy_version_info') + + readonly = 0 + + # look for read/seek overrides + try: + read = self.load_read + # don't use mmap if there are custom read/seek functions + use_mmap = False + except AttributeError: + read = self.fp.read + + try: + seek = self.load_seek + use_mmap = False + except AttributeError: + seek = self.fp.seek + + if use_mmap: + # try memory mapping + d, e, o, a = self.tile[0] + if d == "raw" and a[0] == self.mode and a[0] in Image._MAPMODES: + try: + if hasattr(Image.core, "map"): + # use built-in mapper + self.map = Image.core.map(self.filename) + self.map.seek(o) + self.im = self.map.readimage( + self.mode, self.size, a[1], a[2] + ) + else: + # use mmap, if possible + import mmap + fp = open(self.filename, "r+") + size = os.path.getsize(self.filename) + # FIXME: on Unix, use PROT_READ etc + self.map = mmap.mmap(fp.fileno(), size) + self.im = Image.core.map_buffer( + self.map, self.size, d, e, o, a + ) + readonly = 1 + except (AttributeError, EnvironmentError, ImportError): + self.map = None + + self.load_prepare() + + if not self.map: + # sort tiles in file order + self.tile.sort(key=_tilesort) + + try: + # FIXME: This is a hack to handle TIFF's JpegTables tag. + prefix = self.tile_prefix + except AttributeError: + prefix = b"" + + for d, e, o, a in self.tile: + d = Image._getdecoder(self.mode, d, a, self.decoderconfig) + seek(o) + try: + d.setimage(self.im, e) + except ValueError: + continue + b = prefix + while True: + try: + s = read(self.decodermaxblock) + except (IndexError, struct.error): # truncated png/gif + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated") + + if not s and not d.handles_eof: # truncated jpeg + self.tile = [] + + # JpegDecode needs to clean things up here either way + # If we don't destroy the decompressor, + # we have a memory leak. + d.cleanup() + + if LOAD_TRUNCATED_IMAGES: + break + else: + raise IOError("image file is truncated " + "(%d bytes not processed)" % len(b)) + + b = b + s + n, e = d.decode(b) + if n < 0: + break + b = b[n:] + # Need to cleanup here to prevent leaks in PyPy + d.cleanup() + + self.tile = [] + self.readonly = readonly + + self.fp = None # might be shared + + if not self.map and not LOAD_TRUNCATED_IMAGES and e < 0: + # still raised if decoder fails to return anything + raise_ioerror(e) + + # post processing + if hasattr(self, "tile_post_rotate"): + # FIXME: This is a hack to handle rotated PCD's + self.im = self.im.rotate(self.tile_post_rotate) + self.size = self.im.size + + self.load_end() + + return Image.Image.load(self) + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.new(self.mode, self.size) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + def load_end(self): + # may be overridden + pass + + # may be defined for contained formats + # def load_seek(self, pos): + # pass + + # may be defined for blocked formats (e.g. PNG) + # def load_read(self, bytes): + # pass + + +class StubImageFile(ImageFile): + """ + Base class for stub image loaders. + + A stub loader is an image loader that can identify files of a + certain format, but relies on external code to load the file. + """ + + def _open(self): + raise NotImplementedError( + "StubImageFile subclass must implement _open" + ) + + def load(self): + loader = self._load() + if loader is None: + raise IOError("cannot find loader for this %s file" % self.format) + image = loader.load(self) + assert image is not None + # become the other object (!) + self.__class__ = image.__class__ + self.__dict__ = image.__dict__ + + def _load(self): + "(Hook) Find actual image loader." + raise NotImplementedError( + "StubImageFile subclass must implement _load" + ) + + +class Parser(object): + """ + Incremental image parser. This class implements the standard + feed/close consumer interface. + + In Python 2.x, this is an old-style class. + """ + incremental = None + image = None + data = None + decoder = None + offset = 0 + finished = 0 + + def reset(self): + """ + (Consumer) Reset the parser. Note that you can only call this + method immediately after you've created a parser; parser + instances cannot be reused. + """ + assert self.data is None, "cannot reuse parsers" + + def feed(self, data): + """ + (Consumer) Feed data to the parser. + + :param data: A string buffer. + :exception IOError: If the parser failed to parse the image file. + """ + # collect data + + if self.finished: + return + + if self.data is None: + self.data = data + else: + self.data = self.data + data + + # parse what we have + if self.decoder: + + if self.offset > 0: + # skip header + skip = min(len(self.data), self.offset) + self.data = self.data[skip:] + self.offset = self.offset - skip + if self.offset > 0 or not self.data: + return + + n, e = self.decoder.decode(self.data) + + if n < 0: + # end of stream + self.data = None + self.finished = 1 + if e < 0: + # decoding error + self.image = None + raise_ioerror(e) + else: + # end of image + return + self.data = self.data[n:] + + elif self.image: + + # if we end up here with no decoder, this file cannot + # be incrementally parsed. wait until we've gotten all + # available data + pass + + else: + + # attempt to open this file + try: + try: + fp = io.BytesIO(self.data) + im = Image.open(fp) + finally: + fp.close() # explicitly close the virtual file + except IOError: + # traceback.print_exc() + pass # not enough data + else: + flag = hasattr(im, "load_seek") or hasattr(im, "load_read") + if flag or len(im.tile) != 1: + # custom load code, or multiple tiles + self.decode = None + else: + # initialize decoder + im.load_prepare() + d, e, o, a = im.tile[0] + im.tile = [] + self.decoder = Image._getdecoder( + im.mode, d, a, im.decoderconfig + ) + self.decoder.setimage(im.im, e) + + # calculate decoder offset + self.offset = o + if self.offset <= len(self.data): + self.data = self.data[self.offset:] + self.offset = 0 + + self.image = im + + def close(self): + """ + (Consumer) Close the stream. + + :returns: An image object. + :exception IOError: If the parser failed to parse the image file either + because it cannot be identified or cannot be + decoded. + """ + # finish decoding + if self.decoder: + # get rid of what's left in the buffers + self.feed(b"") + self.data = self.decoder = None + if not self.finished: + raise IOError("image was incomplete") + if not self.image: + raise IOError("cannot parse this image") + if self.data: + # incremental parsing not possible; reopen the file + # not that we have all data + try: + fp = io.BytesIO(self.data) + self.image = Image.open(fp) + finally: + self.image.load() + fp.close() # explicitly close the virtual file + return self.image + + +# -------------------------------------------------------------------- + +def _save(im, fp, tile, bufsize=0): + """Helper to save image based on tile list + + :param im: Image object. + :param fp: File object. + :param tile: Tile list. + :param bufsize: Optional buffer size + """ + + im.load() + if not hasattr(im, "encoderconfig"): + im.encoderconfig = () + tile.sort(key=_tilesort) + # FIXME: make MAXBLOCK a configuration parameter + # It would be great if we could have the encoder specify what it needs + # But, it would need at least the image size in most cases. RawEncode is + # a tricky case. + bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c + if fp == sys.stdout: + fp.flush() + return + try: + fh = fp.fileno() + fp.flush() + except (AttributeError, io.UnsupportedOperation): + # compress to Python file-compatible object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + while True: + l, s, d = e.encode(bufsize) + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + else: + # slight speedup: compress to real file object + for e, b, o, a in tile: + e = Image._getencoder(im.mode, e, a, im.encoderconfig) + if o > 0: + fp.seek(o, 0) + e.setimage(im.im, b) + s = e.encode_to_file(fh, bufsize) + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + e.cleanup() + if hasattr(fp, "flush"): + fp.flush() + + +def _safe_read(fp, size): + """ + Reads large blocks in a safe way. Unlike fp.read(n), this function + doesn't trust the user. If the requested size is larger than + SAFEBLOCK, the file is read block by block. + + :param fp: File handle. Must implement a read method. + :param size: Number of bytes to read. + :returns: A string containing up to size bytes of data. + """ + if size <= 0: + return b"" + if size <= SAFEBLOCK: + return fp.read(size) + data = [] + while size > 0: + block = fp.read(min(size, SAFEBLOCK)) + if not block: + break + data.append(block) + size -= len(block) + return b"".join(data) diff --git a/Lib/site-packages/PIL/ImageFilter.py b/Lib/site-packages/PIL/ImageFilter.py new file mode 100644 index 0000000..baa168a --- /dev/null +++ b/Lib/site-packages/PIL/ImageFilter.py @@ -0,0 +1,275 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard filters +# +# History: +# 1995-11-27 fl Created +# 2002-06-08 fl Added rank and mode filters +# 2003-09-15 fl Fixed rank calculation in rank filter; added expand call +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2002 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import functools + + +class Filter(object): + pass + + +class Kernel(Filter): + """ + Create a convolution kernel. The current version only + supports 3x3 and 5x5 integer and floating point kernels. + + In the current version, kernels can only be applied to + "L" and "RGB" images. + + :param size: Kernel size, given as (width, height). In the current + version, this must be (3,3) or (5,5). + :param kernel: A sequence containing kernel weights. + :param scale: Scale factor. If given, the result for each pixel is + divided by this value. the default is the sum of the + kernel weights. + :param offset: Offset. If given, this value is added to the result, + after it has been divided by the scale factor. + """ + + def __init__(self, size, kernel, scale=None, offset=0): + if scale is None: + # default scale is sum of kernel + scale = functools.reduce(lambda a, b: a+b, kernel) + if size[0] * size[1] != len(kernel): + raise ValueError("not enough coefficients in kernel") + self.filterargs = size, scale, offset, kernel + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + return image.filter(*self.filterargs) + + +class BuiltinFilter(Kernel): + def __init__(self): + pass + + +class RankFilter(Filter): + """ + Create a rank filter. The rank filter sorts all pixels in + a window of the given size, and returns the **rank**'th value. + + :param size: The kernel size, in pixels. + :param rank: What pixel value to pick. Use 0 for a min filter, + ``size * size / 2`` for a median filter, ``size * size - 1`` + for a max filter, etc. + """ + name = "Rank" + + def __init__(self, size, rank): + self.size = size + self.rank = rank + + def filter(self, image): + if image.mode == "P": + raise ValueError("cannot filter palette images") + image = image.expand(self.size//2, self.size//2) + return image.rankfilter(self.size, self.rank) + + +class MedianFilter(RankFilter): + """ + Create a median filter. Picks the median pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Median" + + def __init__(self, size=3): + self.size = size + self.rank = size*size//2 + + +class MinFilter(RankFilter): + """ + Create a min filter. Picks the lowest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Min" + + def __init__(self, size=3): + self.size = size + self.rank = 0 + + +class MaxFilter(RankFilter): + """ + Create a max filter. Picks the largest pixel value in a window with the + given size. + + :param size: The kernel size, in pixels. + """ + name = "Max" + + def __init__(self, size=3): + self.size = size + self.rank = size*size-1 + + +class ModeFilter(Filter): + """ + + Create a mode filter. Picks the most frequent pixel value in a box with the + given size. Pixel values that occur only once or twice are ignored; if no + pixel value occurs more than twice, the original pixel value is preserved. + + :param size: The kernel size, in pixels. + """ + name = "Mode" + + def __init__(self, size=3): + self.size = size + + def filter(self, image): + return image.modefilter(self.size) + + +class GaussianBlur(Filter): + """Gaussian blur filter. + + :param radius: Blur radius. + """ + name = "GaussianBlur" + + def __init__(self, radius=2): + self.radius = radius + + def filter(self, image): + return image.gaussian_blur(self.radius) + + +class UnsharpMask(Filter): + """Unsharp mask filter. + + See Wikipedia's entry on `digital unsharp masking`_ for an explanation of + the parameters. + + :param radius: Blur Radius + :param percent: Unsharp strength, in percent + :param threshold: Threshold controls the minimum brightness change that + will be sharpened + + .. _digital unsharp masking: https://en.wikipedia.org/wiki/Unsharp_masking#Digital_unsharp_masking + + """ + name = "UnsharpMask" + + def __init__(self, radius=2, percent=150, threshold=3): + self.radius = radius + self.percent = percent + self.threshold = threshold + + def filter(self, image): + return image.unsharp_mask(self.radius, self.percent, self.threshold) + + +class BLUR(BuiltinFilter): + name = "Blur" + filterargs = (5, 5), 16, 0, ( + 1, 1, 1, 1, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 0, 0, 0, 1, + 1, 1, 1, 1, 1 + ) + + +class CONTOUR(BuiltinFilter): + name = "Contour" + filterargs = (3, 3), 1, 255, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class DETAIL(BuiltinFilter): + name = "Detail" + filterargs = (3, 3), 6, 0, ( + 0, -1, 0, + -1, 10, -1, + 0, -1, 0 + ) + + +class EDGE_ENHANCE(BuiltinFilter): + name = "Edge-enhance" + filterargs = (3, 3), 2, 0, ( + -1, -1, -1, + -1, 10, -1, + -1, -1, -1 + ) + + +class EDGE_ENHANCE_MORE(BuiltinFilter): + name = "Edge-enhance More" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 9, -1, + -1, -1, -1 + ) + + +class EMBOSS(BuiltinFilter): + name = "Emboss" + filterargs = (3, 3), 1, 128, ( + -1, 0, 0, + 0, 1, 0, + 0, 0, 0 + ) + + +class FIND_EDGES(BuiltinFilter): + name = "Find Edges" + filterargs = (3, 3), 1, 0, ( + -1, -1, -1, + -1, 8, -1, + -1, -1, -1 + ) + + +class SMOOTH(BuiltinFilter): + name = "Smooth" + filterargs = (3, 3), 13, 0, ( + 1, 1, 1, + 1, 5, 1, + 1, 1, 1 + ) + + +class SMOOTH_MORE(BuiltinFilter): + name = "Smooth More" + filterargs = (5, 5), 100, 0, ( + 1, 1, 1, 1, 1, + 1, 5, 5, 5, 1, + 1, 5, 44, 5, 1, + 1, 5, 5, 5, 1, + 1, 1, 1, 1, 1 + ) + + +class SHARPEN(BuiltinFilter): + name = "Sharpen" + filterargs = (3, 3), 16, 0, ( + -2, -2, -2, + -2, 32, -2, + -2, -2, -2 + ) diff --git a/Lib/site-packages/PIL/ImageFont.py b/Lib/site-packages/PIL/ImageFont.py new file mode 100644 index 0000000..af1166d --- /dev/null +++ b/Lib/site-packages/PIL/ImageFont.py @@ -0,0 +1,437 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIL raster font management +# +# History: +# 1996-08-07 fl created (experimental) +# 1997-08-25 fl minor adjustments to handle fonts from pilfont 0.3 +# 1999-02-06 fl rewrote most font management stuff in C +# 1999-03-17 fl take pth files into account in load_path (from Richard Jones) +# 2001-02-17 fl added freetype support +# 2001-05-09 fl added TransposedFont wrapper class +# 2002-03-04 fl make sure we have a "L" or "1" font +# 2002-12-04 fl skip non-directory entries in the system path +# 2003-04-29 fl add embedded default font +# 2003-09-27 fl added support for truetype charmap encodings +# +# Todo: +# Adapt to PILFONT2 format (16-bit fonts, compressed, single file) +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1996-2003 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isDirectory, isPath +import os +import sys + + +class _imagingft_not_installed(object): + # module placeholder + def __getattr__(self, id): + raise ImportError("The _imagingft C module is not installed") + +try: + from PIL import _imagingft as core +except ImportError: + core = _imagingft_not_installed() + +# FIXME: add support for pilfont2 format (see FontFile.py) + +# -------------------------------------------------------------------- +# Font metrics format: +# "PILfont" LF +# fontdescriptor LF +# (optional) key=value... LF +# "DATA" LF +# binary data: 256*10*2 bytes (dx, dy, dstbox, srcbox) +# +# To place a character, cut out srcbox and paste at dstbox, +# relative to the character position. Then move the character +# position according to dx, dy. +# -------------------------------------------------------------------- + + +class ImageFont(object): + "PIL font wrapper" + + def _load_pilfont(self, filename): + + fp = open(filename, "rb") + + for ext in (".png", ".gif", ".pbm"): + try: + fullname = os.path.splitext(filename)[0] + ext + image = Image.open(fullname) + except: + pass + else: + if image and image.mode in ("1", "L"): + break + else: + raise IOError("cannot find glyph data file") + + self.file = fullname + + return self._load_pilfont_data(fp, image) + + def _load_pilfont_data(self, file, image): + + # read PILfont header + if file.readline() != b"PILfont\n": + raise SyntaxError("Not a PILfont file") + file.readline().split(b";") + self.info = [] # FIXME: should be a dictionary + while True: + s = file.readline() + if not s or s == b"DATA\n": + break + self.info.append(s) + + # read PILfont metrics + data = file.read(256*20) + + # check image + if image.mode not in ("1", "L"): + raise TypeError("invalid font image mode") + + image.load() + + self.font = Image.core.font(image.im, data) + + # delegate critical operations to internal type + self.getsize = self.font.getsize + self.getmask = self.font.getmask + + +## +# Wrapper for FreeType fonts. Application code should use the +# truetype factory function to create font objects. + +class FreeTypeFont(object): + "FreeType font wrapper (requires _imagingft service)" + + def __init__(self, font=None, size=10, index=0, encoding=""): + # FIXME: use service provider instead + + self.path = font + self.size = size + self.index = index + self.encoding = encoding + + if isPath(font): + self.font = core.getfont(font, size, index, encoding) + else: + self.font_bytes = font.read() + self.font = core.getfont( + "", size, index, encoding, self.font_bytes) + + def getname(self): + return self.font.family, self.font.style + + def getmetrics(self): + return self.font.ascent, self.font.descent + + def getsize(self, text): + size, offset = self.font.getsize(text) + return (size[0] + offset[0], size[1] + offset[1]) + + def getoffset(self, text): + return self.font.getsize(text)[1] + + def getmask(self, text, mode=""): + return self.getmask2(text, mode)[0] + + def getmask2(self, text, mode="", fill=Image.core.fill): + size, offset = self.font.getsize(text) + im = fill("L", size, 0) + self.font.render(text, im.id, mode == "1") + return im, offset + + def font_variant(self, font=None, size=None, index=None, encoding=None): + """ + Create a copy of this FreeTypeFont object, + using any specified arguments to override the settings. + + Parameters are identical to the parameters used to initialize this + object. + + :return: A FreeTypeFont object. + """ + return FreeTypeFont(font=self.path if font is None else font, + size=self.size if size is None else size, + index=self.index if index is None else index, + encoding=self.encoding if encoding is None else + encoding) + +## +# Wrapper that creates a transposed font from any existing font +# object. +# +# @param font A font object. +# @param orientation An optional orientation. If given, this should +# be one of Image.FLIP_LEFT_RIGHT, Image.FLIP_TOP_BOTTOM, +# Image.ROTATE_90, Image.ROTATE_180, or Image.ROTATE_270. + + +class TransposedFont(object): + "Wrapper for writing rotated or mirrored text" + + def __init__(self, font, orientation=None): + self.font = font + self.orientation = orientation # any 'transpose' argument, or None + + def getsize(self, text): + w, h = self.font.getsize(text) + if self.orientation in (Image.ROTATE_90, Image.ROTATE_270): + return h, w + return w, h + + def getmask(self, text, mode=""): + im = self.font.getmask(text, mode) + if self.orientation is not None: + return im.transpose(self.orientation) + return im + + +def load(filename): + """ + Load a font file. This function loads a font object from the given + bitmap font file, and returns the corresponding font object. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + f = ImageFont() + f._load_pilfont(filename) + return f + + +def truetype(font=None, size=10, index=0, encoding=""): + """ + Load a TrueType or OpenType font file, and create a font object. + This function loads a font object from the given file, and creates + a font object for a font of the given size. + + This function requires the _imagingft service. + + :param font: A truetype font file. Under Windows, if the file + is not found in this filename, the loader also looks in + Windows :file:`fonts/` directory. + :param size: The requested size, in points. + :param index: Which font face to load (default is first available face). + :param encoding: Which font encoding to use (default is Unicode). Common + encodings are "unic" (Unicode), "symb" (Microsoft + Symbol), "ADOB" (Adobe Standard), "ADBE" (Adobe Expert), + and "armn" (Apple Roman). See the FreeType documentation + for more information. + :return: A font object. + :exception IOError: If the file could not be read. + """ + + try: + return FreeTypeFont(font, size, index, encoding) + except IOError: + ttf_filename = os.path.basename(font) + + dirs = [] + if sys.platform == "win32": + # check the windows font repository + # NOTE: must use uppercase WINDIR, to work around bugs in + # 1.5.2's os.environ.get() + windir = os.environ.get("WINDIR") + if windir: + dirs.append(os.path.join(windir, "fonts")) + elif sys.platform in ('linux', 'linux2'): + lindirs = os.environ.get("XDG_DATA_DIRS", "") + if not lindirs: + # According to the freedesktop spec, XDG_DATA_DIRS should + # default to /usr/share + lindirs = '/usr/share' + dirs += [os.path.join(lindir, "fonts") + for lindir in lindirs.split(":")] + elif sys.platform == 'darwin': + dirs += ['/Library/Fonts', '/System/Library/Fonts', + os.path.expanduser('~/Library/Fonts')] + + ext = os.path.splitext(ttf_filename)[1] + first_font_with_a_different_extension = None + for directory in dirs: + for walkroot, walkdir, walkfilenames in os.walk(directory): + for walkfilename in walkfilenames: + if ext and walkfilename == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + return FreeTypeFont(fontpath, size, index, encoding) + elif not ext and os.path.splitext(walkfilename)[0] == ttf_filename: + fontpath = os.path.join(walkroot, walkfilename) + if os.path.splitext(fontpath)[1] == '.ttf': + return FreeTypeFont(fontpath, size, index, encoding) + if not ext and first_font_with_a_different_extension is None: + first_font_with_a_different_extension = fontpath + if first_font_with_a_different_extension: + return FreeTypeFont(first_font_with_a_different_extension, size, + index, encoding) + raise + + +def load_path(filename): + """ + Load font file. Same as :py:func:`~PIL.ImageFont.load`, but searches for a + bitmap font along the Python path. + + :param filename: Name of font file. + :return: A font object. + :exception IOError: If the file could not be read. + """ + for directory in sys.path: + if isDirectory(directory): + if not isinstance(filename, str): + if bytes is str: + filename = filename.encode("utf-8") + else: + filename = filename.decode("utf-8") + try: + return load(os.path.join(directory, filename)) + except IOError: + pass + raise IOError("cannot find font file") + + +def load_default(): + """Load a "better than nothing" default font. + + .. versionadded:: 1.1.4 + + :return: A font object. + """ + from io import BytesIO + import base64 + f = ImageFont() + f._load_pilfont_data( + # courB08 + BytesIO(base64.decodestring(b''' +UElMZm9udAo7Ozs7OzsxMDsKREFUQQoAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAA//8AAQAAAAAAAAABAAEA +BgAAAAH/+gADAAAAAQAAAAMABgAGAAAAAf/6AAT//QADAAAABgADAAYAAAAA//kABQABAAYAAAAL +AAgABgAAAAD/+AAFAAEACwAAABAACQAGAAAAAP/5AAUAAAAQAAAAFQAHAAYAAP////oABQAAABUA +AAAbAAYABgAAAAH/+QAE//wAGwAAAB4AAwAGAAAAAf/5AAQAAQAeAAAAIQAIAAYAAAAB//kABAAB +ACEAAAAkAAgABgAAAAD/+QAE//0AJAAAACgABAAGAAAAAP/6AAX//wAoAAAALQAFAAYAAAAB//8A +BAACAC0AAAAwAAMABgAAAAD//AAF//0AMAAAADUAAQAGAAAAAf//AAMAAAA1AAAANwABAAYAAAAB +//kABQABADcAAAA7AAgABgAAAAD/+QAFAAAAOwAAAEAABwAGAAAAAP/5AAYAAABAAAAARgAHAAYA +AAAA//kABQAAAEYAAABLAAcABgAAAAD/+QAFAAAASwAAAFAABwAGAAAAAP/5AAYAAABQAAAAVgAH +AAYAAAAA//kABQAAAFYAAABbAAcABgAAAAD/+QAFAAAAWwAAAGAABwAGAAAAAP/5AAUAAABgAAAA +ZQAHAAYAAAAA//kABQAAAGUAAABqAAcABgAAAAD/+QAFAAAAagAAAG8ABwAGAAAAAf/8AAMAAABv +AAAAcQAEAAYAAAAA//wAAwACAHEAAAB0AAYABgAAAAD/+gAE//8AdAAAAHgABQAGAAAAAP/7AAT/ +/gB4AAAAfAADAAYAAAAB//oABf//AHwAAACAAAUABgAAAAD/+gAFAAAAgAAAAIUABgAGAAAAAP/5 +AAYAAQCFAAAAiwAIAAYAAP////oABgAAAIsAAACSAAYABgAA////+gAFAAAAkgAAAJgABgAGAAAA +AP/6AAUAAACYAAAAnQAGAAYAAP////oABQAAAJ0AAACjAAYABgAA////+gAFAAAAowAAAKkABgAG +AAD////6AAUAAACpAAAArwAGAAYAAAAA//oABQAAAK8AAAC0AAYABgAA////+gAGAAAAtAAAALsA +BgAGAAAAAP/6AAQAAAC7AAAAvwAGAAYAAP////oABQAAAL8AAADFAAYABgAA////+gAGAAAAxQAA +AMwABgAGAAD////6AAUAAADMAAAA0gAGAAYAAP////oABQAAANIAAADYAAYABgAA////+gAGAAAA +2AAAAN8ABgAGAAAAAP/6AAUAAADfAAAA5AAGAAYAAP////oABQAAAOQAAADqAAYABgAAAAD/+gAF +AAEA6gAAAO8ABwAGAAD////6AAYAAADvAAAA9gAGAAYAAAAA//oABQAAAPYAAAD7AAYABgAA//// ++gAFAAAA+wAAAQEABgAGAAD////6AAYAAAEBAAABCAAGAAYAAP////oABgAAAQgAAAEPAAYABgAA +////+gAGAAABDwAAARYABgAGAAAAAP/6AAYAAAEWAAABHAAGAAYAAP////oABgAAARwAAAEjAAYA +BgAAAAD/+gAFAAABIwAAASgABgAGAAAAAf/5AAQAAQEoAAABKwAIAAYAAAAA//kABAABASsAAAEv +AAgABgAAAAH/+QAEAAEBLwAAATIACAAGAAAAAP/5AAX//AEyAAABNwADAAYAAAAAAAEABgACATcA +AAE9AAEABgAAAAH/+QAE//wBPQAAAUAAAwAGAAAAAP/7AAYAAAFAAAABRgAFAAYAAP////kABQAA +AUYAAAFMAAcABgAAAAD/+wAFAAABTAAAAVEABQAGAAAAAP/5AAYAAAFRAAABVwAHAAYAAAAA//sA +BQAAAVcAAAFcAAUABgAAAAD/+QAFAAABXAAAAWEABwAGAAAAAP/7AAYAAgFhAAABZwAHAAYAAP// +//kABQAAAWcAAAFtAAcABgAAAAD/+QAGAAABbQAAAXMABwAGAAAAAP/5AAQAAgFzAAABdwAJAAYA +AP////kABgAAAXcAAAF+AAcABgAAAAD/+QAGAAABfgAAAYQABwAGAAD////7AAUAAAGEAAABigAF +AAYAAP////sABQAAAYoAAAGQAAUABgAAAAD/+wAFAAABkAAAAZUABQAGAAD////7AAUAAgGVAAAB +mwAHAAYAAAAA//sABgACAZsAAAGhAAcABgAAAAD/+wAGAAABoQAAAacABQAGAAAAAP/7AAYAAAGn +AAABrQAFAAYAAAAA//kABgAAAa0AAAGzAAcABgAA////+wAGAAABswAAAboABQAGAAD////7AAUA +AAG6AAABwAAFAAYAAP////sABgAAAcAAAAHHAAUABgAAAAD/+wAGAAABxwAAAc0ABQAGAAD////7 +AAYAAgHNAAAB1AAHAAYAAAAA//sABQAAAdQAAAHZAAUABgAAAAH/+QAFAAEB2QAAAd0ACAAGAAAA +Av/6AAMAAQHdAAAB3gAHAAYAAAAA//kABAABAd4AAAHiAAgABgAAAAD/+wAF//0B4gAAAecAAgAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAYAAAAB +//sAAwACAecAAAHpAAcABgAAAAD/+QAFAAEB6QAAAe4ACAAGAAAAAP/5AAYAAAHuAAAB9AAHAAYA +AAAA//oABf//AfQAAAH5AAUABgAAAAD/+QAGAAAB+QAAAf8ABwAGAAAAAv/5AAMAAgH/AAACAAAJ +AAYAAAAA//kABQABAgAAAAIFAAgABgAAAAH/+gAE//sCBQAAAggAAQAGAAAAAP/5AAYAAAIIAAAC +DgAHAAYAAAAB//kABf/+Ag4AAAISAAUABgAA////+wAGAAACEgAAAhkABQAGAAAAAP/7AAX//gIZ +AAACHgADAAYAAAAA//wABf/9Ah4AAAIjAAEABgAAAAD/+QAHAAACIwAAAioABwAGAAAAAP/6AAT/ ++wIqAAACLgABAAYAAAAA//kABP/8Ai4AAAIyAAMABgAAAAD/+gAFAAACMgAAAjcABgAGAAAAAf/5 +AAT//QI3AAACOgAEAAYAAAAB//kABP/9AjoAAAI9AAQABgAAAAL/+QAE//sCPQAAAj8AAgAGAAD/ +///7AAYAAgI/AAACRgAHAAYAAAAA//kABgABAkYAAAJMAAgABgAAAAH//AAD//0CTAAAAk4AAQAG +AAAAAf//AAQAAgJOAAACUQADAAYAAAAB//kABP/9AlEAAAJUAAQABgAAAAH/+QAF//4CVAAAAlgA +BQAGAAD////7AAYAAAJYAAACXwAFAAYAAP////kABgAAAl8AAAJmAAcABgAA////+QAGAAACZgAA +Am0ABwAGAAD////5AAYAAAJtAAACdAAHAAYAAAAA//sABQACAnQAAAJ5AAcABgAA////9wAGAAAC +eQAAAoAACQAGAAD////3AAYAAAKAAAAChwAJAAYAAP////cABgAAAocAAAKOAAkABgAA////9wAG +AAACjgAAApUACQAGAAD////4AAYAAAKVAAACnAAIAAYAAP////cABgAAApwAAAKjAAkABgAA//// ++gAGAAACowAAAqoABgAGAAAAAP/6AAUAAgKqAAACrwAIAAYAAP////cABQAAAq8AAAK1AAkABgAA +////9wAFAAACtQAAArsACQAGAAD////3AAUAAAK7AAACwQAJAAYAAP////gABQAAAsEAAALHAAgA +BgAAAAD/9wAEAAACxwAAAssACQAGAAAAAP/3AAQAAALLAAACzwAJAAYAAAAA//cABAAAAs8AAALT +AAkABgAAAAD/+AAEAAAC0wAAAtcACAAGAAD////6AAUAAALXAAAC3QAGAAYAAP////cABgAAAt0A +AALkAAkABgAAAAD/9wAFAAAC5AAAAukACQAGAAAAAP/3AAUAAALpAAAC7gAJAAYAAAAA//cABQAA +Au4AAALzAAkABgAAAAD/9wAFAAAC8wAAAvgACQAGAAAAAP/4AAUAAAL4AAAC/QAIAAYAAAAA//oA +Bf//Av0AAAMCAAUABgAA////+gAGAAADAgAAAwkABgAGAAD////3AAYAAAMJAAADEAAJAAYAAP// +//cABgAAAxAAAAMXAAkABgAA////9wAGAAADFwAAAx4ACQAGAAD////4AAYAAAAAAAoABwASAAYA +AP////cABgAAAAcACgAOABMABgAA////+gAFAAAADgAKABQAEAAGAAD////6AAYAAAAUAAoAGwAQ +AAYAAAAA//gABgAAABsACgAhABIABgAAAAD/+AAGAAAAIQAKACcAEgAGAAAAAP/4AAYAAAAnAAoA +LQASAAYAAAAA//gABgAAAC0ACgAzABIABgAAAAD/+QAGAAAAMwAKADkAEQAGAAAAAP/3AAYAAAA5 +AAoAPwATAAYAAP////sABQAAAD8ACgBFAA8ABgAAAAD/+wAFAAIARQAKAEoAEQAGAAAAAP/4AAUA +AABKAAoATwASAAYAAAAA//gABQAAAE8ACgBUABIABgAAAAD/+AAFAAAAVAAKAFkAEgAGAAAAAP/5 +AAUAAABZAAoAXgARAAYAAAAA//gABgAAAF4ACgBkABIABgAAAAD/+AAGAAAAZAAKAGoAEgAGAAAA +AP/4AAYAAABqAAoAcAASAAYAAAAA//kABgAAAHAACgB2ABEABgAAAAD/+AAFAAAAdgAKAHsAEgAG +AAD////4AAYAAAB7AAoAggASAAYAAAAA//gABQAAAIIACgCHABIABgAAAAD/+AAFAAAAhwAKAIwA +EgAGAAAAAP/4AAUAAACMAAoAkQASAAYAAAAA//gABQAAAJEACgCWABIABgAAAAD/+QAFAAAAlgAK +AJsAEQAGAAAAAP/6AAX//wCbAAoAoAAPAAYAAAAA//oABQABAKAACgClABEABgAA////+AAGAAAA +pQAKAKwAEgAGAAD////4AAYAAACsAAoAswASAAYAAP////gABgAAALMACgC6ABIABgAA////+QAG +AAAAugAKAMEAEQAGAAD////4AAYAAgDBAAoAyAAUAAYAAP////kABQACAMgACgDOABMABgAA//// ++QAGAAIAzgAKANUAEw== +''')), Image.open(BytesIO(base64.decodestring(b''' +iVBORw0KGgoAAAANSUhEUgAAAx4AAAAUAQAAAAArMtZoAAAEwElEQVR4nABlAJr/AHVE4czCI/4u +Mc4b7vuds/xzjz5/3/7u/n9vMe7vnfH/9++vPn/xyf5zhxzjt8GHw8+2d83u8x27199/nxuQ6Od9 +M43/5z2I+9n9ZtmDBwMQECDRQw/eQIQohJXxpBCNVE6QCCAAAAD//wBlAJr/AgALyj1t/wINwq0g +LeNZUworuN1cjTPIzrTX6ofHWeo3v336qPzfEwRmBnHTtf95/fglZK5N0PDgfRTslpGBvz7LFc4F +IUXBWQGjQ5MGCx34EDFPwXiY4YbYxavpnhHFrk14CDAAAAD//wBlAJr/AgKqRooH2gAgPeggvUAA +Bu2WfgPoAwzRAABAAAAAAACQgLz/3Uv4Gv+gX7BJgDeeGP6AAAD1NMDzKHD7ANWr3loYbxsAD791 +NAADfcoIDyP44K/jv4Y63/Z+t98Ovt+ub4T48LAAAAD//wBlAJr/AuplMlADJAAAAGuAphWpqhMx +in0A/fRvAYBABPgBwBUgABBQ/sYAyv9g0bCHgOLoGAAAAAAAREAAwI7nr0ArYpow7aX8//9LaP/9 +SjdavWA8ePHeBIKB//81/83ndznOaXx379wAAAD//wBlAJr/AqDxW+D3AABAAbUh/QMnbQag/gAY +AYDAAACgtgD/gOqAAAB5IA/8AAAk+n9w0AAA8AAAmFRJuPo27ciC0cD5oeW4E7KA/wD3ECMAn2tt +y8PgwH8AfAxFzC0JzeAMtratAsC/ffwAAAD//wBlAJr/BGKAyCAA4AAAAvgeYTAwHd1kmQF5chkG +ABoMIHcL5xVpTfQbUqzlAAAErwAQBgAAEOClA5D9il08AEh/tUzdCBsXkbgACED+woQg8Si9VeqY +lODCn7lmF6NhnAEYgAAA/NMIAAAAAAD//2JgjLZgVGBg5Pv/Tvpc8hwGBjYGJADjHDrAwPzAjv/H +/Wf3PzCwtzcwHmBgYGcwbZz8wHaCAQMDOwMDQ8MCBgYOC3W7mp+f0w+wHOYxO3OG+e376hsMZjk3 +AAAAAP//YmCMY2A4wMAIN5e5gQETPD6AZisDAwMDgzSDAAPjByiHcQMDAwMDg1nOze1lByRu5/47 +c4859311AYNZzg0AAAAA//9iYGDBYihOIIMuwIjGL39/fwffA8b//xv/P2BPtzzHwCBjUQAAAAD/ +/yLFBrIBAAAA//9i1HhcwdhizX7u8NZNzyLbvT97bfrMf/QHI8evOwcSqGUJAAAA//9iYBB81iSw +pEE170Qrg5MIYydHqwdDQRMrAwcVrQAAAAD//2J4x7j9AAMDn8Q/BgYLBoaiAwwMjPdvMDBYM1Tv +oJodAAAAAP//Yqo/83+dxePWlxl3npsel9lvLfPcqlE9725C+acfVLMEAAAA//9i+s9gwCoaaGMR +evta/58PTEWzr21hufPjA8N+qlnBwAAAAAD//2JiWLci5v1+HmFXDqcnULE/MxgYGBj+f6CaJQAA +AAD//2Ji2FrkY3iYpYC5qDeGgeEMAwPDvwQBBoYvcTwOVLMEAAAA//9isDBgkP///0EOg9z35v// +Gc/eeW7BwPj5+QGZhANUswMAAAD//2JgqGBgYGBgqEMXlvhMPUsAAAAA//8iYDd1AAAAAP//AwDR +w7IkEbzhVQAAAABJRU5ErkJggg== +''')))) + return f + +# End of file diff --git a/Lib/site-packages/PIL/ImageGrab.py b/Lib/site-packages/PIL/ImageGrab.py new file mode 100644 index 0000000..febdb23 --- /dev/null +++ b/Lib/site-packages/PIL/ImageGrab.py @@ -0,0 +1,61 @@ +# +# The Python Imaging Library +# $Id$ +# +# screen grabber (OS X and Windows only) +# +# History: +# 2001-04-26 fl created +# 2001-09-17 fl use builtin driver, if present +# 2002-11-19 fl added grabclipboard support +# +# Copyright (c) 2001-2002 by Secret Labs AB +# Copyright (c) 2001-2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + +import sys +if sys.platform not in ["win32", "darwin"]: + raise ImportError("ImageGrab is OS X and Windows only") + +if sys.platform == "win32": + grabber = Image.core.grabscreen +elif sys.platform == "darwin": + import os + import tempfile + import subprocess + + +def grab(bbox=None): + if sys.platform == "darwin": + f, file = tempfile.mkstemp('.png') + os.close(f) + subprocess.call(['screencapture', '-x', file]) + im = Image.open(file) + im.load() + os.unlink(file) + else: + size, data = grabber() + im = Image.frombytes( + "RGB", size, data, + # RGB, 32-bit line padding, origo in lower left corner + "raw", "BGR", (size[0]*3 + 3) & -4, -1 + ) + if bbox: + im = im.crop(bbox) + return im + + +def grabclipboard(): + if sys.platform == "darwin": + raise NotImplementedError("Method is not implemented on OS X") + debug = 0 # temporary interface + data = Image.core.grabclipboard(debug) + if isinstance(data, bytes): + from PIL import BmpImagePlugin + import io + return BmpImagePlugin.DibImageFile(io.BytesIO(data)) + return data diff --git a/Lib/site-packages/PIL/ImageMath.py b/Lib/site-packages/PIL/ImageMath.py new file mode 100644 index 0000000..f92d500 --- /dev/null +++ b/Lib/site-packages/PIL/ImageMath.py @@ -0,0 +1,270 @@ +# +# The Python Imaging Library +# $Id$ +# +# a simple math add-on for the Python Imaging Library +# +# History: +# 1999-02-15 fl Original PIL Plus release +# 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6 +# 2005-09-12 fl Fixed int() and float() for Python 2.4.1 +# +# Copyright (c) 1999-2005 by Secret Labs AB +# Copyright (c) 2005 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import _imagingmath + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +VERBOSE = 0 + + +def _isconstant(v): + return isinstance(v, int) or isinstance(v, float) + + +class _Operand(object): + # wraps an image operand, providing standard operators + + def __init__(self, im): + self.im = im + + def __fixup(self, im1): + # convert image to suitable mode + if isinstance(im1, _Operand): + # argument was an image. + if im1.im.mode in ("1", "L"): + return im1.im.convert("I") + elif im1.im.mode in ("I", "F"): + return im1.im + else: + raise ValueError("unsupported mode: %s" % im1.im.mode) + else: + # argument was a constant + if _isconstant(im1) and self.im.mode in ("1", "L", "I"): + return Image.new("I", self.im.size, im1) + else: + return Image.new("F", self.im.size, im1) + + def apply(self, op, im1, im2=None, mode=None): + im1 = self.__fixup(im1) + if im2 is None: + # unary operation + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.unop(op, out.im.id, im1.im.id) + else: + # binary operation + im2 = self.__fixup(im2) + if im1.mode != im2.mode: + # convert both arguments to floating point + if im1.mode != "F": + im1 = im1.convert("F") + if im2.mode != "F": + im2 = im2.convert("F") + if im1.mode != im2.mode: + raise ValueError("mode mismatch") + if im1.size != im2.size: + # crop both arguments to a common size + size = (min(im1.size[0], im2.size[0]), + min(im1.size[1], im2.size[1])) + if im1.size != size: + im1 = im1.crop((0, 0) + size) + if im2.size != size: + im2 = im2.crop((0, 0) + size) + out = Image.new(mode or im1.mode, size, None) + else: + out = Image.new(mode or im1.mode, im1.size, None) + im1.load() + im2.load() + try: + op = getattr(_imagingmath, op+"_"+im1.mode) + except AttributeError: + raise TypeError("bad operand type for '%s'" % op) + _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id) + return _Operand(out) + + # unary operators + def __bool__(self): + # an image is "true" if it contains at least one non-zero pixel + return self.im.getbbox() is not None + + if bytes is str: + # Provide __nonzero__ for pre-Py3k + __nonzero__ = __bool__ + del __bool__ + + def __abs__(self): + return self.apply("abs", self) + + def __pos__(self): + return self + + def __neg__(self): + return self.apply("neg", self) + + # binary operators + def __add__(self, other): + return self.apply("add", self, other) + + def __radd__(self, other): + return self.apply("add", other, self) + + def __sub__(self, other): + return self.apply("sub", self, other) + + def __rsub__(self, other): + return self.apply("sub", other, self) + + def __mul__(self, other): + return self.apply("mul", self, other) + + def __rmul__(self, other): + return self.apply("mul", other, self) + + def __truediv__(self, other): + return self.apply("div", self, other) + + def __rtruediv__(self, other): + return self.apply("div", other, self) + + def __mod__(self, other): + return self.apply("mod", self, other) + + def __rmod__(self, other): + return self.apply("mod", other, self) + + def __pow__(self, other): + return self.apply("pow", self, other) + + def __rpow__(self, other): + return self.apply("pow", other, self) + + if bytes is str: + # Provide __div__ and __rdiv__ for pre-Py3k + __div__ = __truediv__ + __rdiv__ = __rtruediv__ + del __truediv__ + del __rtruediv__ + + # bitwise + def __invert__(self): + return self.apply("invert", self) + + def __and__(self, other): + return self.apply("and", self, other) + + def __rand__(self, other): + return self.apply("and", other, self) + + def __or__(self, other): + return self.apply("or", self, other) + + def __ror__(self, other): + return self.apply("or", other, self) + + def __xor__(self, other): + return self.apply("xor", self, other) + + def __rxor__(self, other): + return self.apply("xor", other, self) + + def __lshift__(self, other): + return self.apply("lshift", self, other) + + def __rshift__(self, other): + return self.apply("rshift", self, other) + + # logical + def __eq__(self, other): + return self.apply("eq", self, other) + + def __ne__(self, other): + return self.apply("ne", self, other) + + def __lt__(self, other): + return self.apply("lt", self, other) + + def __le__(self, other): + return self.apply("le", self, other) + + def __gt__(self, other): + return self.apply("gt", self, other) + + def __ge__(self, other): + return self.apply("ge", self, other) + + +# conversions +def imagemath_int(self): + return _Operand(self.im.convert("I")) + + +def imagemath_float(self): + return _Operand(self.im.convert("F")) + + +# logical +def imagemath_equal(self, other): + return self.apply("eq", self, other, mode="I") + + +def imagemath_notequal(self, other): + return self.apply("ne", self, other, mode="I") + + +def imagemath_min(self, other): + return self.apply("min", self, other) + + +def imagemath_max(self, other): + return self.apply("max", self, other) + + +def imagemath_convert(self, mode): + return _Operand(self.im.convert(mode)) + +ops = {} +for k, v in list(globals().items()): + if k[:10] == "imagemath_": + ops[k[10:]] = v + + +def eval(expression, _dict={}, **kw): + """ + Evaluates an image expression. + + :param expression: A string containing a Python-style expression. + :param options: Values to add to the evaluation context. You + can either use a dictionary, or one or more keyword + arguments. + :return: The evaluated expression. This is usually an image object, but can + also be an integer, a floating point value, or a pixel tuple, + depending on the expression. + """ + + # build execution namespace + args = ops.copy() + args.update(_dict) + args.update(kw) + for k, v in list(args.items()): + if hasattr(v, "im"): + args[k] = _Operand(v) + + out = builtins.eval(expression, args) + try: + return out.im + except AttributeError: + return out diff --git a/Lib/site-packages/PIL/ImageMode.py b/Lib/site-packages/PIL/ImageMode.py new file mode 100644 index 0000000..d896001 --- /dev/null +++ b/Lib/site-packages/PIL/ImageMode.py @@ -0,0 +1,52 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard mode descriptors +# +# History: +# 2006-03-20 fl Added +# +# Copyright (c) 2006 by Secret Labs AB. +# Copyright (c) 2006 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# mode descriptor cache +_modes = {} + + +## +# Wrapper for mode strings. + +class ModeDescriptor(object): + + def __init__(self, mode, bands, basemode, basetype): + self.mode = mode + self.bands = bands + self.basemode = basemode + self.basetype = basetype + + def __str__(self): + return self.mode + + +## +# Gets a mode descriptor for the given mode. + +def getmode(mode): + if not _modes: + # initialize mode cache + from PIL import Image + # core modes + for m, (basemode, basetype, bands) in Image._MODEINFO.items(): + _modes[m] = ModeDescriptor(m, bands, basemode, basetype) + # extra experimental modes + _modes["LA"] = ModeDescriptor("LA", ("L", "A"), "L", "L") + _modes["PA"] = ModeDescriptor("PA", ("P", "A"), "RGB", "L") + # mapping modes + _modes["I;16"] = ModeDescriptor("I;16", "I", "L", "L") + _modes["I;16L"] = ModeDescriptor("I;16L", "I", "L", "L") + _modes["I;16B"] = ModeDescriptor("I;16B", "I", "L", "L") + return _modes[mode] diff --git a/Lib/site-packages/PIL/ImageMorph.py b/Lib/site-packages/PIL/ImageMorph.py new file mode 100644 index 0000000..902ed8d --- /dev/null +++ b/Lib/site-packages/PIL/ImageMorph.py @@ -0,0 +1,251 @@ +# A binary morphology add-on for the Python Imaging Library +# +# History: +# 2014-06-04 Initial version. +# +# Copyright (c) 2014 Dov Grobgeld + +from PIL import Image +from PIL import _imagingmorph +import re + +LUT_SIZE = 1 << 9 + + +class LutBuilder(object): + """A class for building a MorphLut from a descriptive language + + The input patterns is a list of a strings sequences like these:: + + 4:(... + .1. + 111)->1 + + (whitespaces including linebreaks are ignored). The option 4 + describes a series of symmetry operations (in this case a + 4-rotation), the pattern is described by: + + - . or X - Ignore + - 1 - Pixel is on + - 0 - Pixel is off + + The result of the operation is described after "->" string. + + The default is to return the current pixel value, which is + returned if no other match is found. + + Operations: + + - 4 - 4 way rotation + - N - Negate + - 1 - Dummy op for no other operation (an op must always be given) + - M - Mirroring + + Example:: + + lb = LutBuilder(patterns = ["4:(... .1. 111)->1"]) + lut = lb.build_lut() + + """ + def __init__(self, patterns=None, op_name=None): + if patterns is not None: + self.patterns = patterns + else: + self.patterns = [] + self.lut = None + if op_name is not None: + known_patterns = { + 'corner': ['1:(... ... ...)->0', + '4:(00. 01. ...)->1'], + 'dilation4': ['4:(... .0. .1.)->1'], + 'dilation8': ['4:(... .0. .1.)->1', + '4:(... .0. ..1)->1'], + 'erosion4': ['4:(... .1. .0.)->0'], + 'erosion8': ['4:(... .1. .0.)->0', + '4:(... .1. ..0)->0'], + 'edge': ['1:(... ... ...)->0', + '4:(.0. .1. ...)->1', + '4:(01. .1. ...)->1'] + } + if op_name not in known_patterns: + raise Exception('Unknown pattern '+op_name+'!') + + self.patterns = known_patterns[op_name] + + def add_patterns(self, patterns): + self.patterns += patterns + + def build_default_lut(self): + symbols = [0, 1] + m = 1 << 4 # pos of current pixel + self.lut = bytearray([symbols[(i & m) > 0] for i in range(LUT_SIZE)]) + + def get_lut(self): + return self.lut + + def _string_permute(self, pattern, permutation): + """string_permute takes a pattern and a permutation and returns the + string permuted according to the permutation list. + """ + assert(len(permutation) == 9) + return ''.join([pattern[p] for p in permutation]) + + def _pattern_permute(self, basic_pattern, options, basic_result): + """pattern_permute takes a basic pattern and its result and clones + the pattern according to the modifications described in the $options + parameter. It returns a list of all cloned patterns.""" + patterns = [(basic_pattern, basic_result)] + + # rotations + if '4' in options: + res = patterns[-1][1] + for i in range(4): + patterns.append( + (self._string_permute(patterns[-1][0], [6, 3, 0, + 7, 4, 1, + 8, 5, 2]), res)) + # mirror + if 'M' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + patterns.append( + (self._string_permute(pattern, [2, 1, 0, + 5, 4, 3, + 8, 7, 6]), res)) + + # negate + if 'N' in options: + n = len(patterns) + for pattern, res in patterns[0:n]: + # Swap 0 and 1 + pattern = (pattern + .replace('0', 'Z') + .replace('1', '0') + .replace('Z', '1')) + res = '%d' % (1-int(res)) + patterns.append((pattern, res)) + + return patterns + + def build_lut(self): + """Compile all patterns into a morphology lut. + + TBD :Build based on (file) morphlut:modify_lut + """ + self.build_default_lut() + patterns = [] + + # Parse and create symmetries of the patterns strings + for p in self.patterns: + m = re.search( + r'(\w*):?\s*\((.+?)\)\s*->\s*(\d)', p.replace('\n', '')) + if not m: + raise Exception('Syntax error in pattern "'+p+'"') + options = m.group(1) + pattern = m.group(2) + result = int(m.group(3)) + + # Get rid of spaces + pattern = pattern.replace(' ', '').replace('\n', '') + + patterns += self._pattern_permute(pattern, options, result) + +# # Debugging +# for p,r in patterns: +# print p,r +# print '--' + + # compile the patterns into regular expressions for speed + for i in range(len(patterns)): + p = patterns[i][0].replace('.', 'X').replace('X', '[01]') + p = re.compile(p) + patterns[i] = (p, patterns[i][1]) + + # Step through table and find patterns that match. + # Note that all the patterns are searched. The last one + # caught overrides + for i in range(LUT_SIZE): + # Build the bit pattern + bitpattern = bin(i)[2:] + bitpattern = ('0'*(9-len(bitpattern)) + bitpattern)[::-1] + + for p, r in patterns: + if p.match(bitpattern): + self.lut[i] = [0, 1][r] + + return self.lut + + +class MorphOp(object): + """A class for binary morphological operators""" + + def __init__(self, + lut=None, + op_name=None, + patterns=None): + """Create a binary morphological operator""" + self.lut = lut + if op_name is not None: + self.lut = LutBuilder(op_name=op_name).build_lut() + elif patterns is not None: + self.lut = LutBuilder(patterns=patterns).build_lut() + + def apply(self, image): + """Run a single morphological operation on an image + + Returns a tuple of the number of changed pixels and the + morphed image""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + outimage = Image.new(image.mode, image.size, None) + count = _imagingmorph.apply( + bytes(self.lut), image.im.id, outimage.im.id) + return count, outimage + + def match(self, image): + """Get a list of coordinates matching the morphological operation on + an image. + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + if self.lut is None: + raise Exception('No operator loaded') + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.match(bytes(self.lut), image.im.id) + + def get_on_pixels(self, image): + """Get a list of all turned on pixels in a binary image + + Returns a list of tuples of (x,y) coordinates + of all matching pixels.""" + + if image.mode != 'L': + raise Exception('Image must be binary, meaning it must use mode L') + return _imagingmorph.get_on_pixels(image.im.id) + + def load_lut(self, filename): + """Load an operator from an mrl file""" + with open(filename, 'rb') as f: + self.lut = bytearray(f.read()) + + if len(self.lut) != 8192: + self.lut = None + raise Exception('Wrong size operator file!') + + def save_lut(self, filename): + """Save an operator to an mrl file""" + if self.lut is None: + raise Exception('No operator loaded') + with open(filename, 'wb') as f: + f.write(self.lut) + + def set_lut(self, lut): + """Set the lut from an external source""" + self.lut = lut + +# End of file diff --git a/Lib/site-packages/PIL/ImageOps.py b/Lib/site-packages/PIL/ImageOps.py new file mode 100644 index 0000000..f317645 --- /dev/null +++ b/Lib/site-packages/PIL/ImageOps.py @@ -0,0 +1,461 @@ +# +# The Python Imaging Library. +# $Id$ +# +# standard image operations +# +# History: +# 2001-10-20 fl Created +# 2001-10-23 fl Added autocontrast operator +# 2001-12-18 fl Added Kevin's fit operator +# 2004-03-14 fl Fixed potential division by zero in equalize +# 2005-05-05 fl Fixed equalize for low number of values +# +# Copyright (c) 2001-2004 by Secret Labs AB +# Copyright (c) 2001-2004 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isStringType +import operator +import functools + + +# +# helpers + +def _border(border): + if isinstance(border, tuple): + if len(border) == 2: + left, top = right, bottom = border + elif len(border) == 4: + left, top, right, bottom = border + else: + left = top = right = bottom = border + return left, top, right, bottom + + +def _color(color, mode): + if isStringType(color): + from PIL import ImageColor + color = ImageColor.getcolor(color, mode) + return color + + +def _lut(image, lut): + if image.mode == "P": + # FIXME: apply to lookup table, not image data + raise NotImplementedError("mode P support coming soon") + elif image.mode in ("L", "RGB"): + if image.mode == "RGB" and len(lut) == 256: + lut = lut + lut + lut + return image.point(lut) + else: + raise IOError("not supported for this image mode") + +# +# actions + + +def autocontrast(image, cutoff=0, ignore=None): + """ + Maximize (normalize) image contrast. This function calculates a + histogram of the input image, removes **cutoff** percent of the + lightest and darkest pixels from the histogram, and remaps the image + so that the darkest pixel becomes black (0), and the lightest + becomes white (255). + + :param image: The image to process. + :param cutoff: How many percent to cut off from the histogram. + :param ignore: The background pixel value (use None for no background). + :return: An image. + """ + histogram = image.histogram() + lut = [] + for layer in range(0, len(histogram), 256): + h = histogram[layer:layer+256] + if ignore is not None: + # get rid of outliers + try: + h[ignore] = 0 + except TypeError: + # assume sequence + for ix in ignore: + h[ix] = 0 + if cutoff: + # cut off pixels from both ends of the histogram + # get number of pixels + n = 0 + for ix in range(256): + n = n + h[ix] + # remove cutoff% pixels from the low end + cut = n * cutoff // 100 + for lo in range(256): + if cut > h[lo]: + cut = cut - h[lo] + h[lo] = 0 + else: + h[lo] -= cut + cut = 0 + if cut <= 0: + break + # remove cutoff% samples from the hi end + cut = n * cutoff // 100 + for hi in range(255, -1, -1): + if cut > h[hi]: + cut = cut - h[hi] + h[hi] = 0 + else: + h[hi] -= cut + cut = 0 + if cut <= 0: + break + # find lowest/highest samples after preprocessing + for lo in range(256): + if h[lo]: + break + for hi in range(255, -1, -1): + if h[hi]: + break + if hi <= lo: + # don't bother + lut.extend(list(range(256))) + else: + scale = 255.0 / (hi - lo) + offset = -lo * scale + for ix in range(256): + ix = int(ix * scale + offset) + if ix < 0: + ix = 0 + elif ix > 255: + ix = 255 + lut.append(ix) + return _lut(image, lut) + + +def colorize(image, black, white): + """ + Colorize grayscale image. The **black** and **white** + arguments should be RGB tuples; this function calculates a color + wedge mapping all black pixels in the source image to the first + color, and all white pixels to the second color. + + :param image: The image to colorize. + :param black: The color to use for black input pixels. + :param white: The color to use for white input pixels. + :return: An image. + """ + assert image.mode == "L" + black = _color(black, "RGB") + white = _color(white, "RGB") + red = [] + green = [] + blue = [] + for i in range(256): + red.append(black[0]+i*(white[0]-black[0])//255) + green.append(black[1]+i*(white[1]-black[1])//255) + blue.append(black[2]+i*(white[2]-black[2])//255) + image = image.convert("RGB") + return _lut(image, red + green + blue) + + +def crop(image, border=0): + """ + Remove border from image. The same amount of pixels are removed + from all four sides. This function works on all image modes. + + .. seealso:: :py:meth:`~PIL.Image.Image.crop` + + :param image: The image to crop. + :param border: The number of pixels to remove. + :return: An image. + """ + left, top, right, bottom = _border(border) + return image.crop( + (left, top, image.size[0]-right, image.size[1]-bottom) + ) + + +def deform(image, deformer, resample=Image.BILINEAR): + """ + Deform the image. + + :param image: The image to deform. + :param deformer: A deformer object. Any object that implements a + **getmesh** method can be used. + :param resample: What resampling filter to use. + :return: An image. + """ + return image.transform( + image.size, Image.MESH, deformer.getmesh(image), resample + ) + + +def equalize(image, mask=None): + """ + Equalize the image histogram. This function applies a non-linear + mapping to the input image, in order to create a uniform + distribution of grayscale values in the output image. + + :param image: The image to equalize. + :param mask: An optional mask. If given, only the pixels selected by + the mask are included in the analysis. + :return: An image. + """ + if image.mode == "P": + image = image.convert("RGB") + h = image.histogram(mask) + lut = [] + for b in range(0, len(h), 256): + histo = [_f for _f in h[b:b+256] if _f] + if len(histo) <= 1: + lut.extend(list(range(256))) + else: + step = (functools.reduce(operator.add, histo) - histo[-1]) // 255 + if not step: + lut.extend(list(range(256))) + else: + n = step // 2 + for i in range(256): + lut.append(n // step) + n = n + h[i+b] + return _lut(image, lut) + + +def expand(image, border=0, fill=0): + """ + Add border to the image + + :param image: The image to expand. + :param border: Border width, in pixels. + :param fill: Pixel fill value (a color value). Default is 0 (black). + :return: An image. + """ + left, top, right, bottom = _border(border) + width = left + image.size[0] + right + height = top + image.size[1] + bottom + out = Image.new(image.mode, (width, height), _color(fill, image.mode)) + out.paste(image, (left, top)) + return out + + +def fit(image, size, method=Image.NEAREST, bleed=0.0, centering=(0.5, 0.5)): + """ + Returns a sized and cropped version of the image, cropped to the + requested aspect ratio and size. + + This function was contributed by Kevin Cazabon. + + :param size: The requested output size in pixels, given as a + (width, height) tuple. + :param method: What resampling method to use. Default is + :py:attr:`PIL.Image.NEAREST`. + :param bleed: Remove a border around the outside of the image (from all + four edges. The value is a decimal percentage (use 0.01 for + one percent). The default value is 0 (no border). + :param centering: Control the cropping position. Use (0.5, 0.5) for + center cropping (e.g. if cropping the width, take 50% off + of the left side, and therefore 50% off the right side). + (0.0, 0.0) will crop from the top left corner (i.e. if + cropping the width, take all of the crop off of the right + side, and if cropping the height, take all of it off the + bottom). (1.0, 0.0) will crop from the bottom left + corner, etc. (i.e. if cropping the width, take all of the + crop off the left side, and if cropping the height take + none from the top, and therefore all off the bottom). + :return: An image. + """ + + # by Kevin Cazabon, Feb 17/2000 + # kevin@cazabon.com + # http://www.cazabon.com + + # ensure inputs are valid + if not isinstance(centering, list): + centering = [centering[0], centering[1]] + + if centering[0] > 1.0 or centering[0] < 0.0: + centering[0] = 0.50 + if centering[1] > 1.0 or centering[1] < 0.0: + centering[1] = 0.50 + + if bleed > 0.49999 or bleed < 0.0: + bleed = 0.0 + + # calculate the area to use for resizing and cropping, subtracting + # the 'bleed' around the edges + + # number of pixels to trim off on Top and Bottom, Left and Right + bleedPixels = ( + int((float(bleed) * float(image.size[0])) + 0.5), + int((float(bleed) * float(image.size[1])) + 0.5) + ) + + liveArea = (0, 0, image.size[0], image.size[1]) + if bleed > 0.0: + liveArea = ( + bleedPixels[0], bleedPixels[1], image.size[0] - bleedPixels[0] - 1, + image.size[1] - bleedPixels[1] - 1 + ) + + liveSize = (liveArea[2] - liveArea[0], liveArea[3] - liveArea[1]) + + # calculate the aspect ratio of the liveArea + liveAreaAspectRatio = float(liveSize[0])/float(liveSize[1]) + + # calculate the aspect ratio of the output image + aspectRatio = float(size[0]) / float(size[1]) + + # figure out if the sides or top/bottom will be cropped off + if liveAreaAspectRatio >= aspectRatio: + # liveArea is wider than what's needed, crop the sides + cropWidth = int((aspectRatio * float(liveSize[1])) + 0.5) + cropHeight = liveSize[1] + else: + # liveArea is taller than what's needed, crop the top and bottom + cropWidth = liveSize[0] + cropHeight = int((float(liveSize[0])/aspectRatio) + 0.5) + + # make the crop + leftSide = int(liveArea[0] + (float(liveSize[0]-cropWidth) * centering[0])) + if leftSide < 0: + leftSide = 0 + topSide = int(liveArea[1] + (float(liveSize[1]-cropHeight) * centering[1])) + if topSide < 0: + topSide = 0 + + out = image.crop( + (leftSide, topSide, leftSide + cropWidth, topSide + cropHeight) + ) + + # resize the image and return it + return out.resize(size, method) + + +def flip(image): + """ + Flip the image vertically (top to bottom). + + :param image: The image to flip. + :return: An image. + """ + return image.transpose(Image.FLIP_TOP_BOTTOM) + + +def grayscale(image): + """ + Convert the image to grayscale. + + :param image: The image to convert. + :return: An image. + """ + return image.convert("L") + + +def invert(image): + """ + Invert (negate) the image. + + :param image: The image to invert. + :return: An image. + """ + lut = [] + for i in range(256): + lut.append(255-i) + return _lut(image, lut) + + +def mirror(image): + """ + Flip image horizontally (left to right). + + :param image: The image to mirror. + :return: An image. + """ + return image.transpose(Image.FLIP_LEFT_RIGHT) + + +def posterize(image, bits): + """ + Reduce the number of bits for each color channel. + + :param image: The image to posterize. + :param bits: The number of bits to keep for each channel (1-8). + :return: An image. + """ + lut = [] + mask = ~(2**(8-bits)-1) + for i in range(256): + lut.append(i & mask) + return _lut(image, lut) + + +def solarize(image, threshold=128): + """ + Invert all pixel values above a threshold. + + :param image: The image to solarize. + :param threshold: All pixels above this greyscale level are inverted. + :return: An image. + """ + lut = [] + for i in range(256): + if i < threshold: + lut.append(i) + else: + lut.append(255-i) + return _lut(image, lut) + + +# -------------------------------------------------------------------- +# PIL USM components, from Kevin Cazabon. + +def gaussian_blur(im, radius=None): + """ PIL_usm.gblur(im, [radius])""" + + if radius is None: + radius = 5.0 + + im.load() + + return im.im.gaussian_blur(radius) + +gblur = gaussian_blur + + +def unsharp_mask(im, radius=None, percent=None, threshold=None): + """ PIL_usm.usm(im, [radius, percent, threshold])""" + + if radius is None: + radius = 5.0 + if percent is None: + percent = 150 + if threshold is None: + threshold = 3 + + im.load() + + return im.im.unsharp_mask(radius, percent, threshold) + +usm = unsharp_mask + + +def box_blur(image, radius): + """ + Blur the image by setting each pixel to the average value of the pixels + in a square box extending radius pixels in each direction. + Supports float radius of arbitrary size. Uses an optimized implementation + which runs in linear time relative to the size of the image + for any radius value. + + :param image: The image to blur. + :param radius: Size of the box in one direction. Radius 0 does not blur, + returns an identical image. Radius 1 takes 1 pixel + in each direction, i.e. 9 pixels in total. + :return: An image. + """ + image.load() + + return image._new(image.im.box_blur(radius)) diff --git a/Lib/site-packages/PIL/ImagePalette.py b/Lib/site-packages/PIL/ImagePalette.py new file mode 100644 index 0000000..fdc5a46 --- /dev/null +++ b/Lib/site-packages/PIL/ImagePalette.py @@ -0,0 +1,237 @@ +# +# The Python Imaging Library. +# $Id$ +# +# image palette object +# +# History: +# 1996-03-11 fl Rewritten. +# 1997-01-03 fl Up and running. +# 1997-08-23 fl Added load hack +# 2001-04-16 fl Fixed randint shadow bug in random() +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import array +from PIL import ImageColor + + +class ImagePalette(object): + """ + Color palette for palette mapped images + + :param mode: The mode to use for the Palette. See: + :ref:`concept-modes`. Defaults to "RGB" + :param palette: An optional palette. If given, it must be a bytearray, + an array or a list of ints between 0-255 and of length ``size`` + times the number of colors in ``mode``. The list must be aligned + by channel (All R values must be contiguous in the list before G + and B values.) Defaults to 0 through 255 per channel. + :param size: An optional palette size. If given, it cannot be equal to + or greater than 256. Defaults to 0. + """ + + def __init__(self, mode="RGB", palette=None, size=0): + self.mode = mode + self.rawmode = None # if set, palette contains raw data + self.palette = palette or list(range(256))*len(self.mode) + self.colors = {} + self.dirty = None + if ((size == 0 and len(self.mode)*256 != len(self.palette)) or + (size != 0 and size != len(self.palette))): + raise ValueError("wrong palette size") + + def copy(self): + new = ImagePalette() + + new.mode = self.mode + new.rawmode = self.rawmode + if self.palette is not None: + new.palette = self.palette[:] + new.colors = self.colors.copy() + new.dirty = self.dirty + + return new + + def getdata(self): + """ + Get palette contents in format suitable # for the low-level + ``im.putpalette`` primitive. + + .. warning:: This method is experimental. + """ + if self.rawmode: + return self.rawmode, self.palette + return self.mode + ";L", self.tobytes() + + def tobytes(self): + """Convert palette to bytes. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(self.palette, bytes): + return self.palette + arr = array.array("B", self.palette) + if hasattr(arr, 'tobytes'): + return arr.tobytes() + return arr.tostring() + + # Declare tostring as an alias for tobytes + tostring = tobytes + + def getcolor(self, color): + """Given an rgb tuple, allocate palette entry. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(color, tuple): + try: + return self.colors[color] + except KeyError: + # allocate new color slot + if isinstance(self.palette, bytes): + self.palette = [int(x) for x in self.palette] + index = len(self.colors) + if index >= 256: + raise ValueError("cannot allocate more than 256 colors") + self.colors[color] = index + self.palette[index] = color[0] + self.palette[index+256] = color[1] + self.palette[index+512] = color[2] + self.dirty = 1 + return index + else: + raise ValueError("unknown color specifier: %r" % color) + + def save(self, fp): + """Save palette to text file. + + .. warning:: This method is experimental. + """ + if self.rawmode: + raise ValueError("palette contains raw palette data") + if isinstance(fp, str): + fp = open(fp, "w") + fp.write("# Palette\n") + fp.write("# Mode: %s\n" % self.mode) + for i in range(256): + fp.write("%d" % i) + for j in range(i*len(self.mode), (i+1)*len(self.mode)): + try: + fp.write(" %d" % self.palette[j]) + except IndexError: + fp.write(" 0") + fp.write("\n") + fp.close() + + +# -------------------------------------------------------------------- +# Internal + +def raw(rawmode, data): + palette = ImagePalette() + palette.rawmode = rawmode + palette.palette = data + palette.dirty = 1 + return palette + + +# -------------------------------------------------------------------- +# Factories + +def make_linear_lut(black, white): + lut = [] + if black == 0: + for i in range(256): + lut.append(white*i//255) + else: + raise NotImplementedError # FIXME + return lut + + +def make_gamma_lut(exp): + lut = [] + for i in range(256): + lut.append(int(((i / 255.0) ** exp) * 255.0 + 0.5)) + return lut + + +def negative(mode="RGB"): + palette = list(range(256)) + palette.reverse() + return ImagePalette(mode, palette * len(mode)) + + +def random(mode="RGB"): + from random import randint + palette = [] + for i in range(256*len(mode)): + palette.append(randint(0, 255)) + return ImagePalette(mode, palette) + + +def sepia(white="#fff0c0"): + r, g, b = ImageColor.getrgb(white) + r = make_linear_lut(0, r) + g = make_linear_lut(0, g) + b = make_linear_lut(0, b) + return ImagePalette("RGB", r + g + b) + + +def wedge(mode="RGB"): + return ImagePalette(mode, list(range(256)) * len(mode)) + + +def load(filename): + + # FIXME: supports GIMP gradients only + + fp = open(filename, "rb") + + lut = None + + if not lut: + try: + from PIL import GimpPaletteFile + fp.seek(0) + p = GimpPaletteFile.GimpPaletteFile(fp) + lut = p.getpalette() + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + + if not lut: + try: + from PIL import GimpGradientFile + fp.seek(0) + p = GimpGradientFile.GimpGradientFile(fp) + lut = p.getpalette() + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + + if not lut: + try: + from PIL import PaletteFile + fp.seek(0) + p = PaletteFile.PaletteFile(fp) + lut = p.getpalette() + except (SyntaxError, ValueError): + # import traceback + # traceback.print_exc() + pass + + if not lut: + raise IOError("cannot load palette") + + return lut # data, rawmode diff --git a/Lib/site-packages/PIL/ImagePath.py b/Lib/site-packages/PIL/ImagePath.py new file mode 100644 index 0000000..f23d014 --- /dev/null +++ b/Lib/site-packages/PIL/ImagePath.py @@ -0,0 +1,66 @@ +# +# The Python Imaging Library +# $Id$ +# +# path interface +# +# History: +# 1996-11-04 fl Created +# 2002-04-14 fl Added documentation stub class +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +# the Python class below is overridden by the C implementation. + + +class Path(object): + + def __init__(self, xy): + pass + + ## + # Compacts the path, by removing points that are close to each + # other. This method modifies the path in place. + + def compact(self, distance=2): + pass + + ## + # Gets the bounding box. + + def getbbox(self): + pass + + ## + # Maps the path through a function. + + def map(self, function): + pass + + ## + # Converts the path to Python list. + # + # @param flat By default, this function returns a list of 2-tuples + # [(x, y), ...]. If this argument is true, it returns a flat + # list [x, y, ...] instead. + # @return A list of coordinates. + + def tolist(self, flat=0): + pass + + ## + # Transforms the path. + + def transform(self, matrix): + pass + + +# override with C implementation +Path = Image.core.path diff --git a/Lib/site-packages/PIL/ImageQt.py b/Lib/site-packages/PIL/ImageQt.py new file mode 100644 index 0000000..aece9d6 --- /dev/null +++ b/Lib/site-packages/PIL/ImageQt.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a simple Qt image interface. +# +# history: +# 2006-06-03 fl: created +# 2006-06-04 fl: inherit from QImage instead of wrapping it +# 2006-06-05 fl: removed toimage helper; move string support to ImageQt +# 2013-11-13 fl: add support for Qt5 (aurelien.ballier@cyclonit.com) +# +# Copyright (c) 2006 by Secret Labs AB +# Copyright (c) 2006 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL._util import isPath +from io import BytesIO + +qt_is_installed = True +qt_version = None +try: + from PyQt5.QtGui import QImage, qRgba, QPixmap + from PyQt5.QtCore import QBuffer, QIODevice + qt_version = '5' +except ImportError: + try: + from PyQt4.QtGui import QImage, qRgba, QPixmap + from PyQt4.QtCore import QBuffer, QIODevice + qt_version = '4' + except ImportError: + try: + from PySide.QtGui import QImage, qRgba, QPixmap + from PySide.QtCore import QBuffer, QIODevice + qt_version = 'side' + except ImportError: + qt_is_installed = False + + +def rgb(r, g, b, a=255): + """(Internal) Turns an RGB color into a Qt compatible color integer.""" + # use qRgb to pack the colors, and then turn the resulting long + # into a negative integer with the same bitpattern. + return (qRgba(r, g, b, a) & 0xffffffff) + + +# :param im A PIL Image object, or a file name +# (given either as Python string or a PyQt string object) + +def fromqimage(im): + buffer = QBuffer() + buffer.open(QIODevice.ReadWrite) + # preserve alha channel with png + # otherwise ppm is more friendly with Image.open + if im.hasAlphaChannel(): + im.save(buffer, 'png') + else: + im.save(buffer, 'ppm') + + b = BytesIO() + try: + b.write(buffer.data()) + except TypeError: + # workaround for Python 2 + b.write(str(buffer.data())) + buffer.close() + b.seek(0) + + return Image.open(b) + + +def fromqpixmap(im): + return fromqimage(im) + # buffer = QBuffer() + # buffer.open(QIODevice.ReadWrite) + # # im.save(buffer) + # # What if png doesn't support some image features like animation? + # im.save(buffer, 'ppm') + # bytes_io = BytesIO() + # bytes_io.write(buffer.data()) + # buffer.close() + # bytes_io.seek(0) + # return Image.open(bytes_io) + + +def align8to32(bytes, width, mode): + """ + converts each scanline of data from 8 bit to 32 bit aligned + """ + + bits_per_pixel = { + '1': 1, + 'L': 8, + 'P': 8, + }[mode] + + # calculate bytes per line and the extra padding if needed + bits_per_line = bits_per_pixel * width + full_bytes_per_line, remaining_bits_per_line = divmod(bits_per_line, 8) + bytes_per_line = full_bytes_per_line + (1 if remaining_bits_per_line else 0) + + extra_padding = -bytes_per_line % 4 + + # already 32 bit aligned by luck + if not extra_padding: + return bytes + + new_data = [] + for i in range(len(bytes) // bytes_per_line): + new_data.append(bytes[i*bytes_per_line:(i+1)*bytes_per_line] + b'\x00' * extra_padding) + + return b''.join(new_data) + + +def _toqclass_helper(im): + data = None + colortable = None + + # handle filename, if given instead of image name + if hasattr(im, "toUtf8"): + # FIXME - is this really the best way to do this? + if str is bytes: + im = unicode(im.toUtf8(), "utf-8") + else: + im = str(im.toUtf8(), "utf-8") + if isPath(im): + im = Image.open(im) + + if im.mode == "1": + format = QImage.Format_Mono + elif im.mode == "L": + format = QImage.Format_Indexed8 + colortable = [] + for i in range(256): + colortable.append(rgb(i, i, i)) + elif im.mode == "P": + format = QImage.Format_Indexed8 + colortable = [] + palette = im.getpalette() + for i in range(0, len(palette), 3): + colortable.append(rgb(*palette[i:i+3])) + elif im.mode == "RGB": + data = im.tobytes("raw", "BGRX") + format = QImage.Format_RGB32 + elif im.mode == "RGBA": + try: + data = im.tobytes("raw", "BGRA") + except SystemError: + # workaround for earlier versions + r, g, b, a = im.split() + im = Image.merge("RGBA", (b, g, r, a)) + format = QImage.Format_ARGB32 + else: + raise ValueError("unsupported image mode %r" % im.mode) + + # must keep a reference, or Qt will crash! + __data = data or align8to32(im.tobytes(), im.size[0], im.mode) + return { + 'data': __data, 'im': im, 'format': format, 'colortable': colortable + } + +## +# An PIL image wrapper for Qt. This is a subclass of PyQt's QImage +# class. +# +# @param im A PIL Image object, or a file name (given either as Python +# string or a PyQt string object). + +if qt_is_installed: + class ImageQt(QImage): + + def __init__(self, im): + im_data = _toqclass_helper(im) + QImage.__init__(self, + im_data['data'], im_data['im'].size[0], + im_data['im'].size[1], im_data['format']) + if im_data['colortable']: + self.setColorTable(im_data['colortable']) + + +def toqimage(im): + return ImageQt(im) + + +def toqpixmap(im): + # # This doesn't work. For now using a dumb approach. + # im_data = _toqclass_helper(im) + # result = QPixmap(im_data['im'].size[0], im_data['im'].size[1]) + # result.loadFromData(im_data['data']) + # Fix some strange bug that causes + if im.mode == 'RGB': + im = im.convert('RGBA') + + qimage = toqimage(im) + return QPixmap.fromImage(qimage) diff --git a/Lib/site-packages/PIL/ImageSequence.py b/Lib/site-packages/PIL/ImageSequence.py new file mode 100644 index 0000000..256bcbe --- /dev/null +++ b/Lib/site-packages/PIL/ImageSequence.py @@ -0,0 +1,42 @@ +# +# The Python Imaging Library. +# $Id$ +# +# sequence support classes +# +# history: +# 1997-02-20 fl Created +# +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## + + +class Iterator(object): + """ + This class implements an iterator object that can be used to loop + over an image sequence. + + You can use the ``[]`` operator to access elements by index. This operator + will raise an :py:exc:`IndexError` if you try to access a nonexistent + frame. + + :param im: An image object. + """ + + def __init__(self, im): + if not hasattr(im, "seek"): + raise AttributeError("im must have seek method") + self.im = im + + def __getitem__(self, ix): + try: + if ix: + self.im.seek(ix) + return self.im + except EOFError: + raise IndexError # end of sequence diff --git a/Lib/site-packages/PIL/ImageShow.py b/Lib/site-packages/PIL/ImageShow.py new file mode 100644 index 0000000..51417c3 --- /dev/null +++ b/Lib/site-packages/PIL/ImageShow.py @@ -0,0 +1,179 @@ +# +# The Python Imaging Library. +# $Id$ +# +# im.show() drivers +# +# History: +# 2008-04-06 fl Created +# +# Copyright (c) Secret Labs AB 2008. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image +import os +import sys + +if sys.version_info >= (3, 3): + from shlex import quote +else: + from pipes import quote + +_viewers = [] + + +def register(viewer, order=1): + try: + if issubclass(viewer, Viewer): + viewer = viewer() + except TypeError: + pass # raised if viewer wasn't a class + if order > 0: + _viewers.append(viewer) + elif order < 0: + _viewers.insert(0, viewer) + + +## +# Displays a given image. +# +# @param image An image object. +# @param title Optional title. Not all viewers can display the title. +# @param **options Additional viewer options. +# @return True if a suitable viewer was found, false otherwise. + +def show(image, title=None, **options): + for viewer in _viewers: + if viewer.show(image, title=title, **options): + return 1 + return 0 + + +## +# Base class for viewers. + +class Viewer(object): + + # main api + + def show(self, image, **options): + + # save temporary image to disk + if image.mode[:4] == "I;16": + # @PIL88 @PIL101 + # "I;16" isn't an 'official' mode, but we still want to + # provide a simple way to show 16-bit images. + base = "L" + # FIXME: auto-contrast if max() > 255? + else: + base = Image.getmodebase(image.mode) + if base != image.mode and image.mode != "1": + image = image.convert(base) + + return self.show_image(image, **options) + + # hook methods + + format = None + + def get_format(self, image): + # return format name, or None to save as PGM/PPM + return self.format + + def get_command(self, file, **options): + raise NotImplementedError + + def save_image(self, image): + # save to temporary file, and return filename + return image._dump(format=self.get_format(image)) + + def show_image(self, image, **options): + # display given image + return self.show_file(self.save_image(image), **options) + + def show_file(self, file, **options): + # display given file + os.system(self.get_command(file, **options)) + return 1 + +# -------------------------------------------------------------------- + +if sys.platform == "win32": + + class WindowsViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + return ('start "Pillow" /WAIT "%s" ' + '&& ping -n 2 127.0.0.1 >NUL ' + '&& del /f "%s"' % (file, file)) + + register(WindowsViewer) + +elif sys.platform == "darwin": + + class MacViewer(Viewer): + format = "BMP" + + def get_command(self, file, **options): + # on darwin open returns immediately resulting in the temp + # file removal while app is opening + command = "open -a /Applications/Preview.app" + command = "(%s %s; sleep 20; rm -f %s)&" % (command, quote(file), + quote(file)) + return command + + register(MacViewer) + +else: + + # unixoids + + def which(executable): + path = os.environ.get("PATH") + if not path: + return None + for dirname in path.split(os.pathsep): + filename = os.path.join(dirname, executable) + if os.path.isfile(filename): + # FIXME: make sure it's executable + return filename + return None + + class UnixViewer(Viewer): + def show_file(self, file, **options): + command, executable = self.get_command_ex(file, **options) + command = "(%s %s; rm -f %s)&" % (command, quote(file), + quote(file)) + os.system(command) + return 1 + + # implementations + + class DisplayViewer(UnixViewer): + def get_command_ex(self, file, **options): + command = executable = "display" + return command, executable + + if which("display"): + register(DisplayViewer) + + class XVViewer(UnixViewer): + def get_command_ex(self, file, title=None, **options): + # note: xv is pretty outdated. most modern systems have + # imagemagick's display command instead. + command = executable = "xv" + if title: + command += " -name %s" % quote(title) + return command, executable + + if which("xv"): + register(XVViewer) + +if __name__ == "__main__": + # usage: python ImageShow.py imagefile [title] + print(show(Image.open(sys.argv[1]), *sys.argv[2:])) diff --git a/Lib/site-packages/PIL/ImageStat.py b/Lib/site-packages/PIL/ImageStat.py new file mode 100644 index 0000000..f3c138b --- /dev/null +++ b/Lib/site-packages/PIL/ImageStat.py @@ -0,0 +1,147 @@ +# +# The Python Imaging Library. +# $Id$ +# +# global image statistics +# +# History: +# 1996-04-05 fl Created +# 1997-05-21 fl Added mask; added rms, var, stddev attributes +# 1997-08-05 fl Added median +# 1998-07-05 hk Fixed integer overflow error +# +# Notes: +# This class shows how to implement delayed evaluation of attributes. +# To get a certain value, simply access the corresponding attribute. +# The __getattr__ dispatcher takes care of the rest. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996-97. +# +# See the README file for information on usage and redistribution. +# + +import math +import operator +import functools + + +class Stat(object): + + def __init__(self, image_or_list, mask=None): + try: + if mask: + self.h = image_or_list.histogram(mask) + else: + self.h = image_or_list.histogram() + except AttributeError: + self.h = image_or_list # assume it to be a histogram list + if not isinstance(self.h, list): + raise TypeError("first argument must be image or list") + self.bands = list(range(len(self.h) // 256)) + + def __getattr__(self, id): + "Calculate missing attribute" + if id[:4] == "_get": + raise AttributeError(id) + # calculate missing attribute + v = getattr(self, "_get" + id)() + setattr(self, id, v) + return v + + def _getextrema(self): + "Get min/max values for each band in the image" + + def minmax(histogram): + n = 255 + x = 0 + for i in range(256): + if histogram[i]: + n = min(n, i) + x = max(x, i) + return n, x # returns (255, 0) if there's no data in the histogram + + v = [] + for i in range(0, len(self.h), 256): + v.append(minmax(self.h[i:])) + return v + + def _getcount(self): + "Get total number of pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + v.append(functools.reduce(operator.add, self.h[i:i+256])) + return v + + def _getsum(self): + "Get sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + layerSum = 0.0 + for j in range(256): + layerSum += j * self.h[i + j] + v.append(layerSum) + return v + + def _getsum2(self): + "Get squared sum of all pixels in each layer" + + v = [] + for i in range(0, len(self.h), 256): + sum2 = 0.0 + for j in range(256): + sum2 += (j ** 2) * float(self.h[i + j]) + v.append(sum2) + return v + + def _getmean(self): + "Get average pixel level for each layer" + + v = [] + for i in self.bands: + v.append(self.sum[i] / self.count[i]) + return v + + def _getmedian(self): + "Get median pixel level for each layer" + + v = [] + for i in self.bands: + s = 0 + l = self.count[i]//2 + b = i * 256 + for j in range(256): + s = s + self.h[b+j] + if s > l: + break + v.append(j) + return v + + def _getrms(self): + "Get RMS for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.sum2[i] / self.count[i])) + return v + + def _getvar(self): + "Get variance for each layer" + + v = [] + for i in self.bands: + n = self.count[i] + v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n) + return v + + def _getstddev(self): + "Get standard deviation for each layer" + + v = [] + for i in self.bands: + v.append(math.sqrt(self.var[i])) + return v + +Global = Stat # compatibility diff --git a/Lib/site-packages/PIL/ImageTk.py b/Lib/site-packages/PIL/ImageTk.py new file mode 100644 index 0000000..68d388e --- /dev/null +++ b/Lib/site-packages/PIL/ImageTk.py @@ -0,0 +1,292 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Tk display interface +# +# History: +# 96-04-08 fl Created +# 96-09-06 fl Added getimage method +# 96-11-01 fl Rewritten, removed image attribute and crop method +# 97-05-09 fl Use PyImagingPaste method instead of image type +# 97-05-12 fl Minor tweaks to match the IFUNC95 interface +# 97-05-17 fl Support the "pilbitmap" booster patch +# 97-06-05 fl Added file= and data= argument to image constructors +# 98-03-09 fl Added width and height methods to Image classes +# 98-07-02 fl Use default mode for "P" images without palette attribute +# 98-07-02 fl Explicitly destroy Tkinter image objects +# 99-07-24 fl Support multiple Tk interpreters (from Greg Couch) +# 99-07-26 fl Automatically hook into Tkinter (if possible) +# 99-08-15 fl Hook uses _imagingtk instead of _imaging +# +# Copyright (c) 1997-1999 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +try: + import tkinter +except ImportError: + import Tkinter + tkinter = Tkinter + del Tkinter + +from PIL import Image + + +# -------------------------------------------------------------------- +# Check for Tkinter interface hooks + +_pilbitmap_ok = None + + +def _pilbitmap_check(): + global _pilbitmap_ok + if _pilbitmap_ok is None: + try: + im = Image.new("1", (1, 1)) + tkinter.BitmapImage(data="PIL:%d" % im.im.id) + _pilbitmap_ok = 1 + except tkinter.TclError: + _pilbitmap_ok = 0 + return _pilbitmap_ok + + +# -------------------------------------------------------------------- +# PhotoImage + +class PhotoImage(object): + """ + A Tkinter-compatible photo image. This can be used + everywhere Tkinter expects an image object. If the image is an RGBA + image, pixels having alpha 0 are treated as transparent. + + The constructor takes either a PIL image, or a mode and a size. + Alternatively, you can use the **file** or **data** options to initialize + the photo image object. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. + :param size: If the first argument is a mode string, this defines the size + of the image. + :keyword file: A filename to load the image from (using + ``Image.open(file)``). + :keyword data: An 8-bit string containing image data (as loaded from an + image file). + """ + + def __init__(self, image=None, size=None, **kw): + + # Tk compatibility: file or data + if image is None: + if "file" in kw: + image = Image.open(kw["file"]) + del kw["file"] + elif "data" in kw: + from io import BytesIO + image = Image.open(BytesIO(kw["data"])) + del kw["data"] + + if hasattr(image, "mode") and hasattr(image, "size"): + # got an image instead of a mode + mode = image.mode + if mode == "P": + # palette mapped data + image.load() + try: + mode = image.palette.mode + except AttributeError: + mode = "RGB" # default + size = image.size + kw["width"], kw["height"] = size + else: + mode = image + image = None + + if mode not in ["1", "L", "RGB", "RGBA"]: + mode = Image.getmodebase(mode) + + self.__mode = mode + self.__size = size + self.__photo = tkinter.PhotoImage(**kw) + self.tk = self.__photo.tk + if image: + self.paste(image) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def __str__(self): + """ + Get the Tkinter photo image identifier. This method is automatically + called by Tkinter whenever a PhotoImage object is passed to a Tkinter + method. + + :return: A Tkinter photo image identifier (a string). + """ + return str(self.__photo) + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def paste(self, im, box=None): + """ + Paste a PIL image into the photo image. Note that this can + be very slow if the photo image is displayed. + + :param im: A PIL image. The size must match the target region. If the + mode does not match, the image is converted to the mode of + the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and lower pixel + coordinate. If None is given instead of a tuple, all of + the image is assumed. + """ + + # convert to blittable + im.load() + image = im.im + if image.isblock() and im.mode == self.__mode: + block = image + else: + block = image.new_block(self.__mode, im.size) + image.convert2(block, image) # convert directly between buffers + + tk = self.__photo.tk + + try: + tk.call("PyImagingPhoto", self.__photo, block.id) + except tkinter.TclError: + # activate Tkinter hook + try: + from PIL import _imagingtk + try: + _imagingtk.tkinit(tk.interpaddr(), 1) + except AttributeError: + _imagingtk.tkinit(id(tk), 0) + tk.call("PyImagingPhoto", self.__photo, block.id) + except (ImportError, AttributeError, tkinter.TclError): + raise # configuration problem; cannot attach to Tkinter + +# -------------------------------------------------------------------- +# BitmapImage + + +class BitmapImage(object): + """ + + A Tkinter-compatible bitmap image. This can be used everywhere Tkinter + expects an image object. + + The given image must have mode "1". Pixels having value 0 are treated as + transparent. Options, if any, are passed on to Tkinter. The most commonly + used option is **foreground**, which is used to specify the color for the + non-transparent parts. See the Tkinter documentation for information on + how to specify colours. + + :param image: A PIL image. + """ + + def __init__(self, image=None, **kw): + + # Tk compatibility: file or data + if image is None: + if "file" in kw: + image = Image.open(kw["file"]) + del kw["file"] + elif "data" in kw: + from io import BytesIO + image = Image.open(BytesIO(kw["data"])) + del kw["data"] + + self.__mode = image.mode + self.__size = image.size + + if _pilbitmap_check(): + # fast way (requires the pilbitmap booster patch) + image.load() + kw["data"] = "PIL:%d" % image.im.id + self.__im = image # must keep a reference + else: + # slow but safe way + kw["data"] = image.tobitmap() + self.__photo = tkinter.BitmapImage(**kw) + + def __del__(self): + name = self.__photo.name + self.__photo.name = None + try: + self.__photo.tk.call("image", "delete", name) + except: + pass # ignore internal errors + + def width(self): + """ + Get the width of the image. + + :return: The width, in pixels. + """ + return self.__size[0] + + def height(self): + """ + Get the height of the image. + + :return: The height, in pixels. + """ + return self.__size[1] + + def __str__(self): + """ + Get the Tkinter bitmap image identifier. This method is automatically + called by Tkinter whenever a BitmapImage object is passed to a Tkinter + method. + + :return: A Tkinter bitmap image identifier (a string). + """ + return str(self.__photo) + + +def getimage(photo): + """Copies the contents of a PhotoImage to a PIL image memory.""" + photo.tk.call("PyImagingPhotoGet", photo) + + +# -------------------------------------------------------------------- +# Helper for the Image.show method. + +def _show(image, title): + + class UI(tkinter.Label): + def __init__(self, master, im): + if im.mode == "1": + self.image = BitmapImage(im, foreground="white", master=master) + else: + self.image = PhotoImage(im, master=master) + tkinter.Label.__init__(self, master, image=self.image, + bg="black", bd=0) + + if not tkinter._default_root: + raise IOError("tkinter not initialized") + top = tkinter.Toplevel() + if title: + top.title(title) + UI(top, image).pack() diff --git a/Lib/site-packages/PIL/ImageTransform.py b/Lib/site-packages/PIL/ImageTransform.py new file mode 100644 index 0000000..81f9050 --- /dev/null +++ b/Lib/site-packages/PIL/ImageTransform.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# transform wrappers +# +# History: +# 2002-04-08 fl Created +# +# Copyright (c) 2002 by Secret Labs AB +# Copyright (c) 2002 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class Transform(Image.ImageTransformHandler): + def __init__(self, data): + self.data = data + + def getdata(self): + return self.method, self.data + + def transform(self, size, image, **options): + # can be overridden + method, data = self.getdata() + return image.transform(size, method, data, **options) + + +## +# Define an affine image transform. +#

+# This function takes a 6-tuple (a, b, c, d, e, f) which +# contain the first two rows from an affine transform matrix. For +# each pixel (x, y) in the output image, the new value is +# taken from a position (a x + b y + c, +# d x + e y + f) in the input image, rounded to +# nearest pixel. +#

+# This function can be used to scale, translate, rotate, and shear the +# original image. +# +# @def AffineTransform(matrix) +# @param matrix A 6-tuple (a, b, c, d, e, f) containing +# the first two rows from an affine transform matrix. +# @see Image#Image.transform + + +class AffineTransform(Transform): + method = Image.AFFINE + + +## +# Define a transform to extract a subregion from an image. +#

+# Maps a rectangle (defined by two corners) from the image to a +# rectangle of the given size. The resulting image will contain +# data sampled from between the corners, such that (x0, y0) +# in the input image will end up at (0,0) in the output image, +# and (x1, y1) at size. +#

+# This method can be used to crop, stretch, shrink, or mirror an +# arbitrary rectangle in the current image. It is slightly slower than +# crop, but about as fast as a corresponding resize +# operation. +# +# @def ExtentTransform(bbox) +# @param bbox A 4-tuple (x0, y0, x1, y1) which specifies +# two points in the input image's coordinate system. +# @see Image#Image.transform + +class ExtentTransform(Transform): + method = Image.EXTENT + + +## +# Define an quad image transform. +#

+# Maps a quadrilateral (a region defined by four corners) from the +# image to a rectangle of the given size. +# +# @def QuadTransform(xy) +# @param xy An 8-tuple (x0, y0, x1, y1, x2, y2, y3, y3) which +# contain the upper left, lower left, lower right, and upper right +# corner of the source quadrilateral. +# @see Image#Image.transform + +class QuadTransform(Transform): + method = Image.QUAD + + +## +# Define an mesh image transform. A mesh transform consists of one +# or more individual quad transforms. +# +# @def MeshTransform(data) +# @param data A list of (bbox, quad) tuples. +# @see Image#Image.transform + +class MeshTransform(Transform): + method = Image.MESH diff --git a/Lib/site-packages/PIL/ImageWin.py b/Lib/site-packages/PIL/ImageWin.py new file mode 100644 index 0000000..58894d6 --- /dev/null +++ b/Lib/site-packages/PIL/ImageWin.py @@ -0,0 +1,239 @@ +# +# The Python Imaging Library. +# $Id$ +# +# a Windows DIB display interface +# +# History: +# 1996-05-20 fl Created +# 1996-09-20 fl Fixed subregion exposure +# 1997-09-21 fl Added draw primitive (for tzPrint) +# 2003-05-21 fl Added experimental Window/ImageWindow classes +# 2003-09-05 fl Added fromstring/tostring methods +# +# Copyright (c) Secret Labs AB 1997-2003. +# Copyright (c) Fredrik Lundh 1996-2003. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image + + +class HDC(object): + """ + Wraps an HDC integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods. + """ + def __init__(self, dc): + self.dc = dc + + def __int__(self): + return self.dc + + +class HWND(object): + """ + Wraps an HWND integer. The resulting object can be passed to the + :py:meth:`~PIL.ImageWin.Dib.draw` and :py:meth:`~PIL.ImageWin.Dib.expose` + methods, instead of a DC. + """ + def __init__(self, wnd): + self.wnd = wnd + + def __int__(self): + return self.wnd + + +class Dib(object): + """ + A Windows bitmap with the given mode and size. The mode can be one of "1", + "L", "P", or "RGB". + + If the display requires a palette, this constructor creates a suitable + palette and associates it with the image. For an "L" image, 128 greylevels + are allocated. For an "RGB" image, a 6x6x6 colour cube is used, together + with 20 greylevels. + + To make sure that palettes work properly under Windows, you must call the + **palette** method upon certain events from Windows. + + :param image: Either a PIL image, or a mode string. If a mode string is + used, a size must also be given. The mode can be one of "1", + "L", "P", or "RGB". + :param size: If the first argument is a mode string, this + defines the size of the image. + """ + + def __init__(self, image, size=None): + if hasattr(image, "mode") and hasattr(image, "size"): + mode = image.mode + size = image.size + else: + mode = image + image = None + if mode not in ["1", "L", "P", "RGB"]: + mode = Image.getmodebase(mode) + self.image = Image.core.display(mode, size) + self.mode = mode + self.size = size + if image: + self.paste(image) + + def expose(self, handle): + """ + Copy the bitmap contents to a device context. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. In PythonWin, you can use the + :py:meth:`CDC.GetHandleAttrib` to get a suitable handle. + """ + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.expose(dc) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.expose(handle) + return result + + def draw(self, handle, dst, src=None): + """ + Same as expose, but allows you to specify where to draw the image, and + what part of it to draw. + + The destination and source areas are given as 4-tuple rectangles. If + the source is omitted, the entire image is copied. If the source and + the destination have different sizes, the image is resized as + necessary. + """ + if not src: + src = (0, 0) + self.size + if isinstance(handle, HWND): + dc = self.image.getdc(handle) + try: + result = self.image.draw(dc, dst, src) + finally: + self.image.releasedc(handle, dc) + else: + result = self.image.draw(handle, dst, src) + return result + + def query_palette(self, handle): + """ + Installs the palette associated with the image in the given device + context. + + This method should be called upon **QUERYNEWPALETTE** and + **PALETTECHANGED** events from Windows. If this method returns a + non-zero value, one or more display palette entries were changed, and + the image should be redrawn. + + :param handle: Device context (HDC), cast to a Python integer, or an + HDC or HWND instance. + :return: A true value if one or more entries were changed (this + indicates that the image should be redrawn). + """ + if isinstance(handle, HWND): + handle = self.image.getdc(handle) + try: + result = self.image.query_palette(handle) + finally: + self.image.releasedc(handle, handle) + else: + result = self.image.query_palette(handle) + return result + + def paste(self, im, box=None): + """ + Paste a PIL image into the bitmap image. + + :param im: A PIL image. The size must match the target region. + If the mode does not match, the image is converted to the + mode of the bitmap image. + :param box: A 4-tuple defining the left, upper, right, and + lower pixel coordinate. If None is given instead of a + tuple, all of the image is assumed. + """ + im.load() + if self.mode != im.mode: + im = im.convert(self.mode) + if box: + self.image.paste(im.im, box) + else: + self.image.paste(im.im) + + def frombytes(self, buffer): + """ + Load display memory contents from byte data. + + :param buffer: A buffer containing display data (usually + data returned from tobytes) + """ + return self.image.frombytes(buffer) + + def tobytes(self): + """ + Copy display memory contents to bytes object. + + :return: A bytes object containing display data. + """ + return self.image.tobytes() + + def fromstring(self, *args, **kw): + raise Exception("fromstring() has been removed. " + + "Please use frombytes() instead.") + + def tostring(self, *args, **kw): + raise Exception("tostring() has been removed. " + + "Please use tobytes() instead.") + + +## +# Create a Window with the given title size. + +class Window(object): + + def __init__(self, title="PIL", width=None, height=None): + self.hwnd = Image.core.createwindow( + title, self.__dispatcher, width or 0, height or 0 + ) + + def __dispatcher(self, action, *args): + return getattr(self, "ui_handle_" + action)(*args) + + def ui_handle_clear(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_damage(self, x0, y0, x1, y1): + pass + + def ui_handle_destroy(self): + pass + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + pass + + def ui_handle_resize(self, width, height): + pass + + def mainloop(self): + Image.core.eventloop() + + +## +# Create an image window which displays the given image. + +class ImageWindow(Window): + + def __init__(self, image, title="PIL"): + if not isinstance(image, Dib): + image = Dib(image) + self.image = image + width, height = image.size + Window.__init__(self, title, width=width, height=height) + + def ui_handle_repair(self, dc, x0, y0, x1, y1): + self.image.draw(dc, (x0, y0, x1, y1)) diff --git a/Lib/site-packages/PIL/ImtImagePlugin.py b/Lib/site-packages/PIL/ImtImagePlugin.py new file mode 100644 index 0000000..63e8924 --- /dev/null +++ b/Lib/site-packages/PIL/ImtImagePlugin.py @@ -0,0 +1,95 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IM Tools support for PIL +# +# history: +# 1996-05-27 fl Created (read 8-bit images only) +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.2) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re + +from PIL import Image, ImageFile + +__version__ = "0.2" + + +# +# -------------------------------------------------------------------- + +field = re.compile(br"([a-z]*) ([^ \r\n]*)") + + +## +# Image plugin for IM Tools images. + +class ImtImageFile(ImageFile.ImageFile): + + format = "IMT" + format_description = "IM Tools" + + def _open(self): + + # Quick rejection: if there's not a LF among the first + # 100 bytes, this is (probably) not a text header. + + if b"\n" not in self.fp.read(100): + raise SyntaxError("not an IM file") + self.fp.seek(0) + + xsize = ysize = 0 + + while True: + + s = self.fp.read(1) + if not s: + break + + if s == b'\x0C': + + # image data begins + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), + (self.mode, 0, 1))] + + break + + else: + + # read key/value pair + # FIXME: dangerous, may read whole file + s = s + self.fp.readline() + if len(s) == 1 or len(s) > 100: + break + if s[0] == b"*": + continue # comment + + m = field.match(s) + if not m: + break + k, v = m.group(1, 2) + if k == "width": + xsize = int(v) + self.size = xsize, ysize + elif k == "height": + ysize = int(v) + self.size = xsize, ysize + elif k == "pixel" and v == "n8": + self.mode = "L" + + +# +# -------------------------------------------------------------------- + +Image.register_open(ImtImageFile.format, ImtImageFile) + +# +# no extension registered (".im" is simply too common) diff --git a/Lib/site-packages/PIL/IptcImagePlugin.py b/Lib/site-packages/PIL/IptcImagePlugin.py new file mode 100644 index 0000000..56d1de4 --- /dev/null +++ b/Lib/site-packages/PIL/IptcImagePlugin.py @@ -0,0 +1,267 @@ +# +# The Python Imaging Library. +# $Id$ +# +# IPTC/NAA file handling +# +# history: +# 1995-10-01 fl Created +# 1998-03-09 fl Cleaned up and added to PIL +# 2002-06-18 fl Added getiptcinfo helper +# +# Copyright (c) Secret Labs AB 1997-2002. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +from PIL import Image, ImageFile, _binary +import os +import tempfile + +__version__ = "0.3" + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be +o8 = _binary.o8 + +COMPRESSION = { + 1: "raw", + 5: "jpeg" +} + +PAD = o8(0) * 4 + + +# +# Helpers + +def i(c): + return i32((PAD + c)[-4:]) + + +def dump(c): + for i in c: + print("%02x" % i8(i), end=' ') + print() + + +## +# Image plugin for IPTC/NAA datastreams. To read IPTC/NAA fields +# from TIFF and JPEG files, use the getiptcinfo function. + +class IptcImageFile(ImageFile.ImageFile): + + format = "IPTC" + format_description = "IPTC/NAA" + + def getint(self, key): + return i(self.info[key]) + + def field(self): + # + # get a IPTC field header + s = self.fp.read(5) + if not len(s): + return None, 0 + + tag = i8(s[1]), i8(s[2]) + + # syntax + if i8(s[0]) != 0x1C or tag[0] < 1 or tag[0] > 9: + raise SyntaxError("invalid IPTC/NAA file") + + # field size + size = i8(s[3]) + if size > 132: + raise IOError("illegal field length in IPTC/NAA file") + elif size == 128: + size = 0 + elif size > 128: + size = i(self.fp.read(size-128)) + else: + size = i16(s[3:]) + + return tag, size + + def _open(self): + + # load descriptive fields + while True: + offset = self.fp.tell() + tag, size = self.field() + if not tag or tag == (8, 10): + break + if size: + tagdata = self.fp.read(size) + else: + tagdata = None + if tag in list(self.info.keys()): + if isinstance(self.info[tag], list): + self.info[tag].append(tagdata) + else: + self.info[tag] = [self.info[tag], tagdata] + else: + self.info[tag] = tagdata + + # print tag, self.info[tag] + + # mode + layers = i8(self.info[(3, 60)][0]) + component = i8(self.info[(3, 60)][1]) + if (3, 65) in self.info: + id = i8(self.info[(3, 65)][0])-1 + else: + id = 0 + if layers == 1 and not component: + self.mode = "L" + elif layers == 3 and component: + self.mode = "RGB"[id] + elif layers == 4 and component: + self.mode = "CMYK"[id] + + # size + self.size = self.getint((3, 20)), self.getint((3, 30)) + + # compression + try: + compression = COMPRESSION[self.getint((3, 120))] + except KeyError: + raise IOError("Unknown IPTC image compression") + + # tile + if tag == (8, 10): + self.tile = [("iptc", (compression, offset), + (0, 0, self.size[0], self.size[1]))] + + def load(self): + + if len(self.tile) != 1 or self.tile[0][0] != "iptc": + return ImageFile.ImageFile.load(self) + + type, tile, box = self.tile[0] + + encoding, offset = tile + + self.fp.seek(offset) + + # Copy image data to temporary file + o_fd, outfile = tempfile.mkstemp(text=False) + o = os.fdopen(o_fd) + if encoding == "raw": + # To simplify access to the extracted file, + # prepend a PPM header + o.write("P5\n%d %d\n255\n" % self.size) + while True: + type, size = self.field() + if type != (8, 10): + break + while size > 0: + s = self.fp.read(min(size, 8192)) + if not s: + break + o.write(s) + size -= len(s) + o.close() + + try: + try: + # fast + self.im = Image.core.open_ppm(outfile) + except: + # slightly slower + im = Image.open(outfile) + im.load() + self.im = im.im + finally: + try: + os.unlink(outfile) + except OSError: + pass + + +Image.register_open(IptcImageFile.format, IptcImageFile) + +Image.register_extension(IptcImageFile.format, ".iim") + + +## +# Get IPTC information from TIFF, JPEG, or IPTC file. +# +# @param im An image containing IPTC data. +# @return A dictionary containing IPTC information, or None if +# no IPTC information block was found. + +def getiptcinfo(im): + + from PIL import TiffImagePlugin, JpegImagePlugin + import io + + data = None + + if isinstance(im, IptcImageFile): + # return info dictionary right away + return im.info + + elif isinstance(im, JpegImagePlugin.JpegImageFile): + # extract the IPTC/NAA resource + try: + app = im.app["APP13"] + if app[:14] == b"Photoshop 3.0\x00": + app = app[14:] + # parse the image resource block + offset = 0 + while app[offset:offset+4] == b"8BIM": + offset += 4 + # resource code + code = i16(app, offset) + offset += 2 + # resource name (usually empty) + name_len = i8(app[offset]) + # name = app[offset+1:offset+1+name_len] + offset = 1 + offset + name_len + if offset & 1: + offset += 1 + # resource data block + size = i32(app, offset) + offset += 4 + if code == 0x0404: + # 0x0404 contains IPTC/NAA data + data = app[offset:offset+size] + break + offset = offset + size + if offset & 1: + offset += 1 + except (AttributeError, KeyError): + pass + + elif isinstance(im, TiffImagePlugin.TiffImageFile): + # get raw data from the IPTC/NAA tag (PhotoShop tags the data + # as 4-byte integers, so we cannot use the get method...) + try: + data = im.tag.tagdata[TiffImagePlugin.IPTC_NAA_CHUNK] + except (AttributeError, KeyError): + pass + + if data is None: + return None # no properties + + # create an IptcImagePlugin object without initializing it + class FakeImage(object): + pass + im = FakeImage() + im.__class__ = IptcImageFile + + # parse the IPTC information chunk + im.info = {} + im.fp = io.BytesIO(data) + + try: + im._open() + except (IndexError, KeyError): + pass # expected failure + + return im.info diff --git a/Lib/site-packages/PIL/Jpeg2KImagePlugin.py b/Lib/site-packages/PIL/Jpeg2KImagePlugin.py new file mode 100644 index 0000000..b82acdd --- /dev/null +++ b/Lib/site-packages/PIL/Jpeg2KImagePlugin.py @@ -0,0 +1,276 @@ +# +# The Python Imaging Library +# $Id$ +# +# JPEG2000 file handling +# +# History: +# 2014-03-12 ajh Created +# +# Copyright (c) 2014 Coriolis Systems Limited +# Copyright (c) 2014 Alastair Houghton +# +# See the README file for information on usage and redistribution. +# +from PIL import Image, ImageFile +import struct +import os +import io + +__version__ = "0.1" + + +def _parse_codestream(fp): + """Parse the JPEG 2000 codestream to extract the size and component + count from the SIZ marker segment, returning a PIL (size, mode) tuple.""" + + hdr = fp.read(2) + lsiz = struct.unpack('>H', hdr)[0] + siz = hdr + fp.read(lsiz - 2) + lsiz, rsiz, xsiz, ysiz, xosiz, yosiz, xtsiz, ytsiz, \ + xtosiz, ytosiz, csiz \ + = struct.unpack('>HHIIIIIIIIH', siz[:38]) + ssiz = [None]*csiz + xrsiz = [None]*csiz + yrsiz = [None]*csiz + for i in range(csiz): + ssiz[i], xrsiz[i], yrsiz[i] \ + = struct.unpack('>BBB', siz[36 + 3 * i:39 + 3 * i]) + + size = (xsiz - xosiz, ysiz - yosiz) + if csiz == 1: + if (yrsiz[0] & 0x7f) > 8: + mode = 'I;16' + else: + mode = 'L' + elif csiz == 2: + mode = 'LA' + elif csiz == 3: + mode = 'RGB' + elif csiz == 4: + mode = 'RGBA' + else: + mode = None + + return (size, mode) + + +def _parse_jp2_header(fp): + """Parse the JP2 header box to extract size, component count and + color space information, returning a PIL (size, mode) tuple.""" + + # Find the JP2 header box + header = None + while True: + lbox, tbox = struct.unpack('>I4s', fp.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', fp.read(8))[0] + hlen = 16 + else: + hlen = 8 + + if lbox < hlen: + raise SyntaxError('Invalid JP2 header length') + + if tbox == b'jp2h': + header = fp.read(lbox - hlen) + break + else: + fp.seek(lbox - hlen, os.SEEK_CUR) + + if header is None: + raise SyntaxError('could not find JP2 header') + + size = None + mode = None + bpc = None + + hio = io.BytesIO(header) + while True: + lbox, tbox = struct.unpack('>I4s', hio.read(8)) + if lbox == 1: + lbox = struct.unpack('>Q', hio.read(8))[0] + hlen = 16 + else: + hlen = 8 + + content = hio.read(lbox - hlen) + + if tbox == b'ihdr': + height, width, nc, bpc, c, unkc, ipr \ + = struct.unpack('>IIHBBBB', content) + size = (width, height) + if unkc: + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif tbox == b'colr': + meth, prec, approx = struct.unpack('>BBB', content[:3]) + if meth == 1: + cs = struct.unpack('>I', content[3:7])[0] + if cs == 16: # sRGB + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + elif cs == 17: # grayscale + if nc == 1 and (bpc & 0x7f) > 8: + mode = 'I;16' + elif nc == 1: + mode = 'L' + elif nc == 2: + mode = 'LA' + break + elif cs == 18: # sYCC + if nc == 3: + mode = 'RGB' + elif nc == 4: + mode = 'RGBA' + break + + return (size, mode) + +## +# Image plugin for JPEG2000 images. + + +class Jpeg2KImageFile(ImageFile.ImageFile): + format = "JPEG2000" + format_description = "JPEG 2000 (ISO 15444)" + + def _open(self): + sig = self.fp.read(4) + if sig == b'\xff\x4f\xff\x51': + self.codec = "j2k" + self.size, self.mode = _parse_codestream(self.fp) + else: + sig = sig + self.fp.read(8) + + if sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': + self.codec = "jp2" + self.size, self.mode = _parse_jp2_header(self.fp) + else: + raise SyntaxError('not a JPEG 2000 file') + + if self.size is None or self.mode is None: + raise SyntaxError('unable to determine size/mode') + + self.reduce = 0 + self.layers = 0 + + fd = -1 + length = -1 + + try: + fd = self.fp.fileno() + length = os.fstat(fd).st_size + except: + fd = -1 + try: + pos = self.fp.tell() + self.fp.seek(0, 2) + length = self.fp.tell() + self.fp.seek(pos, 0) + except: + length = -1 + + self.tile = [('jpeg2k', (0, 0) + self.size, 0, + (self.codec, self.reduce, self.layers, fd, length))] + + def load(self): + if self.reduce: + power = 1 << self.reduce + adjust = power >> 1 + self.size = (int((self.size[0] + adjust) / power), + int((self.size[1] + adjust) / power)) + + if self.tile: + # Update the reduce and layers settings + t = self.tile[0] + t3 = (t[3][0], self.reduce, self.layers, t[3][3], t[3][4]) + self.tile = [(t[0], (0, 0) + self.size, t[2], t3)] + + ImageFile.ImageFile.load(self) + + +def _accept(prefix): + return (prefix[:4] == b'\xff\x4f\xff\x51' or + prefix[:12] == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a') + + +# ------------------------------------------------------------ +# Save support + +def _save(im, fp, filename): + if filename.endswith('.j2k'): + kind = 'j2k' + else: + kind = 'jp2' + + # Get the keyword arguments + info = im.encoderinfo + + offset = info.get('offset', None) + tile_offset = info.get('tile_offset', None) + tile_size = info.get('tile_size', None) + quality_mode = info.get('quality_mode', 'rates') + quality_layers = info.get('quality_layers', None) + num_resolutions = info.get('num_resolutions', 0) + cblk_size = info.get('codeblock_size', None) + precinct_size = info.get('precinct_size', None) + irreversible = info.get('irreversible', False) + progression = info.get('progression', 'LRCP') + cinema_mode = info.get('cinema_mode', 'no') + fd = -1 + + if hasattr(fp, "fileno"): + try: + fd = fp.fileno() + except: + fd = -1 + + im.encoderconfig = ( + offset, + tile_offset, + tile_size, + quality_mode, + quality_layers, + num_resolutions, + cblk_size, + precinct_size, + irreversible, + progression, + cinema_mode, + fd + ) + + ImageFile._save(im, fp, [('jpeg2k', (0, 0)+im.size, 0, kind)]) + +# ------------------------------------------------------------ +# Registry stuff + +Image.register_open(Jpeg2KImageFile.format, Jpeg2KImageFile, _accept) +Image.register_save(Jpeg2KImageFile.format, _save) + +Image.register_extension(Jpeg2KImageFile.format, '.jp2') +Image.register_extension(Jpeg2KImageFile.format, '.j2k') +Image.register_extension(Jpeg2KImageFile.format, '.jpc') +Image.register_extension(Jpeg2KImageFile.format, '.jpf') +Image.register_extension(Jpeg2KImageFile.format, '.jpx') +Image.register_extension(Jpeg2KImageFile.format, '.j2c') + +Image.register_mime(Jpeg2KImageFile.format, 'image/jp2') +Image.register_mime(Jpeg2KImageFile.format, 'image/jpx') diff --git a/Lib/site-packages/PIL/JpegImagePlugin.py b/Lib/site-packages/PIL/JpegImagePlugin.py new file mode 100644 index 0000000..beda4f7 --- /dev/null +++ b/Lib/site-packages/PIL/JpegImagePlugin.py @@ -0,0 +1,753 @@ +# +# The Python Imaging Library. +# $Id$ +# +# JPEG (JFIF) file handling +# +# See "Digital Compression and Coding of Continuous-Tone Still Images, +# Part 1, Requirements and Guidelines" (CCITT T.81 / ISO 10918-1) +# +# History: +# 1995-09-09 fl Created +# 1995-09-13 fl Added full parser +# 1996-03-25 fl Added hack to use the IJG command line utilities +# 1996-05-05 fl Workaround Photoshop 2.5 CMYK polarity bug +# 1996-05-28 fl Added draft support, JFIF version (0.1) +# 1996-12-30 fl Added encoder options, added progression property (0.2) +# 1997-08-27 fl Save mode 1 images as BW (0.3) +# 1998-07-12 fl Added YCbCr to draft and save methods (0.4) +# 1998-10-19 fl Don't hang on files using 16-bit DQT's (0.4.1) +# 2001-04-16 fl Extract DPI settings from JFIF files (0.4.2) +# 2002-07-01 fl Skip pad bytes before markers; identify Exif files (0.4.3) +# 2003-04-25 fl Added experimental EXIF decoder (0.5) +# 2003-06-06 fl Added experimental EXIF GPSinfo decoder +# 2003-09-13 fl Extract COM markers +# 2009-09-06 fl Added icc_profile support (from Florian Hoech) +# 2009-03-06 fl Changed CMYK handling; always use Adobe polarity (0.6) +# 2009-03-08 fl Added subsampling support (from Justin Huff). +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +import array +import struct +import io +import warnings +from struct import unpack_from +from PIL import Image, ImageFile, TiffImagePlugin, _binary +from PIL.JpegPresets import presets +from PIL._util import isStringType + +i8 = _binary.i8 +o8 = _binary.o8 +i16 = _binary.i16be +i32 = _binary.i32be + +__version__ = "0.6" + + +# +# Parser + +def Skip(self, marker): + n = i16(self.fp.read(2))-2 + ImageFile._safe_read(self.fp, n) + + +def APP(self, marker): + # + # Application marker. Store these in the APP dictionary. + # Also look for well-known application markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + app = "APP%d" % (marker & 15) + + self.app[app] = s # compatibility + self.applist.append((app, s)) + + if marker == 0xFFE0 and s[:4] == b"JFIF": + # extract JFIF information + self.info["jfif"] = version = i16(s, 5) # version + self.info["jfif_version"] = divmod(version, 256) + # extract JFIF properties + try: + jfif_unit = i8(s[7]) + jfif_density = i16(s, 8), i16(s, 10) + except: + pass + else: + if jfif_unit == 1: + self.info["dpi"] = jfif_density + self.info["jfif_unit"] = jfif_unit + self.info["jfif_density"] = jfif_density + elif marker == 0xFFE1 and s[:5] == b"Exif\0": + # extract Exif information (incomplete) + self.info["exif"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:5] == b"FPXR\0": + # extract FlashPix information (incomplete) + self.info["flashpix"] = s # FIXME: value will change + elif marker == 0xFFE2 and s[:12] == b"ICC_PROFILE\0": + # Since an ICC profile can be larger than the maximum size of + # a JPEG marker (64K), we need provisions to split it into + # multiple markers. The format defined by the ICC specifies + # one or more APP2 markers containing the following data: + # Identifying string ASCII "ICC_PROFILE\0" (12 bytes) + # Marker sequence number 1, 2, etc (1 byte) + # Number of markers Total of APP2's used (1 byte) + # Profile data (remainder of APP2 data) + # Decoders should use the marker sequence numbers to + # reassemble the profile, rather than assuming that the APP2 + # markers appear in the correct sequence. + self.icclist.append(s) + elif marker == 0xFFEE and s[:5] == b"Adobe": + self.info["adobe"] = i16(s, 5) + # extract Adobe custom properties + try: + adobe_transform = i8(s[1]) + except: + pass + else: + self.info["adobe_transform"] = adobe_transform + elif marker == 0xFFE2 and s[:4] == b"MPF\0": + # extract MPO information + self.info["mp"] = s[4:] + # offset is current location minus buffer size + # plus constant header size + self.info["mpoffset"] = self.fp.tell() - n + 4 + + +def COM(self, marker): + # + # Comment marker. Store these in the APP dictionary. + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + + self.app["COM"] = s # compatibility + self.applist.append(("COM", s)) + + +def SOF(self, marker): + # + # Start of frame marker. Defines the size and mode of the + # image. JPEG is colour blind, so we use some simple + # heuristics to map the number of layers to an appropriate + # mode. Note that this could be made a bit brighter, by + # looking for JFIF and Adobe APP markers. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + self.size = i16(s[3:]), i16(s[1:]) + + self.bits = i8(s[0]) + if self.bits != 8: + raise SyntaxError("cannot handle %d-bit layers" % self.bits) + + self.layers = i8(s[5]) + if self.layers == 1: + self.mode = "L" + elif self.layers == 3: + self.mode = "RGB" + elif self.layers == 4: + self.mode = "CMYK" + else: + raise SyntaxError("cannot handle %d-layer images" % self.layers) + + if marker in [0xFFC2, 0xFFC6, 0xFFCA, 0xFFCE]: + self.info["progressive"] = self.info["progression"] = 1 + + if self.icclist: + # fixup icc profile + self.icclist.sort() # sort by sequence number + if i8(self.icclist[0][13]) == len(self.icclist): + profile = [] + for p in self.icclist: + profile.append(p[14:]) + icc_profile = b"".join(profile) + else: + icc_profile = None # wrong number of fragments + self.info["icc_profile"] = icc_profile + self.icclist = None + + for i in range(6, len(s), 3): + t = s[i:i+3] + # 4-tuples: id, vsamp, hsamp, qtable + self.layer.append((t[0], i8(t[1])//16, i8(t[1]) & 15, i8(t[2]))) + + +def DQT(self, marker): + # + # Define quantization table. Support baseline 8-bit tables + # only. Note that there might be more than one table in + # each marker. + + # FIXME: The quantization tables can be used to estimate the + # compression quality. + + n = i16(self.fp.read(2))-2 + s = ImageFile._safe_read(self.fp, n) + while len(s): + if len(s) < 65: + raise SyntaxError("bad quantization table marker") + v = i8(s[0]) + if v//16 == 0: + self.quantization[v & 15] = array.array("b", s[1:65]) + s = s[65:] + else: + return # FIXME: add code to read 16-bit tables! + # raise SyntaxError, "bad quantization table element size" + + +# +# JPEG marker table + +MARKER = { + 0xFFC0: ("SOF0", "Baseline DCT", SOF), + 0xFFC1: ("SOF1", "Extended Sequential DCT", SOF), + 0xFFC2: ("SOF2", "Progressive DCT", SOF), + 0xFFC3: ("SOF3", "Spatial lossless", SOF), + 0xFFC4: ("DHT", "Define Huffman table", Skip), + 0xFFC5: ("SOF5", "Differential sequential DCT", SOF), + 0xFFC6: ("SOF6", "Differential progressive DCT", SOF), + 0xFFC7: ("SOF7", "Differential spatial", SOF), + 0xFFC8: ("JPG", "Extension", None), + 0xFFC9: ("SOF9", "Extended sequential DCT (AC)", SOF), + 0xFFCA: ("SOF10", "Progressive DCT (AC)", SOF), + 0xFFCB: ("SOF11", "Spatial lossless DCT (AC)", SOF), + 0xFFCC: ("DAC", "Define arithmetic coding conditioning", Skip), + 0xFFCD: ("SOF13", "Differential sequential DCT (AC)", SOF), + 0xFFCE: ("SOF14", "Differential progressive DCT (AC)", SOF), + 0xFFCF: ("SOF15", "Differential spatial (AC)", SOF), + 0xFFD0: ("RST0", "Restart 0", None), + 0xFFD1: ("RST1", "Restart 1", None), + 0xFFD2: ("RST2", "Restart 2", None), + 0xFFD3: ("RST3", "Restart 3", None), + 0xFFD4: ("RST4", "Restart 4", None), + 0xFFD5: ("RST5", "Restart 5", None), + 0xFFD6: ("RST6", "Restart 6", None), + 0xFFD7: ("RST7", "Restart 7", None), + 0xFFD8: ("SOI", "Start of image", None), + 0xFFD9: ("EOI", "End of image", None), + 0xFFDA: ("SOS", "Start of scan", Skip), + 0xFFDB: ("DQT", "Define quantization table", DQT), + 0xFFDC: ("DNL", "Define number of lines", Skip), + 0xFFDD: ("DRI", "Define restart interval", Skip), + 0xFFDE: ("DHP", "Define hierarchical progression", SOF), + 0xFFDF: ("EXP", "Expand reference component", Skip), + 0xFFE0: ("APP0", "Application segment 0", APP), + 0xFFE1: ("APP1", "Application segment 1", APP), + 0xFFE2: ("APP2", "Application segment 2", APP), + 0xFFE3: ("APP3", "Application segment 3", APP), + 0xFFE4: ("APP4", "Application segment 4", APP), + 0xFFE5: ("APP5", "Application segment 5", APP), + 0xFFE6: ("APP6", "Application segment 6", APP), + 0xFFE7: ("APP7", "Application segment 7", APP), + 0xFFE8: ("APP8", "Application segment 8", APP), + 0xFFE9: ("APP9", "Application segment 9", APP), + 0xFFEA: ("APP10", "Application segment 10", APP), + 0xFFEB: ("APP11", "Application segment 11", APP), + 0xFFEC: ("APP12", "Application segment 12", APP), + 0xFFED: ("APP13", "Application segment 13", APP), + 0xFFEE: ("APP14", "Application segment 14", APP), + 0xFFEF: ("APP15", "Application segment 15", APP), + 0xFFF0: ("JPG0", "Extension 0", None), + 0xFFF1: ("JPG1", "Extension 1", None), + 0xFFF2: ("JPG2", "Extension 2", None), + 0xFFF3: ("JPG3", "Extension 3", None), + 0xFFF4: ("JPG4", "Extension 4", None), + 0xFFF5: ("JPG5", "Extension 5", None), + 0xFFF6: ("JPG6", "Extension 6", None), + 0xFFF7: ("JPG7", "Extension 7", None), + 0xFFF8: ("JPG8", "Extension 8", None), + 0xFFF9: ("JPG9", "Extension 9", None), + 0xFFFA: ("JPG10", "Extension 10", None), + 0xFFFB: ("JPG11", "Extension 11", None), + 0xFFFC: ("JPG12", "Extension 12", None), + 0xFFFD: ("JPG13", "Extension 13", None), + 0xFFFE: ("COM", "Comment", COM) +} + + +def _accept(prefix): + return prefix[0:1] == b"\377" + + +## +# Image plugin for JPEG and JFIF images. + +class JpegImageFile(ImageFile.ImageFile): + + format = "JPEG" + format_description = "JPEG (ISO 10918)" + + def _open(self): + + s = self.fp.read(1) + + if i8(s) != 255: + raise SyntaxError("not a JPEG file") + + # Create attributes + self.bits = self.layers = 0 + + # JPEG specifics (internal) + self.layer = [] + self.huffman_dc = {} + self.huffman_ac = {} + self.quantization = {} + self.app = {} # compatibility + self.applist = [] + self.icclist = [] + + while True: + + i = i8(s) + if i == 0xFF: + s = s + self.fp.read(1) + i = i16(s) + else: + # Skip non-0xFF junk + s = self.fp.read(1) + continue + + if i in MARKER: + name, description, handler = MARKER[i] + # print hex(i), name, description + if handler is not None: + handler(self, i) + if i == 0xFFDA: # start of scan + rawmode = self.mode + if self.mode == "CMYK": + rawmode = "CMYK;I" # assume adobe conventions + self.tile = [("jpeg", (0, 0) + self.size, 0, + (rawmode, ""))] + # self.__offset = self.fp.tell() + break + s = self.fp.read(1) + elif i == 0 or i == 0xFFFF: + # padded marker or junk; move on + s = b"\xff" + else: + raise SyntaxError("no marker found") + + def draft(self, mode, size): + + if len(self.tile) != 1: + return + + d, e, o, a = self.tile[0] + scale = 0 + + if a[0] == "RGB" and mode in ["L", "YCbCr"]: + self.mode = mode + a = mode, "" + + if size: + scale = max(self.size[0] // size[0], self.size[1] // size[1]) + for s in [8, 4, 2, 1]: + if scale >= s: + break + e = e[0], e[1], (e[2]-e[0]+s-1)//s+e[0], (e[3]-e[1]+s-1)//s+e[1] + self.size = ((self.size[0]+s-1)//s, (self.size[1]+s-1)//s) + scale = s + + self.tile = [(d, e, o, a)] + self.decoderconfig = (scale, 0) + + return self + + def load_djpeg(self): + + # ALTERNATIVE: handle JPEGs via the IJG command line utilities + + import subprocess + import tempfile + import os + f, path = tempfile.mkstemp() + os.close(f) + if os.path.exists(self.filename): + subprocess.check_call(["djpeg", "-outfile", path, self.filename]) + else: + raise ValueError("Invalid Filename") + + try: + self.im = Image.core.open_ppm(path) + finally: + try: + os.unlink(path) + except OSError: + pass + + self.mode = self.im.mode + self.size = self.im.size + + self.tile = [] + + def _getexif(self): + return _getexif(self) + + def _getmp(self): + return _getmp(self) + + +def _fixup_dict(src_dict): + # Helper function for _getexif() + # returns a dict with any single item tuples/lists as individual values + def _fixup(value): + try: + if len(value) == 1 and type(value) != type({}): + return value[0] + except: pass + return value + + return dict([(k, _fixup(v)) for k, v in src_dict.items()]) + + +def _getexif(self): + # Extract EXIF information. This method is highly experimental, + # and is likely to be replaced with something better in a future + # version. + + # The EXIF record consists of a TIFF file embedded in a JPEG + # application marker (!). + try: + data = self.info["exif"] + except KeyError: + return None + file = io.BytesIO(data[6:]) + head = file.read(8) + # process dictionary + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif = dict(_fixup_dict(info)) + # get exif extension + try: + # exif field 0x8769 is an offset pointer to the location + # of the nested embedded exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8769]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif.update(_fixup_dict(info)) + # get gpsinfo extension + try: + # exif field 0x8825 is an offset pointer to the location + # of the nested embedded gps exif ifd. + # It should be a long, but may be corrupted. + file.seek(exif[0x8825]) + except (KeyError, TypeError): + pass + else: + info = TiffImagePlugin.ImageFileDirectory_v1(head) + info.load(file) + exif[0x8825] = _fixup_dict(info) + + return exif + + +def _getmp(self): + # Extract MP information. This method was inspired by the "highly + # experimental" _getexif version that's been in use for years now, + # itself based on the ImageFileDirectory class in the TIFF plug-in. + + # The MP record essentially consists of a TIFF file embedded in a JPEG + # application marker. + try: + data = self.info["mp"] + except KeyError: + return None + file_contents = io.BytesIO(data) + head = file_contents.read(8) + endianness = '>' if head[:4] == b'\x4d\x4d\x00\x2a' else '<' + # process dictionary + try: + info = TiffImagePlugin.ImageFileDirectory_v2(head) + info.load(file_contents) + mp = dict(info) + except: + raise SyntaxError("malformed MP Index (unreadable directory)") + # it's an error not to have a number of images + try: + quant = mp[0xB001] + except KeyError: + raise SyntaxError("malformed MP Index (no number of images)") + # get MP entries + mpentries = [] + try: + rawmpentries = mp[0xB002] + for entrynum in range(0, quant): + unpackedentry = unpack_from( + '{0}LLLHH'.format(endianness), rawmpentries, entrynum * 16) + labels = ('Attribute', 'Size', 'DataOffset', 'EntryNo1', + 'EntryNo2') + mpentry = dict(zip(labels, unpackedentry)) + mpentryattr = { + 'DependentParentImageFlag': bool(mpentry['Attribute'] & + (1 << 31)), + 'DependentChildImageFlag': bool(mpentry['Attribute'] & + (1 << 30)), + 'RepresentativeImageFlag': bool(mpentry['Attribute'] & + (1 << 29)), + 'Reserved': (mpentry['Attribute'] & (3 << 27)) >> 27, + 'ImageDataFormat': (mpentry['Attribute'] & (7 << 24)) >> 24, + 'MPType': mpentry['Attribute'] & 0x00FFFFFF + } + if mpentryattr['ImageDataFormat'] == 0: + mpentryattr['ImageDataFormat'] = 'JPEG' + else: + raise SyntaxError("unsupported picture format in MPO") + mptypemap = { + 0x000000: 'Undefined', + 0x010001: 'Large Thumbnail (VGA Equivalent)', + 0x010002: 'Large Thumbnail (Full HD Equivalent)', + 0x020001: 'Multi-Frame Image (Panorama)', + 0x020002: 'Multi-Frame Image: (Disparity)', + 0x020003: 'Multi-Frame Image: (Multi-Angle)', + 0x030000: 'Baseline MP Primary Image' + } + mpentryattr['MPType'] = mptypemap.get(mpentryattr['MPType'], + 'Unknown') + mpentry['Attribute'] = mpentryattr + mpentries.append(mpentry) + mp[0xB002] = mpentries + except KeyError: + raise SyntaxError("malformed MP Index (bad MP Entry)") + # Next we should try and parse the individual image unique ID list; + # we don't because I've never seen this actually used in a real MPO + # file and so can't test it. + return mp + + +# -------------------------------------------------------------------- +# stuff to save JPEG files + +RAWMODE = { + "1": "L", + "L": "L", + "RGB": "RGB", + "RGBA": "RGB", + "RGBX": "RGB", + "CMYK": "CMYK;I", # assume adobe conventions + "YCbCr": "YCbCr", +} + +zigzag_index = (0, 1, 5, 6, 14, 15, 27, 28, + 2, 4, 7, 13, 16, 26, 29, 42, + 3, 8, 12, 17, 25, 30, 41, 43, + 9, 11, 18, 24, 31, 40, 44, 53, + 10, 19, 23, 32, 39, 45, 52, 54, + 20, 22, 33, 38, 46, 51, 55, 60, + 21, 34, 37, 47, 50, 56, 59, 61, + 35, 36, 48, 49, 57, 58, 62, 63) + +samplings = {(1, 1, 1, 1, 1, 1): 0, + (2, 1, 1, 1, 1, 1): 1, + (2, 2, 1, 1, 1, 1): 2, + } + + +def convert_dict_qtables(qtables): + qtables = [qtables[key] for key in range(len(qtables)) if key in qtables] + for idx, table in enumerate(qtables): + qtables[idx] = [table[i] for i in zigzag_index] + return qtables + + +def get_sampling(im): + # There's no subsampling when image have only 1 layer + # (grayscale images) or when they are CMYK (4 layers), + # so set subsampling to default value. + # + # NOTE: currently Pillow can't encode JPEG to YCCK format. + # If YCCK support is added in the future, subsampling code will have + # to be updated (here and in JpegEncode.c) to deal with 4 layers. + if not hasattr(im, 'layers') or im.layers in (1, 4): + return -1 + sampling = im.layer[0][1:3] + im.layer[1][1:3] + im.layer[2][1:3] + return samplings.get(sampling, -1) + + +def _save(im, fp, filename): + + try: + rawmode = RAWMODE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as JPEG" % im.mode) + + info = im.encoderinfo + + dpi = info.get("dpi", (0, 0)) + + quality = info.get("quality", 0) + subsampling = info.get("subsampling", -1) + qtables = info.get("qtables") + + if quality == "keep": + quality = 0 + subsampling = "keep" + qtables = "keep" + elif quality in presets: + preset = presets[quality] + quality = 0 + subsampling = preset.get('subsampling', -1) + qtables = preset.get('quantization') + elif not isinstance(quality, int): + raise ValueError("Invalid quality setting") + else: + if subsampling in presets: + subsampling = presets[subsampling].get('subsampling', -1) + if isStringType(qtables) and qtables in presets: + qtables = presets[qtables].get('quantization') + + if subsampling == "4:4:4": + subsampling = 0 + elif subsampling == "4:2:2": + subsampling = 1 + elif subsampling == "4:1:1": + subsampling = 2 + elif subsampling == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + subsampling = get_sampling(im) + + def validate_qtables(qtables): + if qtables is None: + return qtables + if isStringType(qtables): + try: + lines = [int(num) for line in qtables.splitlines() + for num in line.split('#', 1)[0].split()] + except ValueError: + raise ValueError("Invalid quantization table") + else: + qtables = [lines[s:s+64] for s in range(0, len(lines), 64)] + if isinstance(qtables, (tuple, list, dict)): + if isinstance(qtables, dict): + qtables = convert_dict_qtables(qtables) + elif isinstance(qtables, tuple): + qtables = list(qtables) + if not (0 < len(qtables) < 5): + raise ValueError("None or too many quantization tables") + for idx, table in enumerate(qtables): + try: + if len(table) != 64: + raise + table = array.array('b', table) + except TypeError: + raise ValueError("Invalid quantization table") + else: + qtables[idx] = list(table) + return qtables + + if qtables == "keep": + if im.format != "JPEG": + raise ValueError( + "Cannot use 'keep' when original image is not a JPEG") + qtables = getattr(im, "quantization", None) + qtables = validate_qtables(qtables) + + extra = b"" + + icc_profile = info.get("icc_profile") + if icc_profile: + ICC_OVERHEAD_LEN = 14 + MAX_BYTES_IN_MARKER = 65533 + MAX_DATA_BYTES_IN_MARKER = MAX_BYTES_IN_MARKER - ICC_OVERHEAD_LEN + markers = [] + while icc_profile: + markers.append(icc_profile[:MAX_DATA_BYTES_IN_MARKER]) + icc_profile = icc_profile[MAX_DATA_BYTES_IN_MARKER:] + i = 1 + for marker in markers: + size = struct.pack(">H", 2 + ICC_OVERHEAD_LEN + len(marker)) + extra += (b"\xFF\xE2" + size + b"ICC_PROFILE\0" + o8(i) + + o8(len(markers)) + marker) + i += 1 + + # get keyword arguments + im.encoderconfig = ( + quality, + # "progressive" is the official name, but older documentation + # says "progression" + # FIXME: issue a warning if the wrong form is used (post-1.1.7) + "progressive" in info or "progression" in info, + info.get("smooth", 0), + "optimize" in info, + info.get("streamtype", 0), + dpi[0], dpi[1], + subsampling, + qtables, + extra, + info.get("exif", b"") + ) + + # if we optimize, libjpeg needs a buffer big enough to hold the whole image + # in a shot. Guessing on the size, at im.size bytes. (raw pizel size is + # channels*size, this is a value that's been used in a django patch. + # https://github.com/matthewwithanm/django-imagekit/issues/50 + bufsize = 0 + if "optimize" in info or "progressive" in info or "progression" in info: + # keep sets quality to 0, but the actual value may be high. + if quality >= 95 or quality == 0: + bufsize = 2 * im.size[0] * im.size[1] + else: + bufsize = im.size[0] * im.size[1] + + # The exif info needs to be written as one block, + APP1, + one spare byte. + # Ensure that our buffer is big enough + bufsize = max(ImageFile.MAXBLOCK, bufsize, len(info.get("exif", b"")) + 5) + + ImageFile._save(im, fp, [("jpeg", (0, 0)+im.size, 0, rawmode)], bufsize) + + +def _save_cjpeg(im, fp, filename): + # ALTERNATIVE: handle JPEGs via the IJG command line utilities. + import os + import subprocess + tempfile = im._dump() + subprocess.check_call(["cjpeg", "-outfile", filename, tempfile]) + try: + os.unlink(tempfile) + except OSError: + pass + + +## +# Factory for making JPEG and MPO instances +def jpeg_factory(fp=None, filename=None): + im = JpegImageFile(fp, filename) + try: + mpheader = im._getmp() + if mpheader[45057] > 1: + # It's actually an MPO + from .MpoImagePlugin import MpoImageFile + im = MpoImageFile(fp, filename) + except (TypeError, IndexError): + # It is really a JPEG + pass + except SyntaxError: + warnings.warn("Image appears to be a malformed MPO file, it will be " + "interpreted as a base JPEG file") + return im + + +# -------------------------------------------------------------------q- +# Registry stuff + +Image.register_open(JpegImageFile.format, jpeg_factory, _accept) +Image.register_save(JpegImageFile.format, _save) + +Image.register_extension(JpegImageFile.format, ".jfif") +Image.register_extension(JpegImageFile.format, ".jpe") +Image.register_extension(JpegImageFile.format, ".jpg") +Image.register_extension(JpegImageFile.format, ".jpeg") + +Image.register_mime(JpegImageFile.format, "image/jpeg") diff --git a/Lib/site-packages/PIL/JpegPresets.py b/Lib/site-packages/PIL/JpegPresets.py new file mode 100644 index 0000000..c5a36b9 --- /dev/null +++ b/Lib/site-packages/PIL/JpegPresets.py @@ -0,0 +1,241 @@ +""" +JPEG quality settings equivalent to the Photoshop settings. + +More presets can be added to the presets dict if needed. + +Can be use when saving JPEG file. + +To apply the preset, specify:: + + quality="preset_name" + +To apply only the quantization table:: + + qtables="preset_name" + +To apply only the subsampling setting:: + + subsampling="preset_name" + +Example:: + + im.save("image_name.jpg", quality="web_high") + + +Subsampling +----------- + +Subsampling is the practice of encoding images by implementing less resolution +for chroma information than for luma information. +(ref.: https://en.wikipedia.org/wiki/Chroma_subsampling) + +Possible subsampling values are 0, 1 and 2 that correspond to 4:4:4, 4:2:2 and +4:1:1 (or 4:2:0?). + +You can get the subsampling of a JPEG with the +`JpegImagePlugin.get_subsampling(im)` function. + + +Quantization tables +------------------- + +They are values use by the DCT (Discrete cosine transform) to remove +*unnecessary* information from the image (the lossy part of the compression). +(ref.: https://en.wikipedia.org/wiki/Quantization_matrix#Quantization_matrices, +https://en.wikipedia.org/wiki/JPEG#Quantization) + +You can get the quantization tables of a JPEG with:: + + im.quantization + +This will return a dict with a number of arrays. You can pass this dict +directly as the qtables argument when saving a JPEG. + +The tables format between im.quantization and quantization in presets differ in +3 ways: + +1. The base container of the preset is a list with sublists instead of dict. + dict[0] -> list[0], dict[1] -> list[1], ... +2. Each table in a preset is a list instead of an array. +3. The zigzag order is remove in the preset (needed by libjpeg >= 6a). + +You can convert the dict format to the preset format with the +`JpegImagePlugin.convert_dict_qtables(dict_qtables)` function. + +Libjpeg ref.: http://www.jpegcameras.com/libjpeg/libjpeg-3.html + +""" + +presets = { + 'web_low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [20, 16, 25, 39, 50, 46, 62, 68, + 16, 18, 23, 38, 38, 53, 65, 68, + 25, 23, 31, 38, 53, 65, 68, 68, + 39, 38, 38, 53, 65, 68, 68, 68, + 50, 38, 53, 65, 68, 68, 68, 68, + 46, 53, 65, 68, 68, 68, 68, 68, + 62, 65, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68], + [21, 25, 32, 38, 54, 68, 68, 68, + 25, 28, 24, 38, 54, 68, 68, 68, + 32, 24, 32, 43, 66, 68, 68, 68, + 38, 38, 43, 53, 68, 68, 68, 68, + 54, 54, 66, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68, + 68, 68, 68, 68, 68, 68, 68, 68] + ]}, + 'web_medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [16, 11, 11, 16, 23, 27, 31, 30, + 11, 12, 12, 15, 20, 23, 23, 30, + 11, 12, 13, 16, 23, 26, 35, 47, + 16, 15, 16, 23, 26, 37, 47, 64, + 23, 20, 23, 26, 39, 51, 64, 64, + 27, 23, 26, 37, 51, 64, 64, 64, + 31, 23, 35, 47, 64, 64, 64, 64, + 30, 30, 47, 64, 64, 64, 64, 64], + [17, 15, 17, 21, 20, 26, 38, 48, + 15, 19, 18, 17, 20, 26, 35, 43, + 17, 18, 20, 22, 26, 30, 46, 53, + 21, 17, 22, 28, 30, 39, 53, 64, + 20, 20, 26, 30, 39, 48, 64, 64, + 26, 26, 30, 39, 48, 63, 64, 64, + 38, 35, 46, 53, 64, 64, 64, 64, + 48, 43, 53, 64, 64, 64, 64, 64] + ]}, + 'web_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 14, 19, + 6, 6, 6, 11, 12, 15, 19, 28, + 9, 8, 10, 12, 16, 20, 27, 31, + 11, 10, 12, 15, 20, 27, 31, 31, + 12, 12, 14, 19, 27, 31, 31, 31, + 16, 12, 19, 28, 31, 31, 31, 31], + [7, 7, 13, 24, 26, 31, 31, 31, + 7, 12, 16, 21, 31, 31, 31, 31, + 13, 16, 17, 31, 31, 31, 31, 31, + 24, 21, 31, 31, 31, 31, 31, 31, + 26, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31] + ]}, + 'web_very_high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 11, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 11, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'web_maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 2, + 1, 1, 1, 1, 1, 1, 2, 2, + 1, 1, 1, 1, 1, 2, 2, 3, + 1, 1, 1, 1, 2, 2, 3, 3, + 1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 2, 2, 3, 3, 3, 3], + [1, 1, 1, 2, 2, 3, 3, 3, + 1, 1, 1, 2, 3, 3, 3, 3, + 1, 1, 1, 3, 3, 3, 3, 3, + 2, 2, 3, 3, 3, 3, 3, 3, + 2, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3] + ]}, + 'low': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [18, 14, 14, 21, 30, 35, 34, 17, + 14, 16, 16, 19, 26, 23, 12, 12, + 14, 16, 17, 21, 23, 12, 12, 12, + 21, 19, 21, 23, 12, 12, 12, 12, + 30, 26, 23, 12, 12, 12, 12, 12, + 35, 23, 12, 12, 12, 12, 12, 12, + 34, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [20, 19, 22, 27, 20, 20, 17, 17, + 19, 25, 23, 14, 14, 12, 12, 12, + 22, 23, 14, 14, 12, 12, 12, 12, + 27, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'medium': {'subsampling': 2, # "4:1:1" + 'quantization': [ + [12, 8, 8, 12, 17, 21, 24, 17, + 8, 9, 9, 11, 15, 19, 12, 12, + 8, 9, 10, 12, 19, 12, 12, 12, + 12, 11, 12, 21, 12, 12, 12, 12, + 17, 15, 19, 12, 12, 12, 12, 12, + 21, 19, 12, 12, 12, 12, 12, 12, + 24, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12], + [13, 11, 13, 16, 20, 20, 17, 17, + 11, 14, 14, 14, 14, 12, 12, 12, + 13, 14, 14, 14, 12, 12, 12, 12, + 16, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'high': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [6, 4, 4, 6, 9, 11, 12, 16, + 4, 5, 5, 6, 8, 10, 12, 12, + 4, 5, 5, 6, 10, 12, 12, 12, + 6, 6, 6, 11, 12, 12, 12, 12, + 9, 8, 10, 12, 12, 12, 12, 12, + 11, 10, 12, 12, 12, 12, 12, 12, + 12, 12, 12, 12, 12, 12, 12, 12, + 16, 12, 12, 12, 12, 12, 12, 12], + [7, 7, 13, 24, 20, 20, 17, 17, + 7, 12, 16, 14, 14, 12, 12, 12, + 13, 16, 14, 14, 12, 12, 12, 12, + 24, 14, 14, 12, 12, 12, 12, 12, + 20, 14, 12, 12, 12, 12, 12, 12, + 20, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12, + 17, 12, 12, 12, 12, 12, 12, 12] + ]}, + 'maximum': {'subsampling': 0, # "4:4:4" + 'quantization': [ + [2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 3, 4, 5, 6, + 2, 2, 2, 2, 4, 5, 7, 9, + 2, 2, 2, 4, 5, 7, 9, 12, + 3, 3, 4, 5, 8, 10, 12, 12, + 4, 4, 5, 7, 10, 12, 12, 12, + 5, 5, 7, 9, 12, 12, 12, 12, + 6, 6, 9, 12, 12, 12, 12, 12], + [3, 3, 5, 9, 13, 15, 15, 15, + 3, 4, 6, 10, 14, 12, 12, 12, + 5, 6, 9, 14, 12, 12, 12, 12, + 9, 10, 14, 12, 12, 12, 12, 12, + 13, 14, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12, + 15, 12, 12, 12, 12, 12, 12, 12] + ]}, +} diff --git a/Lib/site-packages/PIL/McIdasImagePlugin.py b/Lib/site-packages/PIL/McIdasImagePlugin.py new file mode 100644 index 0000000..b753603 --- /dev/null +++ b/Lib/site-packages/PIL/McIdasImagePlugin.py @@ -0,0 +1,74 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Basic McIdas support for PIL +# +# History: +# 1997-05-05 fl Created (8-bit images only) +# 2009-03-08 fl Added 16/32-bit support. +# +# Thanks to Richard Jones and Craig Swank for specs and samples. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +import struct +from PIL import Image, ImageFile + +__version__ = "0.2" + + +def _accept(s): + return s[:8] == b"\x00\x00\x00\x00\x00\x00\x00\x04" + + +## +# Image plugin for McIdas area images. + +class McIdasImageFile(ImageFile.ImageFile): + + format = "MCIDAS" + format_description = "McIdas area file" + + def _open(self): + + # parse area file directory + s = self.fp.read(256) + if not _accept(s) or len(s) != 256: + raise SyntaxError("not an McIdas area file") + + self.area_descriptor_raw = s + self.area_descriptor = w = [0] + list(struct.unpack("!64i", s)) + + # get mode + if w[11] == 1: + mode = rawmode = "L" + elif w[11] == 2: + # FIXME: add memory map support + mode = "I" + rawmode = "I;16B" + elif w[11] == 4: + # FIXME: add memory map support + mode = "I" + rawmode = "I;32B" + else: + raise SyntaxError("unsupported McIdas format") + + self.mode = mode + self.size = w[10], w[9] + + offset = w[34] + w[15] + stride = w[15] + w[10]*w[11]*w[14] + + self.tile = [("raw", (0, 0) + self.size, offset, (rawmode, stride, 1))] + +# -------------------------------------------------------------------- +# registry + +Image.register_open(McIdasImageFile.format, McIdasImageFile, _accept) + +# no default extension diff --git a/Lib/site-packages/PIL/MicImagePlugin.py b/Lib/site-packages/PIL/MicImagePlugin.py new file mode 100644 index 0000000..3c91244 --- /dev/null +++ b/Lib/site-packages/PIL/MicImagePlugin.py @@ -0,0 +1,103 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Microsoft Image Composer support for PIL +# +# Notes: +# uses TiffImagePlugin.py to read the actual image streams +# +# History: +# 97-01-20 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, TiffImagePlugin +from PIL.OleFileIO import MAGIC, OleFileIO + +__version__ = "0.1" + + +# +# -------------------------------------------------------------------- + + +def _accept(prefix): + return prefix[:8] == MAGIC + + +## +# Image plugin for Microsoft's Image Composer file format. + +class MicImageFile(TiffImagePlugin.TiffImageFile): + + format = "MIC" + format_description = "Microsoft Image Composer" + + def _open(self): + + # read the OLE directory and see if this is a likely + # to be a Microsoft Image Composer file + + try: + self.ole = OleFileIO(self.fp) + except IOError: + raise SyntaxError("not an MIC file; invalid OLE file") + + # find ACI subfiles with Image members (maybe not the + # best way to identify MIC files, but what the... ;-) + + self.images = [] + for path in self.ole.listdir(): + if path[1:] and path[0][-4:] == ".ACI" and path[1] == "Image": + self.images.append(path) + + # if we didn't find any images, this is probably not + # an MIC file. + if not self.images: + raise SyntaxError("not an MIC file; no image entries") + + self.__fp = self.fp + self.frame = 0 + + if len(self.images) > 1: + self.category = Image.CONTAINER + + self.seek(0) + + @property + def n_frames(self): + return len(self.images) + + @property + def is_animated(self): + return len(self.images) > 1 + + def seek(self, frame): + + try: + filename = self.images[frame] + except IndexError: + raise EOFError("no such frame") + + self.fp = self.ole.openstream(filename) + + TiffImagePlugin.TiffImageFile._open(self) + + self.frame = frame + + def tell(self): + + return self.frame + +# +# -------------------------------------------------------------------- + +Image.register_open(MicImageFile.format, MicImageFile, _accept) + +Image.register_extension(MicImageFile.format, ".mic") diff --git a/Lib/site-packages/PIL/MpegImagePlugin.py b/Lib/site-packages/PIL/MpegImagePlugin.py new file mode 100644 index 0000000..6671b86 --- /dev/null +++ b/Lib/site-packages/PIL/MpegImagePlugin.py @@ -0,0 +1,86 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPEG file handling +# +# History: +# 95-09-09 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile +from PIL._binary import i8 + +__version__ = "0.1" + + +# +# Bitstream parser + +class BitStream(object): + + def __init__(self, fp): + self.fp = fp + self.bits = 0 + self.bitbuffer = 0 + + def next(self): + return i8(self.fp.read(1)) + + def peek(self, bits): + while self.bits < bits: + c = self.next() + if c < 0: + self.bits = 0 + continue + self.bitbuffer = (self.bitbuffer << 8) + c + self.bits += 8 + return self.bitbuffer >> (self.bits - bits) & (1 << bits) - 1 + + def skip(self, bits): + while self.bits < bits: + self.bitbuffer = (self.bitbuffer << 8) + i8(self.fp.read(1)) + self.bits += 8 + self.bits = self.bits - bits + + def read(self, bits): + v = self.peek(bits) + self.bits = self.bits - bits + return v + + +## +# Image plugin for MPEG streams. This plugin can identify a stream, +# but it cannot read it. + +class MpegImageFile(ImageFile.ImageFile): + + format = "MPEG" + format_description = "MPEG" + + def _open(self): + + s = BitStream(self.fp) + + if s.read(32) != 0x1B3: + raise SyntaxError("not an MPEG file") + + self.mode = "RGB" + self.size = s.read(12), s.read(12) + + +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(MpegImageFile.format, MpegImageFile) + +Image.register_extension(MpegImageFile.format, ".mpg") +Image.register_extension(MpegImageFile.format, ".mpeg") + +Image.register_mime(MpegImageFile.format, "video/mpeg") diff --git a/Lib/site-packages/PIL/MpoImagePlugin.py b/Lib/site-packages/PIL/MpoImagePlugin.py new file mode 100644 index 0000000..1d26021 --- /dev/null +++ b/Lib/site-packages/PIL/MpoImagePlugin.py @@ -0,0 +1,99 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MPO file handling +# +# See "Multi-Picture Format" (CIPA DC-007-Translation 2009, Standard of the +# Camera & Imaging Products Association) +# +# The multi-picture object combines multiple JPEG images (with a modified EXIF +# data format) into a single file. While it can theoretically be used much like +# a GIF animation, it is commonly used to represent 3D photographs and is (as +# of this writing) the most commonly used format by 3D cameras. +# +# History: +# 2014-03-13 Feneric Created +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, JpegImagePlugin + +__version__ = "0.1" + + +def _accept(prefix): + return JpegImagePlugin._accept(prefix) + + +def _save(im, fp, filename): + # Note that we can only save the current frame at present + return JpegImagePlugin._save(im, fp, filename) + + +## +# Image plugin for MPO images. + +class MpoImageFile(JpegImagePlugin.JpegImageFile): + + format = "MPO" + format_description = "MPO (CIPA DC-007)" + + def _open(self): + self.fp.seek(0) # prep the fp in order to pass the JPEG test + JpegImagePlugin.JpegImageFile._open(self) + self.mpinfo = self._getmp() + self.__framecount = self.mpinfo[0xB001] + self.__mpoffsets = [mpent['DataOffset'] + self.info['mpoffset'] + for mpent in self.mpinfo[0xB002]] + self.__mpoffsets[0] = 0 + # Note that the following assertion will only be invalid if something + # gets broken within JpegImagePlugin. + assert self.__framecount == len(self.__mpoffsets) + del self.info['mpoffset'] # no longer needed + self.__fp = self.fp # FIXME: hack + self.__fp.seek(self.__mpoffsets[0]) # get ready to read first frame + self.__frame = 0 + self.offset = 0 + # for now we can only handle reading and individual frame extraction + self.readonly = 1 + + def load_seek(self, pos): + self.__fp.seek(pos) + + @property + def n_frames(self): + return self.__framecount + + @property + def is_animated(self): + return self.__framecount > 1 + + def seek(self, frame): + if frame < 0 or frame >= self.__framecount: + raise EOFError("no more images in MPO file") + else: + self.fp = self.__fp + self.offset = self.__mpoffsets[frame] + self.tile = [ + ("jpeg", (0, 0) + self.size, self.offset, (self.mode, "")) + ] + self.__frame = frame + + def tell(self): + return self.__frame + + +# -------------------------------------------------------------------q- +# Registry stuff + +# Note that since MPO shares a factory with JPEG, we do not need to do a +# separate registration for it here. +# Image.register_open(MpoImageFile.format, +# JpegImagePlugin.jpeg_factory, _accept) +Image.register_save(MpoImageFile.format, _save) + +Image.register_extension(MpoImageFile.format, ".mpo") + +Image.register_mime(MpoImageFile.format, "image/mpo") diff --git a/Lib/site-packages/PIL/MspImagePlugin.py b/Lib/site-packages/PIL/MspImagePlugin.py new file mode 100644 index 0000000..85f8e76 --- /dev/null +++ b/Lib/site-packages/PIL/MspImagePlugin.py @@ -0,0 +1,104 @@ +# +# The Python Imaging Library. +# $Id$ +# +# MSP file handling +# +# This is the format used by the Paint program in Windows 1 and 2. +# +# History: +# 95-09-05 fl Created +# 97-01-03 fl Read/write MSP images +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + + +# +# read MSP files + +i16 = _binary.i16le + + +def _accept(prefix): + return prefix[:4] in [b"DanM", b"LinS"] + + +## +# Image plugin for Windows MSP images. This plugin supports both +# uncompressed (Windows 1.0). + +class MspImageFile(ImageFile.ImageFile): + + format = "MSP" + format_description = "Windows Paint" + + def _open(self): + + # Header + s = self.fp.read(32) + if s[:4] not in [b"DanM", b"LinS"]: + raise SyntaxError("not an MSP file") + + # Header checksum + checksum = 0 + for i in range(0, 32, 2): + checksum = checksum ^ i16(s[i:i+2]) + if checksum != 0: + raise SyntaxError("bad MSP checksum") + + self.mode = "1" + self.size = i16(s[4:]), i16(s[6:]) + + if s[:4] == b"DanM": + self.tile = [("raw", (0, 0)+self.size, 32, ("1", 0, 1))] + else: + self.tile = [("msp", (0, 0)+self.size, 32+2*self.size[1], None)] + +# +# write MSP files (uncompressed only) + +o16 = _binary.o16le + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as MSP" % im.mode) + + # create MSP header + header = [0] * 16 + + header[0], header[1] = i16(b"Da"), i16(b"nM") # version 1 + header[2], header[3] = im.size + header[4], header[5] = 1, 1 + header[6], header[7] = 1, 1 + header[8], header[9] = im.size + + checksum = 0 + for h in header: + checksum = checksum ^ h + header[12] = checksum # FIXME: is this the right field? + + # header + for h in header: + fp.write(o16(h)) + + # image body + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 32, ("1", 0, 1))]) + +# +# registry + +Image.register_open(MspImageFile.format, MspImageFile, _accept) +Image.register_save(MspImageFile.format, _save) + +Image.register_extension(MspImageFile.format, ".msp") diff --git a/Lib/site-packages/PIL/OleFileIO-README.md b/Lib/site-packages/PIL/OleFileIO-README.md new file mode 100644 index 0000000..0962a5a --- /dev/null +++ b/Lib/site-packages/PIL/OleFileIO-README.md @@ -0,0 +1,180 @@ +olefile (formerly OleFileIO_PL) +=============================== + +[olefile](http://www.decalage.info/olefile) is a Python package to parse, read and write +[Microsoft OLE2 files](http://en.wikipedia.org/wiki/Compound_File_Binary_Format) +(also called Structured Storage, Compound File Binary Format or Compound Document File Format), +such as Microsoft Office 97-2003 documents, vbaProject.bin in MS Office 2007+ files, Image Composer +and FlashPix files, Outlook messages, StickyNotes, several Microscopy file formats, McAfee antivirus quarantine files, +etc. + + +**Quick links:** [Home page](http://www.decalage.info/olefile) - +[Download/Install](https://bitbucket.org/decalage/olefileio_pl/wiki/Install) - +[Documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) - +[Report Issues/Suggestions/Questions](https://bitbucket.org/decalage/olefileio_pl/issues?status=new&status=open) - +[Contact the author](http://decalage.info/contact) - +[Repository](https://bitbucket.org/decalage/olefileio_pl) - +[Updates on Twitter](https://twitter.com/decalage2) + + +News +---- + +Follow all updates and news on Twitter: + +- **2015-01-25 v0.42**: improved handling of special characters in stream/storage names on Python 2.x (using UTF-8 + instead of Latin-1), fixed bug in listdir with empty storages. +- 2014-11-25 v0.41: OleFileIO.open and isOleFile now support OLE files stored in byte strings, fixed installer for + python 3, added support for Jython (Niko Ehrenfeuchter) +- 2014-10-01 v0.40: renamed OleFileIO_PL to olefile, added initial write support for streams >4K, updated doc and + license, improved the setup script. +- 2014-07-27 v0.31: fixed support for large files with 4K sectors, thanks to Niko Ehrenfeuchter, Martijn Berger and + Dave Jones. Added test scripts from Pillow (by hugovk). Fixed setup for Python 3 (Martin Panter) +- 2014-02-04 v0.30: now compatible with Python 3.x, thanks to Martin Panter who did most of the hard work. +- 2013-07-24 v0.26: added methods to parse stream/storage timestamps, improved listdir to include storages, fixed + parsing of direntry timestamps +- 2013-05-27 v0.25: improved metadata extraction, properties parsing and exception handling, fixed + [issue #12](https://bitbucket.org/decalage/olefileio_pl/issue/12/error-when-converting-timestamps-in-ole) +- 2013-05-07 v0.24: new features to extract metadata (get\_metadata method and OleMetadata class), improved + getproperties to convert timestamps to Python datetime +- 2012-10-09: published [python-oletools](http://www.decalage.info/python/oletools), a package of analysis tools based + on OleFileIO_PL +- 2012-09-11 v0.23: added support for file-like objects, fixed [issue #8](https://bitbucket.org/decalage/olefileio_pl/issue/8/bug-with-file-object) +- 2012-02-17 v0.22: fixed issues #7 (bug in getproperties) and #2 (added close method) +- 2011-10-20: code hosted on bitbucket to ease contributions and bug tracking +- 2010-01-24 v0.21: fixed support for big-endian CPUs, such as PowerPC Macs. +- 2009-12-11 v0.20: small bugfix in OleFileIO.open when filename is not plain str. +- 2009-12-10 v0.19: fixed support for 64 bits platforms (thanks to Ben G. and Martijn for reporting the bug) +- see changelog in source code for more info. + +Download/Install +---------------- + +If you have pip or setuptools installed (pip is included in Python 2.7.9+), you may simply run **pip install olefile** +or **easy_install olefile** for the first installation. + +To update olefile, run **pip install -U olefile**. + +Otherwise, see https://bitbucket.org/decalage/olefileio_pl/wiki/Install + +Features +-------- + +- Parse, read and write any OLE file such as Microsoft Office 97-2003 legacy document formats (Word .doc, Excel .xls, + PowerPoint .ppt, Visio .vsd, Project .mpp), Image Composer and FlashPix files, Outlook messages, StickyNotes, + Zeiss AxioVision ZVI files, Olympus FluoView OIB files, etc +- List all the streams and storages contained in an OLE file +- Open streams as files +- Parse and read property streams, containing metadata of the file +- Portable, pure Python module, no dependency + +olefile can be used as an independent package or with PIL/Pillow. + +olefile is mostly meant for developers. If you are looking for tools to analyze OLE files or to extract data (especially +for security purposes such as malware analysis and forensics), then please also check my +[python-oletools](http://www.decalage.info/python/oletools), which are built upon olefile and provide a higher-level interface. + + +History +------- + +olefile is based on the OleFileIO module from [PIL](http://www.pythonware.com/products/pil/index.htm), the excellent +Python Imaging Library, created and maintained by Fredrik Lundh. The olefile API is still compatible with PIL, but +since 2005 I have improved the internal implementation significantly, with new features, bugfixes and a more robust +design. From 2005 to 2014 the project was called OleFileIO_PL, and in 2014 I changed its name to olefile to celebrate +its 9 years and its new write features. + +As far as I know, olefile is the most complete and robust Python implementation to read MS OLE2 files, portable on +several operating systems. (please tell me if you know other similar Python modules) + +Since 2014 olefile/OleFileIO_PL has been integrated into [Pillow](http://python-imaging.github.io/), the friendly fork +of PIL. olefile will continue to be improved as a separate project, and new versions will be merged into Pillow +regularly. + + +Main improvements over the original version of OleFileIO in PIL: +---------------------------------------------------------------- + +- Compatible with Python 3.x and 2.6+ +- Many bug fixes +- Support for files larger than 6.8MB +- Support for 64 bits platforms and big-endian CPUs +- Robust: many checks to detect malformed files +- Runtime option to choose if malformed files should be parsed or raise exceptions +- Improved API +- Metadata extraction, stream/storage timestamps (e.g. for document forensics) +- Can open file-like objects +- Added setup.py and install.bat to ease installation +- More convenient slash-based syntax for stream paths +- Write features + +Documentation +------------- + +Please see the [online documentation](https://bitbucket.org/decalage/olefileio_pl/wiki) for more information, +especially the [OLE overview](https://bitbucket.org/decalage/olefileio_pl/wiki/OLE_Overview) and the +[API page](https://bitbucket.org/decalage/olefileio_pl/wiki/API) which describe how to use olefile in Python applications. +A copy of the same documentation is also provided in the doc subfolder of the olefile package. + + +## Real-life examples ## + +A real-life example: [using OleFileIO_PL for malware analysis and forensics](http://blog.gregback.net/2011/03/using-remnux-for-forensic-puzzle-6/). + +See also [this paper](https://computer-forensics.sans.org/community/papers/gcfa/grow-forensic-tools-taxonomy-python-libraries-helpful-forensic-analysis_6879) about python tools for forensics, which features olefile. + + +License +------- + +olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +([http://www.decalage.info](http://www.decalage.info)) + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +---------- + +olefile is based on source code from the OleFileIO module of the Python Imaging Library (PIL) published by Fredrik +Lundh under the following license: + +The Python Imaging Library (PIL) is + +- Copyright (c) 1997-2005 by Secret Labs AB +- Copyright (c) 1995-2005 by Fredrik Lundh + +By obtaining, using, and/or copying this software and/or its associated documentation, you agree that you have read, +understood, and will comply with the following terms and conditions: + +Permission to use, copy, modify, and distribute this software and its associated documentation for any purpose and +without fee is hereby granted, provided that the above copyright notice appears in all copies, and that both that +copyright notice and this permission notice appear in supporting documentation, and that the name of Secret Labs AB or +the author not be used in advertising or publicity pertaining to distribution of the software without specific, written +prior permission. + +SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/Lib/site-packages/PIL/OleFileIO.py b/Lib/site-packages/PIL/OleFileIO.py new file mode 100644 index 0000000..4cf106d --- /dev/null +++ b/Lib/site-packages/PIL/OleFileIO.py @@ -0,0 +1,2305 @@ +#!/usr/bin/env python + +# olefile (formerly OleFileIO_PL) version 0.42 2015-01-25 +# +# Module to read/write Microsoft OLE2 files (also called Structured Storage or +# Microsoft Compound Document File Format), such as Microsoft Office 97-2003 +# documents, Image Composer and FlashPix files, Outlook messages, ... +# This version is compatible with Python 2.6+ and 3.x +# +# Project website: http://www.decalage.info/olefile +# +# olefile is copyright (c) 2005-2015 Philippe Lagadec (http://www.decalage.info) +# +# olefile is based on the OleFileIO module from the PIL library v1.1.6 +# See: http://www.pythonware.com/products/pil/index.htm +# +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# See source code and LICENSE.txt for information on usage and redistribution. + + +# Since OleFileIO_PL v0.30, only Python 2.6+ and 3.x is supported +# This import enables print() as a function rather than a keyword +# (main requirement to be compatible with Python 3.x) +# The comment on the line below should be printed on Python 2.5 or older: +from __future__ import print_function # This version of olefile requires Python 2.6+ or 3.x. + + +__author__ = "Philippe Lagadec" +__date__ = "2015-01-25" +__version__ = '0.42b' + +#--- LICENSE ------------------------------------------------------------------ + +# olefile (formerly OleFileIO_PL) is copyright (c) 2005-2015 Philippe Lagadec +# (http://www.decalage.info) +# +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without modification, +# are permitted provided that the following conditions are met: +# +# * Redistributions of source code must retain the above copyright notice, this +# list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above copyright notice, +# this list of conditions and the following disclaimer in the documentation +# and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# ---------- +# PIL License: +# +# olefile is based on source code from the OleFileIO module of the Python +# Imaging Library (PIL) published by Fredrik Lundh under the following license: + +# The Python Imaging Library (PIL) is +# Copyright (c) 1997-2005 by Secret Labs AB +# Copyright (c) 1995-2005 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its associated +# documentation, you agree that you have read, understood, and will comply with +# the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and its +# associated documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appears in all copies, and that both +# that copyright notice and this permission notice appear in supporting +# documentation, and that the name of Secret Labs AB or the author(s) not be used +# in advertising or publicity pertaining to distribution of the software +# without specific, written prior permission. +# +# SECRET LABS AB AND THE AUTHORS DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS +# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. +# IN NO EVENT SHALL SECRET LABS AB OR THE AUTHORS BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +# PERFORMANCE OF THIS SOFTWARE. + +#----------------------------------------------------------------------------- +# CHANGELOG: (only olefile/OleFileIO_PL changes compared to PIL 1.1.6) +# 2005-05-11 v0.10 PL: - a few fixes for Python 2.4 compatibility +# (all changes flagged with [PL]) +# 2006-02-22 v0.11 PL: - a few fixes for some Office 2003 documents which raise +# exceptions in _OleStream.__init__() +# 2006-06-09 v0.12 PL: - fixes for files above 6.8MB (DIFAT in loadfat) +# - added some constants +# - added header values checks +# - added some docstrings +# - getsect: bugfix in case sectors >512 bytes +# - getsect: added conformity checks +# - DEBUG_MODE constant to activate debug display +# 2007-09-04 v0.13 PL: - improved/translated (lots of) comments +# - updated license +# - converted tabs to 4 spaces +# 2007-11-19 v0.14 PL: - added OleFileIO._raise_defect() to adapt sensitivity +# - improved _unicode() to use Python 2.x unicode support +# - fixed bug in _OleDirectoryEntry +# 2007-11-25 v0.15 PL: - added safety checks to detect FAT loops +# - fixed _OleStream which didn't check stream size +# - added/improved many docstrings and comments +# - moved helper functions _unicode and _clsid out of +# OleFileIO class +# - improved OleFileIO._find() to add Unix path syntax +# - OleFileIO._find() is now case-insensitive +# - added get_type() and get_rootentry_name() +# - rewritten loaddirectory and _OleDirectoryEntry +# 2007-11-27 v0.16 PL: - added _OleDirectoryEntry.kids_dict +# - added detection of duplicate filenames in storages +# - added detection of duplicate references to streams +# - added get_size() and exists() to _OleDirectoryEntry +# - added isOleFile to check header before parsing +# - added __all__ list to control public keywords in pydoc +# 2007-12-04 v0.17 PL: - added _load_direntry to fix a bug in loaddirectory +# - improved _unicode(), added workarounds for Python <2.3 +# - added set_debug_mode and -d option to set debug mode +# - fixed bugs in OleFileIO.open and _OleDirectoryEntry +# - added safety check in main for large or binary +# properties +# - allow size>0 for storages for some implementations +# 2007-12-05 v0.18 PL: - fixed several bugs in handling of FAT, MiniFAT and +# streams +# - added option '-c' in main to check all streams +# 2009-12-10 v0.19 PL: - bugfix for 32 bit arrays on 64 bits platforms +# (thanks to Ben G. and Martijn for reporting the bug) +# 2009-12-11 v0.20 PL: - bugfix in OleFileIO.open when filename is not plain str +# 2010-01-22 v0.21 PL: - added support for big-endian CPUs such as PowerPC Macs +# 2012-02-16 v0.22 PL: - fixed bug in getproperties, patch by chuckleberryfinn +# (https://bitbucket.org/decalage/olefileio_pl/issue/7) +# - added close method to OleFileIO (fixed issue #2) +# 2012-07-25 v0.23 PL: - added support for file-like objects (patch by mete0r_kr) +# 2013-05-05 v0.24 PL: - getproperties: added conversion from filetime to python +# datetime +# - main: displays properties with date format +# - new class OleMetadata to parse standard properties +# - added get_metadata method +# 2013-05-07 v0.24 PL: - a few improvements in OleMetadata +# 2013-05-24 v0.25 PL: - getproperties: option to not convert some timestamps +# - OleMetaData: total_edit_time is now a number of seconds, +# not a timestamp +# - getproperties: added support for VT_BOOL, VT_INT, V_UINT +# - getproperties: filter out null chars from strings +# - getproperties: raise non-fatal defects instead of +# exceptions when properties cannot be parsed properly +# 2013-05-27 PL: - getproperties: improved exception handling +# - _raise_defect: added option to set exception type +# - all non-fatal issues are now recorded, and displayed +# when run as a script +# 2013-07-11 v0.26 PL: - added methods to get modification and creation times +# of a directory entry or a storage/stream +# - fixed parsing of direntry timestamps +# 2013-07-24 PL: - new options in listdir to list storages and/or streams +# 2014-02-04 v0.30 PL: - upgraded code to support Python 3.x by Martin Panter +# - several fixes for Python 2.6 (xrange, MAGIC) +# - reused i32 from Pillow's _binary +# 2014-07-18 v0.31 - preliminary support for 4K sectors +# 2014-07-27 v0.31 PL: - a few improvements in OleFileIO.open (header parsing) +# - Fixed loadfat for large files with 4K sectors (issue #3) +# 2014-07-30 v0.32 PL: - added write_sect to write sectors to disk +# - added write_mode option to OleFileIO.__init__ and open +# 2014-07-31 PL: - fixed padding in write_sect for Python 3, added checks +# - added write_stream to write a stream to disk +# 2014-09-26 v0.40 PL: - renamed OleFileIO_PL to olefile +# 2014-11-09 NE: - added support for Jython (Niko Ehrenfeuchter) +# 2014-11-13 v0.41 PL: - improved isOleFile and OleFileIO.open to support OLE +# data in a string buffer and file-like objects. +# 2014-11-21 PL: - updated comments according to Pillow's commits +# 2015-01-24 v0.42 PL: - changed the default path name encoding from Latin-1 +# to UTF-8 on Python 2.x (Unicode on Python 3.x) +# - added path_encoding option to override the default +# - fixed a bug in _list when a storage is empty + +#----------------------------------------------------------------------------- +# TODO (for version 1.0): +# + get rid of print statements, to simplify Python 2.x and 3.x support +# + add is_stream and is_storage +# + remove leading and trailing slashes where a path is used +# + add functions path_list2str and path_str2list +# + fix how all the methods handle unicode str and/or bytes as arguments +# + add path attrib to _OleDirEntry, set it once and for all in init or +# append_kids (then listdir/_list can be simplified) +# - TESTS with Linux, MacOSX, Python 1.5.2, various files, PIL, ... +# - add underscore to each private method, to avoid their display in +# pydoc/epydoc documentation - Remove it for classes to be documented +# - replace all raised exceptions with _raise_defect (at least in OleFileIO) +# - merge code from _OleStream and OleFileIO.getsect to read sectors +# (maybe add a class for FAT and MiniFAT ?) +# - add method to check all streams (follow sectors chains without storing all +# stream in memory, and report anomalies) +# - use _OleDirectoryEntry.kids_dict to improve _find and _list ? +# - fix Unicode names handling (find some way to stay compatible with Py1.5.2) +# => if possible avoid converting names to Latin-1 +# - review DIFAT code: fix handling of DIFSECT blocks in FAT (not stop) +# - rewrite OleFileIO.getproperties +# - improve docstrings to show more sample uses +# - see also original notes and FIXME below +# - remove all obsolete FIXMEs +# - OleMetadata: fix version attrib according to +# http://msdn.microsoft.com/en-us/library/dd945671%28v=office.12%29.aspx + +# IDEAS: +# - in OleFileIO._open and _OleStream, use size=None instead of 0x7FFFFFFF for +# streams with unknown size +# - use arrays of int instead of long integers for FAT/MiniFAT, to improve +# performance and reduce memory usage ? (possible issue with values >2^31) +# - provide tests with unittest (may need write support to create samples) +# - move all debug code (and maybe dump methods) to a separate module, with +# a class which inherits OleFileIO ? +# - fix docstrings to follow epydoc format +# - add support for big endian byte order ? +# - create a simple OLE explorer with wxPython + +# FUTURE EVOLUTIONS to add write support: +# see issue #6 on Bitbucket: +# https://bitbucket.org/decalage/olefileio_pl/issue/6/improve-olefileio_pl-to-write-ole-files + +#----------------------------------------------------------------------------- +# NOTES from PIL 1.1.6: + +# History: +# 1997-01-20 fl Created +# 1997-01-22 fl Fixed 64-bit portability quirk +# 2003-09-09 fl Fixed typo in OleFileIO.loadfat (noted by Daniel Haertle) +# 2004-02-29 fl Changed long hex constants to signed integers +# +# Notes: +# FIXME: sort out sign problem (eliminate long hex constants) +# FIXME: change filename to use "a/b/c" instead of ["a", "b", "c"] +# FIXME: provide a glob mechanism function (using fnmatchcase) +# +# Literature: +# +# "FlashPix Format Specification, Appendix A", Kodak and Microsoft, +# September 1996. +# +# Quotes: +# +# "If this document and functionality of the Software conflict, +# the actual functionality of the Software represents the correct +# functionality" -- Microsoft, in the OLE format specification + +#------------------------------------------------------------------------------ + + +import io +import sys +import struct +import array +import os.path +import datetime + +#=== COMPATIBILITY WORKAROUNDS ================================================ + +# [PL] Define explicitly the public API to avoid private objects in pydoc: +#TODO: add more +# __all__ = ['OleFileIO', 'isOleFile', 'MAGIC'] + +# For Python 3.x, need to redefine long as int: +if str is not bytes: + long = int + +# Need to make sure we use xrange both on Python 2 and 3.x: +try: + # on Python 2 we need xrange: + iterrange = xrange +except: + # no xrange, for Python 3 it was renamed as range: + iterrange = range + +# [PL] workaround to fix an issue with array item size on 64 bits systems: +if array.array('L').itemsize == 4: + # on 32 bits platforms, long integers in an array are 32 bits: + UINT32 = 'L' +elif array.array('I').itemsize == 4: + # on 64 bits platforms, integers in an array are 32 bits: + UINT32 = 'I' +elif array.array('i').itemsize == 4: + # On 64 bit Jython, signed integers ('i') are the only way to store our 32 + # bit values in an array in a *somewhat* reasonable way, as the otherwise + # perfectly suited 'H' (unsigned int, 32 bits) results in a completely + # unusable behaviour. This is most likely caused by the fact that Java + # doesn't have unsigned values, and thus Jython's "array" implementation, + # which is based on "jarray", doesn't have them either. + # NOTE: to trick Jython into converting the values it would normally + # interpret as "signed" into "unsigned", a binary-and operation with + # 0xFFFFFFFF can be used. This way it is possible to use the same comparing + # operations on all platforms / implementations. The corresponding code + # lines are flagged with a 'JYTHON-WORKAROUND' tag below. + UINT32 = 'i' +else: + raise ValueError('Need to fix a bug with 32 bit arrays, please contact author...') + + +# [PL] These workarounds were inspired from the Path module +# (see http://www.jorendorff.com/articles/python/path/) +try: + basestring +except NameError: + basestring = str + +# [PL] Experimental setting: if True, OLE filenames will be kept in Unicode +# if False (default PIL behaviour), all filenames are converted to Latin-1. +KEEP_UNICODE_NAMES = True + +if sys.version_info[0] < 3: + # On Python 2.x, the default encoding for path names is UTF-8: + DEFAULT_PATH_ENCODING = 'utf-8' +else: + # On Python 3.x, the default encoding for path names is Unicode (None): + DEFAULT_PATH_ENCODING = None + + +#=== DEBUGGING =============================================================== + +#TODO: replace this by proper logging + +# [PL] DEBUG display mode: False by default, use set_debug_mode() or "-d" on +# command line to change it. +DEBUG_MODE = False + + +def debug_print(msg): + print(msg) + + +def debug_pass(msg): + pass + + +debug = debug_pass + + +def set_debug_mode(debug_mode): + """ + Set debug mode on or off, to control display of debugging messages. + :param mode: True or False + """ + global DEBUG_MODE, debug + DEBUG_MODE = debug_mode + if debug_mode: + debug = debug_print + else: + debug = debug_pass + + +#=== CONSTANTS =============================================================== + +# magic bytes that should be at the beginning of every OLE file: +MAGIC = b'\xD0\xCF\x11\xE0\xA1\xB1\x1A\xE1' + +# [PL]: added constants for Sector IDs (from AAF specifications) +MAXREGSECT = 0xFFFFFFFA # (-6) maximum SECT +DIFSECT = 0xFFFFFFFC # (-4) denotes a DIFAT sector in a FAT +FATSECT = 0xFFFFFFFD # (-3) denotes a FAT sector in a FAT +ENDOFCHAIN = 0xFFFFFFFE # (-2) end of a virtual stream chain +FREESECT = 0xFFFFFFFF # (-1) unallocated sector + +# [PL]: added constants for Directory Entry IDs (from AAF specifications) +MAXREGSID = 0xFFFFFFFA # (-6) maximum directory entry ID +NOSTREAM = 0xFFFFFFFF # (-1) unallocated directory entry + +# [PL] object types in storage (from AAF specifications) +STGTY_EMPTY = 0 # empty directory entry (according to OpenOffice.org doc) +STGTY_STORAGE = 1 # element is a storage object +STGTY_STREAM = 2 # element is a stream object +STGTY_LOCKBYTES = 3 # element is an ILockBytes object +STGTY_PROPERTY = 4 # element is an IPropertyStorage object +STGTY_ROOT = 5 # element is a root storage + + +# +# -------------------------------------------------------------------- +# property types + +VT_EMPTY = 0; VT_NULL = 1; VT_I2 = 2; VT_I4 = 3; VT_R4 = 4; VT_R8 = 5; VT_CY = 6; +VT_DATE = 7; VT_BSTR = 8; VT_DISPATCH = 9; VT_ERROR = 10; VT_BOOL = 11; +VT_VARIANT = 12; VT_UNKNOWN = 13; VT_DECIMAL = 14; VT_I1 = 16; VT_UI1 = 17; +VT_UI2 = 18; VT_UI4 = 19; VT_I8 = 20; VT_UI8 = 21; VT_INT = 22; VT_UINT = 23; +VT_VOID = 24; VT_HRESULT = 25; VT_PTR = 26; VT_SAFEARRAY = 27; VT_CARRAY = 28; +VT_USERDEFINED = 29; VT_LPSTR = 30; VT_LPWSTR = 31; VT_FILETIME = 64; +VT_BLOB = 65; VT_STREAM = 66; VT_STORAGE = 67; VT_STREAMED_OBJECT = 68; +VT_STORED_OBJECT = 69; VT_BLOB_OBJECT = 70; VT_CF = 71; VT_CLSID = 72; +VT_VECTOR = 0x1000; + +# map property id to name (for debugging purposes) + +VT = {} +for keyword, var in list(vars().items()): + if keyword[:3] == "VT_": + VT[var] = keyword + +# +# -------------------------------------------------------------------- +# Some common document types (root.clsid fields) + +WORD_CLSID = "00020900-0000-0000-C000-000000000046" +#TODO: check Excel, PPT, ... + +# [PL]: Defect levels to classify parsing errors - see OleFileIO._raise_defect() +DEFECT_UNSURE = 10 # a case which looks weird, but not sure it's a defect +DEFECT_POTENTIAL = 20 # a potential defect +DEFECT_INCORRECT = 30 # an error according to specifications, but parsing + # can go on +DEFECT_FATAL = 40 # an error which cannot be ignored, parsing is + # impossible + +# Minimal size of an empty OLE file, with 512-bytes sectors = 1536 bytes +# (this is used in isOleFile and OleFile.open) +MINIMAL_OLEFILE_SIZE = 1536 + +# [PL] add useful constants to __all__: +# for key in list(vars().keys()): +# if key.startswith('STGTY_') or key.startswith('DEFECT_'): +# __all__.append(key) + + +#=== FUNCTIONS =============================================================== + +def isOleFile(filename): + """ + Test if a file is an OLE container (according to the magic bytes in its header). + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read and seek methods), + it is parsed as-is. + + :returns: True if OLE, False otherwise. + """ + # check if filename is a string-like or file-like object: + if hasattr(filename, 'read'): + # file-like object: use it directly + header = filename.read(len(MAGIC)) + # just in case, seek back to start of file: + filename.seek(0) + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + header = filename[:len(MAGIC)] + else: + # string-like object: filename of file on disk + header = open(filename, 'rb').read(len(MAGIC)) + if header == MAGIC: + return True + else: + return False + + +if bytes is str: + # version for Python 2.x + def i8(c): + return ord(c) +else: + # version for Python 3.x + def i8(c): + return c if c.__class__ is int else c[0] + + +#TODO: replace i16 and i32 with more readable struct.unpack equivalent? + +def i16(c, o = 0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return struct.unpack(" len(fat): + raise IOError('malformed OLE document, stream too large') + # optimization(?): data is first a list of strings, and join() is called + # at the end to concatenate all in one string. + # (this may not be really useful with recent Python versions) + data = [] + # if size is zero, then first sector index should be ENDOFCHAIN: + if size == 0 and sect != ENDOFCHAIN: + debug('size == 0 and sect != ENDOFCHAIN:') + raise IOError('incorrect OLE sector index for empty stream') + # [PL] A fixed-length for loop is used instead of an undefined while + # loop to avoid DoS attacks: + for i in range(nb_sectors): + # Sector index may be ENDOFCHAIN, but only if size was unknown + if sect == ENDOFCHAIN: + if unknown_size: + break + else: + # else this means that the stream is smaller than declared: + debug('sect=ENDOFCHAIN before expected size') + raise IOError('incomplete OLE stream') + # sector index should be within FAT: + if sect < 0 or sect >= len(fat): + debug('sect=%d (%X) / len(fat)=%d' % (sect, sect, len(fat))) + debug('i=%d / nb_sectors=%d' % (i, nb_sectors)) +## tmp_data = b"".join(data) +## f = open('test_debug.bin', 'wb') +## f.write(tmp_data) +## f.close() +## debug('data read so far: %d bytes' % len(tmp_data)) + raise IOError('incorrect OLE FAT, sector index out of range') + #TODO: merge this code with OleFileIO.getsect() ? + #TODO: check if this works with 4K sectors: + try: + fp.seek(offset + sectorsize * sect) + except: + debug('sect=%d, seek=%d, filesize=%d' % + (sect, offset+sectorsize*sect, filesize)) + raise IOError('OLE sector index out of range') + sector_data = fp.read(sectorsize) + # [PL] check if there was enough data: + # Note: if sector is the last of the file, sometimes it is not a + # complete sector (of 512 or 4K), so we may read less than + # sectorsize. + if len(sector_data) != sectorsize and sect != (len(fat)-1): + debug('sect=%d / len(fat)=%d, seek=%d / filesize=%d, len read=%d' % + (sect, len(fat), offset+sectorsize*sect, filesize, len(sector_data))) + debug('seek+len(read)=%d' % (offset+sectorsize*sect+len(sector_data))) + raise IOError('incomplete OLE sector') + data.append(sector_data) + # jump to next sector in the FAT: + try: + sect = fat[sect] & 0xFFFFFFFF # JYTHON-WORKAROUND + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + data = b"".join(data) + # Data is truncated to the actual stream size: + if len(data) >= size: + data = data[:size] + # actual stream size is stored for future use: + self.size = size + elif unknown_size: + # actual stream size was not known, now we know the size of read + # data: + self.size = len(data) + else: + # read data is less than expected: + debug('len(data)=%d, size=%d' % (len(data), size)) + raise IOError('OLE stream size is less than declared') + # when all data is read in memory, BytesIO constructor is called + io.BytesIO.__init__(self, data) + # Then the _OleStream object can be used as a read-only file object. + + +#--- _OleDirectoryEntry ------------------------------------------------------- + +class _OleDirectoryEntry(object): + + """ + OLE2 Directory Entry + """ + # [PL] parsing code moved from OleFileIO.loaddirectory + + # struct to parse directory entries: + # <: little-endian byte order, standard sizes + # (note: this should guarantee that Q returns a 64 bits int) + # 64s: string containing entry name in unicode (max 31 chars) + null char + # H: uint16, number of bytes used in name buffer, including null = (len+1)*2 + # B: uint8, dir entry type (between 0 and 5) + # B: uint8, color: 0=black, 1=red + # I: uint32, index of left child node in the red-black tree, NOSTREAM if none + # I: uint32, index of right child node in the red-black tree, NOSTREAM if none + # I: uint32, index of child root node if it is a storage, else NOSTREAM + # 16s: CLSID, unique identifier (only used if it is a storage) + # I: uint32, user flags + # Q (was 8s): uint64, creation timestamp or zero + # Q (was 8s): uint64, modification timestamp or zero + # I: uint32, SID of first sector if stream or ministream, SID of 1st sector + # of stream containing ministreams if root entry, 0 otherwise + # I: uint32, total stream size in bytes if stream (low 32 bits), 0 otherwise + # I: uint32, total stream size in bytes if stream (high 32 bits), 0 otherwise + STRUCT_DIRENTRY = '<64sHBBIII16sIQQIII' + # size of a directory entry: 128 bytes + DIRENTRY_SIZE = 128 + assert struct.calcsize(STRUCT_DIRENTRY) == DIRENTRY_SIZE + + def __init__(self, entry, sid, olefile): + """ + Constructor for an _OleDirectoryEntry object. + Parses a 128-bytes entry from the OLE Directory stream. + + :param entry : string (must be 128 bytes long) + :param sid : index of this directory entry in the OLE file directory + :param olefile: OleFileIO containing this directory entry + """ + self.sid = sid + # ref to olefile is stored for future use + self.olefile = olefile + # kids is a list of children entries, if this entry is a storage: + # (list of _OleDirectoryEntry objects) + self.kids = [] + # kids_dict is a dictionary of children entries, indexed by their + # name in lowercase: used to quickly find an entry, and to detect + # duplicates + self.kids_dict = {} + # flag used to detect if the entry is referenced more than once in + # directory: + self.used = False + # decode DirEntry + ( + name, + namelength, + self.entry_type, + self.color, + self.sid_left, + self.sid_right, + self.sid_child, + clsid, + self.dwUserFlags, + self.createTime, + self.modifyTime, + self.isectStart, + sizeLow, + sizeHigh + ) = struct.unpack(_OleDirectoryEntry.STRUCT_DIRENTRY, entry) + if self.entry_type not in [STGTY_ROOT, STGTY_STORAGE, STGTY_STREAM, STGTY_EMPTY]: + olefile.raise_defect(DEFECT_INCORRECT, 'unhandled OLE storage type') + # only first directory entry can (and should) be root: + if self.entry_type == STGTY_ROOT and sid != 0: + olefile.raise_defect(DEFECT_INCORRECT, 'duplicate OLE root entry') + if sid == 0 and self.entry_type != STGTY_ROOT: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect OLE root entry') + #debug (struct.unpack(fmt_entry, entry[:len_entry])) + # name should be at most 31 unicode characters + null character, + # so 64 bytes in total (31*2 + 2): + if namelength > 64: + olefile.raise_defect(DEFECT_INCORRECT, 'incorrect DirEntry name length') + # if exception not raised, namelength is set to the maximum value: + namelength = 64 + # only characters without ending null char are kept: + name = name[:(namelength-2)] + #TODO: check if the name is actually followed by a null unicode character ([MS-CFB] 2.6.1) + #TODO: check if the name does not contain forbidden characters: + # [MS-CFB] 2.6.1: "The following characters are illegal and MUST NOT be part of the name: '/', '\', ':', '!'." + # name is converted from UTF-16LE to the path encoding specified in the OleFileIO: + self.name = olefile._decode_utf16_str(name) + + debug('DirEntry SID=%d: %s' % (self.sid, repr(self.name))) + debug(' - type: %d' % self.entry_type) + debug(' - sect: %d' % self.isectStart) + debug(' - SID left: %d, right: %d, child: %d' % (self.sid_left, + self.sid_right, self.sid_child)) + + # sizeHigh is only used for 4K sectors, it should be zero for 512 bytes + # sectors, BUT apparently some implementations set it as 0xFFFFFFFF, 1 + # or some other value so it cannot be raised as a defect in general: + if olefile.sectorsize == 512: + if sizeHigh != 0 and sizeHigh != 0xFFFFFFFF: + debug('sectorsize=%d, sizeLow=%d, sizeHigh=%d (%X)' % + (olefile.sectorsize, sizeLow, sizeHigh, sizeHigh)) + olefile.raise_defect(DEFECT_UNSURE, 'incorrect OLE stream size') + self.size = sizeLow + else: + self.size = sizeLow + (long(sizeHigh) << 32) + debug(' - size: %d (sizeLow=%d, sizeHigh=%d)' % (self.size, sizeLow, sizeHigh)) + + self.clsid = _clsid(clsid) + # a storage should have a null size, BUT some implementations such as + # Word 8 for Mac seem to allow non-null values => Potential defect: + if self.entry_type == STGTY_STORAGE and self.size != 0: + olefile.raise_defect(DEFECT_POTENTIAL, 'OLE storage with size>0') + # check if stream is not already referenced elsewhere: + if self.entry_type in (STGTY_ROOT, STGTY_STREAM) and self.size > 0: + if self.size < olefile.minisectorcutoff \ + and self.entry_type == STGTY_STREAM: # only streams can be in MiniFAT + # ministream object + minifat = True + else: + minifat = False + olefile._check_duplicate_stream(self.isectStart, minifat) + + def build_storage_tree(self): + """ + Read and build the red-black tree attached to this _OleDirectoryEntry + object, if it is a storage. + Note that this method builds a tree of all subentries, so it should + only be called for the root object once. + """ + debug('build_storage_tree: SID=%d - %s - sid_child=%d' + % (self.sid, repr(self.name), self.sid_child)) + if self.sid_child != NOSTREAM: + # if child SID is not NOSTREAM, then this entry is a storage. + # Let's walk through the tree of children to fill the kids list: + self.append_kids(self.sid_child) + + # Note from OpenOffice documentation: the safest way is to + # recreate the tree because some implementations may store broken + # red-black trees... + + # in the OLE file, entries are sorted on (length, name). + # for convenience, we sort them on name instead: + # (see rich comparison methods in this class) + self.kids.sort() + + def append_kids(self, child_sid): + """ + Walk through red-black tree of children of this directory entry to add + all of them to the kids list. (recursive method) + + :param child_sid : index of child directory entry to use, or None when called + first time for the root. (only used during recursion) + """ + # [PL] this method was added to use simple recursion instead of a complex + # algorithm. + # if this is not a storage or a leaf of the tree, nothing to do: + if child_sid == NOSTREAM: + return + # check if child SID is in the proper range: + if child_sid < 0 or child_sid >= len(self.olefile.direntries): + self.olefile.raise_defect(DEFECT_FATAL, 'OLE DirEntry index out of range') + # get child direntry: + child = self.olefile._load_direntry(child_sid) #direntries[child_sid] + debug('append_kids: child_sid=%d - %s - sid_left=%d, sid_right=%d, sid_child=%d' + % (child.sid, repr(child.name), child.sid_left, child.sid_right, child.sid_child)) + # the directory entries are organized as a red-black tree. + # (cf. Wikipedia for details) + # First walk through left side of the tree: + self.append_kids(child.sid_left) + # Check if its name is not already used (case-insensitive): + name_lower = child.name.lower() + if name_lower in self.kids_dict: + self.olefile.raise_defect(DEFECT_INCORRECT, + "Duplicate filename in OLE storage") + # Then the child_sid _OleDirectoryEntry object is appended to the + # kids list and dictionary: + self.kids.append(child) + self.kids_dict[name_lower] = child + # Check if kid was not already referenced in a storage: + if child.used: + self.olefile.raise_defect(DEFECT_INCORRECT, + 'OLE Entry referenced more than once') + child.used = True + # Finally walk through right side of the tree: + self.append_kids(child.sid_right) + # Afterwards build kid's own tree if it's also a storage: + child.build_storage_tree() + + def __eq__(self, other): + "Compare entries by name" + return self.name == other.name + + def __lt__(self, other): + "Compare entries by name" + return self.name < other.name + + def __ne__(self, other): + return not self.__eq__(other) + + def __le__(self, other): + return self.__eq__(other) or self.__lt__(other) + + # Reflected __lt__() and __le__() will be used for __gt__() and __ge__() + + #TODO: replace by the same function as MS implementation ? + # (order by name length first, then case-insensitive order) + + def dump(self, tab = 0): + "Dump this entry, and all its subentries (for debug purposes only)" + TYPES = ["(invalid)", "(storage)", "(stream)", "(lockbytes)", + "(property)", "(root)"] + print(" "*tab + repr(self.name), TYPES[self.entry_type], end=' ') + if self.entry_type in (STGTY_STREAM, STGTY_ROOT): + print(self.size, "bytes", end=' ') + print() + if self.entry_type in (STGTY_STORAGE, STGTY_ROOT) and self.clsid: + print(" "*tab + "{%s}" % self.clsid) + + for kid in self.kids: + kid.dump(tab + 2) + + def getmtime(self): + """ + Return modification time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.modifyTime == 0: + return None + return filetime2datetime(self.modifyTime) + + def getctime(self): + """ + Return creation time of a directory entry. + + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + if self.createTime == 0: + return None + return filetime2datetime(self.createTime) + + +#--- OleFileIO ---------------------------------------------------------------- + +class OleFileIO(object): + """ + OLE container object + + This class encapsulates the interface to an OLE 2 structured + storage file. Use the :py:meth:`~PIL.OleFileIO.OleFileIO.listdir` and + :py:meth:`~PIL.OleFileIO.OleFileIO.openstream` methods to + access the contents of this file. + + Object names are given as a list of strings, one for each subentry + level. The root entry should be omitted. For example, the following + code extracts all image streams from a Microsoft Image Composer file:: + + ole = OleFileIO("fan.mic") + + for entry in ole.listdir(): + if entry[1:2] == "Image": + fin = ole.openstream(entry) + fout = open(entry[0:1], "wb") + while True: + s = fin.read(8192) + if not s: + break + fout.write(s) + + You can use the viewer application provided with the Python Imaging + Library to view the resulting files (which happens to be standard + TIFF files). + """ + + def __init__(self, filename=None, raise_defects=DEFECT_FATAL, + write_mode=False, debug=False, path_encoding=DEFAULT_PATH_ENCODING): + """ + Constructor for the OleFileIO class. + + :param filename: file to open. + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param raise_defects: minimal level for defects to be raised as exceptions. + (use DEFECT_FATAL for a typical application, DEFECT_INCORRECT for a + security-oriented application, see source code for details) + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. + + :param debug: bool, set debug mode + + :param path_encoding: None or str, name of the codec to use for path + names (streams and storages), or None for Unicode. + Unicode by default on Python 3+, UTF-8 on Python 2.x. + (new in olefile 0.42, was hardcoded to Latin-1 until olefile v0.41) + """ + set_debug_mode(debug) + # minimal level for defects to be raised as exceptions: + self._raise_defects_level = raise_defects + # list of defects/issues not raised as exceptions: + # tuples of (exception type, message) + self.parsing_issues = [] + self.write_mode = write_mode + self.path_encoding = path_encoding + self._filesize = None + self.fp = None + if filename: + self.open(filename, write_mode=write_mode) + + def raise_defect(self, defect_level, message, exception_type=IOError): + """ + This method should be called for any defect found during file parsing. + It may raise an IOError exception according to the minimal level chosen + for the OleFileIO object. + + :param defect_level: defect level, possible values are: + + - DEFECT_UNSURE : a case which looks weird, but not sure it's a defect + - DEFECT_POTENTIAL : a potential defect + - DEFECT_INCORRECT : an error according to specifications, but parsing can go on + - DEFECT_FATAL : an error which cannot be ignored, parsing is impossible + + :param message: string describing the defect, used with raised exception. + :param exception_type: exception class to be raised, IOError by default + """ + # added by [PL] + if defect_level >= self._raise_defects_level: + raise exception_type(message) + else: + # just record the issue, no exception raised: + self.parsing_issues.append((exception_type, message)) + + def _decode_utf16_str(self, utf16_str, errors='replace'): + """ + Decode a string encoded in UTF-16 LE format, as found in the OLE + directory or in property streams. Return a string encoded + according to the path_encoding specified for the OleFileIO object. + + :param utf16_str: bytes string encoded in UTF-16 LE format + :param errors: str, see python documentation for str.decode() + :return: str, encoded according to path_encoding + """ + unicode_str = utf16_str.decode('UTF-16LE', errors) + if self.path_encoding: + # an encoding has been specified for path names: + return unicode_str.encode(self.path_encoding, errors) + else: + # path_encoding=None, return the Unicode string as-is: + return unicode_str + + def open(self, filename, write_mode=False): + """ + Open an OLE2 file in read-only or read/write mode. + Read and parse the header, FAT and directory. + + :param filename: string-like or file-like object, OLE file to parse + + - if filename is a string smaller than 1536 bytes, it is the path + of the file to open. (bytes or unicode string) + - if filename is a string longer than 1535 bytes, it is parsed + as the content of an OLE file in memory. (bytes type only) + - if filename is a file-like object (with read, seek and tell methods), + it is parsed as-is. + + :param write_mode: bool, if True the file is opened in read/write mode instead + of read-only by default. (ignored if filename is not a path) + """ + self.write_mode = write_mode + # [PL] check if filename is a string-like or file-like object: + # (it is better to check for a read() method) + if hasattr(filename, 'read'): + #TODO: also check seek and tell methods? + # file-like object: use it directly + self.fp = filename + elif isinstance(filename, bytes) and len(filename) >= MINIMAL_OLEFILE_SIZE: + # filename is a bytes string containing the OLE file to be parsed: + # convert it to BytesIO + self.fp = io.BytesIO(filename) + else: + # string-like object: filename of file on disk + if self.write_mode: + # open file in mode 'read with update, binary' + # According to https://docs.python.org/2/library/functions.html#open + # 'w' would truncate the file, 'a' may only append on some Unixes + mode = 'r+b' + else: + # read-only mode by default + mode = 'rb' + self.fp = open(filename, mode) + # obtain the filesize by using seek and tell, which should work on most + # file-like objects: + #TODO: do it above, using getsize with filename when possible? + #TODO: fix code to fail with clear exception when filesize cannot be obtained + filesize = 0 + self.fp.seek(0, os.SEEK_END) + try: + filesize = self.fp.tell() + finally: + self.fp.seek(0) + self._filesize = filesize + + # lists of streams in FAT and MiniFAT, to detect duplicate references + # (list of indexes of first sectors of each stream) + self._used_streams_fat = [] + self._used_streams_minifat = [] + + header = self.fp.read(512) + + if len(header) != 512 or header[:8] != MAGIC: + self.raise_defect(DEFECT_FATAL, "not an OLE2 structured storage file") + + # [PL] header structure according to AAF specifications: + ##Header + ##struct StructuredStorageHeader { // [offset from start (bytes), length (bytes)] + ##BYTE _abSig[8]; // [00H,08] {0xd0, 0xcf, 0x11, 0xe0, 0xa1, 0xb1, + ## // 0x1a, 0xe1} for current version + ##CLSID _clsid; // [08H,16] reserved must be zero (WriteClassStg/ + ## // GetClassFile uses root directory class id) + ##USHORT _uMinorVersion; // [18H,02] minor version of the format: 33 is + ## // written by reference implementation + ##USHORT _uDllVersion; // [1AH,02] major version of the dll/format: 3 for + ## // 512-byte sectors, 4 for 4 KB sectors + ##USHORT _uByteOrder; // [1CH,02] 0xFFFE: indicates Intel byte-ordering + ##USHORT _uSectorShift; // [1EH,02] size of sectors in power-of-two; + ## // typically 9 indicating 512-byte sectors + ##USHORT _uMiniSectorShift; // [20H,02] size of mini-sectors in power-of-two; + ## // typically 6 indicating 64-byte mini-sectors + ##USHORT _usReserved; // [22H,02] reserved, must be zero + ##ULONG _ulReserved1; // [24H,04] reserved, must be zero + ##FSINDEX _csectDir; // [28H,04] must be zero for 512-byte sectors, + ## // number of SECTs in directory chain for 4 KB + ## // sectors + ##FSINDEX _csectFat; // [2CH,04] number of SECTs in the FAT chain + ##SECT _sectDirStart; // [30H,04] first SECT in the directory chain + ##DFSIGNATURE _signature; // [34H,04] signature used for transactions; must + ## // be zero. The reference implementation + ## // does not support transactions + ##ULONG _ulMiniSectorCutoff; // [38H,04] maximum size for a mini stream; + ## // typically 4096 bytes + ##SECT _sectMiniFatStart; // [3CH,04] first SECT in the MiniFAT chain + ##FSINDEX _csectMiniFat; // [40H,04] number of SECTs in the MiniFAT chain + ##SECT _sectDifStart; // [44H,04] first SECT in the DIFAT chain + ##FSINDEX _csectDif; // [48H,04] number of SECTs in the DIFAT chain + ##SECT _sectFat[109]; // [4CH,436] the SECTs of first 109 FAT sectors + ##}; + + # [PL] header decoding: + # '<' indicates little-endian byte ordering for Intel (cf. struct module help) + fmt_header = '<8s16sHHHHHHLLLLLLLLLL' + header_size = struct.calcsize(fmt_header) + debug("fmt_header size = %d, +FAT = %d" % (header_size, header_size + 109*4)) + header1 = header[:header_size] + ( + self.Sig, + self.clsid, + self.MinorVersion, + self.DllVersion, + self.ByteOrder, + self.SectorShift, + self.MiniSectorShift, + self.Reserved, self.Reserved1, + self.csectDir, + self.csectFat, + self.sectDirStart, + self.signature, + self.MiniSectorCutoff, + self.MiniFatStart, + self.csectMiniFat, + self.sectDifStart, + self.csectDif + ) = struct.unpack(fmt_header, header1) + debug(struct.unpack(fmt_header, header1)) + + if self.Sig != MAGIC: + # OLE signature should always be present + self.raise_defect(DEFECT_FATAL, "incorrect OLE signature") + if self.clsid != bytearray(16): + # according to AAF specs, CLSID should always be zero + self.raise_defect(DEFECT_INCORRECT, "incorrect CLSID in OLE header") + debug("MinorVersion = %d" % self.MinorVersion) + debug("DllVersion = %d" % self.DllVersion) + if self.DllVersion not in [3, 4]: + # version 3: usual format, 512 bytes per sector + # version 4: large format, 4K per sector + self.raise_defect(DEFECT_INCORRECT, "incorrect DllVersion in OLE header") + debug("ByteOrder = %X" % self.ByteOrder) + if self.ByteOrder != 0xFFFE: + # For now only common little-endian documents are handled correctly + self.raise_defect(DEFECT_FATAL, "incorrect ByteOrder in OLE header") + # TODO: add big-endian support for documents created on Mac ? + # But according to [MS-CFB] ? v20140502, ByteOrder MUST be 0xFFFE. + self.SectorSize = 2**self.SectorShift + debug("SectorSize = %d" % self.SectorSize) + if self.SectorSize not in [512, 4096]: + self.raise_defect(DEFECT_INCORRECT, "incorrect SectorSize in OLE header") + if (self.DllVersion == 3 and self.SectorSize != 512) \ + or (self.DllVersion == 4 and self.SectorSize != 4096): + self.raise_defect(DEFECT_INCORRECT, "SectorSize does not match DllVersion in OLE header") + self.MiniSectorSize = 2**self.MiniSectorShift + debug("MiniSectorSize = %d" % self.MiniSectorSize) + if self.MiniSectorSize not in [64]: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorSize in OLE header") + if self.Reserved != 0 or self.Reserved1 != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect OLE header (non-null reserved bytes)") + debug("csectDir = %d" % self.csectDir) + # Number of directory sectors (only allowed if DllVersion != 3) + if self.SectorSize == 512 and self.csectDir != 0: + self.raise_defect(DEFECT_INCORRECT, "incorrect csectDir in OLE header") + debug("csectFat = %d" % self.csectFat) + # csectFat = number of FAT sectors in the file + debug("sectDirStart = %X" % self.sectDirStart) + # sectDirStart = 1st sector containing the directory + debug("signature = %d" % self.signature) + # Signature should be zero, BUT some implementations do not follow this + # rule => only a potential defect: + # (according to MS-CFB, may be != 0 for applications supporting file + # transactions) + if self.signature != 0: + self.raise_defect(DEFECT_POTENTIAL, "incorrect OLE header (signature>0)") + debug("MiniSectorCutoff = %d" % self.MiniSectorCutoff) + # MS-CFB: This integer field MUST be set to 0x00001000. This field + # specifies the maximum size of a user-defined data stream allocated + # from the mini FAT and mini stream, and that cutoff is 4096 bytes. + # Any user-defined data stream larger than or equal to this cutoff size + # must be allocated as normal sectors from the FAT. + if self.MiniSectorCutoff != 0x1000: + self.raise_defect(DEFECT_INCORRECT, "incorrect MiniSectorCutoff in OLE header") + debug("MiniFatStart = %X" % self.MiniFatStart) + debug("csectMiniFat = %d" % self.csectMiniFat) + debug("sectDifStart = %X" % self.sectDifStart) + debug("csectDif = %d" % self.csectDif) + + # calculate the number of sectors in the file + # (-1 because header doesn't count) + self.nb_sect = ((filesize + self.SectorSize-1) // self.SectorSize) - 1 + debug("Number of sectors in the file: %d" % self.nb_sect) + #TODO: change this test, because an OLE file MAY contain other data + # after the last sector. + + # file clsid + self.clsid = _clsid(header[8:24]) + + #TODO: remove redundant attributes, and fix the code which uses them? + self.sectorsize = self.SectorSize #1 << i16(header, 30) + self.minisectorsize = self.MiniSectorSize #1 << i16(header, 32) + self.minisectorcutoff = self.MiniSectorCutoff # i32(header, 56) + + # check known streams for duplicate references (these are always in FAT, + # never in MiniFAT): + self._check_duplicate_stream(self.sectDirStart) + # check MiniFAT only if it is not empty: + if self.csectMiniFat: + self._check_duplicate_stream(self.MiniFatStart) + # check DIFAT only if it is not empty: + if self.csectDif: + self._check_duplicate_stream(self.sectDifStart) + + # Load file allocation tables + self.loadfat(header) + # Load directory. This sets both the direntries list (ordered by sid) + # and the root (ordered by hierarchy) members. + self.loaddirectory(self.sectDirStart)#i32(header, 48)) + self.ministream = None + self.minifatsect = self.MiniFatStart #i32(header, 60) + + def close(self): + """ + close the OLE file, to release the file object + """ + self.fp.close() + + def _check_duplicate_stream(self, first_sect, minifat=False): + """ + Checks if a stream has not been already referenced elsewhere. + This method should only be called once for each known stream, and only + if stream size is not null. + + :param first_sect: int, index of first sector of the stream in FAT + :param minifat: bool, if True, stream is located in the MiniFAT, else in the FAT + """ + if minifat: + debug('_check_duplicate_stream: sect=%d in MiniFAT' % first_sect) + used_streams = self._used_streams_minifat + else: + debug('_check_duplicate_stream: sect=%d in FAT' % first_sect) + # some values can be safely ignored (not a real stream): + if first_sect in (DIFSECT, FATSECT, ENDOFCHAIN, FREESECT): + return + used_streams = self._used_streams_fat + #TODO: would it be more efficient using a dict or hash values, instead + # of a list of long ? + if first_sect in used_streams: + self.raise_defect(DEFECT_INCORRECT, 'Stream referenced twice') + else: + used_streams.append(first_sect) + + def dumpfat(self, fat, firstindex=0): + "Displays a part of FAT in human-readable form for debugging purpose" + # [PL] added only for debug + if not DEBUG_MODE: + return + # dictionary to convert special FAT values in human-readable strings + VPL = 8 # values per line (8+1 * 8+1 = 81) + fatnames = { + FREESECT: "..free..", + ENDOFCHAIN: "[ END. ]", + FATSECT: "FATSECT ", + DIFSECT: "DIFSECT " + } + nbsect = len(fat) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = fat[i] + aux = sect & 0xFFFFFFFF # JYTHON-WORKAROUND + if aux in fatnames: + name = fatnames[aux] + else: + if sect == i+1: + name = " --->" + else: + name = "%8X" % sect + print(name, end=" ") + print() + + def dumpsect(self, sector, firstindex=0): + "Displays a sector in a human-readable form, for debugging purpose." + if not DEBUG_MODE: + return + VPL = 8 # number of values per line (8+1 * 8+1 = 81) + tab = array.array(UINT32, sector) + if sys.byteorder == 'big': + tab.byteswap() + nbsect = len(tab) + nlines = (nbsect+VPL-1)//VPL + print("index", end=" ") + for i in range(VPL): + print("%8X" % i, end=" ") + print() + for l in range(nlines): + index = l*VPL + print("%8X:" % (firstindex+index), end=" ") + for i in range(index, index+VPL): + if i >= nbsect: + break + sect = tab[i] + name = "%8X" % sect + print(name, end=" ") + print() + + def sect2array(self, sect): + """ + convert a sector to an array of 32 bits unsigned integers, + swapping bytes on big endian CPUs such as PowerPC (old Macs) + """ + a = array.array(UINT32, sect) + # if CPU is big endian, swap bytes: + if sys.byteorder == 'big': + a.byteswap() + return a + + def loadfat_sect(self, sect): + """ + Adds the indexes of the given sector to the FAT + + :param sect: string containing the first FAT sector, or array of long integers + :returns: index of last FAT sector. + """ + # a FAT sector is an array of ulong integers. + if isinstance(sect, array.array): + # if sect is already an array it is directly used + fat1 = sect + else: + # if it's a raw sector, it is parsed in an array + fat1 = self.sect2array(sect) + self.dumpsect(sect) + # The FAT is a sector chain starting at the first index of itself. + for isect in fat1: + isect = isect & 0xFFFFFFFF # JYTHON-WORKAROUND + debug("isect = %X" % isect) + if isect == ENDOFCHAIN or isect == FREESECT: + # the end of the sector chain has been reached + debug("found end of sector chain") + break + # read the FAT sector + s = self.getsect(isect) + # parse it as an array of 32 bits integers, and add it to the + # global FAT array + nextfat = self.sect2array(s) + self.fat = self.fat + nextfat + return isect + + def loadfat(self, header): + """ + Load the FAT table. + """ + # The 1st sector of the file contains sector numbers for the first 109 + # FAT sectors, right after the header which is 76 bytes long. + # (always 109, whatever the sector size: 512 bytes = 76+4*109) + # Additional sectors are described by DIF blocks + + sect = header[76:512] + debug("len(sect)=%d, so %d integers" % (len(sect), len(sect)//4)) + #fat = [] + # [PL] FAT is an array of 32 bits unsigned ints, it's more effective + # to use an array than a list in Python. + # It's initialized as empty first: + self.fat = array.array(UINT32) + self.loadfat_sect(sect) + #self.dumpfat(self.fat) +## for i in range(0, len(sect), 4): +## ix = i32(sect, i) +## # [PL] if ix == -2 or ix == -1: # ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## if ix == 0xFFFFFFFE or ix == 0xFFFFFFFF: +## break +## s = self.getsect(ix) +## #fat = fat + [i32(s, i) for i in range(0, len(s), 4)] +## fat = fat + array.array(UINT32, s) + if self.csectDif != 0: + # [PL] There's a DIFAT because file is larger than 6.8MB + # some checks just in case: + if self.csectFat <= 109: + # there must be at least 109 blocks in header and the rest in + # DIFAT, so number of sectors must be >109. + self.raise_defect(DEFECT_INCORRECT, 'incorrect DIFAT, not enough sectors') + if self.sectDifStart >= self.nb_sect: + # initial DIFAT block index must be valid + self.raise_defect(DEFECT_FATAL, 'incorrect DIFAT, first index out of range') + debug("DIFAT analysis...") + # We compute the necessary number of DIFAT sectors : + # Number of pointers per DIFAT sector = (sectorsize/4)-1 + # (-1 because the last pointer is the next DIFAT sector number) + nb_difat_sectors = (self.sectorsize//4)-1 + # (if 512 bytes: each DIFAT sector = 127 pointers + 1 towards next DIFAT sector) + nb_difat = (self.csectFat-109 + nb_difat_sectors-1)//nb_difat_sectors + debug("nb_difat = %d" % nb_difat) + if self.csectDif != nb_difat: + raise IOError('incorrect DIFAT') + isect_difat = self.sectDifStart + for i in iterrange(nb_difat): + debug("DIFAT block %d, sector %X" % (i, isect_difat)) + #TODO: check if corresponding FAT SID = DIFSECT + sector_difat = self.getsect(isect_difat) + difat = self.sect2array(sector_difat) + self.dumpsect(sector_difat) + self.loadfat_sect(difat[:nb_difat_sectors]) + # last DIFAT pointer is next DIFAT sector: + isect_difat = difat[nb_difat_sectors] + debug("next DIFAT sector: %X" % isect_difat) + # checks: + if isect_difat not in [ENDOFCHAIN, FREESECT]: + # last DIFAT pointer value must be ENDOFCHAIN or FREESECT + raise IOError('incorrect end of DIFAT') +## if len(self.fat) != self.csectFat: +## # FAT should contain csectFat blocks +## print("FAT length: %d instead of %d" % (len(self.fat), self.csectFat)) +## raise IOError('incorrect DIFAT') + # since FAT is read from fixed-size sectors, it may contain more values + # than the actual number of sectors in the file. + # Keep only the relevant sector indexes: + if len(self.fat) > self.nb_sect: + debug('len(fat)=%d, shrunk to nb_sect=%d' % (len(self.fat), self.nb_sect)) + self.fat = self.fat[:self.nb_sect] + debug('\nFAT:') + self.dumpfat(self.fat) + + def loadminifat(self): + """ + Load the MiniFAT table. + """ + # MiniFAT is stored in a standard sub-stream, pointed to by a header + # field. + # NOTE: there are two sizes to take into account for this stream: + # 1) Stream size is calculated according to the number of sectors + # declared in the OLE header. This allocated stream may be more than + # needed to store the actual sector indexes. + # (self.csectMiniFat is the number of sectors of size self.SectorSize) + stream_size = self.csectMiniFat * self.SectorSize + # 2) Actually used size is calculated by dividing the MiniStream size + # (given by root entry size) by the size of mini sectors, *4 for + # 32 bits indexes: + nb_minisectors = (self.root.size + self.MiniSectorSize-1) // self.MiniSectorSize + used_size = nb_minisectors * 4 + debug('loadminifat(): minifatsect=%d, nb FAT sectors=%d, used_size=%d, stream_size=%d, nb MiniSectors=%d' % + (self.minifatsect, self.csectMiniFat, used_size, stream_size, nb_minisectors)) + if used_size > stream_size: + # This is not really a problem, but may indicate a wrong implementation: + self.raise_defect(DEFECT_INCORRECT, 'OLE MiniStream is larger than MiniFAT') + # In any case, first read stream_size: + s = self._open(self.minifatsect, stream_size, force_FAT=True).read() + # [PL] Old code replaced by an array: + # self.minifat = [i32(s, i) for i in range(0, len(s), 4)] + self.minifat = self.sect2array(s) + # Then shrink the array to used size, to avoid indexes out of MiniStream: + debug('MiniFAT shrunk from %d to %d sectors' % (len(self.minifat), nb_minisectors)) + self.minifat = self.minifat[:nb_minisectors] + debug('loadminifat(): len=%d' % len(self.minifat)) + debug('\nMiniFAT:') + self.dumpfat(self.minifat) + + def getsect(self, sect): + """ + Read given sector from file on disk. + + :param sect: int, sector index + :returns: a string containing the sector data. + """ + # From [MS-CFB]: A sector number can be converted into a byte offset + # into the file by using the following formula: + # (sector number + 1) x Sector Size. + # This implies that sector #0 of the file begins at byte offset Sector + # Size, not at 0. + + # [PL] the original code in PIL was wrong when sectors are 4KB instead of + # 512 bytes: + # self.fp.seek(512 + self.sectorsize * sect) + # [PL]: added safety checks: + # print("getsect(%X)" % sect) + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('getsect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + sector = self.fp.read(self.sectorsize) + if len(sector) != self.sectorsize: + debug('getsect(): sect=%X, read=%d, sectorsize=%d' % + (sect, len(sector), self.sectorsize)) + self.raise_defect(DEFECT_FATAL, 'incomplete OLE sector') + return sector + + def write_sect(self, sect, data, padding=b'\x00'): + """ + Write given sector to file on disk. + + :param sect: int, sector index + :param data: bytes, sector data + :param padding: single byte, padding character if data < sector size + """ + if not isinstance(data, bytes): + raise TypeError("write_sect: data must be a bytes string") + if not isinstance(padding, bytes) or len(padding) != 1: + raise TypeError("write_sect: padding must be a bytes string of 1 char") + #TODO: we could allow padding=None for no padding at all + try: + self.fp.seek(self.sectorsize * (sect+1)) + except: + debug('write_sect(): sect=%X, seek=%d, filesize=%d' % + (sect, self.sectorsize*(sect+1), self._filesize)) + self.raise_defect(DEFECT_FATAL, 'OLE sector index out of range') + if len(data) < self.sectorsize: + # add padding + data += padding * (self.sectorsize - len(data)) + elif len(data) < self.sectorsize: + raise ValueError("Data is larger than sector size") + self.fp.write(data) + + def loaddirectory(self, sect): + """ + Load the directory. + + :param sect: sector index of directory stream. + """ + # The directory is stored in a standard + # substream, independent of its size. + + # open directory stream as a read-only file: + # (stream size is not known in advance) + self.directory_fp = self._open(sect) + + # [PL] to detect malformed documents and avoid DoS attacks, the maximum + # number of directory entries can be calculated: + max_entries = self.directory_fp.size // 128 + debug('loaddirectory: size=%d, max_entries=%d' % + (self.directory_fp.size, max_entries)) + + # Create list of directory entries + # self.direntries = [] + # We start with a list of "None" object + self.direntries = [None] * max_entries +## for sid in iterrange(max_entries): +## entry = fp.read(128) +## if not entry: +## break +## self.direntries.append(_OleDirectoryEntry(entry, sid, self)) + # load root entry: + root_entry = self._load_direntry(0) + # Root entry is the first entry: + self.root = self.direntries[0] + # read and build all storage trees, starting from the root: + self.root.build_storage_tree() + + def _load_direntry(self, sid): + """ + Load a directory entry from the directory. + This method should only be called once for each storage/stream when + loading the directory. + + :param sid: index of storage/stream in the directory. + :returns: a _OleDirectoryEntry object + + :exception IOError: if the entry has always been referenced. + """ + # check if SID is OK: + if sid < 0 or sid >= len(self.direntries): + self.raise_defect(DEFECT_FATAL, "OLE directory index out of range") + # check if entry was already referenced: + if self.direntries[sid] is not None: + self.raise_defect(DEFECT_INCORRECT, + "double reference for OLE stream/storage") + # if exception not raised, return the object + return self.direntries[sid] + self.directory_fp.seek(sid * 128) + entry = self.directory_fp.read(128) + self.direntries[sid] = _OleDirectoryEntry(entry, sid, self) + return self.direntries[sid] + + def dumpdirectory(self): + """ + Dump directory (for debugging only) + """ + self.root.dump() + + def _open(self, start, size = 0x7FFFFFFF, force_FAT=False): + """ + Open a stream, either in FAT or MiniFAT according to its size. + (openstream helper) + + :param start: index of first sector + :param size: size of stream (or nothing if size is unknown) + :param force_FAT: if False (default), stream will be opened in FAT or MiniFAT + according to size. If True, it will always be opened in FAT. + """ + debug('OleFileIO.open(): sect=%d, size=%d, force_FAT=%s' % + (start, size, str(force_FAT))) + # stream size is compared to the MiniSectorCutoff threshold: + if size < self.minisectorcutoff and not force_FAT: + # ministream object + if not self.ministream: + # load MiniFAT if it wasn't already done: + self.loadminifat() + # The first sector index of the miniFAT stream is stored in the + # root directory entry: + size_ministream = self.root.size + debug('Opening MiniStream: sect=%d, size=%d' % + (self.root.isectStart, size_ministream)) + self.ministream = self._open(self.root.isectStart, + size_ministream, force_FAT=True) + return _OleStream(fp=self.ministream, sect=start, size=size, + offset=0, sectorsize=self.minisectorsize, + fat=self.minifat, filesize=self.ministream.size) + else: + # standard stream + return _OleStream(fp=self.fp, sect=start, size=size, + offset=self.sectorsize, + sectorsize=self.sectorsize, fat=self.fat, + filesize=self._filesize) + + def _list(self, files, prefix, node, streams=True, storages=False): + """ + listdir helper + + :param files: list of files to fill in + :param prefix: current location in storage tree (list of names) + :param node: current node (_OleDirectoryEntry object) + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + """ + prefix = prefix + [node.name] + for entry in node.kids: + if entry.entry_type == STGTY_STORAGE: + # this is a storage + if storages: + # add it to the list + files.append(prefix[1:] + [entry.name]) + # check its kids + self._list(files, prefix, entry, streams, storages) + elif entry.entry_type == STGTY_STREAM: + # this is a stream + if streams: + # add it to the list + files.append(prefix[1:] + [entry.name]) + else: + self.raise_defect(DEFECT_INCORRECT, 'The directory tree contains an entry which is not a stream nor a storage.') + + def listdir(self, streams=True, storages=False): + """ + Return a list of streams and/or storages stored in this file + + :param streams: bool, include streams if True (True by default) - new in v0.26 + :param storages: bool, include storages if True (False by default) - new in v0.26 + (note: the root storage is never included) + :returns: list of stream and/or storage paths + """ + files = [] + self._list(files, [], self.root, streams, storages) + return files + + def _find(self, filename): + """ + Returns directory entry of given filename. (openstream helper) + Note: this method is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: sid of requested filename + :exception IOError: if file not found + """ + + # if filename is a string instead of a list, split it on slashes to + # convert to a list: + if isinstance(filename, basestring): + filename = filename.split('/') + # walk across storage tree, following given path: + node = self.root + for name in filename: + for kid in node.kids: + if kid.name.lower() == name.lower(): + break + else: + raise IOError("file not found") + node = kid + return node.sid + + def openstream(self, filename): + """ + Open a stream as a read-only file object (BytesIO). + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :returns: file object (read-only) + :exception IOError: if filename not found, or if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this file is not a stream") + return self._open(entry.isectStart, entry.size) + + def write_stream(self, stream_name, data): + """ + Write a stream to disk. For now, it is only possible to replace an + existing stream by data of the same size. + + :param stream_name: path of stream in storage tree (except root entry), either: + + - a string using Unix path syntax, for example: + 'storage_1/storage_1.2/stream' + - or a list of storage filenames, path to the desired stream/storage. + Example: ['storage_1', 'storage_1.2', 'stream'] + + :param data: bytes, data to be written, must be the same size as the original + stream. + """ + if not isinstance(data, bytes): + raise TypeError("write_stream: data must be a bytes string") + sid = self._find(stream_name) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + raise IOError("this is not a stream") + size = entry.size + if size != len(data): + raise ValueError("write_stream: data must be the same size as the existing stream") + if size < self.minisectorcutoff: + raise NotImplementedError("Writing a stream in MiniFAT is not implemented yet") + sect = entry.isectStart + # number of sectors to write + nb_sectors = (size + (self.sectorsize-1)) // self.sectorsize + debug('nb_sectors = %d' % nb_sectors) + for i in range(nb_sectors): + # try: + # self.fp.seek(offset + self.sectorsize * sect) + # except: + # debug('sect=%d, seek=%d' % + # (sect, offset+self.sectorsize*sect)) + # raise IOError('OLE sector index out of range') + # extract one sector from data, the last one being smaller: + if i < (nb_sectors-1): + data_sector = data[i*self.sectorsize:(i+1)*self.sectorsize] + #TODO: comment this if it works + assert(len(data_sector) == self.sectorsize) + else: + data_sector = data[i*self.sectorsize:] + # TODO: comment this if it works + debug('write_stream: size=%d sectorsize=%d data_sector=%d size%%sectorsize=%d' + % (size, self.sectorsize, len(data_sector), size % self.sectorsize)) + assert(len(data_sector) % self.sectorsize == size % self.sectorsize) + self.write_sect(sect, data_sector) +# self.fp.write(data_sector) + # jump to next sector in the FAT: + try: + sect = self.fat[sect] + except IndexError: + # [PL] if pointer is out of the FAT an exception is raised + raise IOError('incorrect OLE FAT, sector index out of range') + # [PL] Last sector should be a "end of chain" marker: + if sect != ENDOFCHAIN: + raise IOError('incorrect last sector index in OLE stream') + + def get_type(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container, and return its type. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: False if object does not exist, its entry type (>0) otherwise: + + - STGTY_STREAM: a stream + - STGTY_STORAGE: a storage + - STGTY_ROOT: the root entry + """ + try: + sid = self._find(filename) + entry = self.direntries[sid] + return entry.entry_type + except: + return False + + def getmtime(self, filename): + """ + Return modification time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if modification time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getmtime() + + def getctime(self, filename): + """ + Return creation time of a stream/storage. + + :param filename: path of stream/storage in storage tree. (see openstream for + syntax) + :returns: None if creation time is null, a python datetime object + otherwise (UTC timezone) + + new in version 0.26 + """ + sid = self._find(filename) + entry = self.direntries[sid] + return entry.getctime() + + def exists(self, filename): + """ + Test if given filename exists as a stream or a storage in the OLE + container. + Note: filename is case-insensitive. + + :param filename: path of stream in storage tree. (see openstream for syntax) + :returns: True if object exist, else False. + """ + try: + sid = self._find(filename) + return True + except: + return False + + def get_size(self, filename): + """ + Return size of a stream in the OLE container, in bytes. + + :param filename: path of stream in storage tree (see openstream for syntax) + :returns: size in bytes (long integer) + :exception IOError: if file not found + :exception TypeError: if this is not a stream. + """ + sid = self._find(filename) + entry = self.direntries[sid] + if entry.entry_type != STGTY_STREAM: + #TODO: Should it return zero instead of raising an exception ? + raise TypeError('object is not an OLE stream') + return entry.size + + def get_rootentry_name(self): + """ + Return root entry name. Should usually be 'Root Entry' or 'R' in most + implementations. + """ + return self.root.name + + def getproperties(self, filename, convert_time=False, no_conversion=None): + """ + Return properties described in substream. + + :param filename: path of stream in storage tree (see openstream for syntax) + :param convert_time: bool, if True timestamps will be converted to Python datetime + :param no_conversion: None or list of int, timestamps not to be converted + (for example total editing time is not a real timestamp) + + :returns: a dictionary of values indexed by id (integer) + """ + # REFERENCE: [MS-OLEPS] https://msdn.microsoft.com/en-us/library/dd942421.aspx + # make sure no_conversion is a list, just to simplify code below: + if no_conversion is None: + no_conversion = [] + # stream path as a string to report exceptions: + streampath = filename + if not isinstance(streampath, str): + streampath = '/'.join(streampath) + + fp = self.openstream(filename) + + data = {} + + try: + # header + s = fp.read(28) + clsid = _clsid(s[8:24]) + + # format id + s = fp.read(20) + fmtid = _clsid(s[:16]) + fp.seek(i32(s, 16)) + + # get section + s = b"****" + fp.read(i32(fp.read(4))-4) + # number of properties: + num_props = i32(s, 4) + except BaseException as exc: + # catch exception while parsing property header, and only raise + # a DEFECT_INCORRECT then return an empty dict, because this is not + # a fatal error when parsing the whole file + msg = 'Error while parsing properties header in stream %s: %s' % ( + repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + return data + + for i in range(num_props): + try: + id = 0 # just in case of an exception + id = i32(s, 8+i*8) + offset = i32(s, 12+i*8) + type = i32(s, offset) + + debug('property id=%d: type=%d offset=%X' % (id, type, offset)) + + # test for common types first (should perhaps use + # a dictionary instead?) + + if type == VT_I2: # 16-bit signed integer + value = i16(s, offset+4) + if value >= 32768: + value = value - 65536 + elif type == VT_UI2: # 2-byte unsigned integer + value = i16(s, offset+4) + elif type in (VT_I4, VT_INT, VT_ERROR): + # VT_I4: 32-bit signed integer + # VT_ERROR: HRESULT, similar to 32-bit signed integer, + # see http://msdn.microsoft.com/en-us/library/cc230330.aspx + value = i32(s, offset+4) + elif type in (VT_UI4, VT_UINT): # 4-byte unsigned integer + value = i32(s, offset+4) # FIXME + elif type in (VT_BSTR, VT_LPSTR): + # CodePageString, see http://msdn.microsoft.com/en-us/library/dd942354.aspx + # size is a 32 bits integer, including the null terminator, and + # possibly trailing or embedded null chars + #TODO: if codepage is unicode, the string should be converted as such + count = i32(s, offset+4) + value = s[offset+8:offset+8+count-1] + # remove all null chars: + value = value.replace(b'\x00', b'') + elif type == VT_BLOB: + # binary large object (BLOB) + # see http://msdn.microsoft.com/en-us/library/dd942282.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_LPWSTR: + # UnicodeString + # see http://msdn.microsoft.com/en-us/library/dd942313.aspx + # "the string should NOT contain embedded or additional trailing + # null characters." + count = i32(s, offset+4) + value = self._decode_utf16_str(s[offset+8:offset+8+count*2]) + elif type == VT_FILETIME: + value = long(i32(s, offset+4)) + (long(i32(s, offset+8)) << 32) + # FILETIME is a 64-bit int: "number of 100ns periods + # since Jan 1,1601". + if convert_time and id not in no_conversion: + debug('Converting property #%d to python datetime, value=%d=%fs' + % (id, value, float(value) / 10000000)) + # convert FILETIME to Python datetime.datetime + # inspired from http://code.activestate.com/recipes/511425-filetime-to-datetime/ + _FILETIME_null_date = datetime.datetime(1601, 1, 1, 0, 0, 0) + debug('timedelta days=%d' % (value//(10*1000000*3600*24))) + value = _FILETIME_null_date + datetime.timedelta(microseconds=value//10) + else: + # legacy code kept for backward compatibility: returns a + # number of seconds since Jan 1,1601 + value = value // 10000000 # seconds + elif type == VT_UI1: # 1-byte unsigned integer + value = i8(s[offset+4]) + elif type == VT_CLSID: + value = _clsid(s[offset+4:offset+20]) + elif type == VT_CF: + # PropertyIdentifier or ClipboardData?? + # see http://msdn.microsoft.com/en-us/library/dd941945.aspx + count = i32(s, offset+4) + value = s[offset+8:offset+8+count] + elif type == VT_BOOL: + # VARIANT_BOOL, 16 bits bool, 0x0000=Fals, 0xFFFF=True + # see http://msdn.microsoft.com/en-us/library/cc237864.aspx + value = bool(i16(s, offset+4)) + else: + value = None # everything else yields "None" + debug('property id=%d: type=%d not implemented in parser yet' % (id, type)) + + # missing: VT_EMPTY, VT_NULL, VT_R4, VT_R8, VT_CY, VT_DATE, + # VT_DECIMAL, VT_I1, VT_I8, VT_UI8, + # see http://msdn.microsoft.com/en-us/library/dd942033.aspx + + # FIXME: add support for VT_VECTOR + # VT_VECTOR is a 32 uint giving the number of items, followed by + # the items in sequence. The VT_VECTOR value is combined with the + # type of items, e.g. VT_VECTOR|VT_BSTR + # see http://msdn.microsoft.com/en-us/library/dd942011.aspx + + # print("%08x" % id, repr(value), end=" ") + # print("(%s)" % VT[i32(s, offset) & 0xFFF]) + + data[id] = value + except BaseException as exc: + # catch exception while parsing each property, and only raise + # a DEFECT_INCORRECT, because parsing can go on + msg = 'Error while parsing property id %d in stream %s: %s' % ( + id, repr(streampath), exc) + self.raise_defect(DEFECT_INCORRECT, msg, type(exc)) + + return data + + def get_metadata(self): + """ + Parse standard properties streams, return an OleMetadata object + containing all the available metadata. + (also stored in the metadata attribute of the OleFileIO object) + + new in version 0.25 + """ + self.metadata = OleMetadata() + self.metadata.parse_properties(self) + return self.metadata + +# +# -------------------------------------------------------------------- +# This script can be used to dump the directory of any OLE2 structured +# storage file. + +if __name__ == "__main__": + + # [PL] display quick usage info if launched from command-line + if len(sys.argv) <= 1: + print('olefile version %s %s - %s' % (__version__, __date__, __author__)) + print( +""" +Launched from the command line, this script parses OLE files and prints info. + +Usage: olefile.py [-d] [-c] [file2 ...] + +Options: +-d : debug mode (displays a lot of debug information, for developers only) +-c : check all streams (for debugging purposes) + +For more information, see http://www.decalage.info/olefile +""") + sys.exit() + + check_streams = False + for filename in sys.argv[1:]: + # try: + # OPTIONS: + if filename == '-d': + # option to switch debug mode on: + set_debug_mode(True) + continue + if filename == '-c': + # option to switch check streams mode on: + check_streams = True + continue + + ole = OleFileIO(filename)#, raise_defects=DEFECT_INCORRECT) + print("-" * 68) + print(filename) + print("-" * 68) + ole.dumpdirectory() + for streamname in ole.listdir(): + if streamname[-1][0] == "\005": + print(streamname, ": properties") + props = ole.getproperties(streamname, convert_time=True) + props = sorted(props.items()) + for k, v in props: + # [PL]: avoid to display too large or binary values: + if isinstance(v, (basestring, bytes)): + if len(v) > 50: + v = v[:50] + if isinstance(v, bytes): + # quick and dirty binary check: + for c in (1, 2, 3, 4, 5, 6, 7, 11, 12, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31): + if c in bytearray(v): + v = '(binary data)' + break + print(" ", k, v) + + if check_streams: + # Read all streams to check if there are errors: + print('\nChecking streams...') + for streamname in ole.listdir(): + # print name using repr() to convert binary chars to \xNN: + print('-', repr('/'.join(streamname)), '-', end=' ') + st_type = ole.get_type(streamname) + if st_type == STGTY_STREAM: + print('size %d' % ole.get_size(streamname)) + # just try to read stream in memory: + ole.openstream(streamname) + else: + print('NOT a stream : type=%d' % st_type) + print() + +# for streamname in ole.listdir(): +# # print name using repr() to convert binary chars to \xNN: +# print('-', repr('/'.join(streamname)),'-', end=' ') +# print(ole.getmtime(streamname)) +# print() + + print('Modification/Creation times of all directory entries:') + for entry in ole.direntries: + if entry is not None: + print('- %s: mtime=%s ctime=%s' % (entry.name, + entry.getmtime(), entry.getctime())) + print() + + # parse and display metadata: + meta = ole.get_metadata() + meta.dump() + print() + # [PL] Test a few new methods: + root = ole.get_rootentry_name() + print('Root entry name: "%s"' % root) + if ole.exists('worddocument'): + print("This is a Word document.") + print("type of stream 'WordDocument':", ole.get_type('worddocument')) + print("size :", ole.get_size('worddocument')) + if ole.exists('macros/vba'): + print("This document may contain VBA macros.") + + # print parsing issues: + print('\nNon-fatal issues raised during parsing:') + if ole.parsing_issues: + for exctype, msg in ole.parsing_issues: + print('- %s: %s' % (exctype.__name__, msg)) + else: + print('None') +## except IOError as v: +## print("***", "cannot read", file, "-", v) + +# this code was developed while listening to The Wedding Present "Sea Monsters" diff --git a/Lib/site-packages/PIL/PSDraw.py b/Lib/site-packages/PIL/PSDraw.py new file mode 100644 index 0000000..d4e7b18 --- /dev/null +++ b/Lib/site-packages/PIL/PSDraw.py @@ -0,0 +1,235 @@ +# +# The Python Imaging Library +# $Id$ +# +# simple postscript graphics interface +# +# History: +# 1996-04-20 fl Created +# 1999-01-10 fl Added gsave/grestore to image method +# 2005-05-04 fl Fixed floating point issue in image (from Eric Etheridge) +# +# Copyright (c) 1997-2005 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import EpsImagePlugin +import sys + +## +# Simple Postscript graphics interface. + + +class PSDraw(object): + """ + Sets up printing to the given file. If **file** is omitted, + :py:attr:`sys.stdout` is assumed. + """ + + def __init__(self, fp=None): + if not fp: + fp = sys.stdout + self.fp = fp + + def _fp_write(self, to_write): + if bytes is str or self.fp == sys.stdout: + self.fp.write(to_write) + else: + self.fp.write(bytes(to_write, 'UTF-8')) + + def begin_document(self, id=None): + """Set up printing of a document. (Write Postscript DSC header.)""" + # FIXME: incomplete + self._fp_write("%!PS-Adobe-3.0\n" + "save\n" + "/showpage { } def\n" + "%%EndComments\n" + "%%BeginDocument\n") + # self._fp_write(ERROR_PS) # debugging! + self._fp_write(EDROFF_PS) + self._fp_write(VDI_PS) + self._fp_write("%%EndProlog\n") + self.isofont = {} + + def end_document(self): + """Ends printing. (Write Postscript DSC footer.)""" + self._fp_write("%%EndDocument\n" + "restore showpage\n" + "%%End\n") + if hasattr(self.fp, "flush"): + self.fp.flush() + + def setfont(self, font, size): + """ + Selects which font to use. + + :param font: A Postscript font name + :param size: Size in points. + """ + if font not in self.isofont: + # reencode font + self._fp_write("/PSDraw-%s ISOLatin1Encoding /%s E\n" % + (font, font)) + self.isofont[font] = 1 + # rough + self._fp_write("/F0 %d /PSDraw-%s F\n" % (size, font)) + + def line(self, xy0, xy1): + """ + Draws a line between the two points. Coordinates are given in + Postscript point coordinates (72 points per inch, (0, 0) is the lower + left corner of the page). + """ + xy = xy0 + xy1 + self._fp_write("%d %d %d %d Vl\n" % xy) + + def rectangle(self, box): + """ + Draws a rectangle. + + :param box: A 4-tuple of integers whose order and function is currently + undocumented. + + Hint: the tuple is passed into this format string: + + .. code-block:: python + + %d %d M %d %d 0 Vr\n + """ + self._fp_write("%d %d M %d %d 0 Vr\n" % box) + + def text(self, xy, text): + """ + Draws text at the given position. You must use + :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. + """ + text = "\\(".join(text.split("(")) + text = "\\)".join(text.split(")")) + xy = xy + (text,) + self._fp_write("%d %d M (%s) S\n" % xy) + + def image(self, box, im, dpi=None): + """Draw a PIL image, centered in the given box.""" + # default resolution depends on mode + if not dpi: + if im.mode == "1": + dpi = 200 # fax + else: + dpi = 100 # greyscale + # image size (on paper) + x = float(im.size[0] * 72) / dpi + y = float(im.size[1] * 72) / dpi + # max allowed size + xmax = float(box[2] - box[0]) + ymax = float(box[3] - box[1]) + if x > xmax: + y = y * xmax / x + x = xmax + if y > ymax: + x = x * ymax / y + y = ymax + dx = (xmax - x) / 2 + box[0] + dy = (ymax - y) / 2 + box[1] + self._fp_write("gsave\n%f %f translate\n" % (dx, dy)) + if (x, y) != im.size: + # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) + sx = x / im.size[0] + sy = y / im.size[1] + self._fp_write("%f %f scale\n" % (sx, sy)) + EpsImagePlugin._save(im, self.fp, None, 0) + self._fp_write("\ngrestore\n") + +# -------------------------------------------------------------------- +# Postscript driver + +# +# EDROFF.PS -- Postscript driver for Edroff 2 +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +EDROFF_PS = """\ +/S { show } bind def +/P { moveto show } bind def +/M { moveto } bind def +/X { 0 rmoveto } bind def +/Y { 0 exch rmoveto } bind def +/E { findfont + dup maxlength dict begin + { + 1 index /FID ne { def } { pop pop } ifelse + } forall + /Encoding exch def + dup /FontName exch def + currentdict end definefont pop +} bind def +/F { findfont exch scalefont dup setfont + [ exch /setfont cvx ] cvx bind def +} bind def +""" + +# +# VDI.PS -- Postscript driver for VDI meta commands +# +# History: +# 94-01-25 fl: created (edroff 2.04) +# +# Copyright (c) Fredrik Lundh 1994. +# + +VDI_PS = """\ +/Vm { moveto } bind def +/Va { newpath arcn stroke } bind def +/Vl { moveto lineto stroke } bind def +/Vc { newpath 0 360 arc closepath } bind def +/Vr { exch dup 0 rlineto + exch dup neg 0 exch rlineto + exch neg 0 rlineto + 0 exch rlineto + 100 div setgray fill 0 setgray } bind def +/Tm matrix def +/Ve { Tm currentmatrix pop + translate scale newpath 0 0 .5 0 360 arc closepath + Tm setmatrix +} bind def +/Vf { currentgray exch setgray fill setgray } bind def +""" + +# +# ERROR.PS -- Error handler +# +# History: +# 89-11-21 fl: created (pslist 1.10) +# + +ERROR_PS = """\ +/landscape false def +/errorBUF 200 string def +/errorNL { currentpoint 10 sub exch pop 72 exch moveto } def +errordict begin /handleerror { + initmatrix /Courier findfont 10 scalefont setfont + newpath 72 720 moveto $error begin /newerror false def + (PostScript Error) show errorNL errorNL + (Error: ) show + /errorname load errorBUF cvs show errorNL errorNL + (Command: ) show + /command load dup type /stringtype ne { errorBUF cvs } if show + errorNL errorNL + (VMstatus: ) show + vmstatus errorBUF cvs show ( bytes available, ) show + errorBUF cvs show ( bytes used at level ) show + errorBUF cvs show errorNL errorNL + (Operand stargck: ) show errorNL /ostargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall errorNL + (Execution stargck: ) show errorNL /estargck load { + dup type /stringtype ne { errorBUF cvs } if 72 0 rmoveto show errorNL + } forall + end showpage +} def end +""" diff --git a/Lib/site-packages/PIL/PaletteFile.py b/Lib/site-packages/PIL/PaletteFile.py new file mode 100644 index 0000000..ef50fee --- /dev/null +++ b/Lib/site-packages/PIL/PaletteFile.py @@ -0,0 +1,55 @@ +# +# Python Imaging Library +# $Id$ +# +# stuff to read simple, teragon-style palette files +# +# History: +# 97-08-23 fl Created +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL._binary import o8 + + +## +# File handler for Teragon-style palette files. + +class PaletteFile(object): + + rawmode = "RGB" + + def __init__(self, fp): + + self.palette = [(i, i, i) for i in range(256)] + + while True: + + s = fp.readline() + + if not s: + break + if s[0:1] == b"#": + continue + if len(s) > 100: + raise SyntaxError("bad palette file") + + v = [int(x) for x in s.split()] + try: + [i, r, g, b] = v + except ValueError: + [i, r] = v + g = b = r + + if 0 <= i <= 255: + self.palette[i] = o8(r) + o8(g) + o8(b) + + self.palette = b"".join(self.palette) + + def getpalette(self): + + return self.palette, self.rawmode diff --git a/Lib/site-packages/PIL/PalmImagePlugin.py b/Lib/site-packages/PIL/PalmImagePlugin.py new file mode 100644 index 0000000..4f415ff --- /dev/null +++ b/Lib/site-packages/PIL/PalmImagePlugin.py @@ -0,0 +1,241 @@ +# +# The Python Imaging Library. +# $Id$ +# + +## +# Image plugin for Palm pixmap images (output only). +## + +from PIL import Image, ImageFile, _binary + +__version__ = "1.0" + +_Palm8BitColormapValues = ( + (255, 255, 255), (255, 204, 255), (255, 153, 255), (255, 102, 255), + (255, 51, 255), (255, 0, 255), (255, 255, 204), (255, 204, 204), + (255, 153, 204), (255, 102, 204), (255, 51, 204), (255, 0, 204), + (255, 255, 153), (255, 204, 153), (255, 153, 153), (255, 102, 153), + (255, 51, 153), (255, 0, 153), (204, 255, 255), (204, 204, 255), + (204, 153, 255), (204, 102, 255), (204, 51, 255), (204, 0, 255), + (204, 255, 204), (204, 204, 204), (204, 153, 204), (204, 102, 204), + (204, 51, 204), (204, 0, 204), (204, 255, 153), (204, 204, 153), + (204, 153, 153), (204, 102, 153), (204, 51, 153), (204, 0, 153), + (153, 255, 255), (153, 204, 255), (153, 153, 255), (153, 102, 255), + (153, 51, 255), (153, 0, 255), (153, 255, 204), (153, 204, 204), + (153, 153, 204), (153, 102, 204), (153, 51, 204), (153, 0, 204), + (153, 255, 153), (153, 204, 153), (153, 153, 153), (153, 102, 153), + (153, 51, 153), (153, 0, 153), (102, 255, 255), (102, 204, 255), + (102, 153, 255), (102, 102, 255), (102, 51, 255), (102, 0, 255), + (102, 255, 204), (102, 204, 204), (102, 153, 204), (102, 102, 204), + (102, 51, 204), (102, 0, 204), (102, 255, 153), (102, 204, 153), + (102, 153, 153), (102, 102, 153), (102, 51, 153), (102, 0, 153), + (51, 255, 255), (51, 204, 255), (51, 153, 255), (51, 102, 255), + (51, 51, 255), (51, 0, 255), (51, 255, 204), (51, 204, 204), + (51, 153, 204), (51, 102, 204), (51, 51, 204), (51, 0, 204), + (51, 255, 153), (51, 204, 153), (51, 153, 153), (51, 102, 153), + (51, 51, 153), (51, 0, 153), (0, 255, 255), (0, 204, 255), + (0, 153, 255), (0, 102, 255), (0, 51, 255), (0, 0, 255), + (0, 255, 204), (0, 204, 204), (0, 153, 204), (0, 102, 204), + (0, 51, 204), (0, 0, 204), (0, 255, 153), (0, 204, 153), + (0, 153, 153), (0, 102, 153), (0, 51, 153), (0, 0, 153), + (255, 255, 102), (255, 204, 102), (255, 153, 102), (255, 102, 102), + (255, 51, 102), (255, 0, 102), (255, 255, 51), (255, 204, 51), + (255, 153, 51), (255, 102, 51), (255, 51, 51), (255, 0, 51), + (255, 255, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), + (255, 51, 0), (255, 0, 0), (204, 255, 102), (204, 204, 102), + (204, 153, 102), (204, 102, 102), (204, 51, 102), (204, 0, 102), + (204, 255, 51), (204, 204, 51), (204, 153, 51), (204, 102, 51), + (204, 51, 51), (204, 0, 51), (204, 255, 0), (204, 204, 0), + (204, 153, 0), (204, 102, 0), (204, 51, 0), (204, 0, 0), + (153, 255, 102), (153, 204, 102), (153, 153, 102), (153, 102, 102), + (153, 51, 102), (153, 0, 102), (153, 255, 51), (153, 204, 51), + (153, 153, 51), (153, 102, 51), (153, 51, 51), (153, 0, 51), + (153, 255, 0), (153, 204, 0), (153, 153, 0), (153, 102, 0), + (153, 51, 0), (153, 0, 0), (102, 255, 102), (102, 204, 102), + (102, 153, 102), (102, 102, 102), (102, 51, 102), (102, 0, 102), + (102, 255, 51), (102, 204, 51), (102, 153, 51), (102, 102, 51), + (102, 51, 51), (102, 0, 51), (102, 255, 0), (102, 204, 0), + (102, 153, 0), (102, 102, 0), (102, 51, 0), (102, 0, 0), + (51, 255, 102), (51, 204, 102), (51, 153, 102), (51, 102, 102), + (51, 51, 102), (51, 0, 102), (51, 255, 51), (51, 204, 51), + (51, 153, 51), (51, 102, 51), (51, 51, 51), (51, 0, 51), + (51, 255, 0), (51, 204, 0), (51, 153, 0), (51, 102, 0), + (51, 51, 0), (51, 0, 0), (0, 255, 102), (0, 204, 102), + (0, 153, 102), (0, 102, 102), (0, 51, 102), (0, 0, 102), + (0, 255, 51), (0, 204, 51), (0, 153, 51), (0, 102, 51), + (0, 51, 51), (0, 0, 51), (0, 255, 0), (0, 204, 0), + (0, 153, 0), (0, 102, 0), (0, 51, 0), (17, 17, 17), + (34, 34, 34), (68, 68, 68), (85, 85, 85), (119, 119, 119), + (136, 136, 136), (170, 170, 170), (187, 187, 187), (221, 221, 221), + (238, 238, 238), (192, 192, 192), (128, 0, 0), (128, 0, 128), + (0, 128, 0), (0, 128, 128), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0), + (0, 0, 0), (0, 0, 0), (0, 0, 0), (0, 0, 0)) + + +# so build a prototype image to be used for palette resampling +def build_prototype_image(): + image = Image.new("L", (1, len(_Palm8BitColormapValues),)) + image.putdata(list(range(len(_Palm8BitColormapValues)))) + palettedata = () + for i in range(len(_Palm8BitColormapValues)): + palettedata = palettedata + _Palm8BitColormapValues[i] + for i in range(256 - len(_Palm8BitColormapValues)): + palettedata = palettedata + (0, 0, 0) + image.putpalette(palettedata) + return image + +Palm8BitColormapImage = build_prototype_image() + +# OK, we now have in Palm8BitColormapImage, +# a "P"-mode image with the right palette +# +# -------------------------------------------------------------------- + +_FLAGS = { + "custom-colormap": 0x4000, + "is-compressed": 0x8000, + "has-transparent": 0x2000, + } + +_COMPRESSION_TYPES = { + "none": 0xFF, + "rle": 0x01, + "scanline": 0x00, + } + +o8 = _binary.o8 +o16b = _binary.o16be + + +# +# -------------------------------------------------------------------- + +## +# (Internal) Image save plugin for the Palm format. + +def _save(im, fp, filename, check=0): + + if im.mode == "P": + + # we assume this is a color Palm image with the standard colormap, + # unless the "info" dict has a "custom-colormap" field + + rawmode = "P" + bpp = 8 + version = 1 + + elif (im.mode == "L" and + "bpp" in im.encoderinfo and + im.encoderinfo["bpp"] in (1, 2, 4)): + + # this is 8-bit grayscale, so we shift it to get the high-order bits, + # and invert it because + # Palm does greyscale from white (0) to black (1) + bpp = im.encoderinfo["bpp"] + im = im.point( + lambda x, shift=8-bpp, maxval=(1 << bpp)-1: maxval - (x >> shift)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "L" and "bpp" in im.info and im.info["bpp"] in (1, 2, 4): + + # here we assume that even though the inherent mode is 8-bit grayscale, + # only the lower bpp bits are significant. + # We invert them to match the Palm. + bpp = im.info["bpp"] + im = im.point(lambda x, maxval=(1 << bpp)-1: maxval - (x & maxval)) + # we ignore the palette here + im.mode = "P" + rawmode = "P;" + str(bpp) + version = 1 + + elif im.mode == "1": + + # monochrome -- write it inverted, as is the Palm standard + rawmode = "1;I" + bpp = 1 + version = 0 + + else: + + raise IOError("cannot write mode %s as Palm" % im.mode) + + if check: + return check + + # + # make sure image data is available + im.load() + + # write header + + cols = im.size[0] + rows = im.size[1] + + rowbytes = int((cols + (16//bpp - 1)) / (16 // bpp)) * 2 + transparent_index = 0 + compression_type = _COMPRESSION_TYPES["none"] + + flags = 0 + if im.mode == "P" and "custom-colormap" in im.info: + flags = flags & _FLAGS["custom-colormap"] + colormapsize = 4 * 256 + 2 + colormapmode = im.palette.mode + colormap = im.getdata().getpalette() + else: + colormapsize = 0 + + if "offset" in im.info: + offset = (rowbytes * rows + 16 + 3 + colormapsize) // 4 + else: + offset = 0 + + fp.write(o16b(cols) + o16b(rows) + o16b(rowbytes) + o16b(flags)) + fp.write(o8(bpp)) + fp.write(o8(version)) + fp.write(o16b(offset)) + fp.write(o8(transparent_index)) + fp.write(o8(compression_type)) + fp.write(o16b(0)) # reserved by Palm + + # now write colormap if necessary + + if colormapsize > 0: + fp.write(o16b(256)) + for i in range(256): + fp.write(o8(i)) + if colormapmode == 'RGB': + fp.write( + o8(colormap[3 * i]) + + o8(colormap[3 * i + 1]) + + o8(colormap[3 * i + 2])) + elif colormapmode == 'RGBA': + fp.write( + o8(colormap[4 * i]) + + o8(colormap[4 * i + 1]) + + o8(colormap[4 * i + 2])) + + # now convert data to raw form + ImageFile._save( + im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, rowbytes, 1))]) + + if hasattr(fp, "flush"): + fp.flush() + + +# +# -------------------------------------------------------------------- + +Image.register_save("Palm", _save) + +Image.register_extension("Palm", ".palm") + +Image.register_mime("Palm", "image/palm") diff --git a/Lib/site-packages/PIL/PcdImagePlugin.py b/Lib/site-packages/PIL/PcdImagePlugin.py new file mode 100644 index 0000000..b53635a --- /dev/null +++ b/Lib/site-packages/PIL/PcdImagePlugin.py @@ -0,0 +1,59 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCD file handling +# +# History: +# 96-05-10 fl Created +# 96-05-27 fl Added draft mode (128x192, 256x384) +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +i8 = _binary.i8 + + +## +# Image plugin for PhotoCD images. This plugin only reads the 768x512 +# image from the file; higher resolutions are encoded in a proprietary +# encoding. + +class PcdImageFile(ImageFile.ImageFile): + + format = "PCD" + format_description = "Kodak PhotoCD" + + def _open(self): + + # rough + self.fp.seek(2048) + s = self.fp.read(2048) + + if s[:4] != b"PCD_": + raise SyntaxError("not a PCD file") + + orientation = i8(s[1538]) & 3 + if orientation == 1: + self.tile_post_rotate = 90 # hack + elif orientation == 3: + self.tile_post_rotate = -90 + + self.mode = "RGB" + self.size = 768, 512 # FIXME: not correct for rotated images! + self.tile = [("pcd", (0, 0)+self.size, 96*2048, None)] + +# +# registry + +Image.register_open(PcdImageFile.format, PcdImageFile) + +Image.register_extension(PcdImageFile.format, ".pcd") diff --git a/Lib/site-packages/PIL/PcfFontFile.py b/Lib/site-packages/PIL/PcfFontFile.py new file mode 100644 index 0000000..c200690 --- /dev/null +++ b/Lib/site-packages/PIL/PcfFontFile.py @@ -0,0 +1,252 @@ +# +# THIS IS WORK IN PROGRESS +# +# The Python Imaging Library +# $Id$ +# +# portable compiled font file parser +# +# history: +# 1997-08-19 fl created +# 2003-09-13 fl fixed loading of unicode fonts +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1997-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image +from PIL import FontFile +from PIL import _binary + +# -------------------------------------------------------------------- +# declarations + +PCF_MAGIC = 0x70636601 # "\x01fcp" + +PCF_PROPERTIES = (1 << 0) +PCF_ACCELERATORS = (1 << 1) +PCF_METRICS = (1 << 2) +PCF_BITMAPS = (1 << 3) +PCF_INK_METRICS = (1 << 4) +PCF_BDF_ENCODINGS = (1 << 5) +PCF_SWIDTHS = (1 << 6) +PCF_GLYPH_NAMES = (1 << 7) +PCF_BDF_ACCELERATORS = (1 << 8) + +BYTES_PER_ROW = [ + lambda bits: ((bits+7) >> 3), + lambda bits: ((bits+15) >> 3) & ~1, + lambda bits: ((bits+31) >> 3) & ~3, + lambda bits: ((bits+63) >> 3) & ~7, +] + +i8 = _binary.i8 +l16 = _binary.i16le +l32 = _binary.i32le +b16 = _binary.i16be +b32 = _binary.i32be + + +def sz(s, o): + return s[o:s.index(b"\0", o)] + + +## +# Font file plugin for the X11 PCF format. + +class PcfFontFile(FontFile.FontFile): + + name = "name" + + def __init__(self, fp): + + magic = l32(fp.read(4)) + if magic != PCF_MAGIC: + raise SyntaxError("not a PCF file") + + FontFile.FontFile.__init__(self) + + count = l32(fp.read(4)) + self.toc = {} + for i in range(count): + type = l32(fp.read(4)) + self.toc[type] = l32(fp.read(4)), l32(fp.read(4)), l32(fp.read(4)) + + self.fp = fp + + self.info = self._load_properties() + + metrics = self._load_metrics() + bitmaps = self._load_bitmaps(metrics) + encoding = self._load_encoding() + + # + # create glyph structure + + for ch in range(256): + ix = encoding[ch] + if ix is not None: + x, y, l, r, w, a, d, f = metrics[ix] + glyph = (w, 0), (l, d-y, x+l, d), (0, 0, x, y), bitmaps[ix] + self.glyph[ch] = glyph + + def _getformat(self, tag): + + format, size, offset = self.toc[tag] + + fp = self.fp + fp.seek(offset) + + format = l32(fp.read(4)) + + if format & 4: + i16, i32 = b16, b32 + else: + i16, i32 = l16, l32 + + return fp, format, i16, i32 + + def _load_properties(self): + + # + # font properties + + properties = {} + + fp, format, i16, i32 = self._getformat(PCF_PROPERTIES) + + nprops = i32(fp.read(4)) + + # read property description + p = [] + for i in range(nprops): + p.append((i32(fp.read(4)), i8(fp.read(1)), i32(fp.read(4)))) + if nprops & 3: + fp.seek(4 - (nprops & 3), 1) # pad + + data = fp.read(i32(fp.read(4))) + + for k, s, v in p: + k = sz(data, k) + if s: + v = sz(data, v) + properties[k] = v + + return properties + + def _load_metrics(self): + + # + # font metrics + + metrics = [] + + fp, format, i16, i32 = self._getformat(PCF_METRICS) + + append = metrics.append + + if (format & 0xff00) == 0x100: + + # "compressed" metrics + for i in range(i16(fp.read(2))): + left = i8(fp.read(1)) - 128 + right = i8(fp.read(1)) - 128 + width = i8(fp.read(1)) - 128 + ascent = i8(fp.read(1)) - 128 + descent = i8(fp.read(1)) - 128 + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, 0) + ) + + else: + + # "jumbo" metrics + for i in range(i32(fp.read(4))): + left = i16(fp.read(2)) + right = i16(fp.read(2)) + width = i16(fp.read(2)) + ascent = i16(fp.read(2)) + descent = i16(fp.read(2)) + attributes = i16(fp.read(2)) + xsize = right - left + ysize = ascent + descent + append( + (xsize, ysize, left, right, width, + ascent, descent, attributes) + ) + + return metrics + + def _load_bitmaps(self, metrics): + + # + # bitmap data + + bitmaps = [] + + fp, format, i16, i32 = self._getformat(PCF_BITMAPS) + + nbitmaps = i32(fp.read(4)) + + if nbitmaps != len(metrics): + raise IOError("Wrong number of bitmaps") + + offsets = [] + for i in range(nbitmaps): + offsets.append(i32(fp.read(4))) + + bitmapSizes = [] + for i in range(4): + bitmapSizes.append(i32(fp.read(4))) + + # byteorder = format & 4 # non-zero => MSB + bitorder = format & 8 # non-zero => MSB + padindex = format & 3 + + bitmapsize = bitmapSizes[padindex] + offsets.append(bitmapsize) + + data = fp.read(bitmapsize) + + pad = BYTES_PER_ROW[padindex] + mode = "1;R" + if bitorder: + mode = "1" + + for i in range(nbitmaps): + x, y, l, r, w, a, d, f = metrics[i] + b, e = offsets[i], offsets[i+1] + bitmaps.append( + Image.frombytes("1", (x, y), data[b:e], "raw", mode, pad(x)) + ) + + return bitmaps + + def _load_encoding(self): + + # map character code to bitmap index + encoding = [None] * 256 + + fp, format, i16, i32 = self._getformat(PCF_BDF_ENCODINGS) + + firstCol, lastCol = i16(fp.read(2)), i16(fp.read(2)) + firstRow, lastRow = i16(fp.read(2)), i16(fp.read(2)) + + default = i16(fp.read(2)) + + nencoding = (lastCol - firstCol + 1) * (lastRow - firstRow + 1) + + for i in range(nencoding): + encodingOffset = i16(fp.read(2)) + if encodingOffset != 0xFFFF: + try: + encoding[i+firstCol] = encodingOffset + except IndexError: + break # only load ISO-8859-1 glyphs + + return encoding diff --git a/Lib/site-packages/PIL/PcxImagePlugin.py b/Lib/site-packages/PIL/PcxImagePlugin.py new file mode 100644 index 0000000..9440d53 --- /dev/null +++ b/Lib/site-packages/PIL/PcxImagePlugin.py @@ -0,0 +1,187 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PCX file handling +# +# This format was originally used by ZSoft's popular PaintBrush +# program for the IBM PC. It is also supported by many MS-DOS and +# Windows applications, including the Windows PaintBrush program in +# Windows 3. +# +# history: +# 1995-09-01 fl Created +# 1996-05-20 fl Fixed RGB support +# 1997-01-03 fl Fixed 2-bit and 4-bit support +# 1999-02-03 fl Fixed 8-bit support (broken in 1.0b1) +# 1999-02-07 fl Added write support +# 2002-06-09 fl Made 2-bit and 4-bit support a bit more robust +# 2002-07-30 fl Seek from to current position, not beginning of file +# 2003-06-03 fl Extract DPI settings (info["dpi"]) +# +# Copyright (c) 1997-2003 by Secret Labs AB. +# Copyright (c) 1995-2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +from PIL import Image, ImageFile, ImagePalette, _binary + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16le +o8 = _binary.o8 + +__version__ = "0.6" + + +def _accept(prefix): + return i8(prefix[0]) == 10 and i8(prefix[1]) in [0, 2, 3, 5] + + +## +# Image plugin for Paintbrush images. + +class PcxImageFile(ImageFile.ImageFile): + + format = "PCX" + format_description = "Paintbrush" + + def _open(self): + + # header + s = self.fp.read(128) + if not _accept(s): + raise SyntaxError("not a PCX file") + + # image + bbox = i16(s, 4), i16(s, 6), i16(s, 8)+1, i16(s, 10)+1 + if bbox[2] <= bbox[0] or bbox[3] <= bbox[1]: + raise SyntaxError("bad PCX image size") + logger.debug("BBox: %s %s %s %s", *bbox) + + # format + version = i8(s[1]) + bits = i8(s[3]) + planes = i8(s[65]) + stride = i16(s, 66) + logger.debug("PCX version %s, bits %s, planes %s, stride %s", + version, bits, planes, stride) + + self.info["dpi"] = i16(s, 12), i16(s, 14) + + if bits == 1 and planes == 1: + mode = rawmode = "1" + + elif bits == 1 and planes in (2, 4): + mode = "P" + rawmode = "P;%dL" % planes + self.palette = ImagePalette.raw("RGB", s[16:64]) + + elif version == 5 and bits == 8 and planes == 1: + mode = rawmode = "L" + # FIXME: hey, this doesn't work with the incremental loader !!! + self.fp.seek(-769, 2) + s = self.fp.read(769) + if len(s) == 769 and i8(s[0]) == 12: + # check if the palette is linear greyscale + for i in range(256): + if s[i*3+1:i*3+4] != o8(i)*3: + mode = rawmode = "P" + break + if mode == "P": + self.palette = ImagePalette.raw("RGB", s[1:]) + self.fp.seek(128) + + elif version == 5 and bits == 8 and planes == 3: + mode = "RGB" + rawmode = "RGB;L" + + else: + raise IOError("unknown PCX mode") + + self.mode = mode + self.size = bbox[2]-bbox[0], bbox[3]-bbox[1] + + bbox = (0, 0) + self.size + logger.debug("size: %sx%s", *self.size) + + self.tile = [("pcx", bbox, self.fp.tell(), (rawmode, planes * stride))] + +# -------------------------------------------------------------------- +# save PCX files + +SAVE = { + # mode: (version, bits, planes, raw mode) + "1": (2, 1, 1, "1"), + "L": (5, 8, 1, "L"), + "P": (5, 8, 1, "P"), + "RGB": (5, 8, 3, "RGB;L"), +} + +o16 = _binary.o16le + + +def _save(im, fp, filename, check=0): + + try: + version, bits, planes, rawmode = SAVE[im.mode] + except KeyError: + raise ValueError("Cannot save %s images as PCX" % im.mode) + + if check: + return check + + # bytes per plane + stride = (im.size[0] * bits + 7) // 8 + # stride should be even + stride += stride % 2 + # Stride needs to be kept in sync with the PcxEncode.c version. + # Ideally it should be passed in in the state, but the bytes value + # gets overwritten. + + logger.debug("PcxImagePlugin._save: xwidth: %d, bits: %d, stride: %d", + im.size[0], bits, stride) + + # under windows, we could determine the current screen size with + # "Image.core.display_mode()[1]", but I think that's overkill... + + screen = im.size + + dpi = 100, 100 + + # PCX header + fp.write( + o8(10) + o8(version) + o8(1) + o8(bits) + o16(0) + + o16(0) + o16(im.size[0]-1) + o16(im.size[1]-1) + o16(dpi[0]) + + o16(dpi[1]) + b"\0"*24 + b"\xFF"*24 + b"\0" + o8(planes) + + o16(stride) + o16(1) + o16(screen[0]) + o16(screen[1]) + + b"\0"*54 + ) + + assert fp.tell() == 128 + + ImageFile._save(im, fp, [("pcx", (0, 0)+im.size, 0, + (rawmode, bits*planes))]) + + if im.mode == "P": + # colour palette + fp.write(o8(12)) + fp.write(im.im.getpalette("RGB", "RGB")) # 768 bytes + elif im.mode == "L": + # greyscale palette + fp.write(o8(12)) + for i in range(256): + fp.write(o8(i)*3) + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PcxImageFile.format, PcxImageFile, _accept) +Image.register_save(PcxImageFile.format, _save) + +Image.register_extension(PcxImageFile.format, ".pcx") diff --git a/Lib/site-packages/PIL/PdfImagePlugin.py b/Lib/site-packages/PIL/PdfImagePlugin.py new file mode 100644 index 0000000..7decf0e --- /dev/null +++ b/Lib/site-packages/PIL/PdfImagePlugin.py @@ -0,0 +1,258 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PDF (Acrobat) file handling +# +# History: +# 1996-07-16 fl Created +# 1997-01-18 fl Fixed header +# 2004-02-21 fl Fixes for 1/L/CMYK images, etc. +# 2004-02-24 fl Fixes for 1 and P images. +# +# Copyright (c) 1997-2004 by Secret Labs AB. All rights reserved. +# Copyright (c) 1996-1997 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +## +# Image plugin for PDF images (output only). +## + +from PIL import Image, ImageFile +from PIL._binary import i8 +import io + +__version__ = "0.4" + + +# +# -------------------------------------------------------------------- + +# object ids: +# 1. catalogue +# 2. pages +# 3. image +# 4. page +# 5. page contents + +def _obj(fp, obj, **dict): + fp.write("%d 0 obj\n" % obj) + if dict: + fp.write("<<\n") + for k, v in dict.items(): + if v is not None: + fp.write("/%s %s\n" % (k, v)) + fp.write(">>\n") + + +def _endobj(fp): + fp.write("endobj\n") + + +def _save_all(im, fp, filename): + _save(im, fp, filename, save_all=True) + + +## +# (Internal) Image save plugin for the PDF format. + +def _save(im, fp, filename, save_all=False): + resolution = im.encoderinfo.get("resolution", 72.0) + + # + # make sure image data is available + im.load() + + xref = [0] + + class TextWriter(object): + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, name): + return getattr(self.fp, name) + + def write(self, value): + self.fp.write(value.encode('latin-1')) + + fp = TextWriter(fp) + + fp.write("%PDF-1.2\n") + fp.write("% created by PIL PDF driver " + __version__ + "\n") + + # FIXME: Should replace ASCIIHexDecode with RunLengthDecode (packbits) + # or LZWDecode (tiff/lzw compression). Note that PDF 1.2 also supports + # Flatedecode (zip compression). + + bits = 8 + params = None + + if im.mode == "1": + filter = "/ASCIIHexDecode" + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + bits = 1 + elif im.mode == "L": + filter = "/DCTDecode" + # params = "<< /Predictor 15 /Columns %d >>" % (width-2) + colorspace = "/DeviceGray" + procset = "/ImageB" # grayscale + elif im.mode == "P": + filter = "/ASCIIHexDecode" + colorspace = "[ /Indexed /DeviceRGB 255 <" + palette = im.im.getpalette("RGB") + for i in range(256): + r = i8(palette[i*3]) + g = i8(palette[i*3+1]) + b = i8(palette[i*3+2]) + colorspace += "%02x%02x%02x " % (r, g, b) + colorspace += "> ]" + procset = "/ImageI" # indexed color + elif im.mode == "RGB": + filter = "/DCTDecode" + colorspace = "/DeviceRGB" + procset = "/ImageC" # color images + elif im.mode == "CMYK": + filter = "/DCTDecode" + colorspace = "/DeviceCMYK" + procset = "/ImageC" # color images + else: + raise ValueError("cannot save mode %s" % im.mode) + + # + # catalogue + + xref.append(fp.tell()) + _obj( + fp, 1, + Type="/Catalog", + Pages="2 0 R") + _endobj(fp) + + # + # pages + numberOfPages = 1 + if save_all: + try: + numberOfPages = im.n_frames + except AttributeError: + # Image format does not have n_frames. It is a single frame image + pass + pages = [str(pageNumber*3+4)+" 0 R" + for pageNumber in range(0, numberOfPages)] + + xref.append(fp.tell()) + _obj( + fp, 2, + Type="/Pages", + Count=len(pages), + Kids="["+"\n".join(pages)+"]") + _endobj(fp) + + for pageNumber in range(0, numberOfPages): + im.seek(pageNumber) + + # + # image + + op = io.BytesIO() + + if filter == "/ASCIIHexDecode": + if bits == 1: + # FIXME: the hex encoder doesn't support packed 1-bit + # images; do things the hard way... + data = im.tobytes("raw", "1") + im = Image.new("L", (len(data), 1), None) + im.putdata(data) + ImageFile._save(im, op, [("hex", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/DCTDecode": + Image.SAVE["JPEG"](im, op, filename) + elif filter == "/FlateDecode": + ImageFile._save(im, op, [("zip", (0, 0)+im.size, 0, im.mode)]) + elif filter == "/RunLengthDecode": + ImageFile._save(im, op, [("packbits", (0, 0)+im.size, 0, im.mode)]) + else: + raise ValueError("unsupported PDF filter (%s)" % filter) + + # + # Get image characteristics + + width, height = im.size + + xref.append(fp.tell()) + _obj( + fp, pageNumber*3+3, + Type="/XObject", + Subtype="/Image", + Width=width, # * 72.0 / resolution, + Height=height, # * 72.0 / resolution, + Length=len(op.getvalue()), + Filter=filter, + BitsPerComponent=bits, + DecodeParams=params, + ColorSpace=colorspace) + + fp.write("stream\n") + fp.fp.write(op.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # page + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+4) + fp.write( + "<<\n/Type /Page\n/Parent 2 0 R\n" + "/Resources <<\n/ProcSet [ /PDF %s ]\n" + "/XObject << /image %d 0 R >>\n>>\n" + "/MediaBox [ 0 0 %d %d ]\n/Contents %d 0 R\n>>\n" % ( + procset, + pageNumber*3+3, + int(width * 72.0 / resolution), + int(height * 72.0 / resolution), + pageNumber*3+5)) + _endobj(fp) + + # + # page contents + + op = TextWriter(io.BytesIO()) + + op.write( + "q %d 0 0 %d 0 0 cm /image Do Q\n" % ( + int(width * 72.0 / resolution), + int(height * 72.0 / resolution))) + + xref.append(fp.tell()) + _obj(fp, pageNumber*3+5, Length=len(op.fp.getvalue())) + + fp.write("stream\n") + fp.fp.write(op.fp.getvalue()) + fp.write("\nendstream\n") + + _endobj(fp) + + # + # trailer + startxref = fp.tell() + fp.write("xref\n0 %d\n0000000000 65535 f \n" % len(xref)) + for x in xref[1:]: + fp.write("%010d 00000 n \n" % x) + fp.write("trailer\n<<\n/Size %d\n/Root 1 0 R\n>>\n" % len(xref)) + fp.write("startxref\n%d\n%%%%EOF\n" % startxref) + if hasattr(fp, "flush"): + fp.flush() + +# +# -------------------------------------------------------------------- + +Image.register_save("PDF", _save) +Image.register_save_all("PDF", _save_all) + +Image.register_extension("PDF", ".pdf") + +Image.register_mime("PDF", "application/pdf") diff --git a/Lib/site-packages/PIL/PixarImagePlugin.py b/Lib/site-packages/PIL/PixarImagePlugin.py new file mode 100644 index 0000000..db2ee55 --- /dev/null +++ b/Lib/site-packages/PIL/PixarImagePlugin.py @@ -0,0 +1,68 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PIXAR raster support for PIL +# +# history: +# 97-01-29 fl Created +# +# notes: +# This is incomplete; it is based on a few samples created with +# Photoshop 2.5 and 3.0, and a summary description provided by +# Greg Coats . Hopefully, "L" and +# "RGBA" support will be added in future versions. +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1997. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.1" + +# +# helpers + +i16 = _binary.i16le + + +## +# Image plugin for PIXAR raster images. + +class PixarImageFile(ImageFile.ImageFile): + + format = "PIXAR" + format_description = "PIXAR raster image" + + def _open(self): + + # assuming a 4-byte magic label (FIXME: add "_accept" hook) + s = self.fp.read(4) + if s != b"\200\350\000\000": + raise SyntaxError("not a PIXAR file") + + # read rest of header + s = s + self.fp.read(508) + + self.size = i16(s[418:420]), i16(s[416:418]) + + # get channel/depth descriptions + mode = i16(s[424:426]), i16(s[426:428]) + + if mode == (14, 2): + self.mode = "RGB" + # FIXME: to be continued... + + # create tile descriptor (assuming "dumped") + self.tile = [("raw", (0, 0)+self.size, 1024, (self.mode, 0, 1))] + +# +# -------------------------------------------------------------------- + +Image.register_open(PixarImageFile.format, PixarImageFile) + +# +# FIXME: what's the standard extension? diff --git a/Lib/site-packages/PIL/PngImagePlugin.py b/Lib/site-packages/PIL/PngImagePlugin.py new file mode 100644 index 0000000..d677882 --- /dev/null +++ b/Lib/site-packages/PIL/PngImagePlugin.py @@ -0,0 +1,809 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PNG support code +# +# See "PNG (Portable Network Graphics) Specification, version 1.0; +# W3C Recommendation", 1996-10-01, Thomas Boutell (ed.). +# +# history: +# 1996-05-06 fl Created (couldn't resist it) +# 1996-12-14 fl Upgraded, added read and verify support (0.2) +# 1996-12-15 fl Separate PNG stream parser +# 1996-12-29 fl Added write support, added getchunks +# 1996-12-30 fl Eliminated circular references in decoder (0.3) +# 1998-07-12 fl Read/write 16-bit images as mode I (0.4) +# 2001-02-08 fl Added transparency support (from Zircon) (0.5) +# 2001-04-16 fl Don't close data source in "open" method (0.6) +# 2004-02-24 fl Don't even pretend to support interlaced files (0.7) +# 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8) +# 2004-09-20 fl Added PngInfo chunk container +# 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev) +# 2008-08-13 fl Added tRNS support for RGB images +# 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech) +# 2009-03-08 fl Added zTXT support (from Lowell Alleman) +# 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua) +# +# Copyright (c) 1997-2009 by Secret Labs AB +# Copyright (c) 1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import print_function + +import logging +import re +import zlib + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.9" + +logger = logging.getLogger(__name__) + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + +is_cid = re.compile(b"\w\w\w\w").match + + +_MAGIC = b"\211PNG\r\n\032\n" + + +_MODES = { + # supported bits/color combinations, and corresponding modes/rawmodes + (1, 0): ("1", "1"), + (2, 0): ("L", "L;2"), + (4, 0): ("L", "L;4"), + (8, 0): ("L", "L"), + (16, 0): ("I", "I;16B"), + (8, 2): ("RGB", "RGB"), + (16, 2): ("RGB", "RGB;16B"), + (1, 3): ("P", "P;1"), + (2, 3): ("P", "P;2"), + (4, 3): ("P", "P;4"), + (8, 3): ("P", "P"), + (8, 4): ("LA", "LA"), + (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available + (8, 6): ("RGBA", "RGBA"), + (16, 6): ("RGBA", "RGBA;16B"), +} + + +_simple_palette = re.compile(b'^\xff+\x00\xff*$') +_null_palette = re.compile(b'^\x00*$') + +# Maximum decompressed size for a iTXt or zTXt chunk. +# Eliminates decompression bombs where compressed chunks can expand 1000x +MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK +# Set the maximum total text chunk size. +MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK + + +def _safe_zlib_decompress(s): + dobj = zlib.decompressobj() + plaintext = dobj.decompress(s, MAX_TEXT_CHUNK) + if dobj.unconsumed_tail: + raise ValueError("Decompressed Data Too Large") + return plaintext + + +# -------------------------------------------------------------------- +# Support classes. Suitable for PNG and related formats like MNG etc. + +class ChunkStream(object): + + def __init__(self, fp): + + self.fp = fp + self.queue = [] + + if not hasattr(Image.core, "crc32"): + self.crc = self.crc_skip + + def read(self): + "Fetch a new chunk. Returns header information." + + if self.queue: + cid, pos, length = self.queue[-1] + del self.queue[-1] + self.fp.seek(pos) + else: + s = self.fp.read(8) + cid = s[4:] + pos = self.fp.tell() + length = i32(s) + + if not is_cid(cid): + raise SyntaxError("broken PNG file (chunk %s)" % repr(cid)) + + return cid, pos, length + + def close(self): + self.queue = self.crc = self.fp = None + + def push(self, cid, pos, length): + + self.queue.append((cid, pos, length)) + + def call(self, cid, pos, length): + "Call the appropriate chunk handler" + + logger.debug("STREAM %s %s %s", cid, pos, length) + return getattr(self, "chunk_" + cid.decode('ascii'))(pos, length) + + def crc(self, cid, data): + "Read and verify checksum" + + crc1 = Image.core.crc32(data, Image.core.crc32(cid)) + crc2 = i16(self.fp.read(2)), i16(self.fp.read(2)) + if crc1 != crc2: + raise SyntaxError("broken PNG file" + "(bad header checksum in %s)" % cid) + + def crc_skip(self, cid, data): + "Read checksum. Used if the C module is not present" + + self.fp.read(4) + + def verify(self, endchunk=b"IEND"): + + # Simple approach; just calculate checksum for all remaining + # blocks. Must be called directly after open. + + cids = [] + + while True: + cid, pos, length = self.read() + if cid == endchunk: + break + self.crc(cid, ImageFile._safe_read(self.fp, length)) + cids.append(cid) + + return cids + + +class iTXt(str): + """ + Subclass of string to allow iTXt chunks to look like strings while + keeping their extra information + + """ + @staticmethod + def __new__(cls, text, lang, tkey): + """ + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + """ + + self = str.__new__(cls, text) + self.lang = lang + self.tkey = tkey + return self + + +class PngInfo(object): + """ + PNG chunk container (for use with save(pnginfo=)) + + """ + + def __init__(self): + self.chunks = [] + + def add(self, cid, data): + """Appends an arbitrary chunk. Use with caution. + + :param cid: a byte string, 4 bytes long. + :param data: a byte string of the encoded data + + """ + + self.chunks.append((cid, data)) + + def add_itxt(self, key, value, lang="", tkey="", zip=False): + """Appends an iTXt chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key + :param lang: language code + :param tkey: UTF-8 version of the key name + :param zip: compression flag + + """ + + if not isinstance(key, bytes): + key = key.encode("latin-1", "strict") + if not isinstance(value, bytes): + value = value.encode("utf-8", "strict") + if not isinstance(lang, bytes): + lang = lang.encode("utf-8", "strict") + if not isinstance(tkey, bytes): + tkey = tkey.encode("utf-8", "strict") + + if zip: + self.add(b"iTXt", key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + + zlib.compress(value)) + else: + self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + + value) + + def add_text(self, key, value, zip=0): + """Appends a text chunk. + + :param key: latin-1 encodable text key name + :param value: value for this key, text or an + :py:class:`PIL.PngImagePlugin.iTXt` instance + :param zip: compression flag + + """ + if isinstance(value, iTXt): + return self.add_itxt(key, value, value.lang, value.tkey, bool(zip)) + + # The tEXt chunk stores latin-1 text + if not isinstance(value, bytes): + try: + value = value.encode('latin-1', 'strict') + except UnicodeError: + return self.add_itxt(key, value, zip=bool(zip)) + + if not isinstance(key, bytes): + key = key.encode('latin-1', 'strict') + + if zip: + self.add(b"zTXt", key + b"\0\0" + zlib.compress(value)) + else: + self.add(b"tEXt", key + b"\0" + value) + + +# -------------------------------------------------------------------- +# PNG image stream (IHDR/IEND) + +class PngStream(ChunkStream): + + def __init__(self, fp): + + ChunkStream.__init__(self, fp) + + # local copies of Image attributes + self.im_info = {} + self.im_text = {} + self.im_size = (0, 0) + self.im_mode = None + self.im_tile = None + self.im_palette = None + + self.text_memory = 0 + + def check_text_memory(self, chunklen): + self.text_memory += chunklen + if self.text_memory > MAX_TEXT_MEMORY: + raise ValueError("Too much memory used in text chunks: %s>MAX_TEXT_MEMORY" % + self.text_memory) + + def chunk_iCCP(self, pos, length): + + # ICC profile + s = ImageFile._safe_read(self.fp, length) + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + i = s.find(b"\0") + logger.debug("iCCP profile name %s", s[:i]) + logger.debug("Compression method %s", i8(s[i])) + comp_method = i8(s[i]) + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in iCCP chunk" % + comp_method) + try: + icc_profile = _safe_zlib_decompress(s[i+2:]) + except zlib.error: + icc_profile = None # FIXME + self.im_info["icc_profile"] = icc_profile + return s + + def chunk_IHDR(self, pos, length): + + # image header + s = ImageFile._safe_read(self.fp, length) + self.im_size = i32(s), i32(s[4:]) + try: + self.im_mode, self.im_rawmode = _MODES[(i8(s[8]), i8(s[9]))] + except: + pass + if i8(s[12]): + self.im_info["interlace"] = 1 + if i8(s[11]): + raise SyntaxError("unknown filter category") + return s + + def chunk_IDAT(self, pos, length): + + # image data + self.im_tile = [("zip", (0, 0)+self.im_size, pos, self.im_rawmode)] + self.im_idat = length + raise EOFError + + def chunk_IEND(self, pos, length): + + # end of PNG image + raise EOFError + + def chunk_PLTE(self, pos, length): + + # palette + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + self.im_palette = "RGB", s + return s + + def chunk_tRNS(self, pos, length): + + # transparency + s = ImageFile._safe_read(self.fp, length) + if self.im_mode == "P": + if _simple_palette.match(s): + i = s.find(b"\0") + if i >= 0: + self.im_info["transparency"] = i + elif _null_palette.match(s): + self.im_info["transparency"] = 0 + else: + self.im_info["transparency"] = s + elif self.im_mode == "L": + self.im_info["transparency"] = i16(s) + elif self.im_mode == "RGB": + self.im_info["transparency"] = i16(s), i16(s[2:]), i16(s[4:]) + return s + + def chunk_gAMA(self, pos, length): + + # gamma setting + s = ImageFile._safe_read(self.fp, length) + self.im_info["gamma"] = i32(s) / 100000.0 + return s + + def chunk_pHYs(self, pos, length): + + # pixels per unit + s = ImageFile._safe_read(self.fp, length) + px, py = i32(s), i32(s[4:]) + unit = i8(s[8]) + if unit == 1: # meter + dpi = int(px * 0.0254 + 0.5), int(py * 0.0254 + 0.5) + self.im_info["dpi"] = dpi + elif unit == 0: + self.im_info["aspect"] = px, py + return s + + def chunk_tEXt(self, pos, length): + + # text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + # fallback for broken tEXt tags + k = s + v = b"" + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_zTXt(self, pos, length): + + # compressed text + s = ImageFile._safe_read(self.fp, length) + try: + k, v = s.split(b"\0", 1) + except ValueError: + k = s + v = b"" + if v: + comp_method = i8(v[0]) + else: + comp_method = 0 + if comp_method != 0: + raise SyntaxError("Unknown compression method %s in zTXt chunk" % + comp_method) + try: + v = _safe_zlib_decompress(v[1:]) + except zlib.error: + v = b"" + + if k: + if bytes is not str: + k = k.decode('latin-1', 'strict') + v = v.decode('latin-1', 'replace') + + self.im_info[k] = self.im_text[k] = v + self.check_text_memory(len(v)) + + return s + + def chunk_iTXt(self, pos, length): + + # international text + r = s = ImageFile._safe_read(self.fp, length) + try: + k, r = r.split(b"\0", 1) + except ValueError: + return s + if len(r) < 2: + return s + cf, cm, r = i8(r[0]), i8(r[1]), r[2:] + try: + lang, tk, v = r.split(b"\0", 2) + except ValueError: + return s + if cf != 0: + if cm == 0: + try: + v = _safe_zlib_decompress(v) + except zlib.error: + return s + else: + return s + if bytes is not str: + try: + k = k.decode("latin-1", "strict") + lang = lang.decode("utf-8", "strict") + tk = tk.decode("utf-8", "strict") + v = v.decode("utf-8", "strict") + except UnicodeError: + return s + + self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk) + self.check_text_memory(len(v)) + + return s + + +# -------------------------------------------------------------------- +# PNG reader + +def _accept(prefix): + return prefix[:8] == _MAGIC + + +## +# Image plugin for PNG images. + +class PngImageFile(ImageFile.ImageFile): + + format = "PNG" + format_description = "Portable network graphics" + + def _open(self): + + if self.fp.read(8) != _MAGIC: + raise SyntaxError("not a PNG file") + + # + # Parse headers up to the first IDAT chunk + + self.png = PngStream(self.fp) + + while True: + + # + # get next chunk + + cid, pos, length = self.png.read() + + try: + s = self.png.call(cid, pos, length) + except EOFError: + break + except AttributeError: + logger.debug("%s %s %s (unknown)", cid, pos, length) + s = ImageFile._safe_read(self.fp, length) + + self.png.crc(cid, s) + + # + # Copy relevant attributes from the PngStream. An alternative + # would be to let the PngStream class modify these attributes + # directly, but that introduces circular references which are + # difficult to break if things go wrong in the decoder... + # (believe me, I've tried ;-) + + self.mode = self.png.im_mode + self.size = self.png.im_size + self.info = self.png.im_info + self.text = self.png.im_text # experimental + self.tile = self.png.im_tile + + if self.png.im_palette: + rawmode, data = self.png.im_palette + self.palette = ImagePalette.raw(rawmode, data) + + self.__idat = length # used by load_read() + + def verify(self): + "Verify PNG file" + + if self.fp is None: + raise RuntimeError("verify must be called directly after open") + + # back up to beginning of IDAT block + self.fp.seek(self.tile[0][2] - 8) + + self.png.verify() + self.png.close() + + self.fp = None + + def load_prepare(self): + "internal: prepare to read PNG file" + + if self.info.get("interlace"): + self.decoderconfig = self.decoderconfig + (1,) + + ImageFile.ImageFile.load_prepare(self) + + def load_read(self, read_bytes): + "internal: read more image data" + + while self.__idat == 0: + # end of chunk, skip forward to next one + + self.fp.read(4) # CRC + + cid, pos, length = self.png.read() + + if cid not in [b"IDAT", b"DDAT"]: + self.png.push(cid, pos, length) + return b"" + + self.__idat = length # empty chunks are allowed + + # read more data from this chunk + if read_bytes <= 0: + read_bytes = self.__idat + else: + read_bytes = min(read_bytes, self.__idat) + + self.__idat = self.__idat - read_bytes + + return self.fp.read(read_bytes) + + def load_end(self): + "internal: finished reading image data" + + self.png.close() + self.png = None + + +# -------------------------------------------------------------------- +# PNG writer + +o8 = _binary.o8 +o16 = _binary.o16be +o32 = _binary.o32be + +_OUTMODES = { + # supported PIL modes, and corresponding rawmodes/bits/color combinations + "1": ("1", b'\x01\x00'), + "L;1": ("L;1", b'\x01\x00'), + "L;2": ("L;2", b'\x02\x00'), + "L;4": ("L;4", b'\x04\x00'), + "L": ("L", b'\x08\x00'), + "LA": ("LA", b'\x08\x04'), + "I": ("I;16B", b'\x10\x00'), + "P;1": ("P;1", b'\x01\x03'), + "P;2": ("P;2", b'\x02\x03'), + "P;4": ("P;4", b'\x04\x03'), + "P": ("P", b'\x08\x03'), + "RGB": ("RGB", b'\x08\x02'), + "RGBA": ("RGBA", b'\x08\x06'), +} + + +def putchunk(fp, cid, *data): + "Write a PNG chunk (including CRC field)" + + data = b"".join(data) + + fp.write(o32(len(data)) + cid) + fp.write(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + fp.write(o16(hi) + o16(lo)) + + +class _idat(object): + # wrap output from the encoder in IDAT chunks + + def __init__(self, fp, chunk): + self.fp = fp + self.chunk = chunk + + def write(self, data): + self.chunk(self.fp, b"IDAT", data) + + +def _save(im, fp, filename, chunk=putchunk, check=0): + # save an image to disk (called by the save method) + + mode = im.mode + + if mode == "P": + + # + # attempt to minimize storage requirements for palette images + if "bits" in im.encoderinfo: + # number of bits specified by user + colors = 1 << im.encoderinfo["bits"] + else: + # check palette contents + if im.palette: + colors = max(min(len(im.palette.getdata()[1])//3, 256), 2) + else: + colors = 256 + + if colors <= 2: + bits = 1 + elif colors <= 4: + bits = 2 + elif colors <= 16: + bits = 4 + else: + bits = 8 + if bits != 8: + mode = "%s;%d" % (mode, bits) + + # encoder options + if "dictionary" in im.encoderinfo: + dictionary = im.encoderinfo["dictionary"] + else: + dictionary = b"" + + im.encoderconfig = ("optimize" in im.encoderinfo, + im.encoderinfo.get("compress_level", -1), + im.encoderinfo.get("compress_type", -1), + dictionary) + + # get the corresponding PNG mode + try: + rawmode, mode = _OUTMODES[mode] + except KeyError: + raise IOError("cannot write mode %s as PNG" % mode) + + if check: + return check + + # + # write minimal PNG file + + fp.write(_MAGIC) + + chunk(fp, b"IHDR", + o32(im.size[0]), o32(im.size[1]), # 0: size + mode, # 8: depth/type + b'\0', # 10: compression + b'\0', # 11: filter category + b'\0') # 12: interlace flag + + if im.mode == "P": + palette_byte_number = (2 ** bits) * 3 + palette_bytes = im.im.getpalette("RGB")[:palette_byte_number] + while len(palette_bytes) < palette_byte_number: + palette_bytes += b'\0' + chunk(fp, b"PLTE", palette_bytes) + + transparency = im.encoderinfo.get('transparency', + im.info.get('transparency', None)) + + if transparency or transparency == 0: + if im.mode == "P": + # limit to actual palette size + alpha_bytes = 2**bits + if isinstance(transparency, bytes): + chunk(fp, b"tRNS", transparency[:alpha_bytes]) + else: + transparency = max(0, min(255, transparency)) + alpha = b'\xFF' * transparency + b'\0' + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + elif im.mode == "L": + transparency = max(0, min(65535, transparency)) + chunk(fp, b"tRNS", o16(transparency)) + elif im.mode == "RGB": + red, green, blue = transparency + chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue)) + else: + if "transparency" in im.encoderinfo: + # don't bother with transparency if it's an RGBA + # and it's in the info dict. It's probably just stale. + raise IOError("cannot use transparency for this mode") + else: + if im.mode == "P" and im.im.getpalettemode() == "RGBA": + alpha = im.im.getpalette("RGBA", "A") + alpha_bytes = 2**bits + chunk(fp, b"tRNS", alpha[:alpha_bytes]) + + dpi = im.encoderinfo.get("dpi") + if dpi: + chunk(fp, b"pHYs", + o32(int(dpi[0] / 0.0254 + 0.5)), + o32(int(dpi[1] / 0.0254 + 0.5)), + b'\x01') + + info = im.encoderinfo.get("pnginfo") + if info: + for cid, data in info.chunks: + chunk(fp, cid, data) + + # ICC profile writing support -- 2008-06-06 Florian Hoech + if im.info.get("icc_profile"): + # ICC profile + # according to PNG spec, the iCCP chunk contains: + # Profile name 1-79 bytes (character string) + # Null separator 1 byte (null character) + # Compression method 1 byte (0) + # Compressed profile n bytes (zlib with deflate compression) + name = b"ICC Profile" + data = name + b"\0\0" + zlib.compress(im.info["icc_profile"]) + chunk(fp, b"iCCP", data) + + ImageFile._save(im, _idat(fp, chunk), + [("zip", (0, 0)+im.size, 0, rawmode)]) + + chunk(fp, b"IEND", b"") + + if hasattr(fp, "flush"): + fp.flush() + + +# -------------------------------------------------------------------- +# PNG chunk converter + +def getchunks(im, **params): + """Return a list of PNG chunks representing this image.""" + + class collector(object): + data = [] + + def write(self, data): + pass + + def append(self, chunk): + self.data.append(chunk) + + def append(fp, cid, *data): + data = b"".join(data) + hi, lo = Image.core.crc32(data, Image.core.crc32(cid)) + crc = o16(hi) + o16(lo) + fp.append((cid, data, crc)) + + fp = collector() + + try: + im.encoderinfo = params + _save(im, fp, None, append) + finally: + del im.encoderinfo + + return fp.data + + +# -------------------------------------------------------------------- +# Registry + +Image.register_open(PngImageFile.format, PngImageFile, _accept) +Image.register_save(PngImageFile.format, _save) + +Image.register_extension(PngImageFile.format, ".png") + +Image.register_mime(PngImageFile.format, "image/png") diff --git a/Lib/site-packages/PIL/PpmImagePlugin.py b/Lib/site-packages/PIL/PpmImagePlugin.py new file mode 100644 index 0000000..68073ca --- /dev/null +++ b/Lib/site-packages/PIL/PpmImagePlugin.py @@ -0,0 +1,174 @@ +# +# The Python Imaging Library. +# $Id$ +# +# PPM support for PIL +# +# History: +# 96-03-24 fl Created +# 98-03-06 fl Write RGBA images (as RGB, that is) +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + + +import string + +from PIL import Image, ImageFile + +__version__ = "0.2" + +# +# -------------------------------------------------------------------- + +b_whitespace = string.whitespace +try: + import locale + locale_lang, locale_enc = locale.getlocale() + if locale_enc is None: + locale_lang, locale_enc = locale.getdefaultlocale() + b_whitespace = b_whitespace.decode(locale_enc) +except: + pass +b_whitespace = b_whitespace.encode('ascii', 'ignore') + +MODES = { + # standard + b"P4": "1", + b"P5": "L", + b"P6": "RGB", + # extensions + b"P0CMYK": "CMYK", + # PIL extensions (for test purposes only) + b"PyP": "P", + b"PyRGBA": "RGBA", + b"PyCMYK": "CMYK" +} + + +def _accept(prefix): + return prefix[0:1] == b"P" and prefix[1] in b"0456y" + + +## +# Image plugin for PBM, PGM, and PPM images. + +class PpmImageFile(ImageFile.ImageFile): + + format = "PPM" + format_description = "Pbmplus image" + + def _token(self, s=b""): + while True: # read until next whitespace + c = self.fp.read(1) + if not c or c in b_whitespace: + break + if c > b'\x79': + raise ValueError("Expected ASCII value, found binary") + s = s + c + if (len(s) > 9): + raise ValueError("Expected int, got > 9 digits") + return s + + def _open(self): + + # check magic + s = self.fp.read(1) + if s != b"P": + raise SyntaxError("not a PPM file") + mode = MODES[self._token(s)] + + if mode == "1": + self.mode = "1" + rawmode = "1;I" + else: + self.mode = rawmode = mode + + for ix in range(3): + while True: + while True: + s = self.fp.read(1) + if s not in b_whitespace: + break + if s == b"": + raise ValueError("File does not extend beyond magic number") + if s != b"#": + break + s = self.fp.readline() + s = int(self._token(s)) + if ix == 0: + xsize = s + elif ix == 1: + ysize = s + if mode == "1": + break + elif ix == 2: + # maxgrey + if s > 255: + if not mode == 'L': + raise ValueError("Too many colors for band: %s" % s) + if s < 2**16: + self.mode = 'I' + rawmode = 'I;16B' + else: + self.mode = 'I' + rawmode = 'I;32B' + + self.size = xsize, ysize + self.tile = [("raw", + (0, 0, xsize, ysize), + self.fp.tell(), + (rawmode, 0, 1))] + + # ALTERNATIVE: load via builtin debug function + # self.im = Image.core.open_ppm(self.filename) + # self.mode = self.im.mode + # self.size = self.im.size + + +# +# -------------------------------------------------------------------- + +def _save(im, fp, filename): + if im.mode == "1": + rawmode, head = "1;I", b"P4" + elif im.mode == "L": + rawmode, head = "L", b"P5" + elif im.mode == "I": + if im.getextrema()[1] < 2**16: + rawmode, head = "I;16B", b"P5" + else: + rawmode, head = "I;32B", b"P5" + elif im.mode == "RGB": + rawmode, head = "RGB", b"P6" + elif im.mode == "RGBA": + rawmode, head = "RGB", b"P6" + else: + raise IOError("cannot write mode %s as PPM" % im.mode) + fp.write(head + ("\n%d %d\n" % im.size).encode('ascii')) + if head == b"P6": + fp.write(b"255\n") + if head == b"P5": + if rawmode == "L": + fp.write(b"255\n") + elif rawmode == "I;16B": + fp.write(b"65535\n") + elif rawmode == "I;32B": + fp.write(b"2147483648\n") + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + # ALTERNATIVE: save via builtin debug function + # im._dump(filename) + +# +# -------------------------------------------------------------------- + +Image.register_open(PpmImageFile.format, PpmImageFile, _accept) +Image.register_save(PpmImageFile.format, _save) + +Image.register_extension(PpmImageFile.format, ".pbm") +Image.register_extension(PpmImageFile.format, ".pgm") +Image.register_extension(PpmImageFile.format, ".ppm") diff --git a/Lib/site-packages/PIL/PsdImagePlugin.py b/Lib/site-packages/PIL/PsdImagePlugin.py new file mode 100644 index 0000000..d06e320 --- /dev/null +++ b/Lib/site-packages/PIL/PsdImagePlugin.py @@ -0,0 +1,312 @@ +# +# The Python Imaging Library +# $Id$ +# +# Adobe PSD 2.5/3.0 file handling +# +# History: +# 1995-09-01 fl Created +# 1997-01-03 fl Read most PSD images +# 1997-01-18 fl Fixed P and CMYK support +# 2001-10-21 fl Added seek/tell support (for layers) +# +# Copyright (c) 1997-2001 by Secret Labs AB. +# Copyright (c) 1995-2001 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +__version__ = "0.4" + +from PIL import Image, ImageFile, ImagePalette, _binary + +MODES = { + # (photoshop mode, bits) -> (pil mode, required channels) + (0, 1): ("1", 1), + (0, 8): ("L", 1), + (1, 8): ("L", 1), + (2, 8): ("P", 1), + (3, 8): ("RGB", 3), + (4, 8): ("CMYK", 4), + (7, 8): ("L", 1), # FIXME: multilayer + (8, 8): ("L", 1), # duotone + (9, 8): ("LAB", 3) +} + +# +# helpers + +i8 = _binary.i8 +i16 = _binary.i16be +i32 = _binary.i32be + + +# --------------------------------------------------------------------. +# read PSD images + +def _accept(prefix): + return prefix[:4] == b"8BPS" + + +## +# Image plugin for Photoshop images. + +class PsdImageFile(ImageFile.ImageFile): + + format = "PSD" + format_description = "Adobe Photoshop" + + def _open(self): + + read = self.fp.read + + # + # header + + s = read(26) + if s[:4] != b"8BPS" or i16(s[4:]) != 1: + raise SyntaxError("not a PSD file") + + psd_bits = i16(s[22:]) + psd_channels = i16(s[12:]) + psd_mode = i16(s[24:]) + + mode, channels = MODES[(psd_mode, psd_bits)] + + if channels > psd_channels: + raise IOError("not enough channels") + + self.mode = mode + self.size = i32(s[18:]), i32(s[14:]) + + # + # color mode data + + size = i32(read(4)) + if size: + data = read(size) + if mode == "P" and size == 768: + self.palette = ImagePalette.raw("RGB;L", data) + + # + # image resources + + self.resources = [] + + size = i32(read(4)) + if size: + # load resources + end = self.fp.tell() + size + while self.fp.tell() < end: + signature = read(4) + id = i16(read(2)) + name = read(i8(read(1))) + if not (len(name) & 1): + read(1) # padding + data = read(i32(read(4))) + if (len(data) & 1): + read(1) # padding + self.resources.append((id, name, data)) + if id == 1039: # ICC profile + self.info["icc_profile"] = data + + # + # layer and mask information + + self.layers = [] + + size = i32(read(4)) + if size: + end = self.fp.tell() + size + size = i32(read(4)) + if size: + self.layers = _layerinfo(self.fp) + self.fp.seek(end) + + # + # image descriptor + + self.tile = _maketile(self.fp, mode, (0, 0) + self.size, channels) + + # keep the file open + self._fp = self.fp + self.frame = 0 + + @property + def n_frames(self): + return len(self.layers) + + @property + def is_animated(self): + return len(self.layers) > 1 + + def seek(self, layer): + # seek to given layer (1..max) + if layer == self.frame: + return + try: + if layer <= 0: + raise IndexError + name, mode, bbox, tile = self.layers[layer-1] + self.mode = mode + self.tile = tile + self.frame = layer + self.fp = self._fp + return name, bbox + except IndexError: + raise EOFError("no such layer") + + def tell(self): + # return layer number (0=image, 1..max=layers) + return self.frame + + def load_prepare(self): + # create image memory if necessary + if not self.im or\ + self.im.mode != self.mode or self.im.size != self.size: + self.im = Image.core.fill(self.mode, self.size, 0) + # create palette (optional) + if self.mode == "P": + Image.Image.load(self) + + +def _layerinfo(file): + # read layerinfo block + layers = [] + read = file.read + for i in range(abs(i16(read(2)))): + + # bounding box + y0 = i32(read(4)) + x0 = i32(read(4)) + y1 = i32(read(4)) + x1 = i32(read(4)) + + # image info + info = [] + mode = [] + types = list(range(i16(read(2)))) + if len(types) > 4: + continue + + for i in types: + type = i16(read(2)) + + if type == 65535: + m = "A" + else: + m = "RGBA"[type] + + mode.append(m) + size = i32(read(4)) + info.append((m, size)) + + # figure out the image mode + mode.sort() + if mode == ["R"]: + mode = "L" + elif mode == ["B", "G", "R"]: + mode = "RGB" + elif mode == ["A", "B", "G", "R"]: + mode = "RGBA" + else: + mode = None # unknown + + # skip over blend flags and extra information + filler = read(12) + name = "" + size = i32(read(4)) + combined = 0 + if size: + length = i32(read(4)) + if length: + mask_y = i32(read(4)) + mask_x = i32(read(4)) + mask_h = i32(read(4)) - mask_y + mask_w = i32(read(4)) - mask_x + file.seek(length - 16, 1) + combined += length + 4 + + length = i32(read(4)) + if length: + file.seek(length, 1) + combined += length + 4 + + length = i8(read(1)) + if length: + # Don't know the proper encoding, + # Latin-1 should be a good guess + name = read(length).decode('latin-1', 'replace') + combined += length + 1 + + file.seek(size - combined, 1) + layers.append((name, mode, (x0, y0, x1, y1))) + + # get tiles + i = 0 + for name, mode, bbox in layers: + tile = [] + for m in mode: + t = _maketile(file, m, bbox, 1) + if t: + tile.extend(t) + layers[i] = name, mode, bbox, tile + i += 1 + + return layers + + +def _maketile(file, mode, bbox, channels): + + tile = None + read = file.read + + compression = i16(read(2)) + + xsize = bbox[2] - bbox[0] + ysize = bbox[3] - bbox[1] + + offset = file.tell() + + if compression == 0: + # + # raw compression + tile = [] + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append(("raw", bbox, offset, layer)) + offset = offset + xsize*ysize + + elif compression == 1: + # + # packbits compression + i = 0 + tile = [] + bytecount = read(channels * ysize * 2) + offset = file.tell() + for channel in range(channels): + layer = mode[channel] + if mode == "CMYK": + layer += ";I" + tile.append( + ("packbits", bbox, offset, layer) + ) + for y in range(ysize): + offset = offset + i16(bytecount[i:i+2]) + i += 2 + + file.seek(offset) + + if offset & 1: + read(1) # padding + + return tile + +# -------------------------------------------------------------------- +# registry + +Image.register_open(PsdImageFile.format, PsdImageFile, _accept) + +Image.register_extension(PsdImageFile.format, ".psd") diff --git a/Lib/site-packages/PIL/PyAccess.py b/Lib/site-packages/PIL/PyAccess.py new file mode 100644 index 0000000..faa868c --- /dev/null +++ b/Lib/site-packages/PIL/PyAccess.py @@ -0,0 +1,317 @@ +# +# The Python Imaging Library +# Pillow fork +# +# Python implementation of the PixelAccess Object +# +# Copyright (c) 1997-2009 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-2009 by Fredrik Lundh. +# Copyright (c) 2013 Eric Soroos +# +# See the README file for information on usage and redistribution +# + +# Notes: +# +# * Implements the pixel access object following Access. +# * Does not implement the line functions, as they don't appear to be used +# * Taking only the tuple form, which is used from python. +# * Fill.c uses the integer form, but it's still going to use the old +# Access.c implementation. +# + +from __future__ import print_function + +import logging +import sys + +from cffi import FFI + + +logger = logging.getLogger(__name__) + + +defs = """ +struct Pixel_RGBA { + unsigned char r,g,b,a; +}; +struct Pixel_I16 { + unsigned char l,r; +}; +""" +ffi = FFI() +ffi.cdef(defs) + + +class PyAccess(object): + + def __init__(self, img, readonly=False): + vals = dict(img.im.unsafe_ptrs) + self.readonly = readonly + self.image8 = ffi.cast('unsigned char **', vals['image8']) + self.image32 = ffi.cast('int **', vals['image32']) + self.image = ffi.cast('unsigned char **', vals['image']) + self.xsize = vals['xsize'] + self.ysize = vals['ysize'] + + # Debugging is polluting test traces, only useful here + # when hacking on PyAccess + # logger.debug("%s", vals) + self._post_init() + + def _post_init(self): + pass + + def __setitem__(self, xy, color): + """ + Modifies the pixel at x,y. The color is given as a single + numerical value for single band images, and a tuple for + multi-band images + + :param xy: The pixel coordinate, given as (x, y). + :param value: The pixel value. + """ + if self.readonly: + raise ValueError('Attempt to putpixel a read only image') + (x, y) = self.check_xy(xy) + return self.set_pixel(x, y, color) + + def __getitem__(self, xy): + """ + Returns the pixel at x,y. The pixel is returned as a single + value for single band images or a tuple for multiple band + images + + :param xy: The pixel coordinate, given as (x, y). + :returns: a pixel value for single band images, a tuple of + pixel values for multiband images. + """ + + (x, y) = self.check_xy(xy) + return self.get_pixel(x, y) + + putpixel = __setitem__ + getpixel = __getitem__ + + def check_xy(self, xy): + (x, y) = xy + if not (0 <= x < self.xsize and 0 <= y < self.ysize): + raise ValueError('pixel location out of range') + return xy + + +class _PyAccess32_2(PyAccess): + """ PA, LA, stored in first and last bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.a = min(color[1], 255) + + +class _PyAccess32_3(PyAccess): + """ RGB and friends, stored in the first three bytes of a 32 bit word """ + + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + + +class _PyAccess32_4(PyAccess): + """ RGBA etc, all 4 bytes of a 32 bit word """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast("struct Pixel_RGBA **", self.image32) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return (pixel.r, pixel.g, pixel.b, pixel.a) + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + # tuple + pixel.r = min(color[0], 255) + pixel.g = min(color[1], 255) + pixel.b = min(color[2], 255) + pixel.a = min(color[3], 255) + + +class _PyAccess8(PyAccess): + """ 1, L, P, 8 bit images stored as uint8 """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image8 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 255) + except: + # tuple + self.pixels[y][x] = min(color[0], 255) + + +class _PyAccessI16_N(PyAccess): + """ I;16 access, native bitendian without conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('unsigned short **', self.image) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # integer + self.pixels[y][x] = min(color, 65535) + except: + # tuple + self.pixels[y][x] = min(color[0], 65535) + + +class _PyAccessI16_L(PyAccess): + """ I;16L access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l + pixel.r * 256 + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except TypeError: + color = min(color[0], 65535) + + pixel.l = color & 0xFF + pixel.r = color >> 8 + + +class _PyAccessI16_B(PyAccess): + """ I;16B access, with conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('struct Pixel_I16 **', self.image) + + def get_pixel(self, x, y): + pixel = self.pixels[y][x] + return pixel.l * 256 + pixel.r + + def set_pixel(self, x, y, color): + pixel = self.pixels[y][x] + try: + color = min(color, 65535) + except: + color = min(color[0], 65535) + + pixel.l = color >> 8 + pixel.r = color & 0xFF + + +class _PyAccessI32_N(PyAccess): + """ Signed Int32 access, native endian """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + self.pixels[y][x] = color + + +class _PyAccessI32_Swap(PyAccess): + """ I;32L/B access, with byteswapping conversion """ + def _post_init(self, *args, **kwargs): + self.pixels = self.image32 + + def reverse(self, i): + orig = ffi.new('int *', i) + chars = ffi.cast('unsigned char *', orig) + chars[0], chars[1], chars[2], chars[3] = chars[3], chars[2], \ + chars[1], chars[0] + return ffi.cast('int *', chars)[0] + + def get_pixel(self, x, y): + return self.reverse(self.pixels[y][x]) + + def set_pixel(self, x, y, color): + self.pixels[y][x] = self.reverse(color) + + +class _PyAccessF(PyAccess): + """ 32 bit float access """ + def _post_init(self, *args, **kwargs): + self.pixels = ffi.cast('float **', self.image32) + + def get_pixel(self, x, y): + return self.pixels[y][x] + + def set_pixel(self, x, y, color): + try: + # not a tuple + self.pixels[y][x] = color + except: + # tuple + self.pixels[y][x] = color[0] + + +mode_map = {'1': _PyAccess8, + 'L': _PyAccess8, + 'P': _PyAccess8, + 'LA': _PyAccess32_2, + 'PA': _PyAccess32_2, + 'RGB': _PyAccess32_3, + 'LAB': _PyAccess32_3, + 'HSV': _PyAccess32_3, + 'YCbCr': _PyAccess32_3, + 'RGBA': _PyAccess32_4, + 'RGBa': _PyAccess32_4, + 'RGBX': _PyAccess32_4, + 'CMYK': _PyAccess32_4, + 'F': _PyAccessF, + 'I': _PyAccessI32_N, + } + +if sys.byteorder == 'little': + mode_map['I;16'] = _PyAccessI16_N + mode_map['I;16L'] = _PyAccessI16_N + mode_map['I;16B'] = _PyAccessI16_B + + mode_map['I;32L'] = _PyAccessI32_N + mode_map['I;32B'] = _PyAccessI32_Swap +else: + mode_map['I;16'] = _PyAccessI16_L + mode_map['I;16L'] = _PyAccessI16_L + mode_map['I;16B'] = _PyAccessI16_N + + mode_map['I;32L'] = _PyAccessI32_Swap + mode_map['I;32B'] = _PyAccessI32_N + + +def new(img, readonly=False): + access_type = mode_map.get(img.mode, None) + if not access_type: + logger.debug("PyAccess Not Implemented: %s", img.mode) + return None + return access_type(img, readonly) + +# End of file diff --git a/Lib/site-packages/PIL/SgiImagePlugin.py b/Lib/site-packages/PIL/SgiImagePlugin.py new file mode 100644 index 0000000..f890c7e --- /dev/null +++ b/Lib/site-packages/PIL/SgiImagePlugin.py @@ -0,0 +1,89 @@ +# +# The Python Imaging Library. +# $Id$ +# +# SGI image file handling +# +# See "The SGI Image File Format (Draft version 0.97)", Paul Haeberli. +# +# +# History: +# 1995-09-10 fl Created +# +# Copyright (c) 2008 by Karsten Hiddemann. +# Copyright (c) 1997 by Secret Labs AB. +# Copyright (c) 1995 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +i8 = _binary.i8 +i16 = _binary.i16be + + +def _accept(prefix): + return len(prefix) >= 2 and i16(prefix) == 474 + + +## +# Image plugin for SGI images. + +class SgiImageFile(ImageFile.ImageFile): + + format = "SGI" + format_description = "SGI Image File Format" + + def _open(self): + + # HEAD + s = self.fp.read(512) + if i16(s) != 474: + raise ValueError("Not an SGI image file") + + # relevant header entries + compression = i8(s[2]) + + # bytes, dimension, zsize + layout = i8(s[3]), i16(s[4:]), i16(s[10:]) + + # determine mode from bytes/zsize + if layout == (1, 2, 1) or layout == (1, 1, 1): + self.mode = "L" + elif layout == (1, 3, 3): + self.mode = "RGB" + elif layout == (1, 3, 4): + self.mode = "RGBA" + else: + raise ValueError("Unsupported SGI image mode") + + # size + self.size = i16(s[6:]), i16(s[8:]) + + # decoder info + if compression == 0: + offset = 512 + pagesize = self.size[0]*self.size[1]*layout[0] + self.tile = [] + for layer in self.mode: + self.tile.append( + ("raw", (0, 0)+self.size, offset, (layer, 0, -1))) + offset = offset + pagesize + elif compression == 1: + raise ValueError("SGI RLE encoding not supported") + +# +# registry + +Image.register_open(SgiImageFile.format, SgiImageFile, _accept) + +Image.register_extension(SgiImageFile.format, ".bw") +Image.register_extension(SgiImageFile.format, ".rgb") +Image.register_extension(SgiImageFile.format, ".rgba") +Image.register_extension(SgiImageFile.format, ".sgi") + +# End of file diff --git a/Lib/site-packages/PIL/SpiderImagePlugin.py b/Lib/site-packages/PIL/SpiderImagePlugin.py new file mode 100644 index 0000000..d545789 --- /dev/null +++ b/Lib/site-packages/PIL/SpiderImagePlugin.py @@ -0,0 +1,322 @@ +# +# The Python Imaging Library. +# +# SPIDER image file handling +# +# History: +# 2004-08-02 Created BB +# 2006-03-02 added save method +# 2006-03-13 added support for stack images +# +# Copyright (c) 2004 by Health Research Inc. (HRI) RENSSELAER, NY 12144. +# Copyright (c) 2004 by William Baxter. +# Copyright (c) 2004 by Secret Labs AB. +# Copyright (c) 2004 by Fredrik Lundh. +# + +## +# Image plugin for the Spider image format. This format is is used +# by the SPIDER software, in processing image data from electron +# microscopy and tomography. +## + +# +# SpiderImagePlugin.py +# +# The Spider image format is used by SPIDER software, in processing +# image data from electron microscopy and tomography. +# +# Spider home page: +# http://spider.wadsworth.org/spider_doc/spider/docs/spider.html +# +# Details about the Spider image format: +# http://spider.wadsworth.org/spider_doc/spider/docs/image_doc.html +# + +from __future__ import print_function + +from PIL import Image, ImageFile +import os +import struct +import sys + + +def isInt(f): + try: + i = int(f) + if f-i == 0: + return 1 + else: + return 0 + except ValueError: + return 0 + except OverflowError: + return 0 + +iforms = [1, 3, -11, -12, -21, -22] + + +# There is no magic number to identify Spider files, so just check a +# series of header locations to see if they have reasonable values. +# Returns no.of bytes in the header, if it is a valid Spider header, +# otherwise returns 0 + +def isSpiderHeader(t): + h = (99,) + t # add 1 value so can use spider header index start=1 + # header values 1,2,5,12,13,22,23 should be integers + for i in [1, 2, 5, 12, 13, 22, 23]: + if not isInt(h[i]): + return 0 + # check iform + iform = int(h[5]) + if iform not in iforms: + return 0 + # check other header values + labrec = int(h[13]) # no. records in file header + labbyt = int(h[22]) # total no. of bytes in header + lenbyt = int(h[23]) # record length in bytes + # print "labrec = %d, labbyt = %d, lenbyt = %d" % (labrec,labbyt,lenbyt) + if labbyt != (labrec * lenbyt): + return 0 + # looks like a valid header + return labbyt + + +def isSpiderImage(filename): + fp = open(filename, 'rb') + f = fp.read(92) # read 23 * 4 bytes + fp.close() + t = struct.unpack('>23f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + t = struct.unpack('<23f', f) # little-endian + hdrlen = isSpiderHeader(t) + return hdrlen + + +class SpiderImageFile(ImageFile.ImageFile): + + format = "SPIDER" + format_description = "Spider 2D image" + + def _open(self): + # check header + n = 27 * 4 # read 27 float values + f = self.fp.read(n) + + try: + self.bigendian = 1 + t = struct.unpack('>27f', f) # try big-endian first + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + self.bigendian = 0 + t = struct.unpack('<27f', f) # little-endian + hdrlen = isSpiderHeader(t) + if hdrlen == 0: + raise SyntaxError("not a valid Spider file") + except struct.error: + raise SyntaxError("not a valid Spider file") + + h = (99,) + t # add 1 value : spider header index starts at 1 + iform = int(h[5]) + if iform != 1: + raise SyntaxError("not a Spider 2D image") + + self.size = int(h[12]), int(h[2]) # size in pixels (width, height) + self.istack = int(h[24]) + self.imgnumber = int(h[27]) + + if self.istack == 0 and self.imgnumber == 0: + # stk=0, img=0: a regular 2D image + offset = hdrlen + self._nimages = 1 + elif self.istack > 0 and self.imgnumber == 0: + # stk>0, img=0: Opening the stack for the first time + self.imgbytes = int(h[12]) * int(h[2]) * 4 + self.hdrlen = hdrlen + self._nimages = int(h[26]) + # Point to the first image in the stack + offset = hdrlen * 2 + self.imgnumber = 1 + elif self.istack == 0 and self.imgnumber > 0: + # stk=0, img>0: an image within the stack + offset = hdrlen + self.stkoffset + self.istack = 2 # So Image knows it's still a stack + else: + raise SyntaxError("inconsistent stack header values") + + if self.bigendian: + self.rawmode = "F;32BF" + else: + self.rawmode = "F;32F" + self.mode = "F" + + self.tile = [ + ("raw", (0, 0) + self.size, offset, + (self.rawmode, 0, 1))] + self.__fp = self.fp # FIXME: hack + + @property + def n_frames(self): + return self._nimages + + @property + def is_animated(self): + return self._nimages > 1 + + # 1st image index is zero (although SPIDER imgnumber starts at 1) + def tell(self): + if self.imgnumber < 1: + return 0 + else: + return self.imgnumber - 1 + + def seek(self, frame): + if self.istack == 0: + return + if frame >= self._nimages: + raise EOFError("attempt to seek past end of file") + self.stkoffset = self.hdrlen + frame * (self.hdrlen + self.imgbytes) + self.fp = self.__fp + self.fp.seek(self.stkoffset) + self._open() + + # returns a byte image after rescaling to 0..255 + def convert2byte(self, depth=255): + (minimum, maximum) = self.getextrema() + m = 1 + if maximum != minimum: + m = depth / (maximum-minimum) + b = -m * minimum + return self.point(lambda i, m=m, b=b: i * m + b).convert("L") + + # returns a ImageTk.PhotoImage object, after rescaling to 0..255 + def tkPhotoImage(self): + from PIL import ImageTk + return ImageTk.PhotoImage(self.convert2byte(), palette=256) + + +# -------------------------------------------------------------------- +# Image series + +# given a list of filenames, return a list of images +def loadImageSeries(filelist=None): + " create a list of Image.images for use in montage " + if filelist is None or len(filelist) < 1: + return + + imglist = [] + for img in filelist: + if not os.path.exists(img): + print("unable to find %s" % img) + continue + try: + im = Image.open(img).convert2byte() + except: + if not isSpiderImage(img): + print(img + " is not a Spider image file") + continue + im.info['filename'] = img + imglist.append(im) + return imglist + + +# -------------------------------------------------------------------- +# For saving images in Spider format + +def makeSpiderHeader(im): + nsam, nrow = im.size + lenbyt = nsam * 4 # There are labrec records in the header + labrec = 1024 / lenbyt + if 1024 % lenbyt != 0: + labrec += 1 + labbyt = labrec * lenbyt + hdr = [] + nvalues = int(labbyt / 4) + for i in range(nvalues): + hdr.append(0.0) + + if len(hdr) < 23: + return [] + + # NB these are Fortran indices + hdr[1] = 1.0 # nslice (=1 for an image) + hdr[2] = float(nrow) # number of rows per slice + hdr[5] = 1.0 # iform for 2D image + hdr[12] = float(nsam) # number of pixels per line + hdr[13] = float(labrec) # number of records in file header + hdr[22] = float(labbyt) # total number of bytes in header + hdr[23] = float(lenbyt) # record length in bytes + + # adjust for Fortran indexing + hdr = hdr[1:] + hdr.append(0.0) + # pack binary data into a string + hdrstr = [] + for v in hdr: + hdrstr.append(struct.pack('f', v)) + return hdrstr + + +def _save(im, fp, filename): + if im.mode[0] != "F": + im = im.convert('F') + + hdr = makeSpiderHeader(im) + if len(hdr) < 256: + raise IOError("Error creating Spider header") + + # write the SPIDER header + try: + fp = open(filename, 'wb') + except: + raise IOError("Unable to open %s for writing" % filename) + fp.writelines(hdr) + + rawmode = "F;32NF" # 32-bit native floating point + ImageFile._save(im, fp, [("raw", (0, 0)+im.size, 0, (rawmode, 0, 1))]) + + fp.close() + + +def _save_spider(im, fp, filename): + # get the filename extension and register it with Image + ext = os.path.splitext(filename)[1] + Image.register_extension("SPIDER", ext) + _save(im, fp, filename) + +# -------------------------------------------------------------------- + +Image.register_open(SpiderImageFile.format, SpiderImageFile) +Image.register_save(SpiderImageFile.format, _save_spider) + +if __name__ == "__main__": + + if not sys.argv[1:]: + print("Syntax: python SpiderImagePlugin.py Spiderimage [outfile]") + sys.exit() + + filename = sys.argv[1] + if not isSpiderImage(filename): + print("input image must be in Spider format") + sys.exit() + + outfile = "" + if len(sys.argv[1:]) > 1: + outfile = sys.argv[2] + + im = Image.open(filename) + print("image: " + str(im)) + print("format: " + str(im.format)) + print("size: " + str(im.size)) + print("mode: " + str(im.mode)) + print("max, min: ", end=' ') + print(im.getextrema()) + + if outfile != "": + # perform some image operation + im = im.transpose(Image.FLIP_LEFT_RIGHT) + print( + "saving a flipped version of %s as %s " % + (os.path.basename(filename), outfile)) + im.save(outfile, "SPIDER") diff --git a/Lib/site-packages/PIL/SunImagePlugin.py b/Lib/site-packages/PIL/SunImagePlugin.py new file mode 100644 index 0000000..af63144 --- /dev/null +++ b/Lib/site-packages/PIL/SunImagePlugin.py @@ -0,0 +1,81 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Sun image file handling +# +# History: +# 1995-09-10 fl Created +# 1996-05-28 fl Fixed 32-bit alignment +# 1998-12-29 fl Import ImagePalette module +# 2001-12-18 fl Fixed palette loading (from Jean-Claude Rimbault) +# +# Copyright (c) 1997-2001 by Secret Labs AB +# Copyright (c) 1995-1996 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + +i32 = _binary.i32be + + +def _accept(prefix): + return len(prefix) >= 4 and i32(prefix) == 0x59a66a95 + + +## +# Image plugin for Sun raster files. + +class SunImageFile(ImageFile.ImageFile): + + format = "SUN" + format_description = "Sun Raster File" + + def _open(self): + + # HEAD + s = self.fp.read(32) + if i32(s) != 0x59a66a95: + raise SyntaxError("not an SUN raster file") + + offset = 32 + + self.size = i32(s[4:8]), i32(s[8:12]) + + depth = i32(s[12:16]) + if depth == 1: + self.mode, rawmode = "1", "1;I" + elif depth == 8: + self.mode = rawmode = "L" + elif depth == 24: + self.mode, rawmode = "RGB", "BGR" + else: + raise SyntaxError("unsupported mode") + + compression = i32(s[20:24]) + + if i32(s[24:28]) != 0: + length = i32(s[28:32]) + offset = offset + length + self.palette = ImagePalette.raw("RGB;L", self.fp.read(length)) + if self.mode == "L": + self.mode = rawmode = "P" + + stride = (((self.size[0] * depth + 7) // 8) + 3) & (~3) + + if compression == 1: + self.tile = [("raw", (0, 0)+self.size, offset, (rawmode, stride))] + elif compression == 2: + self.tile = [("sun_rle", (0, 0)+self.size, offset, rawmode)] + +# +# registry + +Image.register_open(SunImageFile.format, SunImageFile, _accept) + +Image.register_extension(SunImageFile.format, ".ras") diff --git a/Lib/site-packages/PIL/TarIO.py b/Lib/site-packages/PIL/TarIO.py new file mode 100644 index 0000000..4e5115b --- /dev/null +++ b/Lib/site-packages/PIL/TarIO.py @@ -0,0 +1,57 @@ +# +# The Python Imaging Library. +# $Id$ +# +# read files from within a tar file +# +# History: +# 95-06-18 fl Created +# 96-05-28 fl Open files in binary mode +# +# Copyright (c) Secret Labs AB 1997. +# Copyright (c) Fredrik Lundh 1995-96. +# +# See the README file for information on usage and redistribution. +# + +from PIL import ContainerIO + + +## +# A file object that provides read access to a given member of a TAR +# file. + +class TarIO(ContainerIO.ContainerIO): + + ## + # Create file object. + # + # @param tarfile Name of TAR file. + # @param file Name of member file. + + def __init__(self, tarfile, file): + + fh = open(tarfile, "rb") + + while True: + + s = fh.read(512) + if len(s) != 512: + raise IOError("unexpected end of tar file") + + name = s[:100].decode('utf-8') + i = name.find('\0') + if i == 0: + raise IOError("cannot find subfile") + if i > 0: + name = name[:i] + + size = int(s[124:135], 8) + + if file == name: + break + + fh.seek((size + 511) & (~511), 1) + + # Open region + ContainerIO.ContainerIO.__init__(self, fh, fh.tell(), size) diff --git a/Lib/site-packages/PIL/TgaImagePlugin.py b/Lib/site-packages/PIL/TgaImagePlugin.py new file mode 100644 index 0000000..a75ce29 --- /dev/null +++ b/Lib/site-packages/PIL/TgaImagePlugin.py @@ -0,0 +1,198 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TGA file handling +# +# History: +# 95-09-01 fl created (reads 24-bit files only) +# 97-01-04 fl support more TGA versions, including compressed images +# 98-07-04 fl fixed orientation and alpha layer bugs +# 98-09-11 fl fixed orientation for runlength decoder +# +# Copyright (c) Secret Labs AB 1997-98. +# Copyright (c) Fredrik Lundh 1995-97. +# +# See the README file for information on usage and redistribution. +# + + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.3" + + +# +# -------------------------------------------------------------------- +# Read RGA file + +i8 = _binary.i8 +i16 = _binary.i16le + + +MODES = { + # map imagetype/depth to rawmode + (1, 8): "P", + (3, 1): "1", + (3, 8): "L", + (2, 16): "BGR;5", + (2, 24): "BGR", + (2, 32): "BGRA", +} + + +## +# Image plugin for Targa files. + +class TgaImageFile(ImageFile.ImageFile): + + format = "TGA" + format_description = "Targa" + + def _open(self): + + # process header + s = self.fp.read(18) + + idlen = i8(s[0]) + + colormaptype = i8(s[1]) + imagetype = i8(s[2]) + + depth = i8(s[16]) + + flags = i8(s[17]) + + self.size = i16(s[12:]), i16(s[14:]) + + # validate header fields + if colormaptype not in (0, 1) or\ + self.size[0] <= 0 or self.size[1] <= 0 or\ + depth not in (1, 8, 16, 24, 32): + raise SyntaxError("not a TGA file") + + # image mode + if imagetype in (3, 11): + self.mode = "L" + if depth == 1: + self.mode = "1" # ??? + elif imagetype in (1, 9): + self.mode = "P" + elif imagetype in (2, 10): + self.mode = "RGB" + if depth == 32: + self.mode = "RGBA" + else: + raise SyntaxError("unknown TGA mode") + + # orientation + orientation = flags & 0x30 + if orientation == 0x20: + orientation = 1 + elif not orientation: + orientation = -1 + else: + raise SyntaxError("unknown TGA orientation") + + self.info["orientation"] = orientation + + if imagetype & 8: + self.info["compression"] = "tga_rle" + + if idlen: + self.info["id_section"] = self.fp.read(idlen) + + if colormaptype: + # read palette + start, size, mapdepth = i16(s[3:]), i16(s[5:]), i16(s[7:]) + if mapdepth == 16: + self.palette = ImagePalette.raw( + "BGR;16", b"\0"*2*start + self.fp.read(2*size)) + elif mapdepth == 24: + self.palette = ImagePalette.raw( + "BGR", b"\0"*3*start + self.fp.read(3*size)) + elif mapdepth == 32: + self.palette = ImagePalette.raw( + "BGRA", b"\0"*4*start + self.fp.read(4*size)) + + # setup tile descriptor + try: + rawmode = MODES[(imagetype & 7, depth)] + if imagetype & 8: + # compressed + self.tile = [("tga_rle", (0, 0)+self.size, + self.fp.tell(), (rawmode, orientation, depth))] + else: + self.tile = [("raw", (0, 0)+self.size, + self.fp.tell(), (rawmode, 0, orientation))] + except KeyError: + pass # cannot decode + +# +# -------------------------------------------------------------------- +# Write TGA file + +o8 = _binary.o8 +o16 = _binary.o16le +o32 = _binary.o32le + +SAVE = { + "1": ("1", 1, 0, 3), + "L": ("L", 8, 0, 3), + "P": ("P", 8, 1, 1), + "RGB": ("BGR", 24, 0, 2), + "RGBA": ("BGRA", 32, 0, 2), +} + + +def _save(im, fp, filename, check=0): + + try: + rawmode, bits, colormaptype, imagetype = SAVE[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TGA" % im.mode) + + if check: + return check + + if colormaptype: + colormapfirst, colormaplength, colormapentry = 0, 256, 24 + else: + colormapfirst, colormaplength, colormapentry = 0, 0, 0 + + if im.mode == "RGBA": + flags = 8 + else: + flags = 0 + + orientation = im.info.get("orientation", -1) + if orientation > 0: + flags = flags | 0x20 + + fp.write(b"\000" + + o8(colormaptype) + + o8(imagetype) + + o16(colormapfirst) + + o16(colormaplength) + + o8(colormapentry) + + o16(0) + + o16(0) + + o16(im.size[0]) + + o16(im.size[1]) + + o8(bits) + + o8(flags)) + + if colormaptype: + fp.write(im.im.getpalette("RGB", "BGR")) + + ImageFile._save( + im, fp, [("raw", (0, 0) + im.size, 0, (rawmode, 0, orientation))]) + +# +# -------------------------------------------------------------------- +# Registry + +Image.register_open(TgaImageFile.format, TgaImageFile) +Image.register_save(TgaImageFile.format, _save) + +Image.register_extension(TgaImageFile.format, ".tga") diff --git a/Lib/site-packages/PIL/TiffImagePlugin.py b/Lib/site-packages/PIL/TiffImagePlugin.py new file mode 100644 index 0000000..096be6f --- /dev/null +++ b/Lib/site-packages/PIL/TiffImagePlugin.py @@ -0,0 +1,1476 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF file handling +# +# TIFF is a flexible, if somewhat aged, image file format originally +# defined by Aldus. Although TIFF supports a wide variety of pixel +# layouts and compression methods, the name doesn't really stand for +# "thousands of incompatible file formats," it just feels that way. +# +# To read TIFF data from a stream, the stream must be seekable. For +# progressive decoding, make sure to use TIFF files where the tag +# directory is placed first in the file. +# +# History: +# 1995-09-01 fl Created +# 1996-05-04 fl Handle JPEGTABLES tag +# 1996-05-18 fl Fixed COLORMAP support +# 1997-01-05 fl Fixed PREDICTOR support +# 1997-08-27 fl Added support for rational tags (from Perry Stoll) +# 1998-01-10 fl Fixed seek/tell (from Jan Blom) +# 1998-07-15 fl Use private names for internal variables +# 1999-06-13 fl Rewritten for PIL 1.0 (1.0) +# 2000-10-11 fl Additional fixes for Python 2.0 (1.1) +# 2001-04-17 fl Fixed rewind support (seek to frame 0) (1.2) +# 2001-05-12 fl Added write support for more tags (from Greg Couch) (1.3) +# 2001-12-18 fl Added workaround for broken Matrox library +# 2002-01-18 fl Don't mess up if photometric tag is missing (D. Alan Stewart) +# 2003-05-19 fl Check FILLORDER tag +# 2003-09-26 fl Added RGBa support +# 2004-02-24 fl Added DPI support; fixed rational write support +# 2005-02-07 fl Added workaround for broken Corel Draw 10 files +# 2006-01-09 fl Added support for float/double tags (from Russell Nelson) +# +# Copyright (c) 1997-2006 by Secret Labs AB. All rights reserved. +# Copyright (c) 1995-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +from __future__ import division, print_function + +from PIL import Image, ImageFile +from PIL import ImagePalette +from PIL import _binary +from PIL import TiffTags + +import collections +from fractions import Fraction +from numbers import Number, Rational + +import io +import itertools +import os +import struct +import sys +import warnings + +from .TiffTags import TYPES, TagInfo + + +__version__ = "1.3.5" +DEBUG = False # Needs to be merged with the new logging approach. + +# Set these to true to force use of libtiff for reading or writing. +READ_LIBTIFF = False +WRITE_LIBTIFF = False +IFD_LEGACY_API = True + +II = b"II" # little-endian (Intel style) +MM = b"MM" # big-endian (Motorola style) + +i8 = _binary.i8 +o8 = _binary.o8 + +# +# -------------------------------------------------------------------- +# Read TIFF files + +# a few tag names, just to make the code below a bit more readable +IMAGEWIDTH = 256 +IMAGELENGTH = 257 +BITSPERSAMPLE = 258 +COMPRESSION = 259 +PHOTOMETRIC_INTERPRETATION = 262 +FILLORDER = 266 +IMAGEDESCRIPTION = 270 +STRIPOFFSETS = 273 +SAMPLESPERPIXEL = 277 +ROWSPERSTRIP = 278 +STRIPBYTECOUNTS = 279 +X_RESOLUTION = 282 +Y_RESOLUTION = 283 +PLANAR_CONFIGURATION = 284 +RESOLUTION_UNIT = 296 +SOFTWARE = 305 +DATE_TIME = 306 +ARTIST = 315 +PREDICTOR = 317 +COLORMAP = 320 +TILEOFFSETS = 324 +EXTRASAMPLES = 338 +SAMPLEFORMAT = 339 +JPEGTABLES = 347 +COPYRIGHT = 33432 +IPTC_NAA_CHUNK = 33723 # newsphoto properties +PHOTOSHOP_CHUNK = 34377 # photoshop properties +ICCPROFILE = 34675 +EXIFIFD = 34665 +XMP = 700 + +# https://github.com/imagej/ImageJA/blob/master/src/main/java/ij/io/TiffDecoder.java +IMAGEJ_META_DATA_BYTE_COUNTS = 50838 +IMAGEJ_META_DATA = 50839 + +COMPRESSION_INFO = { + # Compression => pil compression name + 1: "raw", + 2: "tiff_ccitt", + 3: "group3", + 4: "group4", + 5: "tiff_lzw", + 6: "tiff_jpeg", # obsolete + 7: "jpeg", + 8: "tiff_adobe_deflate", + 32771: "tiff_raw_16", # 16-bit padding + 32773: "packbits", + 32809: "tiff_thunderscan", + 32946: "tiff_deflate", + 34676: "tiff_sgilog", + 34677: "tiff_sgilog24", +} + +COMPRESSION_INFO_REV = dict([(v, k) for (k, v) in COMPRESSION_INFO.items()]) + +OPEN_INFO = { + # (ByteOrder, PhotoInterpretation, SampleFormat, FillOrder, BitsPerSample, + # ExtraSamples) => mode, rawmode + (II, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (MM, 0, (1,), 1, (1,), ()): ("1", "1;I"), + (II, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (MM, 0, (1,), 2, (1,), ()): ("1", "1;IR"), + (II, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (MM, 0, (1,), 1, (8,), ()): ("L", "L;I"), + (II, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (MM, 0, (1,), 2, (8,), ()): ("L", "L;IR"), + (II, 0, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 0, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 1, (1,), 1, (1,), ()): ("1", "1"), + (MM, 1, (1,), 1, (1,), ()): ("1", "1"), + (II, 1, (1,), 1, (4,), ()): ("L", "L;4"), + # ? + (II, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (MM, 1, (1,), 2, (1,), ()): ("1", "1;R"), + (II, 1, (1,), 1, (8,), ()): ("L", "L"), + (MM, 1, (1,), 1, (8,), ()): ("L", "L"), + (II, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (MM, 1, (1,), 1, (8, 8), (2,)): ("LA", "LA"), + (II, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (MM, 1, (1,), 2, (8,), ()): ("L", "L;R"), + (II, 1, (1,), 1, (12,), ()): ("I;16", "I;12"), + (II, 1, (1,), 1, (16,), ()): ("I;16", "I;16"), + (MM, 1, (1,), 1, (16,), ()): ("I;16B", "I;16B"), + (II, 1, (2,), 1, (16,), ()): ("I;16S", "I;16S"), + (MM, 1, (2,), 1, (16,), ()): ("I;16BS", "I;16BS"), + (II, 1, (1,), 1, (32,), ()): ("I", "I;32N"), + (II, 1, (2,), 1, (32,), ()): ("I", "I;32S"), + (MM, 1, (2,), 1, (32,), ()): ("I;32BS", "I;32BS"), + (II, 1, (3,), 1, (32,), ()): ("F", "F;32F"), + (MM, 1, (3,), 1, (32,), ()): ("F", "F;32BF"), + (II, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (MM, 2, (1,), 1, (8, 8, 8), ()): ("RGB", "RGB"), + (II, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (MM, 2, (1,), 2, (8, 8, 8), ()): ("RGB", "RGB;R"), + (II, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (MM, 2, (1,), 1, (8, 8, 8, 8), ()): ("RGBA", "RGBA"), # missing ExtraSamples + (II, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (0,)): ("RGBX", "RGBX"), + (II, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (1,)): ("RGBA", "RGBa"), + (II, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (MM, 2, (1,), 1, (8, 8, 8, 8), (2,)): ("RGBA", "RGBA"), + (II, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (MM, 2, (1,), 1, (8, 8, 8, 8), (999,)): ("RGBA", "RGBA"), # Corel Draw 10 + (II, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (MM, 3, (1,), 1, (1,), ()): ("P", "P;1"), + (II, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (MM, 3, (1,), 2, (1,), ()): ("P", "P;1R"), + (II, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (MM, 3, (1,), 1, (2,), ()): ("P", "P;2"), + (II, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (MM, 3, (1,), 2, (2,), ()): ("P", "P;2R"), + (II, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (MM, 3, (1,), 1, (4,), ()): ("P", "P;4"), + (II, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (MM, 3, (1,), 2, (4,), ()): ("P", "P;4R"), + (II, 3, (1,), 1, (8,), ()): ("P", "P"), + (MM, 3, (1,), 1, (8,), ()): ("P", "P"), + (II, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (MM, 3, (1,), 1, (8, 8), (2,)): ("PA", "PA"), + (II, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (MM, 3, (1,), 2, (8,), ()): ("P", "P;R"), + (II, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (MM, 5, (1,), 1, (8, 8, 8, 8), ()): ("CMYK", "CMYK"), + (II, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (MM, 6, (1,), 1, (8, 8, 8), ()): ("YCbCr", "YCbCr"), + (II, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), + (MM, 8, (1,), 1, (8, 8, 8), ()): ("LAB", "LAB"), +} + +PREFIXES = [b"MM\000\052", b"II\052\000", b"II\xBC\000"] + + +def _accept(prefix): + return prefix[:4] in PREFIXES + + +def _limit_rational(val, max_val): + inv = abs(val) > 1 + n_d = IFDRational(1 / val if inv else val).limit_rational(max_val) + return n_d[::-1] if inv else n_d + +## +# Wrapper for TIFF IFDs. + +_load_dispatch = {} +_write_dispatch = {} + +class IFDRational(Rational): + """ Implements a rational class where 0/0 is a legal value to match + the in the wild use of exif rationals. + + e.g., DigitalZoomRatio - 0.00/0.00 indicates that no digital zoom was used + """ + + """ If the denominator is 0, store this as a float('nan'), otherwise store + as a fractions.Fraction(). Delegate as appropriate + + """ + + __slots__ = ('_numerator', '_denominator', '_val') + + def __init__(self, value, denominator=1): + """ + :param value: either an integer numerator, a + float/rational/other number, or an IFDRational + :param denominator: Optional integer denominator + """ + self._denominator = denominator + self._numerator = value + self._val = float(1) + + if type(value) == Fraction: + self._numerator = value.numerator + self._denominator = value.denominator + self._val = value + + if type(value) == IFDRational: + self._denominator = value.denominator + self._numerator = value.numerator + self._val = value._val + return + + if denominator == 0: + self._val = float('nan') + return + + + elif denominator == 1: + if sys.hexversion < 0x2070000 and type(value) == float: + # python 2.6 is different. + self._val = Fraction.from_float(value) + else: + self._val = Fraction(value) + else: + self._val = Fraction(value, denominator) + + @property + def numerator(a): + return a._numerator + + @property + def denominator(a): + return a._denominator + + + def limit_rational(self, max_denominator): + """ + + :param max_denominator: Integer, the maximum denominator value + :returns: Tuple of (numerator, denominator) + """ + + if self.denominator == 0: + return (self.numerator, self.denominator) + + f = self._val.limit_denominator(max_denominator) + return (f.numerator, f.denominator) + + def __repr__(self): + return str(float(self._val)) + + def __hash__(self): + return self._val.__hash__() + + def __eq__(self,other): + return self._val == other + + def _delegate(op): + def delegate(self, *args): + return getattr(self._val,op)(*args) + return delegate + + """ a = ['add','radd', 'sub', 'rsub','div', 'rdiv', 'mul', 'rmul', + 'truediv', 'rtruediv', 'floordiv', + 'rfloordiv','mod','rmod', 'pow','rpow', 'pos', 'neg', + 'abs', 'trunc', 'lt', 'gt', 'le', 'ge', 'nonzero', + 'ceil', 'floor', 'round'] + print "\n".join("__%s__ = _delegate('__%s__')" % (s,s) for s in a) + """ + + __add__ = _delegate('__add__') + __radd__ = _delegate('__radd__') + __sub__ = _delegate('__sub__') + __rsub__ = _delegate('__rsub__') + __div__ = _delegate('__div__') + __rdiv__ = _delegate('__rdiv__') + __mul__ = _delegate('__mul__') + __rmul__ = _delegate('__rmul__') + __truediv__ = _delegate('__truediv__') + __rtruediv__ = _delegate('__rtruediv__') + __floordiv__ = _delegate('__floordiv__') + __rfloordiv__ = _delegate('__rfloordiv__') + __mod__ = _delegate('__mod__') + __rmod__ = _delegate('__rmod__') + __pow__ = _delegate('__pow__') + __rpow__ = _delegate('__rpow__') + __pos__ = _delegate('__pos__') + __neg__ = _delegate('__neg__') + __abs__ = _delegate('__abs__') + __trunc__ = _delegate('__trunc__') + __lt__ = _delegate('__lt__') + __gt__ = _delegate('__gt__') + __le__ = _delegate('__le__') + __ge__ = _delegate('__ge__') + __nonzero__ = _delegate('__nonzero__') + __ceil__ = _delegate('__ceil__') + __floor__ = _delegate('__floor__') + __round__ = _delegate('__round__') + + + +class ImageFileDirectory_v2(collections.MutableMapping): + """This class represents a TIFF tag directory. To speed things up, we + don't decode tags unless they're asked for. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v2() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print(ifd[key]) + 'Some Data' + + Individual values are returned as the strings or numbers, sequences are + returned as tuples of the values. + + The tiff metadata type of each item is stored in a dictionary of + tag types in + `~PIL.TiffImagePlugin.ImageFileDirectory_v2.tagtype`. The types + are read from a tiff file, guessed from the type added, or added + manually. + + Data Structures: + + * self.tagtype = {} + + * Key: numerical tiff tag number + * Value: integer corresponding to the data type from `~PIL.TiffTags.TYPES` + + .. versionadded:: 3.0.0 + """ + """ + Documentation: + + 'internal' data structures: + * self._tags_v2 = {} Key: numerical tiff tag number + Value: decoded data, as tuple for multiple values + * self._tagdata = {} Key: numerical tiff tag number + Value: undecoded byte string from file + * self._tags_v1 = {} Key: numerical tiff tag number + Value: decoded data in the v1 format + + Tags will be found in the private attributes self._tagdata, and in + self._tags_v2 once decoded. + + Self.legacy_api is a value for internal use, and shouldn't be + changed from outside code. In cooperation with the + ImageFileDirectory_v1 class, if legacy_api is true, then decoded + tags will be populated into both _tags_v1 and _tags_v2. _Tags_v2 + will be used if this IFD is used in the TIFF save routine. Tags + should be read from tags_v1 if legacy_api == true. + + """ + + def __init__(self, ifh=b"II\052\0\0\0\0\0", prefix=None): + """Initialize an ImageFileDirectory. + + To construct an ImageFileDirectory from a real file, pass the 8-byte + magic header to the constructor. To only set the endianness, pass it + as the 'prefix' keyword argument. + + :param ifh: One of the accepted magic headers (cf. PREFIXES); also sets + endianness. + :param prefix: Override the endianness of the file. + """ + if ifh[:4] not in PREFIXES: + raise SyntaxError("not a TIFF file (header %r not valid)" % ifh) + self._prefix = prefix if prefix is not None else ifh[:2] + if self._prefix == MM: + self._endian = ">" + elif self._prefix == II: + self._endian = "<" + else: + raise SyntaxError("not a TIFF IFD") + self.reset() + self.next, = self._unpack("L", ifh[4:]) + self._legacy_api = False + + prefix = property(lambda self: self._prefix) + offset = property(lambda self: self._offset) + legacy_api = property(lambda self: self._legacy_api) + + @legacy_api.setter + def legacy_api(self, value): + raise Exception("Not allowing setting of legacy api") + + def reset(self): + self._tags_v1 = {} # will remain empty if legacy_api is false + self._tags_v2 = {} # main tag storage + self._tagdata = {} + self.tagtype = {} # added 2008-06-05 by Florian Hoech + self._next = None + self._offset = None + + def __str__(self): + return str(dict(self)) + + def as_dict(self): + """Return a dictionary of the image's tags. + + use `dict(ifd)` instead. + + .. deprecated:: 3.0.0 + """ + # FIXME Deprecate: use dict(self) + return dict(self) + + def named(self): + """ + :returns: dict of name|key: value + + Returns the complete tag dictionary, with named tags where possible. + """ + return dict((TiffTags.lookup(code).name, value) + for code, value in self.items()) + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v2)) + + def __getitem__(self, tag): + if tag not in self._tags_v2: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + self[tag] = handler(self, data, self.legacy_api) # check type + val = self._tags_v2[tag] + if self.legacy_api and not isinstance(val, (tuple, bytes)): + val = val, + return val + + def __contains__(self, tag): + return tag in self._tags_v2 or tag in self._tagdata + + if bytes is str: + def has_key(self, tag): + return tag in self + + def __setitem__(self, tag, value): + self._setitem(tag, value, self.legacy_api) + + def _setitem(self, tag, value, legacy_api): + basetypes = (Number, bytes, str) + if bytes is str: + basetypes += unicode, + + info = TiffTags.lookup(tag) + values = [value] if isinstance(value, basetypes) else value + + if tag not in self.tagtype: + if info.type: + self.tagtype[tag] = info.type + else: + self.tagtype[tag] = 7 + if all(isinstance(v, IFDRational) for v in values): + self.tagtype[tag] = 5 + elif all(isinstance(v, int) for v in values): + if all(v < 2 ** 16 for v in values): + self.tagtype[tag] = 3 + else: + self.tagtype[tag] = 4 + elif all(isinstance(v, float) for v in values): + self.tagtype[tag] = 12 + else: + if bytes is str: + # Never treat data as binary by default on Python 2. + self.tagtype[tag] = 2 + else: + if all(isinstance(v, str) for v in values): + self.tagtype[tag] = 2 + + if self.tagtype[tag] == 7 and bytes is not str: + values = [value.encode("ascii", 'replace') if isinstance(value, str) else value + for value in values] + + values = tuple(info.cvt_enum(value) for value in values) + + dest = self._tags_v1 if legacy_api else self._tags_v2 + + if info.length == 1: + if legacy_api and self.tagtype[tag] in [5, 10]: + values = values, + dest[tag], = values + else: + dest[tag] = values + + def __delitem__(self, tag): + self._tags_v2.pop(tag, None) + self._tags_v1.pop(tag, None) + self._tagdata.pop(tag, None) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v2)) + + def _unpack(self, fmt, data): + return struct.unpack(self._endian + fmt, data) + + def _pack(self, fmt, *values): + return struct.pack(self._endian + fmt, *values) + + def _register_loader(idx, size): + def decorator(func): + from PIL.TiffTags import TYPES + if func.__name__.startswith("load_"): + TYPES[idx] = func.__name__[5:].replace("_", " ") + _load_dispatch[idx] = size, func + return func + return decorator + + def _register_writer(idx): + def decorator(func): + _write_dispatch[idx] = func + return func + return decorator + + def _register_basic(idx_fmt_name): + from PIL.TiffTags import TYPES + idx, fmt, name = idx_fmt_name + TYPES[idx] = name + size = struct.calcsize("=" + fmt) + _load_dispatch[idx] = size, lambda self, data, legacy_api=True: ( + self._unpack("{0}{1}".format(len(data) // size, fmt), data)) + _write_dispatch[idx] = lambda self, *values: ( + b"".join(self._pack(fmt, value) for value in values)) + + list(map(_register_basic, + [(3, "H", "short"), (4, "L", "long"), + (6, "b", "signed byte"), (8, "h", "signed short"), + (9, "l", "signed long"), (11, "f", "float"), (12, "d", "double")])) + + @_register_loader(1, 1) # Basic type, except for the legacy API. + def load_byte(self, data, legacy_api=True): + return (data if legacy_api else + tuple(map(ord, data) if bytes is str else data)) + + @_register_writer(1) # Basic type, except for the legacy API. + def write_byte(self, data): + return data + + @_register_loader(2, 1) + def load_string(self, data, legacy_api=True): + if data.endswith(b"\0"): + data = data[:-1] + return data.decode("latin-1", "replace") + + @_register_writer(2) + def write_string(self, value): + # remerge of https://github.com/python-pillow/Pillow/pull/1416 + if sys.version_info[0] == 2: + value = value.decode('ascii', 'replace') + return b"" + value.encode('ascii', 'replace') + b"\0" + + @_register_loader(5, 8) + def load_rational(self, data, legacy_api=True): + vals = self._unpack("{0}L".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(5) + def write_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 31)) + for frac in values) + + @_register_loader(7, 1) + def load_undefined(self, data, legacy_api=True): + return data + + @_register_writer(7) + def write_undefined(self, value): + return value + + @_register_loader(10, 8) + def load_signed_rational(self, data, legacy_api=True): + vals = self._unpack("{0}l".format(len(data) // 4), data) + combine = lambda a, b: (a, b) if legacy_api else IFDRational(a, b) + return tuple(combine(num, denom) + for num, denom in zip(vals[::2], vals[1::2])) + + @_register_writer(10) + def write_signed_rational(self, *values): + return b"".join(self._pack("2L", *_limit_rational(frac, 2 ** 30)) + for frac in values) + + def _ensure_read(self, fp, size): + ret = fp.read(size) + if len(ret) != size: + raise IOError("Corrupt EXIF data. " + + "Expecting to read %d bytes but only got %d. " % + (size, len(ret))) + return ret + + def load(self, fp): + + self.reset() + self._offset = fp.tell() + + try: + for i in range(self._unpack("H", self._ensure_read(fp, 2))[0]): + tag, typ, count, data = self._unpack("HHL4s", self._ensure_read(fp, 12)) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("tag: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + + try: + unit_size, handler = self._load_dispatch[typ] + except KeyError: + if DEBUG: + print("- unsupported type", typ) + continue # ignore unsupported type + size = count * unit_size + if size > 4: + here = fp.tell() + offset, = self._unpack("L", data) + if DEBUG: + print("Tag Location: %s - Data Location: %s" % + (here, offset), end=" ") + fp.seek(offset) + data = ImageFile._safe_read(fp, size) + fp.seek(here) + else: + data = data[:size] + + if len(data) != size: + warnings.warn("Possibly corrupt EXIF data. " + "Expecting to read %d bytes but only got %d. " + "Skipping tag %s" % (size, len(data), tag)) + continue + + self._tagdata[tag] = data + self.tagtype[tag] = typ + + if DEBUG: + if size > 32: + print("- value: " % size) + else: + print("- value:", self[tag]) + + self.next, = self._unpack("L", self._ensure_read(fp, 4)) + except IOError as msg: + warnings.warn(str(msg)) + return + + def save(self, fp): + + if fp.tell() == 0: # skip TIFF header on subsequent pages + # tiff header -- PIL always starts the first IFD at offset 8 + fp.write(self._prefix + self._pack("HL", 42, 8)) + + # FIXME What about tagdata? + fp.write(self._pack("H", len(self._tags_v2))) + + entries = [] + offset = fp.tell() + len(self._tags_v2) * 12 + 4 + stripoffsets = None + + # pass 1: convert tags to binary format + # always write tags in ascending order + for tag, value in sorted(self._tags_v2.items()): + if tag == STRIPOFFSETS: + stripoffsets = len(entries) + typ = self.tagtype.get(tag) + if DEBUG: + print("Tag %s, Type: %s, Value: %s" % (tag, typ, value)) + values = value if isinstance(value, tuple) else (value,) + data = self._write_dispatch[typ](self, *values) + if DEBUG: + tagname = TiffTags.lookup(tag).name + typname = TYPES.get(typ, "unknown") + print("save: %s (%d) - type: %s (%d)" % + (tagname, tag, typname, typ), end=" ") + if len(data) >= 16: + print("- value: " % len(data)) + else: + print("- value:", values) + + # count is sum of lengths for string and arbitrary data + count = len(data) if typ in [2, 7] else len(values) + # figure out if data fits into the entry + if len(data) <= 4: + entries.append((tag, typ, count, data.ljust(4, b"\0"), b"")) + else: + entries.append((tag, typ, count, self._pack("L", offset), data)) + offset += (len(data) + 1) // 2 * 2 # pad to word + + # update strip offset data to point beyond auxiliary data + if stripoffsets is not None: + tag, typ, count, value, data = entries[stripoffsets] + if data: + raise NotImplementedError( + "multistrip support not yet implemented") + value = self._pack("L", self._unpack("L", value)[0] + offset) + entries[stripoffsets] = tag, typ, count, value, data + + # pass 2: write entries to file + for tag, typ, count, value, data in entries: + if DEBUG > 1: + print(tag, typ, count, repr(value), repr(data)) + fp.write(self._pack("HHL4s", tag, typ, count, value)) + + # -- overwrite here for multi-page -- + fp.write(b"\0\0\0\0") # end of entries + + # pass 3: write auxiliary data to file + for tag, typ, count, value, data in entries: + fp.write(data) + if len(data) & 1: + fp.write(b"\0") + + return offset + +ImageFileDirectory_v2._load_dispatch = _load_dispatch +ImageFileDirectory_v2._write_dispatch = _write_dispatch +for idx, name in TYPES.items(): + name = name.replace(" ", "_") + setattr(ImageFileDirectory_v2, "load_" + name, _load_dispatch[idx][1]) + setattr(ImageFileDirectory_v2, "write_" + name, _write_dispatch[idx]) +del _load_dispatch, _write_dispatch, idx, name + + +# Legacy ImageFileDirectory support. +class ImageFileDirectory_v1(ImageFileDirectory_v2): + """This class represents the **legacy** interface to a TIFF tag directory. + + Exposes a dictionary interface of the tags in the directory:: + + ifd = ImageFileDirectory_v1() + ifd[key] = 'Some Data' + ifd.tagtype[key] = 2 + print ifd[key] + ('Some Data',) + + Also contains a dictionary of tag types as read from the tiff image file, + `~PIL.TiffImagePlugin.ImageFileDirectory_v1.tagtype`. + + Values are returned as a tuple. + + .. deprecated:: 3.0.0 + """ + def __init__(self, *args, **kwargs): + ImageFileDirectory_v2.__init__(self, *args, **kwargs) + self._legacy_api = True + + tags = property(lambda self: self._tags_v1) + tagdata = property(lambda self: self._tagdata) + + @classmethod + def from_v2(cls, original): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + + """ + + ifd = cls(prefix=original.prefix) + ifd._tagdata = original._tagdata + ifd.tagtype = original.tagtype + ifd.next = original.next # an indicator for multipage tiffs + return ifd + + def to_v2(self): + """ Returns an + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + instance with the same data as is contained in the original + :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v1` + instance. + + :returns: :py:class:`~PIL.TiffImagePlugin.ImageFileDirectory_v2` + + """ + + ifd = ImageFileDirectory_v2(prefix=self.prefix) + ifd._tagdata = dict(self._tagdata) + ifd.tagtype = dict(self.tagtype) + ifd._tags_v2 = dict(self._tags_v2) + return ifd + + def __contains__(self, tag): + return tag in self._tags_v1 or tag in self._tagdata + + def __len__(self): + return len(set(self._tagdata) | set(self._tags_v1)) + + def __iter__(self): + return iter(set(self._tagdata) | set(self._tags_v1)) + + def __setitem__(self, tag, value): + for legacy_api in (False, True): + self._setitem(tag, value, legacy_api) + + def __getitem__(self, tag): + if tag not in self._tags_v1: # unpack on the fly + data = self._tagdata[tag] + typ = self.tagtype[tag] + size, handler = self._load_dispatch[typ] + for legacy in (False, True): + self._setitem(tag, handler(self, data, legacy), legacy) + val = self._tags_v1[tag] + if not isinstance(val, (tuple, bytes)): + val = val, + return val + + +# undone -- switch this pointer when IFD_LEGACY_API == False +ImageFileDirectory = ImageFileDirectory_v1 + + +## +# Image plugin for TIFF files. + +class TiffImageFile(ImageFile.ImageFile): + + format = "TIFF" + format_description = "Adobe TIFF" + + def _open(self): + "Open the first image in a TIFF file" + + # Header + ifh = self.fp.read(8) + + # image file directory (tag dictionary) + self.tag_v2 = ImageFileDirectory_v2(ifh) + + # legacy tag/ifd entries will be filled in later + self.tag = self.ifd = None + + # setup frame pointers + self.__first = self.__next = self.tag_v2.next + self.__frame = -1 + self.__fp = self.fp + self._frame_pos = [] + self._n_frames = None + self._is_animated = None + + if DEBUG: + print("*** TiffImageFile._open ***") + print("- __first:", self.__first) + print("- ifh: ", ifh) + + # and load the first frame + self._seek(0) + + @property + def n_frames(self): + if self._n_frames is None: + current = self.tell() + try: + while True: + self._seek(self.tell() + 1) + except EOFError: + self._n_frames = self.tell() + 1 + self.seek(current) + return self._n_frames + + @property + def is_animated(self): + if self._is_animated is None: + current = self.tell() + + try: + self.seek(1) + self._is_animated = True + except EOFError: + self._is_animated = False + + self.seek(current) + return self._is_animated + + def seek(self, frame): + "Select a given frame as current image" + self._seek(max(frame, 0)) # Questionable backwards compatibility. + # Create a new core image object on second and + # subsequent frames in the image. Image may be + # different size/mode. + Image._decompression_bomb_check(self.size) + self.im = Image.core.new(self.mode, self.size) + + def _seek(self, frame): + self.fp = self.__fp + while len(self._frame_pos) <= frame: + if not self.__next: + raise EOFError("no more images in TIFF file") + if DEBUG: + print("Seeking to frame %s, on frame %s, " + "__next %s, location: %s" % + (frame, self.__frame, self.__next, self.fp.tell())) + # reset python3 buffered io handle in case fp + # was passed to libtiff, invalidating the buffer + self.fp.tell() + self.fp.seek(self.__next) + self._frame_pos.append(self.__next) + if DEBUG: + print("Loading tags, location: %s" % self.fp.tell()) + self.tag_v2.load(self.fp) + self.__next = self.tag_v2.next + self.__frame += 1 + self.fp.seek(self._frame_pos[frame]) + self.tag_v2.load(self.fp) + # fill the legacy tag/ifd entries + self.tag = self.ifd = ImageFileDirectory_v1.from_v2(self.tag_v2) + self.__frame = frame + self._setup() + + def tell(self): + "Return the current frame number" + return self.__frame + + def _decoder(self, rawmode, layer, tile=None): + "Setup decoder contexts" + + args = None + if rawmode == "RGB" and self._planar_configuration == 2: + rawmode = rawmode[layer] + compression = self._compression + if compression == "raw": + args = (rawmode, 0, 1) + elif compression == "jpeg": + args = rawmode, "" + if JPEGTABLES in self.tag_v2: + # Hack to handle abbreviated JPEG headers + # FIXME This will fail with more than one value + self.tile_prefix, = self.tag_v2[JPEGTABLES] + elif compression == "packbits": + args = rawmode + elif compression == "tiff_lzw": + args = rawmode + if PREDICTOR in self.tag_v2: + # Section 14: Differencing Predictor + self.decoderconfig = (self.tag_v2[PREDICTOR],) + + if ICCPROFILE in self.tag_v2: + self.info['icc_profile'] = self.tag_v2[ICCPROFILE] + + return args + + def _load_libtiff(self): + """ Overload method triggered when we detect a compressed tiff + Calls out to libtiff """ + + pixel = Image.Image.load(self) + + if self.tile is None: + raise IOError("cannot load this image") + if not self.tile: + return pixel + + self.load_prepare() + + if not len(self.tile) == 1: + raise IOError("Not exactly one tile") + + # (self._compression, (extents tuple), + # 0, (rawmode, self._compression, fp)) + extents = self.tile[0][1] + args = self.tile[0][3] + (self.tag_v2.offset,) + decoder = Image._getdecoder(self.mode, 'libtiff', args, + self.decoderconfig) + try: + decoder.setimage(self.im, extents) + except ValueError: + raise IOError("Couldn't set the image") + + if hasattr(self.fp, "getvalue"): + # We've got a stringio like thing passed in. Yay for all in memory. + # The decoder needs the entire file in one shot, so there's not + # a lot we can do here other than give it the entire file. + # unless we could do something like get the address of the + # underlying string for stringio. + # + # Rearranging for supporting byteio items, since they have a fileno + # that returns an IOError if there's no underlying fp. Easier to + # deal with here by reordering. + if DEBUG: + print("have getvalue. just sending in a string from getvalue") + n, err = decoder.decode(self.fp.getvalue()) + elif hasattr(self.fp, "fileno"): + # we've got a actual file on disk, pass in the fp. + if DEBUG: + print("have fileno, calling fileno version of the decoder.") + self.fp.seek(0) + # 4 bytes, otherwise the trace might error out + n, err = decoder.decode(b"fpfp") + else: + # we have something else. + if DEBUG: + print("don't have fileno or getvalue. just reading") + # UNDONE -- so much for that buffer size thing. + n, err = decoder.decode(self.fp.read()) + + self.tile = [] + self.readonly = 0 + # libtiff closed the fp in a, we need to close self.fp, if possible + if hasattr(self.fp, 'close'): + if not self.__next: + self.fp.close() + self.fp = None # might be shared + + if err < 0: + raise IOError(err) + + self.load_end() + + return Image.Image.load(self) + + def _setup(self): + "Setup this image object based on current tags" + + if 0xBC01 in self.tag_v2: + raise IOError("Windows Media Photo files not yet supported") + + # extract relevant tags + self._compression = COMPRESSION_INFO[self.tag_v2.get(COMPRESSION, 1)] + self._planar_configuration = self.tag_v2.get(PLANAR_CONFIGURATION, 1) + + # photometric is a required tag, but not everyone is reading + # the specification + photo = self.tag_v2.get(PHOTOMETRIC_INTERPRETATION, 0) + + fillorder = self.tag_v2.get(FILLORDER, 1) + + if DEBUG: + print("*** Summary ***") + print("- compression:", self._compression) + print("- photometric_interpretation:", photo) + print("- planar_configuration:", self._planar_configuration) + print("- fill_order:", fillorder) + + # size + xsize = self.tag_v2.get(IMAGEWIDTH) + ysize = self.tag_v2.get(IMAGELENGTH) + self.size = xsize, ysize + + if DEBUG: + print("- size:", self.size) + + format = self.tag_v2.get(SAMPLEFORMAT, (1,)) + if len(format) > 1 and max(format) == min(format) == 1: + # SAMPLEFORMAT is properly per band, so an RGB image will + # be (1,1,1). But, we don't support per band pixel types, + # and anything more than one band is a uint8. So, just + # take the first element. Revisit this if adding support + # for more exotic images. + format = (1,) + + # mode: check photometric interpretation and bits per pixel + key = ( + self.tag_v2.prefix, photo, format, fillorder, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + try: + self.mode, rawmode = OPEN_INFO[key] + except KeyError: + if DEBUG: + print("- unsupported format") + raise SyntaxError("unknown pixel mode") + + if DEBUG: + print("- raw mode:", rawmode) + print("- pil mode:", self.mode) + + self.info["compression"] = self._compression + + xres = self.tag_v2.get(X_RESOLUTION,1) + yres = self.tag_v2.get(Y_RESOLUTION,1) + + if xres and yres: + resunit = self.tag_v2.get(RESOLUTION_UNIT, 1) + if resunit == 2: # dots per inch + self.info["dpi"] = xres, yres + elif resunit == 3: # dots per centimeter. convert to dpi + self.info["dpi"] = xres * 2.54, yres * 2.54 + else: # No absolute unit of measurement + self.info["resolution"] = xres, yres + + # build tile descriptors + x = y = l = 0 + self.tile = [] + if STRIPOFFSETS in self.tag_v2: + # striped image + offsets = self.tag_v2[STRIPOFFSETS] + h = self.tag_v2.get(ROWSPERSTRIP, ysize) + w = self.size[0] + if READ_LIBTIFF or self._compression in ["tiff_ccitt", "group3", + "group4", "tiff_jpeg", + "tiff_adobe_deflate", + "tiff_thunderscan", + "tiff_deflate", + "tiff_sgilog", + "tiff_sgilog24", + "tiff_raw_16"]: + # if DEBUG: + # print "Activating g4 compression for whole file" + + # Decoder expects entire file as one tile. + # There's a buffer size limit in load (64k) + # so large g4 images will fail if we use that + # function. + # + # Setup the one tile for the whole image, then + # replace the existing load function with our + # _load_libtiff function. + + self.load = self._load_libtiff + + # To be nice on memory footprint, if there's a + # file descriptor, use that instead of reading + # into a string in python. + + # libtiff closes the file descriptor, so pass in a dup. + try: + fp = hasattr(self.fp, "fileno") and \ + os.dup(self.fp.fileno()) + # flush the file descriptor, prevents error on pypy 2.4+ + # should also eliminate the need for fp.tell for py3 + # in _seek + if hasattr(self.fp, "flush"): + self.fp.flush() + except IOError: + # io.BytesIO have a fileno, but returns an IOError if + # it doesn't use a file descriptor. + fp = False + + # libtiff handles the fillmode for us, so 1;IR should + # actually be 1;I. Including the R double reverses the + # bits, so stripes of the image are reversed. See + # https://github.com/python-pillow/Pillow/issues/279 + if fillorder == 2: + key = ( + self.tag_v2.prefix, photo, format, 1, + self.tag_v2.get(BITSPERSAMPLE, (1,)), + self.tag_v2.get(EXTRASAMPLES, ()) + ) + if DEBUG: + print("format key:", key) + # this should always work, since all the + # fillorder==2 modes have a corresponding + # fillorder=1 mode + self.mode, rawmode = OPEN_INFO[key] + # libtiff always returns the bytes in native order. + # we're expecting image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if self.mode in ('I;16B', 'I;16') and 'I;16' in rawmode: + rawmode = 'I;16N' + + # Offset in the tile tuple is 0, we go from 0,0 to + # w,h, and we only do this once -- eds + a = (rawmode, self._compression, fp) + self.tile.append( + (self._compression, + (0, 0, w, ysize), + 0, a)) + a = None + + else: + for i in range(len(offsets)): + a = self._decoder(rawmode, l, i) + self.tile.append( + (self._compression, + (0, min(y, ysize), w, min(y+h, ysize)), + offsets[i], a)) + if DEBUG: + print("tiles: ", self.tile) + y = y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + elif TILEOFFSETS in self.tag_v2: + # tiled image + w = self.tag_v2.get(322) + h = self.tag_v2.get(323) + a = None + for o in self.tag_v2[TILEOFFSETS]: + if not a: + a = self._decoder(rawmode, l) + # FIXME: this doesn't work if the image size + # is not a multiple of the tile size... + self.tile.append( + (self._compression, + (x, y, x+w, y+h), + o, a)) + x = x + w + if x >= self.size[0]: + x, y = 0, y + h + if y >= self.size[1]: + x = y = 0 + l += 1 + a = None + else: + if DEBUG: + print("- unsupported data organization") + raise SyntaxError("unknown data organization") + + # fixup palette descriptor + + if self.mode == "P": + palette = [o8(b // 256) for b in self.tag_v2[COLORMAP]] + self.palette = ImagePalette.raw("RGB;L", b"".join(palette)) +# +# -------------------------------------------------------------------- +# Write TIFF files + +# little endian is default except for image modes with +# explicit big endian byte-order + +SAVE_INFO = { + # mode => rawmode, byteorder, photometrics, + # sampleformat, bitspersample, extra + "1": ("1", II, 1, 1, (1,), None), + "L": ("L", II, 1, 1, (8,), None), + "LA": ("LA", II, 1, 1, (8, 8), 2), + "P": ("P", II, 3, 1, (8,), None), + "PA": ("PA", II, 3, 1, (8, 8), 2), + "I": ("I;32S", II, 1, 2, (32,), None), + "I;16": ("I;16", II, 1, 1, (16,), None), + "I;16S": ("I;16S", II, 1, 2, (16,), None), + "F": ("F;32F", II, 1, 3, (32,), None), + "RGB": ("RGB", II, 2, 1, (8, 8, 8), None), + "RGBX": ("RGBX", II, 2, 1, (8, 8, 8, 8), 0), + "RGBA": ("RGBA", II, 2, 1, (8, 8, 8, 8), 2), + "CMYK": ("CMYK", II, 5, 1, (8, 8, 8, 8), None), + "YCbCr": ("YCbCr", II, 6, 1, (8, 8, 8), None), + "LAB": ("LAB", II, 8, 1, (8, 8, 8), None), + + "I;32BS": ("I;32BS", MM, 1, 2, (32,), None), + "I;16B": ("I;16B", MM, 1, 1, (16,), None), + "I;16BS": ("I;16BS", MM, 1, 2, (16,), None), + "F;32BF": ("F;32BF", MM, 1, 3, (32,), None), +} + + +def _save(im, fp, filename): + + try: + rawmode, prefix, photo, format, bits, extra = SAVE_INFO[im.mode] + except KeyError: + raise IOError("cannot write mode %s as TIFF" % im.mode) + + ifd = ImageFileDirectory_v2(prefix=prefix) + + compression = im.encoderinfo.get('compression', + im.info.get('compression', 'raw')) + + libtiff = WRITE_LIBTIFF or compression != 'raw' + + # required for color libtiff images + ifd[PLANAR_CONFIGURATION] = getattr(im, '_planar_configuration', 1) + + ifd[IMAGEWIDTH] = im.size[0] + ifd[IMAGELENGTH] = im.size[1] + + # write any arbitrary tags passed in as an ImageFileDirectory + info = im.encoderinfo.get("tiffinfo", {}) + if DEBUG: + print("Tiffinfo Keys: %s" % list(info)) + if isinstance(info, ImageFileDirectory_v1): + info = info.to_v2() + for key in info: + ifd[key] = info.get(key) + try: + ifd.tagtype[key] = info.tagtype[key] + except: + pass # might not be an IFD, Might not have populated type + + # additions written by Greg Couch, gregc@cgl.ucsf.edu + # inspired by image-sig posting from Kevin Cazabon, kcazabon@home.com + if hasattr(im, 'tag_v2'): + # preserve tags from original TIFF image file + for key in (RESOLUTION_UNIT, X_RESOLUTION, Y_RESOLUTION, + IPTC_NAA_CHUNK, PHOTOSHOP_CHUNK, XMP): + if key in im.tag_v2: + ifd[key] = im.tag_v2[key] + ifd.tagtype[key] = im.tag_v2.tagtype.get(key, None) + + # preserve ICC profile (should also work when saving other formats + # which support profiles as TIFF) -- 2008-06-06 Florian Hoech + if "icc_profile" in im.info: + ifd[ICCPROFILE] = im.info["icc_profile"] + + for key, name in [(IMAGEDESCRIPTION, "description"), + (X_RESOLUTION, "resolution"), + (Y_RESOLUTION, "resolution"), + (X_RESOLUTION, "x_resolution"), + (Y_RESOLUTION, "y_resolution"), + (RESOLUTION_UNIT, "resolution_unit"), + (SOFTWARE, "software"), + (DATE_TIME, "date_time"), + (ARTIST, "artist"), + (COPYRIGHT, "copyright")]: + name_with_spaces = name.replace("_", " ") + if "_" in name and name_with_spaces in im.encoderinfo: + warnings.warn("%r is deprecated; use %r instead" % + (name_with_spaces, name), DeprecationWarning) + ifd[key] = im.encoderinfo[name.replace("_", " ")] + if name in im.encoderinfo: + ifd[key] = im.encoderinfo[name] + + dpi = im.encoderinfo.get("dpi") + if dpi: + ifd[RESOLUTION_UNIT] = 2 + ifd[X_RESOLUTION] = dpi[0] + ifd[Y_RESOLUTION] = dpi[1] + + if bits != (1,): + ifd[BITSPERSAMPLE] = bits + if len(bits) != 1: + ifd[SAMPLESPERPIXEL] = len(bits) + if extra is not None: + ifd[EXTRASAMPLES] = extra + if format != 1: + ifd[SAMPLEFORMAT] = format + + ifd[PHOTOMETRIC_INTERPRETATION] = photo + + if im.mode == "P": + lut = im.im.getpalette("RGB", "RGB;L") + ifd[COLORMAP] = tuple(i8(v) * 256 for v in lut) + + # data orientation + stride = len(bits) * ((im.size[0]*bits[0]+7)//8) + ifd[ROWSPERSTRIP] = im.size[1] + ifd[STRIPBYTECOUNTS] = stride * im.size[1] + ifd[STRIPOFFSETS] = 0 # this is adjusted by IFD writer + # no compression by default: + ifd[COMPRESSION] = COMPRESSION_INFO_REV.get(compression, 1) + + if libtiff: + if DEBUG: + print("Saving using libtiff encoder") + print("Items: %s" % sorted(ifd.items())) + _fp = 0 + if hasattr(fp, "fileno"): + try: + fp.seek(0) + _fp = os.dup(fp.fileno()) + except io.UnsupportedOperation: + pass + + # STRIPOFFSETS and STRIPBYTECOUNTS are added by the library + # based on the data in the strip. + blocklist = [STRIPOFFSETS, STRIPBYTECOUNTS] + atts = {} + # bits per sample is a single short in the tiff directory, not a list. + atts[BITSPERSAMPLE] = bits[0] + # Merge the ones that we have with (optional) more bits from + # the original file, e.g x,y resolution so that we can + # save(load('')) == original file. + legacy_ifd = {} + if hasattr(im, 'tag'): + legacy_ifd = im.tag.to_v2() + for tag, value in itertools.chain(ifd.items(), + getattr(im, 'tag_v2', {}).items(), + legacy_ifd.items()): + # Libtiff can only process certain core items without adding + # them to the custom dictionary. It will segfault if it attempts + # to add a custom tag without the dictionary entry + # + # UNDONE -- add code for the custom dictionary + if tag not in TiffTags.LIBTIFF_CORE: continue + if tag not in atts and tag not in blocklist: + if isinstance(value, unicode if bytes is str else str): + atts[tag] = value.encode('ascii', 'replace') + b"\0" + elif isinstance(value, IFDRational): + atts[tag] = float(value) + else: + atts[tag] = value + + if DEBUG: + print("Converted items: %s" % sorted(atts.items())) + + # libtiff always expects the bytes in native order. + # we're storing image byte order. So, if the rawmode + # contains I;16, we need to convert from native to image + # byte order. + if im.mode in ('I;16B', 'I;16'): + rawmode = 'I;16N' + + a = (rawmode, compression, _fp, filename, atts) + # print(im.mode, compression, a, im.encoderconfig) + e = Image._getencoder(im.mode, 'libtiff', a, im.encoderconfig) + e.setimage(im.im, (0, 0)+im.size) + while True: + # undone, change to self.decodermaxblock: + l, s, d = e.encode(16*1024) + if not _fp: + fp.write(d) + if s: + break + if s < 0: + raise IOError("encoder error %d when writing image file" % s) + + else: + offset = ifd.save(fp) + + ImageFile._save(im, fp, [ + ("raw", (0, 0)+im.size, offset, (rawmode, stride, 1)) + ]) + + # -- helper for multi-page save -- + if "_debug_multipage" in im.encoderinfo: + # just to access o32 and o16 (using correct byte order) + im._debug_multipage = ifd + +# +# -------------------------------------------------------------------- +# Register + +Image.register_open(TiffImageFile.format, TiffImageFile, _accept) +Image.register_save(TiffImageFile.format, _save) + +Image.register_extension(TiffImageFile.format, ".tif") +Image.register_extension(TiffImageFile.format, ".tiff") + +Image.register_mime(TiffImageFile.format, "image/tiff") diff --git a/Lib/site-packages/PIL/TiffTags.py b/Lib/site-packages/PIL/TiffTags.py new file mode 100644 index 0000000..07d594e --- /dev/null +++ b/Lib/site-packages/PIL/TiffTags.py @@ -0,0 +1,402 @@ +# +# The Python Imaging Library. +# $Id$ +# +# TIFF tags +# +# This module provides clear-text names for various well-known +# TIFF tags. the TIFF codec works just fine without it. +# +# Copyright (c) Secret Labs AB 1999. +# +# See the README file for information on usage and redistribution. +# + +## +# This module provides constants and clear-text names for various +# well-known TIFF tags. +## + +from collections import namedtuple + + +class TagInfo(namedtuple("_TagInfo", "value name type length enum")): + __slots__ = [] + + def __new__(cls, value=None, name="unknown", type=None, length=0, enum=None): + return super(TagInfo, cls).__new__( + cls, value, name, type, length, enum or {}) + + def cvt_enum(self, value): + return self.enum.get(value, value) + +def lookup(tag): + """ + :param tag: Integer tag number + :returns: Taginfo namedtuple, From the TAGS_V2 info if possible, + otherwise just populating the value and name from TAGS. + If the tag is not recognized, "unknown" is returned for the name + + """ + + return TAGS_V2.get(tag, TagInfo(tag, TAGS.get(tag, 'unknown'))) + + +## +# Map tag numbers to tag info. +# +# id: (Name, Type, Length, enum_values) +# + +ASCII = 2 +SHORT = 3 +LONG = 4 +RATIONAL = 5 + +TAGS_V2 = { + + 254: ("NewSubfileType", LONG, 1), + 255: ("SubfileType", SHORT, 1), + 256: ("ImageWidth", LONG, 1), + 257: ("ImageLength", LONG, 1), + 258: ("BitsPerSample", SHORT, 0), + 259: ("Compression", SHORT, 1, + {"Uncompressed": 1, "CCITT 1d": 2, "Group 3 Fax": 3, "Group 4 Fax": 4, + "LZW": 5, "JPEG": 6, "PackBits": 32773}), + + 262: ("PhotometricInterpretation", SHORT, 1, + {"WhiteIsZero": 0, "BlackIsZero": 1, "RGB": 2, "RBG Palette": 3, + "Transparency Mask": 4, "CMYK": 5, "YCbCr": 6, "CieLAB": 8, + "CFA": 32803, # TIFF/EP, Adobe DNG + "LinearRaw": 32892}), # Adobe DNG + 263: ("Thresholding", SHORT, 1), + 264: ("CellWidth", SHORT, 1), + 265: ("CellHeight", SHORT, 1), + 266: ("FillOrder", SHORT, 1), + 269: ("DocumentName", ASCII, 1), + + 270: ("ImageDescription", ASCII, 1), + 271: ("Make", ASCII, 1), + 272: ("Model", ASCII, 1), + 273: ("StripOffsets", LONG, 0), + 274: ("Orientation", SHORT, 1), + 277: ("SamplesPerPixel", SHORT, 1), + 278: ("RowsPerStrip", LONG, 1), + 279: ("StripByteCounts", LONG, 0), + + 280: ("MinSampleValue", LONG, 0), + 281: ("MaxSampleValue", SHORT, 0), + 282: ("XResolution", RATIONAL, 1), + 283: ("YResolution", RATIONAL, 1), + 284: ("PlanarConfiguration", SHORT, 1, {"Contigous": 1, "Separate": 2}), + 285: ("PageName", ASCII, 1), + 286: ("XPosition", RATIONAL, 1), + 287: ("YPosition", RATIONAL, 1), + 288: ("FreeOffsets", LONG, 1), + 289: ("FreeByteCounts", LONG, 1), + + 290: ("GrayResponseUnit", SHORT, 1), + 291: ("GrayResponseCurve", SHORT, 0), + 292: ("T4Options", LONG, 1), + 293: ("T6Options", LONG, 1), + 296: ("ResolutionUnit", SHORT, 1, {"inch": 1, "cm": 2}), + 297: ("PageNumber", SHORT, 2), + + 301: ("TransferFunction", SHORT, 0), + 305: ("Software", ASCII, 1), + 306: ("DateTime", ASCII, 1), + + 315: ("Artist", ASCII, 1), + 316: ("HostComputer", ASCII, 1), + 317: ("Predictor", SHORT, 1), + 318: ("WhitePoint", RATIONAL, 2), + 319: ("PrimaryChromaticies", SHORT, 6), + + 320: ("ColorMap", SHORT, 0), + 321: ("HalftoneHints", SHORT, 2), + 322: ("TileWidth", LONG, 1), + 323: ("TileLength", LONG, 1), + 324: ("TileOffsets", LONG, 0), + 325: ("TileByteCounts", LONG, 0), + + 332: ("InkSet", SHORT, 1), + 333: ("InkNames", ASCII, 1), + 334: ("NumberOfInks", SHORT, 1), + 336: ("DotRange", SHORT, 0), + 337: ("TargetPrinter", ASCII, 1), + 338: ("ExtraSamples", SHORT, 0), + 339: ("SampleFormat", SHORT, 0), + + 340: ("SMinSampleValue", 12, 0), + 341: ("SMaxSampleValue", 12, 0), + 342: ("TransferRange", SHORT, 6), + + # obsolete JPEG tags + 512: ("JPEGProc", SHORT, 1), + 513: ("JPEGInterchangeFormat", LONG, 1), + 514: ("JPEGInterchangeFormatLength", LONG, 1), + 515: ("JPEGRestartInterval", SHORT, 1), + 517: ("JPEGLosslessPredictors", SHORT, 0), + 518: ("JPEGPointTransforms", SHORT, 0), + 519: ("JPEGQTables", LONG, 0), + 520: ("JPEGDCTables", LONG, 0), + 521: ("JPEGACTables", LONG, 0), + + 529: ("YCbCrCoefficients", RATIONAL, 3), + 530: ("YCbCrSubSampling", SHORT, 2), + 531: ("YCbCrPositioning", SHORT, 1), + 532: ("ReferenceBlackWhite", LONG, 0), + + 33432: ("Copyright", ASCII, 1), + + # FIXME add more tags here + 34665: ("ExifIFD", SHORT, 1), + 34675: ('ICCProfile', 7, 0), + 34853: ('GPSInfoIFD', 1, 1), + + # MPInfo + 45056: ("MPFVersion", 7, 1), + 45057: ("NumberOfImages", LONG, 1), + 45058: ("MPEntry", 7, 1), + 45059: ("ImageUIDList", 7, 0), + 45060: ("TotalFrames", LONG, 1), + 45313: ("MPIndividualNum", LONG, 1), + 45569: ("PanOrientation", LONG, 1), + 45570: ("PanOverlap_H", RATIONAL, 1), + 45571: ("PanOverlap_V", RATIONAL, 1), + 45572: ("BaseViewpointNum", LONG, 1), + 45573: ("ConvergenceAngle", 10, 1), + 45574: ("BaselineLength", RATIONAL, 1), + 45575: ("VerticalDivergence", 10, 1), + 45576: ("AxisDistance_X", 10, 1), + 45577: ("AxisDistance_Y", 10, 1), + 45578: ("AxisDistance_Z", 10, 1), + 45579: ("YawAngle", 10, 1), + 45580: ("PitchAngle", 10, 1), + 45581: ("RollAngle", 10, 1), + + 50741: ("MakerNoteSafety", SHORT, 1, {"Unsafe": 0, "Safe": 1}), + 50780: ("BestQualityScale", RATIONAL, 1), + 50838: ("ImageJMetaDataByteCounts", LONG, 1), + 50839: ("ImageJMetaData", 7, 1) +} + +# Legacy Tags structure +# these tags aren't included above, but were in the previous versions +TAGS = {347: 'JPEGTables', + 700: 'XMP', + + # Additional Exif Info + 33434: 'ExposureTime', + 33437: 'FNumber', + 33723: 'IptcNaaInfo', + 34377: 'PhotoshopInfo', + 34850: 'ExposureProgram', + 34852: 'SpectralSensitivity', + 34855: 'ISOSpeedRatings', + 34856: 'OECF', + 34864: 'SensitivityType', + 34865: 'StandardOutputSensitivity', + 34866: 'RecommendedExposureIndex', + 34867: 'ISOSpeed', + 34868: 'ISOSpeedLatitudeyyy', + 34869: 'ISOSpeedLatitudezzz', + 36864: 'ExifVersion', + 36867: 'DateTimeOriginal', + 36868: 'DateTImeDigitized', + 37121: 'ComponentsConfiguration', + 37122: 'CompressedBitsPerPixel', + 37377: 'ShutterSpeedValue', + 37378: 'ApertureValue', + 37379: 'BrightnessValue', + 37380: 'ExposureBiasValue', + 37381: 'MaxApertureValue', + 37382: 'SubjectDistance', + 37383: 'MeteringMode', + 37384: 'LightSource', + 37385: 'Flash', + 37386: 'FocalLength', + 37396: 'SubjectArea', + 37500: 'MakerNote', + 37510: 'UserComment', + 37520: 'SubSec', + 37521: 'SubSecTimeOriginal', + 37522: 'SubsecTimeDigitized', + 40960: 'FlashPixVersion', + 40961: 'ColorSpace', + 40962: 'PixelXDimension', + 40963: 'PixelYDimension', + 40964: 'RelatedSoundFile', + 40965: 'InteroperabilityIFD', + 41483: 'FlashEnergy', + 41484: 'SpatialFrequencyResponse', + 41486: 'FocalPlaneXResolution', + 41487: 'FocalPlaneYResolution', + 41488: 'FocalPlaneResolutionUnit', + 41492: 'SubjectLocation', + 41493: 'ExposureIndex', + 41495: 'SensingMethod', + 41728: 'FileSource', + 41729: 'SceneType', + 41730: 'CFAPattern', + 41985: 'CustomRendered', + 41986: 'ExposureMode', + 41987: 'WhiteBalance', + 41988: 'DigitalZoomRatio', + 41989: 'FocalLengthIn35mmFilm', + 41990: 'SceneCaptureType', + 41991: 'GainControl', + 41992: 'Contrast', + 41993: 'Saturation', + 41994: 'Sharpness', + 41995: 'DeviceSettingDescription', + 41996: 'SubjectDistanceRange', + 42016: 'ImageUniqueID', + 42032: 'CameraOwnerName', + 42033: 'BodySerialNumber', + 42034: 'LensSpecification', + 42035: 'LensMake', + 42036: 'LensModel', + 42037: 'LensSerialNumber', + 42240: 'Gamma', + + # Adobe DNG + 50706: 'DNGVersion', + 50707: 'DNGBackwardVersion', + 50708: 'UniqueCameraModel', + 50709: 'LocalizedCameraModel', + 50710: 'CFAPlaneColor', + 50711: 'CFALayout', + 50712: 'LinearizationTable', + 50713: 'BlackLevelRepeatDim', + 50714: 'BlackLevel', + 50715: 'BlackLevelDeltaH', + 50716: 'BlackLevelDeltaV', + 50717: 'WhiteLevel', + 50718: 'DefaultScale', + 50719: 'DefaultCropOrigin', + 50720: 'DefaultCropSize', + 50721: 'ColorMatrix1', + 50722: 'ColorMatrix2', + 50723: 'CameraCalibration1', + 50724: 'CameraCalibration2', + 50725: 'ReductionMatrix1', + 50726: 'ReductionMatrix2', + 50727: 'AnalogBalance', + 50728: 'AsShotNeutral', + 50729: 'AsShotWhiteXY', + 50730: 'BaselineExposure', + 50731: 'BaselineNoise', + 50732: 'BaselineSharpness', + 50733: 'BayerGreenSplit', + 50734: 'LinearResponseLimit', + 50735: 'CameraSerialNumber', + 50736: 'LensInfo', + 50737: 'ChromaBlurRadius', + 50738: 'AntiAliasStrength', + 50740: 'DNGPrivateData', + 50778: 'CalibrationIlluminant1', + 50779: 'CalibrationIlluminant2', + } + + +def _populate(): + for k, v in TAGS_V2.items(): + # Populate legacy structure. + TAGS[k] = v[0] + if len(v) == 4: + for sk, sv in v[3].items(): + TAGS[(k, sv)] = sk + + TAGS_V2[k] = TagInfo(k, *v) + +_populate() +## +# Map type numbers to type names -- defined in ImageFileDirectory. + +TYPES = {} + +# was: +# TYPES = { +# 1: "byte", +# 2: "ascii", +# 3: "short", +# 4: "long", +# 5: "rational", +# 6: "signed byte", +# 7: "undefined", +# 8: "signed short", +# 9: "signed long", +# 10: "signed rational", +# 11: "float", +# 12: "double", +# } + +# +# These tags are handled by default in libtiff, without +# adding to the custom dictionary. From tif_dir.c, searching for +# case TIFFTAG in the _TIFFVSetField function: +# Line: item. +# 148: case TIFFTAG_SUBFILETYPE: +# 151: case TIFFTAG_IMAGEWIDTH: +# 154: case TIFFTAG_IMAGELENGTH: +# 157: case TIFFTAG_BITSPERSAMPLE: +# 181: case TIFFTAG_COMPRESSION: +# 202: case TIFFTAG_PHOTOMETRIC: +# 205: case TIFFTAG_THRESHHOLDING: +# 208: case TIFFTAG_FILLORDER: +# 214: case TIFFTAG_ORIENTATION: +# 221: case TIFFTAG_SAMPLESPERPIXEL: +# 228: case TIFFTAG_ROWSPERSTRIP: +# 238: case TIFFTAG_MINSAMPLEVALUE: +# 241: case TIFFTAG_MAXSAMPLEVALUE: +# 244: case TIFFTAG_SMINSAMPLEVALUE: +# 247: case TIFFTAG_SMAXSAMPLEVALUE: +# 250: case TIFFTAG_XRESOLUTION: +# 256: case TIFFTAG_YRESOLUTION: +# 262: case TIFFTAG_PLANARCONFIG: +# 268: case TIFFTAG_XPOSITION: +# 271: case TIFFTAG_YPOSITION: +# 274: case TIFFTAG_RESOLUTIONUNIT: +# 280: case TIFFTAG_PAGENUMBER: +# 284: case TIFFTAG_HALFTONEHINTS: +# 288: case TIFFTAG_COLORMAP: +# 294: case TIFFTAG_EXTRASAMPLES: +# 298: case TIFFTAG_MATTEING: +# 305: case TIFFTAG_TILEWIDTH: +# 316: case TIFFTAG_TILELENGTH: +# 327: case TIFFTAG_TILEDEPTH: +# 333: case TIFFTAG_DATATYPE: +# 344: case TIFFTAG_SAMPLEFORMAT: +# 361: case TIFFTAG_IMAGEDEPTH: +# 364: case TIFFTAG_SUBIFD: +# 376: case TIFFTAG_YCBCRPOSITIONING: +# 379: case TIFFTAG_YCBCRSUBSAMPLING: +# 383: case TIFFTAG_TRANSFERFUNCTION: +# 389: case TIFFTAG_REFERENCEBLACKWHITE: +# 393: case TIFFTAG_INKNAMES: + +# some of these are not in our TAGS_V2 dict and were included from tiff.h + +LIBTIFF_CORE = set ([255, 256, 257, 258, 259, 262, 263, 266, 274, 277, + 278, 280, 281, 340, 341, 282, 283, 284, 286, 287, + 296, 297, 321, 320, 338, 32995, 322, 323, 32998, + 32996, 339, 32997, 330, 531, 530, 301, 532, 333, + # as above + 269 # this has been in our tests forever, and works + ]) + +LIBTIFF_CORE.remove(320) # Array of short, crashes +LIBTIFF_CORE.remove(301) # Array of short, crashes +LIBTIFF_CORE.remove(532) # Array of long, crashes + +LIBTIFF_CORE.remove(255) # We don't have support for subfiletypes +LIBTIFF_CORE.remove(322) # We don't have support for tiled images in libtiff +LIBTIFF_CORE.remove(323) # Tiled images +LIBTIFF_CORE.remove(333) # Ink Names either + +# Note to advanced users: There may be combinations of these +# parameters and values that when added properly, will work and +# produce valid tiff images that may work in your application. +# It is safe to add and remove tags from this set from Pillow's point +# of view so long as you test against libtiff. diff --git a/Lib/site-packages/PIL/WalImageFile.py b/Lib/site-packages/PIL/WalImageFile.py new file mode 100644 index 0000000..0cbd1ca --- /dev/null +++ b/Lib/site-packages/PIL/WalImageFile.py @@ -0,0 +1,128 @@ +# encoding: utf-8 +# +# The Python Imaging Library. +# $Id$ +# +# WAL file handling +# +# History: +# 2003-04-23 fl created +# +# Copyright (c) 2003 by Fredrik Lundh. +# +# See the README file for information on usage and redistribution. +# + +# NOTE: This format cannot be automatically recognized, so the reader +# is not registered for use with Image.open(). To open a WAL file, use +# the WalImageFile.open() function instead. + +# This reader is based on the specification available from: +# http://www.flipcode.com/archives/Quake_2_BSP_File_Format.shtml +# and has been tested with a few sample files found using google. + +from __future__ import print_function + +from PIL import Image, _binary + +try: + import builtins +except ImportError: + import __builtin__ + builtins = __builtin__ + +i32 = _binary.i32le + + +## +# Load texture from a Quake2 WAL texture file. +#

+# By default, a Quake2 standard palette is attached to the texture. +# To override the palette, use the putpalette method. +# +# @param filename WAL file name, or an opened file handle. +# @return An image instance. + +def open(filename): + # FIXME: modify to return a WalImageFile instance instead of + # plain Image object ? + + if hasattr(filename, "read"): + fp = filename + else: + fp = builtins.open(filename, "rb") + + # read header fields + header = fp.read(32+24+32+12) + size = i32(header, 32), i32(header, 36) + offset = i32(header, 40) + + # load pixel data + fp.seek(offset) + + im = Image.frombytes("P", size, fp.read(size[0] * size[1])) + im.putpalette(quake2palette) + + im.format = "WAL" + im.format_description = "Quake2 Texture" + + # strings are null-terminated + im.info["name"] = header[:32].split(b"\0", 1)[0] + next_name = header[56:56+32].split(b"\0", 1)[0] + if next_name: + im.info["next_name"] = next_name + + return im + + +quake2palette = ( + # default palette taken from piffo 0.93 by Hans Häggström + b"\x01\x01\x01\x0b\x0b\x0b\x12\x12\x12\x17\x17\x17\x1b\x1b\x1b\x1e" + b"\x1e\x1e\x22\x22\x22\x26\x26\x26\x29\x29\x29\x2c\x2c\x2c\x2f\x2f" + b"\x2f\x32\x32\x32\x35\x35\x35\x37\x37\x37\x3a\x3a\x3a\x3c\x3c\x3c" + b"\x24\x1e\x13\x22\x1c\x12\x20\x1b\x12\x1f\x1a\x10\x1d\x19\x10\x1b" + b"\x17\x0f\x1a\x16\x0f\x18\x14\x0d\x17\x13\x0d\x16\x12\x0d\x14\x10" + b"\x0b\x13\x0f\x0b\x10\x0d\x0a\x0f\x0b\x0a\x0d\x0b\x07\x0b\x0a\x07" + b"\x23\x23\x26\x22\x22\x25\x22\x20\x23\x21\x1f\x22\x20\x1e\x20\x1f" + b"\x1d\x1e\x1d\x1b\x1c\x1b\x1a\x1a\x1a\x19\x19\x18\x17\x17\x17\x16" + b"\x16\x14\x14\x14\x13\x13\x13\x10\x10\x10\x0f\x0f\x0f\x0d\x0d\x0d" + b"\x2d\x28\x20\x29\x24\x1c\x27\x22\x1a\x25\x1f\x17\x38\x2e\x1e\x31" + b"\x29\x1a\x2c\x25\x17\x26\x20\x14\x3c\x30\x14\x37\x2c\x13\x33\x28" + b"\x12\x2d\x24\x10\x28\x1f\x0f\x22\x1a\x0b\x1b\x14\x0a\x13\x0f\x07" + b"\x31\x1a\x16\x30\x17\x13\x2e\x16\x10\x2c\x14\x0d\x2a\x12\x0b\x27" + b"\x0f\x0a\x25\x0f\x07\x21\x0d\x01\x1e\x0b\x01\x1c\x0b\x01\x1a\x0b" + b"\x01\x18\x0a\x01\x16\x0a\x01\x13\x0a\x01\x10\x07\x01\x0d\x07\x01" + b"\x29\x23\x1e\x27\x21\x1c\x26\x20\x1b\x25\x1f\x1a\x23\x1d\x19\x21" + b"\x1c\x18\x20\x1b\x17\x1e\x19\x16\x1c\x18\x14\x1b\x17\x13\x19\x14" + b"\x10\x17\x13\x0f\x14\x10\x0d\x12\x0f\x0b\x0f\x0b\x0a\x0b\x0a\x07" + b"\x26\x1a\x0f\x23\x19\x0f\x20\x17\x0f\x1c\x16\x0f\x19\x13\x0d\x14" + b"\x10\x0b\x10\x0d\x0a\x0b\x0a\x07\x33\x22\x1f\x35\x29\x26\x37\x2f" + b"\x2d\x39\x35\x34\x37\x39\x3a\x33\x37\x39\x30\x34\x36\x2b\x31\x34" + b"\x27\x2e\x31\x22\x2b\x2f\x1d\x28\x2c\x17\x25\x2a\x0f\x20\x26\x0d" + b"\x1e\x25\x0b\x1c\x22\x0a\x1b\x20\x07\x19\x1e\x07\x17\x1b\x07\x14" + b"\x18\x01\x12\x16\x01\x0f\x12\x01\x0b\x0d\x01\x07\x0a\x01\x01\x01" + b"\x2c\x21\x21\x2a\x1f\x1f\x29\x1d\x1d\x27\x1c\x1c\x26\x1a\x1a\x24" + b"\x18\x18\x22\x17\x17\x21\x16\x16\x1e\x13\x13\x1b\x12\x12\x18\x10" + b"\x10\x16\x0d\x0d\x12\x0b\x0b\x0d\x0a\x0a\x0a\x07\x07\x01\x01\x01" + b"\x2e\x30\x29\x2d\x2e\x27\x2b\x2c\x26\x2a\x2a\x24\x28\x29\x23\x27" + b"\x27\x21\x26\x26\x1f\x24\x24\x1d\x22\x22\x1c\x1f\x1f\x1a\x1c\x1c" + b"\x18\x19\x19\x16\x17\x17\x13\x13\x13\x10\x0f\x0f\x0d\x0b\x0b\x0a" + b"\x30\x1e\x1b\x2d\x1c\x19\x2c\x1a\x17\x2a\x19\x14\x28\x17\x13\x26" + b"\x16\x10\x24\x13\x0f\x21\x12\x0d\x1f\x10\x0b\x1c\x0f\x0a\x19\x0d" + b"\x0a\x16\x0b\x07\x12\x0a\x07\x0f\x07\x01\x0a\x01\x01\x01\x01\x01" + b"\x28\x29\x38\x26\x27\x36\x25\x26\x34\x24\x24\x31\x22\x22\x2f\x20" + b"\x21\x2d\x1e\x1f\x2a\x1d\x1d\x27\x1b\x1b\x25\x19\x19\x21\x17\x17" + b"\x1e\x14\x14\x1b\x13\x12\x17\x10\x0f\x13\x0d\x0b\x0f\x0a\x07\x07" + b"\x2f\x32\x29\x2d\x30\x26\x2b\x2e\x24\x29\x2c\x21\x27\x2a\x1e\x25" + b"\x28\x1c\x23\x26\x1a\x21\x25\x18\x1e\x22\x14\x1b\x1f\x10\x19\x1c" + b"\x0d\x17\x1a\x0a\x13\x17\x07\x10\x13\x01\x0d\x0f\x01\x0a\x0b\x01" + b"\x01\x3f\x01\x13\x3c\x0b\x1b\x39\x10\x20\x35\x14\x23\x31\x17\x23" + b"\x2d\x18\x23\x29\x18\x3f\x3f\x3f\x3f\x3f\x39\x3f\x3f\x31\x3f\x3f" + b"\x2a\x3f\x3f\x20\x3f\x3f\x14\x3f\x3c\x12\x3f\x39\x0f\x3f\x35\x0b" + b"\x3f\x32\x07\x3f\x2d\x01\x3d\x2a\x01\x3b\x26\x01\x39\x21\x01\x37" + b"\x1d\x01\x34\x1a\x01\x32\x16\x01\x2f\x12\x01\x2d\x0f\x01\x2a\x0b" + b"\x01\x27\x07\x01\x23\x01\x01\x1d\x01\x01\x17\x01\x01\x10\x01\x01" + b"\x3d\x01\x01\x19\x19\x3f\x3f\x01\x01\x01\x01\x3f\x16\x16\x13\x10" + b"\x10\x0f\x0d\x0d\x0b\x3c\x2e\x2a\x36\x27\x20\x30\x21\x18\x29\x1b" + b"\x10\x3c\x39\x37\x37\x32\x2f\x31\x2c\x28\x2b\x26\x21\x30\x22\x20" +) diff --git a/Lib/site-packages/PIL/WebPImagePlugin.py b/Lib/site-packages/PIL/WebPImagePlugin.py new file mode 100644 index 0000000..6837b53 --- /dev/null +++ b/Lib/site-packages/PIL/WebPImagePlugin.py @@ -0,0 +1,80 @@ +from PIL import Image +from PIL import ImageFile +from io import BytesIO +from PIL import _webp + + +_VALID_WEBP_MODES = { + "RGB": True, + "RGBA": True, + } + +_VP8_MODES_BY_IDENTIFIER = { + b"VP8 ": "RGB", + b"VP8X": "RGBA", + b"VP8L": "RGBA", # lossless + } + + +def _accept(prefix): + is_riff_file_format = prefix[:4] == b"RIFF" + is_webp_file = prefix[8:12] == b"WEBP" + is_valid_vp8_mode = prefix[12:16] in _VP8_MODES_BY_IDENTIFIER + + return is_riff_file_format and is_webp_file and is_valid_vp8_mode + + +class WebPImageFile(ImageFile.ImageFile): + + format = "WEBP" + format_description = "WebP image" + + def _open(self): + data, width, height, self.mode, icc_profile, exif = \ + _webp.WebPDecode(self.fp.read()) + + if icc_profile: + self.info["icc_profile"] = icc_profile + if exif: + self.info["exif"] = exif + + self.size = width, height + self.fp = BytesIO(data) + self.tile = [("raw", (0, 0) + self.size, 0, self.mode)] + + def _getexif(self): + from PIL.JpegImagePlugin import _getexif + return _getexif(self) + + +def _save(im, fp, filename): + image_mode = im.mode + if im.mode not in _VALID_WEBP_MODES: + raise IOError("cannot write mode %s as WEBP" % image_mode) + + lossless = im.encoderinfo.get("lossless", False) + quality = im.encoderinfo.get("quality", 80) + icc_profile = im.encoderinfo.get("icc_profile", "") + exif = im.encoderinfo.get("exif", "") + + data = _webp.WebPEncode( + im.tobytes(), + im.size[0], + im.size[1], + lossless, + float(quality), + im.mode, + icc_profile, + exif + ) + if data is None: + raise IOError("cannot write file as WEBP (encoder returned None)") + + fp.write(data) + + +Image.register_open(WebPImageFile.format, WebPImageFile, _accept) +Image.register_save(WebPImageFile.format, _save) + +Image.register_extension(WebPImageFile.format, ".webp") +Image.register_mime(WebPImageFile.format, "image/webp") diff --git a/Lib/site-packages/PIL/WmfImagePlugin.py b/Lib/site-packages/PIL/WmfImagePlugin.py new file mode 100644 index 0000000..3163210 --- /dev/null +++ b/Lib/site-packages/PIL/WmfImagePlugin.py @@ -0,0 +1,173 @@ +# +# The Python Imaging Library +# $Id$ +# +# WMF stub codec +# +# history: +# 1996-12-14 fl Created +# 2004-02-22 fl Turned into a stub driver +# 2004-02-23 fl Added EMF support +# +# Copyright (c) Secret Labs AB 1997-2004. All rights reserved. +# Copyright (c) Fredrik Lundh 1996. +# +# See the README file for information on usage and redistribution. +# + +from PIL import Image, ImageFile, _binary + +__version__ = "0.2" + +_handler = None + +if str != bytes: + long = int + + +## +# Install application-specific WMF image handler. +# +# @param handler Handler object. + +def register_handler(handler): + global _handler + _handler = handler + +if hasattr(Image.core, "drawwmf"): + # install default handler (windows only) + + class WmfHandler(object): + + def open(self, im): + im.mode = "RGB" + self.bbox = im.info["wmf_bbox"] + + def load(self, im): + im.fp.seek(0) # rewind + return Image.frombytes( + "RGB", im.size, + Image.core.drawwmf(im.fp.read(), im.size, self.bbox), + "raw", "BGR", (im.size[0]*3 + 3) & -4, -1 + ) + + register_handler(WmfHandler()) + +# -------------------------------------------------------------------- + +word = _binary.i16le + + +def short(c, o=0): + v = word(c, o) + if v >= 32768: + v -= 65536 + return v + +dword = _binary.i32le + + +# +# -------------------------------------------------------------------- +# Read WMF file + +def _accept(prefix): + return ( + prefix[:6] == b"\xd7\xcd\xc6\x9a\x00\x00" or + prefix[:4] == b"\x01\x00\x00\x00" + ) + + +## +# Image plugin for Windows metafiles. + +class WmfStubImageFile(ImageFile.StubImageFile): + + format = "WMF" + format_description = "Windows Metafile" + + def _open(self): + + # check placable header + s = self.fp.read(80) + + if s[:6] == b"\xd7\xcd\xc6\x9a\x00\x00": + + # placeable windows metafile + + # get units per inch + inch = word(s, 14) + + # get bounding box + x0 = short(s, 6) + y0 = short(s, 8) + x1 = short(s, 10) + y1 = short(s, 12) + + # normalize size to 72 dots per inch + size = (x1 - x0) * 72 // inch, (y1 - y0) * 72 // inch + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + self.info["dpi"] = 72 + + # print self.mode, self.size, self.info + + # sanity check (standard metafile header) + if s[22:26] != b"\x01\x00\t\x00": + raise SyntaxError("Unsupported WMF file format") + + elif dword(s) == 1 and s[40:44] == b" EMF": + # enhanced metafile + + # get bounding box + x0 = dword(s, 8) + y0 = dword(s, 12) + x1 = dword(s, 16) + y1 = dword(s, 20) + + # get frame (in 0.01 millimeter units) + frame = dword(s, 24), dword(s, 28), dword(s, 32), dword(s, 36) + + # normalize size to 72 dots per inch + size = x1 - x0, y1 - y0 + + # calculate dots per inch from bbox and frame + xdpi = 2540 * (x1 - y0) // (frame[2] - frame[0]) + ydpi = 2540 * (y1 - y0) // (frame[3] - frame[1]) + + self.info["wmf_bbox"] = x0, y0, x1, y1 + + if xdpi == ydpi: + self.info["dpi"] = xdpi + else: + self.info["dpi"] = xdpi, ydpi + + else: + raise SyntaxError("Unsupported file format") + + self.mode = "RGB" + self.size = size + + loader = self._load() + if loader: + loader.open(self) + + def _load(self): + return _handler + + +def _save(im, fp, filename): + if _handler is None or not hasattr("_handler", "save"): + raise IOError("WMF save handler not installed") + _handler.save(im, fp, filename) + +# +# -------------------------------------------------------------------- +# Registry stuff + +Image.register_open(WmfStubImageFile.format, WmfStubImageFile, _accept) +Image.register_save(WmfStubImageFile.format, _save) + +Image.register_extension(WmfStubImageFile.format, ".wmf") +Image.register_extension(WmfStubImageFile.format, ".emf") diff --git a/Lib/site-packages/PIL/XVThumbImagePlugin.py b/Lib/site-packages/PIL/XVThumbImagePlugin.py new file mode 100644 index 0000000..311e65d --- /dev/null +++ b/Lib/site-packages/PIL/XVThumbImagePlugin.py @@ -0,0 +1,75 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XV Thumbnail file handler by Charles E. "Gene" Cash +# (gcash@magicnet.net) +# +# see xvcolor.c and xvbrowse.c in the sources to John Bradley's XV, +# available from ftp://ftp.cis.upenn.edu/pub/xv/ +# +# history: +# 98-08-15 cec created (b/w only) +# 98-12-09 cec added color palette +# 98-12-28 fl added to PIL (with only a few very minor modifications) +# +# To do: +# FIXME: make save work (this requires quantization support) +# + +from PIL import Image, ImageFile, ImagePalette, _binary + +__version__ = "0.1" + +o8 = _binary.o8 + +# standard color palette for thumbnails (RGB332) +PALETTE = b"" +for r in range(8): + for g in range(8): + for b in range(4): + PALETTE = PALETTE + (o8((r*255)//7)+o8((g*255)//7)+o8((b*255)//3)) + + +## +# Image plugin for XV thumbnail images. + +class XVThumbImageFile(ImageFile.ImageFile): + + format = "XVThumb" + format_description = "XV thumbnail image" + + def _open(self): + + # check magic + s = self.fp.read(6) + if s != b"P7 332": + raise SyntaxError("not an XV thumbnail file") + + # Skip to beginning of next line + self.fp.readline() + + # skip info comments + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("Unexpected EOF reading XV thumbnail file") + if s[0] != b'#': + break + + # parse header line (already read) + s = s.strip().split() + + self.mode = "P" + self.size = int(s[0:1]), int(s[1:2]) + + self.palette = ImagePalette.raw("RGB", PALETTE) + + self.tile = [ + ("raw", (0, 0)+self.size, + self.fp.tell(), (self.mode, 0, 1) + )] + +# -------------------------------------------------------------------- + +Image.register_open(XVThumbImageFile.format, XVThumbImageFile) diff --git a/Lib/site-packages/PIL/XbmImagePlugin.py b/Lib/site-packages/PIL/XbmImagePlugin.py new file mode 100644 index 0000000..bca8828 --- /dev/null +++ b/Lib/site-packages/PIL/XbmImagePlugin.py @@ -0,0 +1,96 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XBM File handling +# +# History: +# 1995-09-08 fl Created +# 1996-11-01 fl Added save support +# 1997-07-07 fl Made header parser more tolerant +# 1997-07-22 fl Fixed yet another parser bug +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.4) +# 2001-05-13 fl Added hotspot handling (based on code from Bernhard Herzog) +# 2004-02-24 fl Allow some whitespace before first #define +# +# Copyright (c) 1997-2004 by Secret Labs AB +# Copyright (c) 1996-1997 by Fredrik Lundh +# +# See the README file for information on usage and redistribution. +# + +import re +from PIL import Image, ImageFile + +__version__ = "0.6" + +# XBM header +xbm_head = re.compile( + b"\s*#define[ \t]+.*_width[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+.*_height[ \t]+(?P[0-9]+)[\r\n]+" + b"(?P" + b"#define[ \t]+[^_]*_x_hot[ \t]+(?P[0-9]+)[\r\n]+" + b"#define[ \t]+[^_]*_y_hot[ \t]+(?P[0-9]+)[\r\n]+" + b")?" + b"[\\000-\\377]*_bits\\[\\]" +) + + +def _accept(prefix): + return prefix.lstrip()[:7] == b"#define" + + +## +# Image plugin for X11 bitmaps. + +class XbmImageFile(ImageFile.ImageFile): + + format = "XBM" + format_description = "X11 Bitmap" + + def _open(self): + + m = xbm_head.match(self.fp.read(512)) + + if m: + + xsize = int(m.group("width")) + ysize = int(m.group("height")) + + if m.group("hotspot"): + self.info["hotspot"] = ( + int(m.group("xhot")), int(m.group("yhot")) + ) + + self.mode = "1" + self.size = xsize, ysize + + self.tile = [("xbm", (0, 0)+self.size, m.end(), None)] + + +def _save(im, fp, filename): + + if im.mode != "1": + raise IOError("cannot write mode %s as XBM" % im.mode) + + fp.write(("#define im_width %d\n" % im.size[0]).encode('ascii')) + fp.write(("#define im_height %d\n" % im.size[1]).encode('ascii')) + + hotspot = im.encoderinfo.get("hotspot") + if hotspot: + fp.write(("#define im_x_hot %d\n" % hotspot[0]).encode('ascii')) + fp.write(("#define im_y_hot %d\n" % hotspot[1]).encode('ascii')) + + fp.write(b"static char im_bits[] = {\n") + + ImageFile._save(im, fp, [("xbm", (0, 0)+im.size, 0, None)]) + + fp.write(b"};\n") + + +Image.register_open(XbmImageFile.format, XbmImageFile, _accept) +Image.register_save(XbmImageFile.format, _save) + +Image.register_extension(XbmImageFile.format, ".xbm") + +Image.register_mime(XbmImageFile.format, "image/xbm") diff --git a/Lib/site-packages/PIL/XpmImagePlugin.py b/Lib/site-packages/PIL/XpmImagePlugin.py new file mode 100644 index 0000000..556adb8 --- /dev/null +++ b/Lib/site-packages/PIL/XpmImagePlugin.py @@ -0,0 +1,130 @@ +# +# The Python Imaging Library. +# $Id$ +# +# XPM File handling +# +# History: +# 1996-12-29 fl Created +# 2001-02-17 fl Use 're' instead of 'regex' (Python 2.1) (0.7) +# +# Copyright (c) Secret Labs AB 1997-2001. +# Copyright (c) Fredrik Lundh 1996-2001. +# +# See the README file for information on usage and redistribution. +# + + +import re +from PIL import Image, ImageFile, ImagePalette +from PIL._binary import i8, o8 + +__version__ = "0.2" + +# XPM header +xpm_head = re.compile(b"\"([0-9]*) ([0-9]*) ([0-9]*) ([0-9]*)") + + +def _accept(prefix): + return prefix[:9] == b"/* XPM */" + + +## +# Image plugin for X11 pixel maps. + +class XpmImageFile(ImageFile.ImageFile): + + format = "XPM" + format_description = "X11 Pixel Map" + + def _open(self): + + if not _accept(self.fp.read(9)): + raise SyntaxError("not an XPM file") + + # skip forward to next string + while True: + s = self.fp.readline() + if not s: + raise SyntaxError("broken XPM file") + m = xpm_head.match(s) + if m: + break + + self.size = int(m.group(1)), int(m.group(2)) + + pal = int(m.group(3)) + bpp = int(m.group(4)) + + if pal > 256 or bpp != 1: + raise ValueError("cannot read this XPM file") + + # + # load palette description + + palette = [b"\0\0\0"] * 256 + + for i in range(pal): + + s = self.fp.readline() + if s[-2:] == b'\r\n': + s = s[:-2] + elif s[-1:] in b'\r\n': + s = s[:-1] + + c = i8(s[1]) + s = s[2:-2].split() + + for i in range(0, len(s), 2): + + if s[i] == b"c": + + # process colour key + rgb = s[i+1] + if rgb == b"None": + self.info["transparency"] = c + elif rgb[0:1] == b"#": + # FIXME: handle colour names (see ImagePalette.py) + rgb = int(rgb[1:], 16) + palette[c] = (o8((rgb >> 16) & 255) + + o8((rgb >> 8) & 255) + + o8(rgb & 255)) + else: + # unknown colour + raise ValueError("cannot read this XPM file") + break + + else: + + # missing colour key + raise ValueError("cannot read this XPM file") + + self.mode = "P" + self.palette = ImagePalette.raw("RGB", b"".join(palette)) + + self.tile = [("raw", (0, 0)+self.size, self.fp.tell(), ("P", 0, 1))] + + def load_read(self, bytes): + + # + # load all image data in one chunk + + xsize, ysize = self.size + + s = [None] * ysize + + for i in range(ysize): + s[i] = self.fp.readline()[1:xsize+1].ljust(xsize) + + self.fp = None + + return b"".join(s) + +# +# Registry + +Image.register_open(XpmImageFile.format, XpmImageFile, _accept) + +Image.register_extension(XpmImageFile.format, ".xpm") + +Image.register_mime(XpmImageFile.format, "image/xpm") diff --git a/Lib/site-packages/PIL/__init__.py b/Lib/site-packages/PIL/__init__.py new file mode 100644 index 0000000..e251cfa --- /dev/null +++ b/Lib/site-packages/PIL/__init__.py @@ -0,0 +1,58 @@ +# +# The Python Imaging Library. +# $Id$ +# +# package placeholder +# +# Copyright (c) 1999 by Secret Labs AB. +# +# See the README file for information on usage and redistribution. +# + +# ;-) + +VERSION = '1.1.7' # PIL version +PILLOW_VERSION = '3.1.0' # Pillow + +_plugins = ['BmpImagePlugin', + 'BufrStubImagePlugin', + 'CurImagePlugin', + 'DcxImagePlugin', + 'EpsImagePlugin', + 'FitsStubImagePlugin', + 'FliImagePlugin', + 'FpxImagePlugin', + 'GbrImagePlugin', + 'GifImagePlugin', + 'GribStubImagePlugin', + 'Hdf5StubImagePlugin', + 'IcnsImagePlugin', + 'IcoImagePlugin', + 'ImImagePlugin', + 'ImtImagePlugin', + 'IptcImagePlugin', + 'JpegImagePlugin', + 'Jpeg2KImagePlugin', + 'McIdasImagePlugin', + 'MicImagePlugin', + 'MpegImagePlugin', + 'MpoImagePlugin', + 'MspImagePlugin', + 'PalmImagePlugin', + 'PcdImagePlugin', + 'PcxImagePlugin', + 'PdfImagePlugin', + 'PixarImagePlugin', + 'PngImagePlugin', + 'PpmImagePlugin', + 'PsdImagePlugin', + 'SgiImagePlugin', + 'SpiderImagePlugin', + 'SunImagePlugin', + 'TgaImagePlugin', + 'TiffImagePlugin', + 'WebPImagePlugin', + 'WmfImagePlugin', + 'XbmImagePlugin', + 'XpmImagePlugin', + 'XVThumbImagePlugin'] diff --git a/Lib/site-packages/PIL/_binary.py b/Lib/site-packages/PIL/_binary.py new file mode 100644 index 0000000..2f5e8ff --- /dev/null +++ b/Lib/site-packages/PIL/_binary.py @@ -0,0 +1,76 @@ +# +# The Python Imaging Library. +# $Id$ +# +# Binary input/output support routines. +# +# Copyright (c) 1997-2003 by Secret Labs AB +# Copyright (c) 1995-2003 by Fredrik Lundh +# Copyright (c) 2012 by Brian Crowell +# +# See the README file for information on usage and redistribution. +# + +from struct import unpack, pack + +if bytes is str: + def i8(c): + return ord(c) + + def o8(i): + return chr(i & 255) +else: + def i8(c): + return c if c.__class__ is int else c[0] + + def o8(i): + return bytes((i & 255,)) + + +# Input, le = little endian, be = big endian +# TODO: replace with more readable struct.unpack equivalent +def i16le(c, o=0): + """ + Converts a 2-bytes (16 bits) string to an integer. + + c: string containing bytes to convert + o: offset of bytes to convert in string + """ + return unpack("H", c[o:o+2])[0] + + +def i32be(c, o=0): + return unpack(">I", c[o:o+4])[0] + + +# Output, le = little endian, be = big endian +def o16le(i): + return pack("H", i) + + +def o32be(i): + return pack(">I", i) + +# End of file diff --git a/Lib/site-packages/PIL/_util.py b/Lib/site-packages/PIL/_util.py new file mode 100644 index 0000000..51c6f68 --- /dev/null +++ b/Lib/site-packages/PIL/_util.py @@ -0,0 +1,27 @@ +import os + +if bytes is str: + def isStringType(t): + return isinstance(t, basestring) + + def isPath(f): + return isinstance(f, basestring) +else: + def isStringType(t): + return isinstance(t, str) + + def isPath(f): + return isinstance(f, (bytes, str)) + + +# Checks if an object is a string, and that it points to a directory. +def isDirectory(f): + return isPath(f) and os.path.isdir(f) + + +class deferred_error(object): + def __init__(self, ex): + self.ex = ex + + def __getattr__(self, elt): + raise self.ex diff --git a/Lib/site-packages/PIL/features.py b/Lib/site-packages/PIL/features.py new file mode 100644 index 0000000..fd87f09 --- /dev/null +++ b/Lib/site-packages/PIL/features.py @@ -0,0 +1,67 @@ +from PIL import Image + +modules = { + "pil": "PIL._imaging", + "tkinter": "PIL._imagingtk", + "freetype2": "PIL._imagingft", + "littlecms2": "PIL._imagingcms", + "webp": "PIL._webp", + "transp_webp": ("WEBP", "WebPDecoderBuggyAlpha") +} + + +def check_module(feature): + if feature not in modules: + raise ValueError("Unknown module %s" % feature) + + module = modules[feature] + + method_to_call = None + if type(module) is tuple: + module, method_to_call = module + + try: + imported_module = __import__(module) + except ImportError: + # If a method is being checked, None means that + # rather than the method failing, the module required for the method + # failed to be imported first + return None if method_to_call else False + + if method_to_call: + method = getattr(imported_module, method_to_call) + return method() is True + else: + return True + + +def get_supported_modules(): + supported_modules = [] + for feature in modules: + if check_module(feature): + supported_modules.append(feature) + return supported_modules + +codecs = { + "jpg": "jpeg", + "jpg_2000": "jpeg2k", + "zlib": "zip", + "libtiff": "libtiff" +} + + +def check_codec(feature): + if feature not in codecs: + raise ValueError("Unknown codec %s" % feature) + + codec = codecs[feature] + + return codec + "_encoder" in dir(Image.core) + + +def get_supported_codecs(): + supported_codecs = [] + for feature in codecs: + if check_codec(feature): + supported_codecs.append(feature) + return supported_codecs diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/DESCRIPTION.rst b/Lib/site-packages/Pillow-3.1.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..de15aa9 --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,62 @@ +Pillow +====== + +Python Imaging Library (Fork) +----------------------------- + +Pillow is the friendly PIL fork by `Alex Clark and Contributors `_. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. + +.. image:: https://zenodo.org/badge/17549/python-pillow/Pillow.svg + :target: https://zenodo.org/badge/latestdoi/17549/python-pillow/Pillow + +.. image:: https://readthedocs.org/projects/pillow/badge/?version=latest + :target: http://pillow.readthedocs.org/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/travis/python-pillow/Pillow/master.svg?label=Linux%20build + :target: https://travis-ci.org/python-pillow/Pillow + :alt: Travis CI build status (Linux) + +.. image:: https://img.shields.io/travis/python-pillow/pillow-wheels/latest.svg?label=OS%20X%20build + :target: https://travis-ci.org/python-pillow/pillow-wheels + :alt: Travis CI build status (OS X) + +.. image:: https://img.shields.io/appveyor/ci/python-pillow/Pillow/master.svg?label=Windows%20build + :target: https://ci.appveyor.com/project/python-pillow/Pillow + :alt: AppVeyor CI build status (Windows) + +.. image:: https://img.shields.io/pypi/v/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Latest PyPI version + +.. image:: https://img.shields.io/pypi/dm/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Number of PyPI downloads + +.. image:: https://coveralls.io/repos/python-pillow/Pillow/badge.svg?branch=master&service=github + :target: https://coveralls.io/r/python-pillow/Pillow?branch=master + :alt: Code coverage + +.. image:: https://landscape.io/github/python-pillow/Pillow/master/landscape.svg + :target: https://landscape.io/github/python-pillow/Pillow/master + :alt: Code health + + +More Information +---------------- + +- `Documentation `_ + + - `Installation `_ + - `Handbook `_ + +- `Contribute `_ + + - `Issues `_ + - `Pull requests `_ + +- `Changelog `_ + + - `Pre-fork `_ + + diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/METADATA b/Lib/site-packages/Pillow-3.1.0.dist-info/METADATA new file mode 100644 index 0000000..156952d --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/METADATA @@ -0,0 +1,89 @@ +Metadata-Version: 2.0 +Name: Pillow +Version: 3.1.0 +Summary: Python Imaging Library (Fork) +Home-page: http://python-pillow.github.io/ +Author: Alex Clark (Fork Author) +Author-email: aclark@aclark.net +License: Standard PIL License +Keywords: Imaging +Platform: UNKNOWN +Classifier: Development Status :: 6 - Mature +Classifier: Topic :: Multimedia :: Graphics +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Digital Camera +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Scanners +Classifier: Topic :: Multimedia :: Graphics :: Capture :: Screen Capture +Classifier: Topic :: Multimedia :: Graphics :: Graphics Conversion +Classifier: Topic :: Multimedia :: Graphics :: Viewers +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy + +Pillow +====== + +Python Imaging Library (Fork) +----------------------------- + +Pillow is the friendly PIL fork by `Alex Clark and Contributors `_. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. + +.. image:: https://zenodo.org/badge/17549/python-pillow/Pillow.svg + :target: https://zenodo.org/badge/latestdoi/17549/python-pillow/Pillow + +.. image:: https://readthedocs.org/projects/pillow/badge/?version=latest + :target: http://pillow.readthedocs.org/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/travis/python-pillow/Pillow/master.svg?label=Linux%20build + :target: https://travis-ci.org/python-pillow/Pillow + :alt: Travis CI build status (Linux) + +.. image:: https://img.shields.io/travis/python-pillow/pillow-wheels/latest.svg?label=OS%20X%20build + :target: https://travis-ci.org/python-pillow/pillow-wheels + :alt: Travis CI build status (OS X) + +.. image:: https://img.shields.io/appveyor/ci/python-pillow/Pillow/master.svg?label=Windows%20build + :target: https://ci.appveyor.com/project/python-pillow/Pillow + :alt: AppVeyor CI build status (Windows) + +.. image:: https://img.shields.io/pypi/v/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Latest PyPI version + +.. image:: https://img.shields.io/pypi/dm/pillow.svg + :target: https://pypi.python.org/pypi/Pillow/ + :alt: Number of PyPI downloads + +.. image:: https://coveralls.io/repos/python-pillow/Pillow/badge.svg?branch=master&service=github + :target: https://coveralls.io/r/python-pillow/Pillow?branch=master + :alt: Code coverage + +.. image:: https://landscape.io/github/python-pillow/Pillow/master/landscape.svg + :target: https://landscape.io/github/python-pillow/Pillow/master + :alt: Code health + + +More Information +---------------- + +- `Documentation `_ + + - `Installation `_ + - `Handbook `_ + +- `Contribute `_ + + - `Issues `_ + - `Pull requests `_ + +- `Changelog `_ + + - `Pre-fork `_ + + diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/RECORD b/Lib/site-packages/Pillow-3.1.0.dist-info/RECORD new file mode 100644 index 0000000..c1bf6e9 --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/RECORD @@ -0,0 +1,213 @@ +PIL/BdfFontFile.py,sha256=OLGZa7gH5ncvWae6x2SWPbfTHRiXaLvH_o7Rknyq7Jg,3343 +PIL/BmpImagePlugin.py,sha256=6SUplqZ01NYS2n0Bug4qsZIaCpGlUHo68sIP4ORUkIQ,12563 +PIL/BufrStubImagePlugin.py,sha256=bnQDTRwjOe-uDVkmO8KuW6-DsN2-eNL7zWh7aCdeOeY,1504 +PIL/ContainerIO.py,sha256=UpxILbI9Ln0kL5rBQtPucOiYTo0XftTJPEFQToM3vzg,2605 +PIL/CurImagePlugin.py,sha256=lBBCMDSN3ShsmXM_lSYukqUq-VGS4pOGtFEoHiWGokg,2041 +PIL/DcxImagePlugin.py,sha256=pqM5S2HZbqZMfqk3EjGJuCwun7gW45WCK6xCKHKPd68,2002 +PIL/EpsImagePlugin.py,sha256=Dw1Tgj764ar82EeElKN0UC4dE5YN_QIt1exa9zQ5Xjs,12558 +PIL/ExifTags.py,sha256=klIomCR-gXvHbeRRPmFg-PFJWEWw3DO_ZFPjmnoiPiA,5092 +PIL/FitsStubImagePlugin.py,sha256=bOyEulT-U_VQkkS9vErugqldfjVAUzgcZufrO3XS7yc,1656 +PIL/FliImagePlugin.py,sha256=pm4TiJjYD2hdIzdF_erSqMGovQo1CmhIkYbwROeNVW0,4824 +PIL/FontFile.py,sha256=YObcDjI_-T4q7kxf-JLJuhVxyC7zklFy0N6b3Ji5lGA,2793 +PIL/FpxImagePlugin.py,sha256=ccwZ9yZKdgyxv2voOAuXKJ77hzc5M2FO731HCbg_ZVs,6316 +PIL/GbrImagePlugin.py,sha256=0vx5Nn7K2SuSNDOZlMr7UL5NI-Xry1SMtOgsWh3YsDY,1642 +PIL/GdImageFile.py,sha256=sgFVwaYV5FZDza1U4VzVh5ru2K4XMdQ7slAhB8F-ebc,2182 +PIL/GifImagePlugin.py,sha256=3BTNEbUsDxT-ug1V0IhfwLHQISd0eKdzDEykTKsHplc,21396 +PIL/GimpGradientFile.py,sha256=K71Bj58GMicSwxywhEfQA2rkjBSkfi595R-loWn4vGY,3347 +PIL/GimpPaletteFile.py,sha256=NyYbhFn0ZenrzDZHyq76xjw1j6PslhLJTZX81AXO9dI,1341 +PIL/GribStubImagePlugin.py,sha256=BrMgUAZs1bfvNcgphqlDG0Bbvd5PY-pHEn_1zGmVHSo,1505 +PIL/Hdf5StubImagePlugin.py,sha256=AthIydrNqzKj_68Pa29b08fKKMu089A_Ef_G49aLLbo,1549 +PIL/IcnsImagePlugin.py,sha256=pT-H9tJPhcWVuIdue1ZcxDcwg6y0DJJaYsQj9uFeEsM,10668 +PIL/IcoImagePlugin.py,sha256=_OuXCz4lUqPSPaOwWm1GGj7BbOGTW5InzM-8XW1bl2Q,9214 +PIL/ImImagePlugin.py,sha256=FTiPg7uAgT2NS9ngb5X1dZA41ZQM3tkjbQWzpIF6LaE,10309 +PIL/Image.py,sha256=alwmJpdVerUZjWGA221X8diXO8G8_e8ky6FGP5yvbQ4,81497 +PIL/ImageChops.py,sha256=jZCVJwGjIpbe4tKudbxpsLVvel4FvujQcd3HbFdtLCM,6184 +PIL/ImageCms.py,sha256=U8Y8bglfFkVBjV1NO0HdE7GGUk3FcyjpFj_PSiPbTbs,37106 +PIL/ImageColor.py,sha256=3CAnIw4NwIZ5W24_fPV4FTTd-0aU1GDMuoKN5VyU_z8,7981 +PIL/ImageDraw.py,sha256=0hqwmBlM-8SulsNQdQ2Xy73Zmq9-P-fwdEnQBB-a2FM,13052 +PIL/ImageDraw2.py,sha256=-2J8X4_1edRkR09DJlhVY0BuP6N3vA0QPhELz-Lnh8Q,3231 +PIL/ImageEnhance.py,sha256=-oS6xX6PHULHa-lJk5fQeemTk-d-sqI8rsv2odTrXEc,3185 +PIL/ImageFile.py,sha256=3Vi77376I3BXIF-a-0SHk_49zarAkvuew9wvjfMFwU8,15697 +PIL/ImageFilter.py,sha256=EZxAfoxda-K4RAVNCfwht0AaH090I8Uh3PdHwJT15rM,6618 +PIL/ImageFont.py,sha256=t1nxWTskEaDxsQPOtbKBG7KbS00J-4AUrHqP-Ae5zs8,19491 +PIL/ImageGrab.py,sha256=UxoINSDDLzjotN-eiHOrHFRvqQidiGgO9PCwC4X92sk,1570 +PIL/ImageMath.py,sha256=KF_7l0uVUexavD5PaE9en6A24yvJhHVR5wb8KN0mpfc,7462 +PIL/ImageMode.py,sha256=BvfsmKlPmT1attWhs_SkjfdDRL5G2ZPy82S1QZVF8OE,1306 +PIL/ImageMorph.py,sha256=5ELcbNnP9reATXf3bp3-5lSoMManm4-HcUG8l7trWZs,8308 +PIL/ImageOps.py,sha256=gGT5MoU-VAX5FNVIVnJhf9miuEZBrTdWzc2rmyYDlxA,13895 +PIL/ImagePalette.py,sha256=pnrnyGg8S6UjRUCGt3KbuPqvZwSkJlR9PFJnrE1sR1I,6738 +PIL/ImagePath.py,sha256=30TyaVd4FNYg08dq_H2lPTkbFFRYBG1RpZsmjscxpyQ,1239 +PIL/ImageQt.py,sha256=4TNax3OTNvtom3qpHLC6IqSRoFdZ6bOpdGtWWFMv1Xg,5658 +PIL/ImageSequence.py,sha256=FcwPuCTIC2v37YGK5-eeebe7Jj5sOx6a0ePOCx8g06k,947 +PIL/ImageShow.py,sha256=zFofzMjn4cL2L0ShXHaBmiJUZWt3ArXWVkKeD3K59Sk,4795 +PIL/ImageStat.py,sha256=7tfTNkkBFduqOHspgu-qVckndwomvJIsToV27yNFqX0,3847 +PIL/ImageTk.py,sha256=Oujs08p7jta7H9a0UZYw_MMDLqSGncjqguqLABgUMDM,9108 +PIL/ImageTransform.py,sha256=rksN-m5SPqnTuINfQt_4Pkm_Qbfef78Lx-qK1CteeE0,2876 +PIL/ImageWin.py,sha256=vQjPXEEbmS0Xv8aIWQwOM2ihPObeeINJ-DckOftR8wQ,7473 +PIL/ImtImagePlugin.py,sha256=foSoxpK5kjotwyF9fxSNTGwbHBVgIMXNfrapCrOARcg,2237 +PIL/IptcImagePlugin.py,sha256=40Vd6aehGS_oWlvQl0NffVYQi9RniRCX7y01pKh1Lt8,6977 +PIL/Jpeg2KImagePlugin.py,sha256=OAkVUMLPT4I_1QlTcW7scju3WsCqr0BilBdn7C2aXoM,7866 +PIL/JpegImagePlugin.py,sha256=DyAQ1xEb8RUdXduNTRqT7c9Noo_rsBXVcGdVWQMBKOU,25929 +PIL/JpegPresets.py,sha256=m9LT6PesbIJuIO5E3SN_Zfnyv22-lKAIMgF7ZP95ukw,12368 +PIL/McIdasImagePlugin.py,sha256=uzz8ANinPXungYt8c38gE938yTx6uyW1ZaczCx6zjCs,1769 +PIL/MicImagePlugin.py,sha256=CYiHPyfGsp46k1i3jvWKmGL9Z1PHSvORvKvjdVWGVao,2365 +PIL/MpegImagePlugin.py,sha256=34yMKXGAg9tL_FxrGTu0g1gMI6ZX-p75rvUkugx9rPw,1880 +PIL/MpoImagePlugin.py,sha256=fPCgu5BXjjgef6yvJKN1md-HNy-90kOSQ8OzjgfkGu0,3022 +PIL/MspImagePlugin.py,sha256=lJRcxXQx7M-Hl3KTuA-SUuTh6COYz34c529CjTq-D7E,2263 +PIL/OleFileIO-README.md,sha256=oqsCKCKkHaXzDCfE1NklWQ9CXxeJCHhMey-_8jZbCMM,10084 +PIL/OleFileIO.py,sha256=gjkWRTrgE9VN7BKeLI2dVAbK7nQTQys-pu1TPSbBAqM,102741 +PIL/PSDraw.py,sha256=kf_m9DtTQhea7VmZXwnI_WdeGInsRG_c1biN18ManyE,6855 +PIL/PaletteFile.py,sha256=WyvvsHKi7afbjiOoFmx4yF8uusrMTz5_bXF86-68SWI,1113 +PIL/PalmImagePlugin.py,sha256=31oLjnm66aIwNQHVNeeuMKvC4KsVh-zgsuRdDmWeMrk,9267 +PIL/PcdImagePlugin.py,sha256=GIG6PN2Rd96xHk1eqt0wUFfha_HW2XI0oHI32GpHLPw,1303 +PIL/PcfFontFile.py,sha256=FE67pIhqGQ2PYOQ_4ImeOcJNcGr2Onxi9Eb_BYiqvr0,6194 +PIL/PcxImagePlugin.py,sha256=6W_CaLtY9BMai4FKs6HWHphAFDTh_IhjhYaAB7J_yAE,5335 +PIL/PdfImagePlugin.py,sha256=GX-U5Y9ExhuX1_xMMbivEmzBDJCbTaMcEfMKolRclH4,6752 +PIL/PixarImagePlugin.py,sha256=EGkq-e53fx17ZxBKYPDf4zL_j-KQ6f6trLu_SQtEi30,1608 +PIL/PngImagePlugin.py,sha256=ePAEtZCI8j8odI8IMeX9PiYXUJ224C9FM_iZ3Wgp2m4,23409 +PIL/PpmImagePlugin.py,sha256=lRip9Z7vQ3qfq7WKLII3g2uxP7w_3HRhoiVUue3nz_Q,4769 +PIL/PsdImagePlugin.py,sha256=YRqZC0Krrhoclu9XWU9IPwxjL3Wc_9tBYBXc9J7L4N8,7601 +PIL/PyAccess.py,sha256=V-8Tis3-v09svd7iafyLgFYZhEkyIVBKT51Gb6Z3da0,8719 +PIL/SgiImagePlugin.py,sha256=adiXiaA7EOHQ3w6oO0ySjQvBZHLqz1jefZ7Liji-N5Y,2181 +PIL/SpiderImagePlugin.py,sha256=0vSgCfPk4sFk5hBH1x9Evsgd3U19DKlpIwGFf4PXZRg,9463 +PIL/SunImagePlugin.py,sha256=CSgKe1FOxEDg19fed2UoVnfsgcoJ8Sa3DYEJQSi9P2E,1972 +PIL/TarIO.py,sha256=5TS3r1DEi4pCHCTW2Xqcwh1ff0oTgcm2bQRdZZx1Ftc,1223 +PIL/TgaImagePlugin.py,sha256=N_JH5yf8Ka-II2PD3DK7oIDlW-jE7b6e2fKGGagC05w,4991 +PIL/TiffImagePlugin.py,sha256=xXYCwvK4-qn-pyvUdnKQqyVVXOHLUSCHuUU_waZQq30,53267 +PIL/TiffTags.py,sha256=U6ttmdvxCk3-kvXjsnR14l7fTmoIvO1pcxPJXX9HROw,12662 +PIL/WalImageFile.py,sha256=a2eLShfAmGC58EcEPIjahdo-iCerf9EYtGRmm3ZgmXQ,5398 +PIL/WebPImagePlugin.py,sha256=JSTb5X05ns6OnZarFRUYTHWm16BiEnTY4IRWGUBeevo,2017 +PIL/WmfImagePlugin.py,sha256=iikUDdzOPZWWpm5I2mPnEe9Ao1GwbREvq5KHjA_8-40,4147 +PIL/XVThumbImagePlugin.py,sha256=dtIQczQpywrRPR-a4eVVMeOlRAUhRyYwgTPuudRRZ3k,1859 +PIL/XbmImagePlugin.py,sha256=TkLzvPcmUrmubEnk4EYHxgB02aGNzu555AveFcJMKME,2505 +PIL/XpmImagePlugin.py,sha256=E7sxq_QI7b4fm2rLrnkIiDsvH5LmyJn_-6QWlRgzGI8,3130 +PIL/__init__.py,sha256=7ZEGNGFVg-aqjgb8uV8r0P9cfWhOIIaCzsQPUdsX1yI,1554 +PIL/_binary.py,sha256=io-M3LN24r6l4JEb3famSD-VsbO9NKNdX6paf8gNVm4,1408 +PIL/_imaging.cp35-win32.pyd,sha256=yoy7X_25dQsGk9ILEboj_Aoesgkny93p5SnlmbDd5C4,1132032 +PIL/_imagingcms.cp35-win32.pyd,sha256=MFuijYobXmqRT3aNVJyRcL-l8CwqVBYOnipKZmASuS0,171520 +PIL/_imagingft.cp35-win32.pyd,sha256=hrH7V6qukOWlc41P17kUqTZBteBdij1Fy_aQu8fxEek,419328 +PIL/_imagingmath.cp35-win32.pyd,sha256=HCdoRPcy8SgTnCnNHc-7uI6pdvRRGAKudkZVuqXalvU,17920 +PIL/_imagingmorph.cp35-win32.pyd,sha256=CtqQ4JrjjDa2BZyZtfDD0F1TUCsTvmLFcyOBhMSkRco,10752 +PIL/_imagingtk.cp35-win32.pyd,sha256=sPU3qU1ncuhTl6gKIZygfi8rm11QaNMEYR67_nKqA3s,10752 +PIL/_util.py,sha256=b3m5R7BosKxQ1NJud7vuxdmFouSfSYoaJ2P7WqlGtcE,553 +PIL/_webp.cp35-win32.pyd,sha256=WvS4GH4hAuP8_PgzYtv5J7xa7mrNBa4CyzlsZLUN5MY,326656 +PIL/features.py,sha256=Sst8D0wPI4ONak8tQHNilZ-vdDAtiKoRprK264-PZ-0,1613 +../../Scripts/createfontdatachunk.py,sha256=vL6M9fd4NowdB91Y0Mp7QwKwREreAy3Akr4SPhyT3ag,547 +../../Scripts/enhancer.py,sha256=duWmYjo2__GpiYQTjHeeZR5f0i6fK7udIkEq9L-1ej0,1497 +../../Scripts/explode.py,sha256=VwCANn7yubleSj0HXgF1CiuryiVkE3sqFgwg6nymLcs,2428 +../../Scripts/gifmaker.py,sha256=XM_OCPWYc3TqnB-GwF38wMClk_q_aQPaaTxZFlLNoBE,631 +../../Scripts/painter.py,sha256=xy9JE1l0RTCjjc9aM56NmXaz-tLR5NYMKPxAUnXuK0A,2097 +../../Scripts/pilconvert.py,sha256=4MdeEuxPZ1BX5QxdL1it8k05fKB0jWp2xBYsMO-2o_U,2348 +../../Scripts/pildriver.py,sha256=hjWv83cl4XcTX3PhZsqZTkeckLOnVOVoEPtcQCHWulY,15564 +../../Scripts/pilfile.py,sha256=v8sZYbiPl7qladMcQEkvvaQRRG-hznWHT_LPe5S89o4,2656 +../../Scripts/pilfont.py,sha256=g56o2yfPoXaOChQwzESzXO93UydKMk3Ug-pGqpbmZvo,1017 +../../Scripts/pilprint.py,sha256=fn2p7bhqhIDxY6_hNC97GUtd0QgacexXni0xNDKEcc0,2527 +../../Scripts/player.py,sha256=5ZmYQXo_LTPhvwXXd0WALfWHYlFCwFMX8OxgwgpHccU,2163 +../../Scripts/thresholder.py,sha256=1wOG_xiVpBQJy4sqihQuAquhxFk-N6B6ICdZ6N1mv2U,1784 +../../Scripts/viewer.py,sha256=R9xolyX6ofsdrETvZXiLY0E357Sv1CwZH3cS9wvSL1M,957 +Pillow-3.1.0.dist-info/DESCRIPTION.rst,sha256=RpLAvBLkrwCx2qGlF1Zx8x_fe3o_bV-vl7DPgEdT0yI,2456 +Pillow-3.1.0.dist-info/METADATA,sha256=kqWN4ck0qpCAXM9IzZAFLoMcNqG1Dr1S8Q2pBdxKFhk,3631 +Pillow-3.1.0.dist-info/RECORD,, +Pillow-3.1.0.dist-info/WHEEL,sha256=c3SRX9YjUgBRRFoaDDirVICw0lM0GonS9hcBq3MYzIc,101 +Pillow-3.1.0.dist-info/metadata.json,sha256=Uaat4I9gqHgF5oUmPZdz19Pj3OSUu_3vId812K4UUJ4,1241 +Pillow-3.1.0.dist-info/top_level.txt,sha256=riZqrk-hyZqh5f1Z0Zwii3dKfxEsByhu9cU9IODF-NY,4 +Pillow-3.1.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +PIL/__pycache__/ImageGrab.cpython-35.pyc,, +PIL/__pycache__/EpsImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageTk.cpython-35.pyc,, +PIL/__pycache__/BmpImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageDraw2.cpython-35.pyc,, +../../Scripts/__pycache__/pilconvert.cpython-35.pyc,, +../../Scripts/__pycache__/thresholder.cpython-35.pyc,, +PIL/__pycache__/ImageSequence.cpython-35.pyc,, +PIL/__pycache__/PngImagePlugin.cpython-35.pyc,, +../../Scripts/__pycache__/gifmaker.cpython-35.pyc,, +PIL/__pycache__/XVThumbImagePlugin.cpython-35.pyc,, +PIL/__pycache__/TgaImagePlugin.cpython-35.pyc,, +PIL/__pycache__/FitsStubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageDraw.cpython-35.pyc,, +PIL/__pycache__/CurImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImagePalette.cpython-35.pyc,, +PIL/__pycache__/PalmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/GbrImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageQt.cpython-35.pyc,, +../../Scripts/__pycache__/viewer.cpython-35.pyc,, +PIL/__pycache__/MpegImagePlugin.cpython-35.pyc,, +PIL/__pycache__/_binary.cpython-35.pyc,, +PIL/__pycache__/MspImagePlugin.cpython-35.pyc,, +PIL/__pycache__/McIdasImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageCms.cpython-35.pyc,, +PIL/__pycache__/TiffImagePlugin.cpython-35.pyc,, +PIL/__pycache__/XpmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageTransform.cpython-35.pyc,, +PIL/__pycache__/TiffTags.cpython-35.pyc,, +PIL/__pycache__/PyAccess.cpython-35.pyc,, +PIL/__pycache__/Jpeg2KImagePlugin.cpython-35.pyc,, +PIL/__pycache__/WmfImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageEnhance.cpython-35.pyc,, +PIL/__pycache__/WebPImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PaletteFile.cpython-35.pyc,, +../../Scripts/__pycache__/painter.cpython-35.pyc,, +../../Scripts/__pycache__/player.cpython-35.pyc,, +../../Scripts/__pycache__/pilfont.cpython-35.pyc,, +../../Scripts/__pycache__/explode.cpython-35.pyc,, +../../Scripts/__pycache__/createfontdatachunk.cpython-35.pyc,, +PIL/__pycache__/FliImagePlugin.cpython-35.pyc,, +PIL/__pycache__/BdfFontFile.cpython-35.pyc,, +PIL/__pycache__/__init__.cpython-35.pyc,, +PIL/__pycache__/FontFile.cpython-35.pyc,, +PIL/__pycache__/features.cpython-35.pyc,, +PIL/__pycache__/IcoImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImImagePlugin.cpython-35.pyc,, +PIL/__pycache__/GimpGradientFile.cpython-35.pyc,, +PIL/__pycache__/PcdImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageChops.cpython-35.pyc,, +PIL/__pycache__/PsdImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageMath.cpython-35.pyc,, +PIL/__pycache__/FpxImagePlugin.cpython-35.pyc,, +PIL/__pycache__/Hdf5StubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/MpoImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImagePath.cpython-35.pyc,, +PIL/__pycache__/GribStubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/WalImageFile.cpython-35.pyc,, +PIL/__pycache__/IcnsImagePlugin.cpython-35.pyc,, +PIL/__pycache__/GdImageFile.cpython-35.pyc,, +PIL/__pycache__/ImageFont.cpython-35.pyc,, +../../Scripts/__pycache__/pilfile.cpython-35.pyc,, +PIL/__pycache__/ExifTags.cpython-35.pyc,, +PIL/__pycache__/BufrStubImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PcxImagePlugin.cpython-35.pyc,, +PIL/__pycache__/Image.cpython-35.pyc,, +PIL/__pycache__/ImageShow.cpython-35.pyc,, +PIL/__pycache__/ImageMorph.cpython-35.pyc,, +../../Scripts/__pycache__/pildriver.cpython-35.pyc,, +PIL/__pycache__/SunImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageFilter.cpython-35.pyc,, +PIL/__pycache__/GimpPaletteFile.cpython-35.pyc,, +PIL/__pycache__/SpiderImagePlugin.cpython-35.pyc,, +PIL/__pycache__/IptcImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageOps.cpython-35.pyc,, +PIL/__pycache__/TarIO.cpython-35.pyc,, +PIL/__pycache__/GifImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ContainerIO.cpython-35.pyc,, +PIL/__pycache__/ImageMode.cpython-35.pyc,, +PIL/__pycache__/DcxImagePlugin.cpython-35.pyc,, +PIL/__pycache__/JpegImagePlugin.cpython-35.pyc,, +PIL/__pycache__/OleFileIO.cpython-35.pyc,, +PIL/__pycache__/ImageFile.cpython-35.pyc,, +PIL/__pycache__/XbmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PixarImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PpmImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImtImagePlugin.cpython-35.pyc,, +PIL/__pycache__/JpegPresets.cpython-35.pyc,, +PIL/__pycache__/SgiImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PcfFontFile.cpython-35.pyc,, +../../Scripts/__pycache__/enhancer.cpython-35.pyc,, +PIL/__pycache__/ImageStat.cpython-35.pyc,, +PIL/__pycache__/_util.cpython-35.pyc,, +PIL/__pycache__/MicImagePlugin.cpython-35.pyc,, +PIL/__pycache__/ImageColor.cpython-35.pyc,, +PIL/__pycache__/ImageWin.cpython-35.pyc,, +PIL/__pycache__/PdfImagePlugin.cpython-35.pyc,, +PIL/__pycache__/PSDraw.cpython-35.pyc,, +../../Scripts/__pycache__/pilprint.cpython-35.pyc,, diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/WHEEL b/Lib/site-packages/Pillow-3.1.0.dist-info/WHEEL new file mode 100644 index 0000000..754b5c7 --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-none-win32 + diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/metadata.json b/Lib/site-packages/Pillow-3.1.0.dist-info/metadata.json new file mode 100644 index 0000000..2fab441 --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Python Imaging Library (Fork)", "classifiers": ["Development Status :: 6 - Mature", "Topic :: Multimedia :: Graphics", "Topic :: Multimedia :: Graphics :: Capture :: Digital Camera", "Topic :: Multimedia :: Graphics :: Capture :: Scanners", "Topic :: Multimedia :: Graphics :: Capture :: Screen Capture", "Topic :: Multimedia :: Graphics :: Graphics Conversion", "Topic :: Multimedia :: Graphics :: Viewers", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"project_urls": {"Home": "http://python-pillow.github.io/"}, "contacts": [{"email": "aclark@aclark.net", "name": "Alex Clark (Fork Author)", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "keywords": ["Imaging"], "license": "Standard PIL License", "metadata_version": "2.0", "name": "Pillow", "version": "3.1.0"} \ No newline at end of file diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/top_level.txt b/Lib/site-packages/Pillow-3.1.0.dist-info/top_level.txt new file mode 100644 index 0000000..b338169 --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/top_level.txt @@ -0,0 +1 @@ +PIL diff --git a/Lib/site-packages/Pillow-3.1.0.dist-info/zip-safe b/Lib/site-packages/Pillow-3.1.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Lib/site-packages/Pillow-3.1.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/DESCRIPTION.rst b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..479eaf5 --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/DESCRIPTION.rst @@ -0,0 +1,137 @@ +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/INSTALLER b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/METADATA b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/METADATA new file mode 100644 index 0000000..01a8c13 --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/METADATA @@ -0,0 +1,157 @@ +Metadata-Version: 2.0 +Name: SQLAlchemy +Version: 1.0.11 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent + +SQLAlchemy +========== + +The Python SQL Toolkit and Object Relational Mapper + +Introduction +------------- + +SQLAlchemy is the Python SQL toolkit and Object Relational Mapper +that gives application developers the full power and +flexibility of SQL. SQLAlchemy provides a full suite +of well known enterprise-level persistence patterns, +designed for efficient and high-performing database +access, adapted into a simple and Pythonic domain +language. + +Major SQLAlchemy features include: + +* An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. +* A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. +* A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. +* A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. +* All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. +* Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + +SQLAlchemy's philosophy: + +* SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. +* An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. +* The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. +* With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. +* Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. +* Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. +* Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + +Documentation +------------- + +Latest documentation is at: + +http://www.sqlalchemy.org/docs/ + +Installation / Requirements +--------------------------- + +Full documentation for installation is at +`Installation `_. + +Getting Help / Development / Bug reporting +------------------------------------------ + +Please refer to the `SQLAlchemy Community Guide `_. + +License +------- + +SQLAlchemy is distributed under the `MIT license +`_. + + + diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/RECORD b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/RECORD new file mode 100644 index 0000000..a4f5a62 --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/RECORD @@ -0,0 +1,376 @@ +SQLAlchemy-1.0.11.dist-info/DESCRIPTION.rst,sha256=ZN8fj2owI_rw0Emr3_RXqoNfTFkThjiZy7xcCzg1W_g,5013 +SQLAlchemy-1.0.11.dist-info/METADATA,sha256=Crgm54wY6B6PslLIEkEzLjTm8Ckq5U8tOkMvXqMHMug,5786 +SQLAlchemy-1.0.11.dist-info/RECORD,, +SQLAlchemy-1.0.11.dist-info/WHEEL,sha256=c3SRX9YjUgBRRFoaDDirVICw0lM0GonS9hcBq3MYzIc,101 +SQLAlchemy-1.0.11.dist-info/metadata.json,sha256=bxQh1fZIglF1s4D0gfL3TgClhBclguM0ETRyLoh2rq0,965 +SQLAlchemy-1.0.11.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 +sqlalchemy/__init__.py,sha256=M40gbPSqQVcL7UfmrMDsIYIV42vHYrzHIGBeo3DiM48,2112 +sqlalchemy/cprocessors.cp35-win32.pyd,sha256=Y5zC26JJcucJ5E7mQ7B177gPGpvDiF3uCcgriReNmWI,13824 +sqlalchemy/cresultproxy.cp35-win32.pyd,sha256=KNlgu9t6h9-NpKr35tu8gygoqnGpHkuBxH-F1y1H3D4,14848 +sqlalchemy/cutils.cp35-win32.pyd,sha256=v6GnJqD9gxARarCKd4n7ImXbRkdT_0pqwsExjgSKg04,9728 +sqlalchemy/events.py,sha256=QNEykcVcLybSAH3ftboKeNEjXObYWnMEy-zMgrgob2M,43944 +sqlalchemy/exc.py,sha256=0E_mahOMkrn6IQISLvd2U110HgQ44-69bU08zW2DIg4,11706 +sqlalchemy/inspection.py,sha256=h4KxFcYGuuswWwnIQQSx4RS4d9IFkq6LsOdNsixsHdU,3093 +sqlalchemy/interfaces.py,sha256=d_F0UA9-u8iJs8DFy9PfOgAPoC7o50Bty066UCvNA68,10967 +sqlalchemy/log.py,sha256=KwffVfiwJoOEDAcQ1mvZyS20tUMT2f9Vgj-ls-oEUF0,6712 +sqlalchemy/pool.py,sha256=flyzNmLboIiH83YvEPGeEeSwQBNbSAWEYnaOeHorpfM,47220 +sqlalchemy/processors.py,sha256=wT0XsPFdmuz6cmH6nD7InHjsl3n4s1GusnH3y1fI-1s,5220 +sqlalchemy/schema.py,sha256=LHD_UoXLNPzF89uiMikxBHWv4aXnZb9jMyam11PXJ_I,1182 +sqlalchemy/types.py,sha256=eShwFVzOg2c5Rkq9OnnwS0qHm9Hbt-oPDQhxqIPbvr4,1650 +sqlalchemy/connectors/__init__.py,sha256=Lg9ZZt-BWkaXg3QmyZee7Uj6a1BAmiM02hizV2gnwSU,278 +sqlalchemy/connectors/mxodbc.py,sha256=kWfnRl1D9z7i1XMQYOwUP0nWbLUjjziZG2U5gsmQN7M,5348 +sqlalchemy/connectors/pyodbc.py,sha256=XazMwfLV0nahi26VUzkWiMx2WHewmFmQZzm-AdCCDxw,6264 +sqlalchemy/connectors/zxJDBC.py,sha256=4Dr9O0_RLLyaZXNA6yhzmOPBQmFptujmdsXpU8yy72Y,1868 +sqlalchemy/databases/__init__.py,sha256=cuSat4I-Yo2roZCyjdkZNbfTTWP6ea7qLM9xaHvcXHw,817 +sqlalchemy/dialects/__init__.py,sha256=FscDjR09siGn7NmN3_kZcliMBUBioyTG7WsMuX9Qhe4,1012 +sqlalchemy/dialects/postgres.py,sha256=jKE_8HuiD9gZqzkinmWVq0yAkbdXwEuQI5RhGs4Lo8g,614 +sqlalchemy/dialects/firebird/__init__.py,sha256=5v5hZJE2SbiWf5NofE6eOq9tX9arZwa9izhyfB4qcm8,664 +sqlalchemy/dialects/firebird/base.py,sha256=mMP4Im_M_lbEiDDxacaPkmXlzlZSBvRozUb557LVsXo,28170 +sqlalchemy/dialects/firebird/fdb.py,sha256=n4y6abR2RNXb1CZplMOPoXQ8rypqSp0Qe0sTB9RWF7o,4325 +sqlalchemy/dialects/firebird/kinterbasdb.py,sha256=Y8OtrdK7fvJifS0h3elP-HGRJv7cwRRyTpSZwVsUsQM,6299 +sqlalchemy/dialects/mssql/__init__.py,sha256=bVIJKPW3-508qdDmfLN36ewTtodO8osfxoo1btBqFMo,1081 +sqlalchemy/dialects/mssql/adodbapi.py,sha256=zx10ag2Dyeopx8norbO4mwMPk4U6bn85F5Ic8rM6cPk,2493 +sqlalchemy/dialects/mssql/base.py,sha256=rshP1qBFwHohF8ASLVZ8cI_5WcfYNF0EAF0HnbSY_gk,66857 +sqlalchemy/dialects/mssql/information_schema.py,sha256=Gyn6fkuQbCkikUMh7_mbDQJ6oY8gWidHKjSEcyZ5G6M,6418 +sqlalchemy/dialects/mssql/mxodbc.py,sha256=gY1--8yXDjufWrRef18sS5yJ_JY9irxHUcUGRnDSTqs,3856 +sqlalchemy/dialects/mssql/pymssql.py,sha256=kPh6yvtOYdLTSd00dq9RoXCpbpSuurw59oQp9qamnDQ,3079 +sqlalchemy/dialects/mssql/pyodbc.py,sha256=Uaj9BEWVYCYDxTQkIQrXtbh8SzStT3npc6trjWwUot0,9473 +sqlalchemy/dialects/mssql/zxjdbc.py,sha256=Mv_anNbSwq8m7gHvcD1ZCb_y-uW5YdU_PdhJhzG6JZw,2282 +sqlalchemy/dialects/mysql/__init__.py,sha256=qj5QLexRiBSAnOMU9X53C4LARrkHEZpcRqVOlZvKT3Y,1171 +sqlalchemy/dialects/mysql/base.py,sha256=J4j8BzMO24CEVkHzU6GxIKtGKPMGNv-y5XsHHM427bU,122869 +sqlalchemy/dialects/mysql/cymysql.py,sha256=-qEAOiLwQNDcfqzWVjmNID2mMMBMg3fBjvzniisTmAs,2349 +sqlalchemy/dialects/mysql/gaerdbms.py,sha256=D3f-OkMDOWUOyjAe7_NO2q7Op0AhR9ShmpokfGgMRrQ,3387 +sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=dtrevmZGtRLKmX5Bbp3iHGSmM-OP4TJUbCdOwAvzpEM,5323 +sqlalchemy/dialects/mysql/mysqldb.py,sha256=gBGwlNLBJom-pBJFQe86MDzuksAq3h3r2FdrLjpxET0,6564 +sqlalchemy/dialects/mysql/oursql.py,sha256=k1rHmaH3bFJBAaslV-0_iD7vR6Tqx5mD10isridL2vg,8124 +sqlalchemy/dialects/mysql/pymysql.py,sha256=jZBZb0yRFF_hcEz_24F0w0UqXSFkWufbn1GfDLkAsOY,1504 +sqlalchemy/dialects/mysql/pyodbc.py,sha256=j_MyuwshHpPCKu630uy8aqd4BdkpNs_xJ4qf4G-0pO8,2665 +sqlalchemy/dialects/mysql/zxjdbc.py,sha256=3akagUfwdWAPy1_d8p23aPRKIwdmYnWh5DUFm5cK9B8,3942 +sqlalchemy/dialects/oracle/__init__.py,sha256=qumZv9m7G6IiDCq6szXK1ahlfP7sUAL6faaXReiOSVg,797 +sqlalchemy/dialects/oracle/base.py,sha256=yuYVq-8HcVBVkx9Raq8uPcLzV_A60gEhL0O7K8FEbzI,55961 +sqlalchemy/dialects/oracle/cx_oracle.py,sha256=8fcI5501uoPYU5YuOKIJZpmFMOUMlsbc1hVcCCBwTiI,38635 +sqlalchemy/dialects/oracle/zxjdbc.py,sha256=zjB0v19z-5T7SvalVEanxTo-j3Dq8th9zd2MOGxT7qs,8173 +sqlalchemy/dialects/postgresql/__init__.py,sha256=GLsL3NKT6P312QMqu_b4bYJKl5qCCtBbznluQ9Pb8iE,1299 +sqlalchemy/dialects/postgresql/base.py,sha256=rU5tQbBUvaxmoib1RP7SNV_8Y0zXeqCTL_Qn3NQbp7M,104213 +sqlalchemy/dialects/postgresql/constraints.py,sha256=om7sWJ4DjbJ5BUYPMCCwAXK96sRVNmMxaMTo38lCBUI,3119 +sqlalchemy/dialects/postgresql/hstore.py,sha256=hdbaz6wMtrxNekju9JC0H4xoJ7BIhYuxhGYQ_AjvI5A,11402 +sqlalchemy/dialects/postgresql/json.py,sha256=svWb9fdnZJA1ptWARFXc50U_d-jbeN3FTtUmjIcBxTc,12215 +sqlalchemy/dialects/postgresql/pg8000.py,sha256=wp1FU3UYym8SzVXGsUnbwin-mGkDjU1mGZutkB39cPs,8375 +sqlalchemy/dialects/postgresql/psycopg2.py,sha256=HO3mb_szeqXsdF3CK6NoEtQ1zNDa0QU9NSbq7GFAfhI,26953 +sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=cRNFG7m5nR_QvW02H-1ZI2B2jEgc5AvnlDlWHkb_vzk,1651 +sqlalchemy/dialects/postgresql/pypostgresql.py,sha256=KRbcqpIMFC5gsjnC_X_n9G4Q7xOFxzr8Q6NMcXir2B4,2655 +sqlalchemy/dialects/postgresql/ranges.py,sha256=cVsWdEE5hUjJP0MKfj-NhQcGfiZ2rZSIV6dFB_KLS-g,4814 +sqlalchemy/dialects/postgresql/zxjdbc.py,sha256=J7ZodrbBIrjqiDhdQjNM8TUUQg3G62kVambDExTVqXk,1397 +sqlalchemy/dialects/sqlite/__init__.py,sha256=0NTrYcRltnSrmi3F4qB_-53tB3TPHN9emOEuKhEYomA,736 +sqlalchemy/dialects/sqlite/base.py,sha256=ANR41MU9VKzm4fwB15AbfHqZTmjcEsHGiZ98g5do17o,54601 +sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=FhRmV0uBCvypkuogKZj-JSq1cu6grc5xCvmFQMC_MWc,4129 +sqlalchemy/dialects/sqlite/pysqlite.py,sha256=cGw-caWqkxKNcOyXe3gOqdB9hFa5I7Kl-qe9OF1q83k,14980 +sqlalchemy/dialects/sybase/__init__.py,sha256=M8n3bvMgJS-eNrmyc85uhaaZdqYco1ghM_NRmGuXYY4,894 +sqlalchemy/dialects/sybase/base.py,sha256=1EvlVu5rCBvC0078WtlSPAT2AIG__74eC6OA4njjP3c,28853 +sqlalchemy/dialects/sybase/mxodbc.py,sha256=D0g0atm1jvEUsYumr4V8q8dnOlvbn_tBLiXrVmSX8mU,901 +sqlalchemy/dialects/sybase/pyodbc.py,sha256=7KXVvJp63PrwLzjrYlYcNw_IzKaUkyRVq3rkNpQbM0o,2102 +sqlalchemy/dialects/sybase/pysybase.py,sha256=cALv3kzbNMsSIV1fK3EqiiLGYkGI3Izo5L5Gw-0Z89k,3208 +sqlalchemy/engine/__init__.py,sha256=XSBTTLWRq7WEOUT-uDRJJWV-u__aDUegBP2n4_iFTas,18803 +sqlalchemy/engine/base.py,sha256=TiCZhWMAUMkHoijwv2J3LdzdNcFne2yu5DXPW5Eq1ag,79380 +sqlalchemy/engine/default.py,sha256=sOZCXA_LuR8IP0gNAkZAblZYrtnZXZpxy5J5OqW1-gA,36456 +sqlalchemy/engine/interfaces.py,sha256=kCQK0wZDURfX2YqFfrsHUju2VVZ21ohPF5y3SNXPGdE,35908 +sqlalchemy/engine/reflection.py,sha256=wr5nPGGz3Mp2MTcUQUVNVvF31K_cBcUTaGLlNqRJ-rs,28590 +sqlalchemy/engine/result.py,sha256=kViAxMhqLXZ0_mTknLSON_97XPbrWWiAJErN2r1w3Jg,44027 +sqlalchemy/engine/strategies.py,sha256=NItHu9fdyN8tYP8T66WzcUo0rGcI-S6EtP8FvokTYgY,8929 +sqlalchemy/engine/threadlocal.py,sha256=Aauq8b1A91uHNqhSxFo6OBb-UjDfPUa7aJ75bcnfHQY,4191 +sqlalchemy/engine/url.py,sha256=ER2Fc7wTitbqBdz3Q6FTOpWzAgsVuY8vBICzkYhE39o,8228 +sqlalchemy/engine/util.py,sha256=DPpYVkyxvKCP6ppaTEvKfDCchsopkLj4jI8-mkkiJHk,2338 +sqlalchemy/event/__init__.py,sha256=NcxkLIhLM30onamaG8isJiMaHjK0Le-2r7wFyKggsuY,419 +sqlalchemy/event/api.py,sha256=lkN9lEyrcq8MzvlVjr7OfFSGCseO_Rw5BplNmct83Hc,5990 +sqlalchemy/event/attr.py,sha256=6GTYyex8lU1oEZmN-K2I8eSqnyB_QYvp70_YNXgW0u8,12053 +sqlalchemy/event/base.py,sha256=JVpy1I1a_hY9_6_fBucz2-0VzcnEy0P8DNi7M0Ct8dA,9540 +sqlalchemy/event/legacy.py,sha256=cGDLt2RKH8i2E-Sd5k8XGScrOcs9C5k2NP7QhQ0BrkM,5814 +sqlalchemy/event/registry.py,sha256=wirqs8yipuy2E4hU-gE2jIbw8ENTEci_9mAbWX9eIDc,7786 +sqlalchemy/ext/__init__.py,sha256=sJxTW-jdNPtKQ9HC5JebRqh-JKgthlAPpWDTO4NtKjI,322 +sqlalchemy/ext/associationproxy.py,sha256=nt1NTvm4Xi-b33cSdMJQqi7zd-yUoYCZUgS2ET2vIMw,33422 +sqlalchemy/ext/automap.py,sha256=yWQPWei4M5ONGaPmfnBBPaUDwH5CGhkBh725yk6-m7U,41566 +sqlalchemy/ext/baked.py,sha256=uSPkSctXjFvEn3yWXc4bxX2pFWbxKPa26Jez9AI2yRE,16967 +sqlalchemy/ext/compiler.py,sha256=q5Id4cBzAqlkK6p4gp-NDDBZVblWlqkcTCjso9EG_7k,15770 +sqlalchemy/ext/horizontal_shard.py,sha256=_9c5GPASVZA-INAQSW7y6eVAQ5QfBkaj36rvmTNzJpE,4814 +sqlalchemy/ext/hybrid.py,sha256=bzmlcR4S23Dw8CrodLfizKbE7H8B-PVyqrDJMzWJbJg,27989 +sqlalchemy/ext/instrumentation.py,sha256=HRgNiuYJ90_uSKC1iDwsEl8_KXscMQkEb9KeElk-yLE,14856 +sqlalchemy/ext/mutable.py,sha256=rh8UkY4WIjXg76qUY60nZbHce3NKen2-y9pFfH9IHNE,24707 +sqlalchemy/ext/orderinglist.py,sha256=vWllRHexh48ErM6u3_eSkQRandbmLVeggbGQQiRmT8Q,13816 +sqlalchemy/ext/serializer.py,sha256=gsbXpfpehng_0K2KXTqYg_r6N_3MtKOG6ErcCBBSP3w,5586 +sqlalchemy/ext/declarative/__init__.py,sha256=337iGKWkkMk6h-SFOSXSZ_PygyMPDR-0rHEeJ9pm0uU,756 +sqlalchemy/ext/declarative/api.py,sha256=dwkoCZohWfai8VHioWr2DzCY1XaONt8V-SFL0rd502k,23045 +sqlalchemy/ext/declarative/base.py,sha256=_JQiE3H-8Qa9VIZ6as-JahPhJOG7wWCs9JfzrqLX4Cs,25231 +sqlalchemy/ext/declarative/clsregistry.py,sha256=t1jPn_Dy4gCcOZUXVYLb6frl_gbS1n1orPhwUkehklE,10817 +sqlalchemy/orm/__init__.py,sha256=UayZ0K9WCW3xD4reL4JizJnr7kRzFes5hsOaYLnDgA4,8033 +sqlalchemy/orm/attributes.py,sha256=rzm2wEmZZXuJaX3b4qeHT3IVm9_0cq4DUFJ69ceYoD8,56510 +sqlalchemy/orm/base.py,sha256=HbnLUQb9fOfVaklHNko06DOF5TkMS8j8b2lUAJngcRo,14634 +sqlalchemy/orm/collections.py,sha256=PLbWDah2hBxnXwX5IF4AFrft8XaLxv25Ca7x0kyzPEw,53549 +sqlalchemy/orm/dependency.py,sha256=4-zz6xKTim8NeHm4-lU38nlDgaxquhIjHghhJcoWQns,46192 +sqlalchemy/orm/deprecated_interfaces.py,sha256=kHzvQItq0AnP2Fn4YbqGL6uGvl6jULN2ab4cg6tvPMk,18254 +sqlalchemy/orm/descriptor_props.py,sha256=2URbR4o5BbXN_tbijibWr53rPwrqHiTRKyMPnva8FZw,25141 +sqlalchemy/orm/dynamic.py,sha256=4-2pbvdZf5QhbVlLXxWkZ0spSdpKJdUxdCvZCmX3mlA,13283 +sqlalchemy/orm/evaluator.py,sha256=wM9CF_3NMWxwnBJA6B2j_AvIYgYvjCY5za7iR3Y7Wg4,4731 +sqlalchemy/orm/events.py,sha256=TQHUlbbC32v2Sx8il33F0Jp-hgzCWtOhBnwKxde8hwg,69410 +sqlalchemy/orm/exc.py,sha256=h7HoKmrbbf9vjDBCCgBZlVSyrpGVg_XKpdjyK0Xrj_A,5439 +sqlalchemy/orm/identity.py,sha256=92X-7hgMvMgHNzpXjURMNgu4WE9vbkvSHVWeAiwvY0g,9021 +sqlalchemy/orm/instrumentation.py,sha256=dyk1T-m55UzacZisb3zxuR4XQlCOR6Gv8HgYN4ezN_I,17510 +sqlalchemy/orm/interfaces.py,sha256=Ap3XG5FDO8ovFjlX9MFOWf4MhqseHp9HWv5sCWJHHbw,21552 +sqlalchemy/orm/loading.py,sha256=ey8xRaBaaDWmJdd8Pve-KOyA9kdtz2wD5f_bWMCef-M,22878 +sqlalchemy/orm/mapper.py,sha256=CLhiWLnwj7kQ-Pn9NI-bDZaYA_pvsxCxzIdhzQJbtPA,115074 +sqlalchemy/orm/path_registry.py,sha256=QpKihick_30zwajJx7GrGQoPs-uwXW-_itgMJuhN9tg,8370 +sqlalchemy/orm/persistence.py,sha256=t9RbO5asV7iiu5XSYEqYYGhOHJVkG2uy3onEubfO-yU,51028 +sqlalchemy/orm/properties.py,sha256=4XYGXw63cwHEnsXMsY1R5EkTzV-3f2ExghziJY1j8II,10426 +sqlalchemy/orm/query.py,sha256=dVLQdvqkOtwhwFugBbdGbJfvaZS1qxeSJevsZMksn50,147616 +sqlalchemy/orm/relationships.py,sha256=Fm_hO6EZKUTMEij7q24-Dth3OeF1a41OsoAMNLTDpiE,116992 +sqlalchemy/orm/scoping.py,sha256=LkBBItVsFBCgplfi9sDsUe_-7BgYMsz9caz9YzB44OU,6101 +sqlalchemy/orm/session.py,sha256=2_ycC2AT0jo7kDIdIE3uzKuHHn33Cfs_dGupxnBsozo,110229 +sqlalchemy/orm/state.py,sha256=5lRgxQBQs1Et6mLej9Q3hoeq91ZR2uo46_wd6e9x82o,22968 +sqlalchemy/orm/strategies.py,sha256=Poc1zyd0WJAGdPUJstH5V59UbAK1UktODVFEbYfeoAw,58527 +sqlalchemy/orm/strategy_options.py,sha256=zIYfL_jNNrXJLVH8ZTHL_Jox8QLsWd2Bhd-FRQq4rKQ,34134 +sqlalchemy/orm/sync.py,sha256=tAbZEbs9FC4LU1uT6CEn3r7DwhZ_RFrjwEuJCha_BAI,5451 +sqlalchemy/orm/unitofwork.py,sha256=1P1czilDopkzcSniWtEjkbfhlirqJ8X0eNQ8VRwvup4,23343 +sqlalchemy/orm/util.py,sha256=Zal9r7_TuEZdqzodotw0bhMwhJy-hyiFpWqF_01zfvM,38092 +sqlalchemy/sql/__init__.py,sha256=asHQiL6DZv0OS-Cxyh5kmG9FJIS7Kn2Z91X48fpcDQ4,1737 +sqlalchemy/sql/annotation.py,sha256=PxI7KVQptJGXSELeYdA2_N8-Eq974jbKjHZcdiP4Xzc,6136 +sqlalchemy/sql/base.py,sha256=8UgyT2_zjmHT_QTfQAhPIBarZ8EnxWIneB7hQQ9S3hA,21583 +sqlalchemy/sql/compiler.py,sha256=6hytTU0apUcTwHJIruJnS2-sebfFXMAFB3Ymp0JFTpQ,100608 +sqlalchemy/sql/crud.py,sha256=zhYa7yLFtM62TfxT51tLyZFRkx2vMfymycrYbfPNd3c,19807 +sqlalchemy/sql/ddl.py,sha256=i4eIF5XpU12CIK-lvOnuA-OsZ7mWbpFMgJajW1o-sJ8,37540 +sqlalchemy/sql/default_comparator.py,sha256=O8w-adlUVgr2qNwOR4yhMHQgnle77Nw9nWydKeNewIY,10409 +sqlalchemy/sql/dml.py,sha256=xDjOiZg6zjej8r2FwzRDka204-15ssRaeHBPrZNoesQ,33330 +sqlalchemy/sql/elements.py,sha256=-RzQgRbPtKMboWMdWXgz0t-zQrNm2EsfeaaidsqK1Rw,133000 +sqlalchemy/sql/expression.py,sha256=L4xYKyUEvgIt1BSaemH0rdjAHzj8qmDXy7EmFBegmlc,5833 +sqlalchemy/sql/functions.py,sha256=eLh335rOopJKdaDTamsVXHY0cn4IktDH7V998byG8mg,18533 +sqlalchemy/sql/naming.py,sha256=AmmhVKerDyFhr2RCe6PrUBJVWzQjj1F0SqN3wcXCqsU,4662 +sqlalchemy/sql/operators.py,sha256=O5EfUeSOpwE4m5ATaF9hcbCYi5L3wjWS_xtw47X7NmA,22922 +sqlalchemy/sql/schema.py,sha256=6Kh5LFggQIbJ0IwFWw0Ol-LL6_RW9hiXV-6MrhohExA,145511 +sqlalchemy/sql/selectable.py,sha256=rGjq1AgVHEV4XL-2jFA-IzMtJj8w6s-Nu8yrfxbQT8g,118995 +sqlalchemy/sql/sqltypes.py,sha256=tmMofzhdnbd_r0mTqxaO8KNG3EeVrlm5-US_xLZ7YIo,55153 +sqlalchemy/sql/type_api.py,sha256=4uPWA50KacSm8jPG4vpWN0rNz03WMuZaBVrcYtgDbOs,42846 +sqlalchemy/sql/util.py,sha256=zhU71mlBig0BK_6skDTEqeu0Y4-htcsHa6zD-nLnI9U,20382 +sqlalchemy/sql/visitors.py,sha256=gNr75-b69DI0aNLVAuZrQepG-8XuR0ZIo2xeV2QMpDc,10271 +sqlalchemy/testing/__init__.py,sha256=GWflHblEPo47aTzO_KT1TBV240ddxltx0GKkqFP1DN8,1095 +sqlalchemy/testing/assertions.py,sha256=x16Wsm9aYjF6t0Ul2q3mumwXXVcAC2nhZu_H-huIIXc,16112 +sqlalchemy/testing/assertsql.py,sha256=q6YeMpRu-qM8cjWXnMTklh-PmY591hcNZ4nnQ2jDmHE,12335 +sqlalchemy/testing/config.py,sha256=fXewFShS5Xln7iTmelymWJL8gqzK1zn9gIlal-X6yEs,2469 +sqlalchemy/testing/distutils_run.py,sha256=tkURrZRwgFiSwseKm1iJRkSjKf2Rtsb3pOXRWtACTHI,247 +sqlalchemy/testing/engines.py,sha256=26NJkqj2-twuH4x2brQuRgu1bY6MFVDaHfV0yI4K-vE,9359 +sqlalchemy/testing/entities.py,sha256=G4Nm0heDeQjykrOqZXfaQMhGPL5jhSu2t5jS3QHLdak,2992 +sqlalchemy/testing/exclusions.py,sha256=0mixd8HQUE_riBKTutPwpsmJfaC2nPs0k0qKIfFM1nw,12570 +sqlalchemy/testing/fixtures.py,sha256=rtHrlcoRovOLJGrufNNUlliIp-JKHcYr_Zf_AJT0CR4,10721 +sqlalchemy/testing/mock.py,sha256=RPIcH7SoUMQtvTdhA9Q9gGn8wm1PgswlDCFxHam9HkE,630 +sqlalchemy/testing/pickleable.py,sha256=6jDvT2Oz-_WK-WgDnApVEv2D-kUMogAHxdiqHGYEVlc,2641 +sqlalchemy/testing/profiling.py,sha256=R5jaHRlgBrAImDEAP6jcqbqee8CIkncYRDsBRTQjbpA,8241 +sqlalchemy/testing/provision.py,sha256=Ew05q2lY4RrETwVwxy848IfrFkOfqyIFJbM7l-L4Fho,5162 +sqlalchemy/testing/replay_fixture.py,sha256=iAxg7XsFkKSCcJnrNPQNJfjMxOgeBAa-ShOkywWPJ4w,5429 +sqlalchemy/testing/requirements.py,sha256=hs04VOe0EDxvUQrvZ4Xd5O3607OsboKP6rey4H33GTk,19949 +sqlalchemy/testing/runner.py,sha256=nUJ-9c6Cs8moGtdsWuyg7HVas0dPX-Ztw4bW0R83kno,1607 +sqlalchemy/testing/schema.py,sha256=xz2YJtwMqMXUPsMvZUAV4ht8DqLlmABxMGrJKQ0yrOs,3446 +sqlalchemy/testing/util.py,sha256=axUu-q9uWos2uqyt99JMh4mySxa4FzF9I2rkCElO72E,7529 +sqlalchemy/testing/warnings.py,sha256=8U-jDrleV5OQl-rVg63wVM35BDCvZ_2MYekucVr0HMw,987 +sqlalchemy/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +sqlalchemy/testing/plugin/bootstrap.py,sha256=Iw8R-d1gqoz_NKFtPyGfdX56QPcQHny_9Lvwov65aVY,1634 +sqlalchemy/testing/plugin/noseplugin.py,sha256=yyJvrGjzMLmXRtB4gKLa03ePnx4pe1IQCa_em4BZUf8,2847 +sqlalchemy/testing/plugin/plugin_base.py,sha256=pxpKlzjKkW9K2j9N6kNhNBRarNqaBUSivFJUS-H3HCM,17120 +sqlalchemy/testing/plugin/pytestplugin.py,sha256=RITZp-hYvzUjWRouSTJw0GyWg9TGMPXSu-f5_bbpDWs,5836 +sqlalchemy/testing/suite/__init__.py,sha256=wqCTrb28i5FwhQZOyXVlnz3mA94iQOUBio7lszkFq-g,471 +sqlalchemy/testing/suite/test_ddl.py,sha256=Baw0ou9nKdADmrRuXgWzF1FZx0rvkkw3JHc6yw5BN0M,1838 +sqlalchemy/testing/suite/test_dialect.py,sha256=ORQPXUt53XtO-5ENlWgs8BpsSdPBDjyMRl4W2UjXLI4,1165 +sqlalchemy/testing/suite/test_insert.py,sha256=nP0mgVpsVs72MHMADmihB1oXLbFBpsYsLGO3BlQ7RLU,8132 +sqlalchemy/testing/suite/test_reflection.py,sha256=eQtmFlLOdZgPVqjowEsLyGzPFAYGrbkhSyuaNVIhdog,24364 +sqlalchemy/testing/suite/test_results.py,sha256=oAcO1tD0I7c9ErMeSvSZBZfz1IBDMJHJTf64Y1pBodk,6685 +sqlalchemy/testing/suite/test_select.py,sha256=u0wAz1g-GrAFdZpG4zwSrVckVtjULvjlbd0Z1U1jHAA,5729 +sqlalchemy/testing/suite/test_sequence.py,sha256=i7tWJnVqfZDTopHs8i4NEDZnhsxjDoOQW0khixKIAnU,3806 +sqlalchemy/testing/suite/test_types.py,sha256=UKa-ZPdpz16mVKvT-9ISRAfqdrqiKaE7IA-_phQQuxo,17088 +sqlalchemy/testing/suite/test_update_delete.py,sha256=r5p467r-EUsjEcWGfUE0VPIfN4LLXZpLRnnyBLyyjl4,1582 +sqlalchemy/util/__init__.py,sha256=MOYfa64R327-OdRkmmkvjOKBMFJnQNUkrPbNMEzXDew,2520 +sqlalchemy/util/_collections.py,sha256=_O32JK3YRvSqIVT7jhgWqmrrcD6tvii_Agp6W279R1Q,27842 +sqlalchemy/util/compat.py,sha256=LPVxDxsA9NEEd3AurOKIUDL9DeOJOWQFW2RMGwhXTmU,6843 +sqlalchemy/util/deprecations.py,sha256=lC0bKFW_4wGUNY0gMRP5QNr8CpNBahfvcGpMUBlVtvI,4403 +sqlalchemy/util/langhelpers.py,sha256=PkiNRgnDPiwTSnoP-PW5NwZjZoxgyXGpD7iLp8BBtbE,41173 +sqlalchemy/util/queue.py,sha256=OscBM8q0-5tho_8oYLZjTv1-cxcW8f__iCLCHDZHo18,6548 +sqlalchemy/util/topological.py,sha256=Zqw6fj4N6M558OEApeYCWQY_zYXfgaUlyJGiruvENGE,2794 +c:\users\j\oml\platform\win32\Lib\site-packages\SQLAlchemy-1.0.11.dist-info\INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +sqlalchemy/engine/__pycache__/threadlocal.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-35.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-35.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/__pycache__/processors.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/zxJDBC.cpython-35.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-35.pyc,, +sqlalchemy/databases/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-35.pyc,, +sqlalchemy/__pycache__/types.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/__pycache__/pool.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/replay_fixture.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/api.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/__pycache__/inspection.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/distutils_run.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-35.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/config.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-35.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-35.pyc,, +sqlalchemy/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/events.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-35.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-35.pyc,, +sqlalchemy/dialects/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-35.pyc,, +sqlalchemy/event/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-35.pyc,, +sqlalchemy/event/__pycache__/base.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-35.pyc,, +sqlalchemy/__pycache__/interfaces.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/requirements.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-35.pyc,, +sqlalchemy/__pycache__/events.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-35.pyc,, +sqlalchemy/__pycache__/exc.cpython-35.pyc,, +sqlalchemy/event/__pycache__/api.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-35.pyc,, +sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-35.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-35.pyc,, +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-35.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-35.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/runner.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-35.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-35.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-35.pyc,, +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-35.pyc,, +sqlalchemy/dialects/__pycache__/postgres.cpython-35.pyc,, +sqlalchemy/util/__pycache__/__init__.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/util.cpython-35.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-35.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-35.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-35.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-35.pyc,, +sqlalchemy/__pycache__/schema.cpython-35.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-35.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-35.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-35.pyc,, +sqlalchemy/__pycache__/log.cpython-35.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-35.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-35.pyc,, diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/WHEEL b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/WHEEL new file mode 100644 index 0000000..754b5c7 --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-none-win32 + diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/metadata.json b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/metadata.json new file mode 100644 index 0000000..c44cfb9 --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Database Abstraction Library", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: Jython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Database :: Front-Ends", "Operating System :: OS Independent"], "extensions": {"python.details": {"project_urls": {"Home": "http://www.sqlalchemy.org"}, "contacts": [{"email": "mike_mp@zzzcomputing.com", "name": "Mike Bayer", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "license": "MIT License", "metadata_version": "2.0", "name": "SQLAlchemy", "version": "1.0.11", "test_requires": [{"requires": ["mock", "pytest (>=2.5.2)", "pytest-xdist"]}]} \ No newline at end of file diff --git a/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/top_level.txt b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/Lib/site-packages/SQLAlchemy-1.0.11.dist-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/Lib/site-packages/_markerlib/__init__.py b/Lib/site-packages/_markerlib/__init__.py new file mode 100644 index 0000000..e2b237b --- /dev/null +++ b/Lib/site-packages/_markerlib/__init__.py @@ -0,0 +1,16 @@ +try: + import ast + from _markerlib.markers import default_environment, compile, interpret +except ImportError: + if 'ast' in globals(): + raise + def default_environment(): + return {} + def compile(marker): + def marker_fn(environment=None, override=None): + # 'empty markers are True' heuristic won't install extra deps. + return not marker.strip() + marker_fn.__doc__ = marker + return marker_fn + def interpret(marker, environment=None, override=None): + return compile(marker)() diff --git a/Lib/site-packages/_markerlib/markers.py b/Lib/site-packages/_markerlib/markers.py new file mode 100644 index 0000000..fa83706 --- /dev/null +++ b/Lib/site-packages/_markerlib/markers.py @@ -0,0 +1,119 @@ +# -*- coding: utf-8 -*- +"""Interpret PEP 345 environment markers. + +EXPR [in|==|!=|not in] EXPR [or|and] ... + +where EXPR belongs to any of those: + + python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) + python_full_version = sys.version.split()[0] + os.name = os.name + sys.platform = sys.platform + platform.version = platform.version() + platform.machine = platform.machine() + platform.python_implementation = platform.python_implementation() + a free string, like '2.6', or 'win32' +""" + +__all__ = ['default_environment', 'compile', 'interpret'] + +import ast +import os +import platform +import sys +import weakref + +_builtin_compile = compile + +try: + from platform import python_implementation +except ImportError: + if os.name == "java": + # Jython 2.5 has ast module, but not platform.python_implementation() function. + def python_implementation(): + return "Jython" + else: + raise + + +# restricted set of variables +_VARS = {'sys.platform': sys.platform, + 'python_version': '%s.%s' % sys.version_info[:2], + # FIXME parsing sys.platform is not reliable, but there is no other + # way to get e.g. 2.7.2+, and the PEP is defined with sys.version + 'python_full_version': sys.version.split(' ', 1)[0], + 'os.name': os.name, + 'platform.version': platform.version(), + 'platform.machine': platform.machine(), + 'platform.python_implementation': python_implementation(), + 'extra': None # wheel extension + } + +for var in list(_VARS.keys()): + if '.' in var: + _VARS[var.replace('.', '_')] = _VARS[var] + +def default_environment(): + """Return copy of default PEP 385 globals dictionary.""" + return dict(_VARS) + +class ASTWhitelist(ast.NodeTransformer): + def __init__(self, statement): + self.statement = statement # for error messages + + ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) + # Bool operations + ALLOWED += (ast.And, ast.Or) + # Comparison operations + ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) + + def visit(self, node): + """Ensure statement only contains allowed nodes.""" + if not isinstance(node, self.ALLOWED): + raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % + (self.statement, + (' ' * node.col_offset) + '^')) + return ast.NodeTransformer.visit(self, node) + + def visit_Attribute(self, node): + """Flatten one level of attribute access.""" + new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) + return ast.copy_location(new_node, node) + +def parse_marker(marker): + tree = ast.parse(marker, mode='eval') + new_tree = ASTWhitelist(marker).generic_visit(tree) + return new_tree + +def compile_marker(parsed_marker): + return _builtin_compile(parsed_marker, '', 'eval', + dont_inherit=True) + +_cache = weakref.WeakValueDictionary() + +def compile(marker): + """Return compiled marker as a function accepting an environment dict.""" + try: + return _cache[marker] + except KeyError: + pass + if not marker.strip(): + def marker_fn(environment=None, override=None): + """""" + return True + else: + compiled_marker = compile_marker(parse_marker(marker)) + def marker_fn(environment=None, override=None): + """override updates environment""" + if override is None: + override = {} + if environment is None: + environment = default_environment() + environment.update(override) + return eval(compiled_marker, environment) + marker_fn.__doc__ = marker + _cache[marker] = marker_fn + return _cache[marker] + +def interpret(marker, environment=None): + return compile(marker)(environment) diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/DESCRIPTION.rst b/Lib/site-packages/cffi-1.5.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..ef4aa7b --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,13 @@ + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/INSTALLER b/Lib/site-packages/cffi-1.5.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/METADATA b/Lib/site-packages/cffi-1.5.0.dist-info/METADATA new file mode 100644 index 0000000..8f7299a --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/METADATA @@ -0,0 +1,34 @@ +Metadata-Version: 2.0 +Name: cffi +Version: 1.5.0 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: pycparser + + +CFFI +==== + +Foreign Function Interface for Python calling C code. +Please see the `Documentation `_. + +Contact +------- + +`Mailing list `_ + + diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/RECORD b/Lib/site-packages/cffi-1.5.0.dist-info/RECORD new file mode 100644 index 0000000..79819cb --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/RECORD @@ -0,0 +1,42 @@ +_cffi_backend.cp35-win32.pyd,sha256=xpzIGi8h1YIIEBm7Ma9MsQ5DilTIil38dNBJrPJZoVw,120832 +cffi/api.py,sha256=Ul_KyMObyooHfIeGkCOagtXnp6wl7VEQRwdg7BjMCko,35050 +cffi/backend_ctypes.py,sha256=L5uklowUxOBVFvv9tzYHch1oH62oYmJBe5DB8pbTTyY,40130 +cffi/cffi_opcode.py,sha256=yl7UL7jQbU38WDeN_DRlTUvfuIsR_Z8Oa_ne_BJtw44,5477 +cffi/commontypes.py,sha256=iOD1UdO_teZ8b1YLdTl-2K261gzJF9DGZQ9KS2q7cHo,2531 +cffi/cparser.py,sha256=OPBAd1baXXenr2kJRztXdnp96efU8toiENWQYhk_IAY,36624 +cffi/ffiplatform.py,sha256=YZJrqT8T18114nKdgkA-MMTzwPkWV8QSw9rnsLB4fYY,3729 +cffi/gc_weakref.py,sha256=ijoJUjCI0I4BTfyhxE9RxvAFnbbKWMVc4U0os99mf4M,642 +cffi/lock.py,sha256=l9TTdwMIMpi6jDkJGnQgE9cvTIR7CAntIJr8EGHt3pY,747 +cffi/model.py,sha256=dRhR8Y_seBprLciRQKgUNNvAndNLu3ohBq7NWJ-KWxk,21110 +cffi/parse_c_type.h,sha256=BBca7ODJCzlsK_1c4HT5MFhy1wdUbyHYvKuvCvxaQZ8,5835 +cffi/recompiler.py,sha256=n_JP9spM2rOxexCJ49rjxzQlwl2wa85LSbCUN4teejc,59939 +cffi/setuptools_ext.py,sha256=P5AQdnfwTdbHwvMQaIWIYKOW8zvzUZeLnSOxu7r-ufk,6158 +cffi/vengine_cpy.py,sha256=6uz_cMSdBUmkRAyoug5K7g_i0QuiWgACyLb1NUx4QO4,41320 +cffi/vengine_gen.py,sha256=Jy40OZrSkX2xyJulXkKWutXjOzD6Nvwd5p8g_r-aVHk,26583 +cffi/verifier.py,sha256=2KvPQhU6hQKaoqdBZ-LCWyNlJF7ez9RAlfX91iUgpR8,11468 +cffi/_cffi_include.h,sha256=duTU1R4itHdrimTy7A6K1mY4Pr6MlNwaFu_sgeNLYr8,9911 +cffi/_embedding.h,sha256=AMVPMkx8tP-Yx1VkCa6qZK1suZ5KSOxHRRSzQVafOSM,17249 +cffi/__init__.py,sha256=rDGrtZnUofH3JfCslrfKTkKb7XFyTA0HN80HyBP9hkE,483 +cffi-1.5.0.dist-info/DESCRIPTION.rst,sha256=9ijQLbcqTWNF-iV0RznFiBeBCNrjArA0P-eutKUPw98,220 +cffi-1.5.0.dist-info/entry_points.txt,sha256=Q9f5C9IpjYxo0d2PK9eUcnkgxHc9pHWwjEMaANPKNCI,76 +cffi-1.5.0.dist-info/METADATA,sha256=tyDq6VJIaTyoohrUUwAwtVKGR_hDQBijVbV-FdX4v8o,1039 +cffi-1.5.0.dist-info/metadata.json,sha256=VRO8FOdatB4lBO1vcJpo2qdNuFAklUf18AnS5ThkmAU,1070 +cffi-1.5.0.dist-info/RECORD,, +cffi-1.5.0.dist-info/top_level.txt,sha256=rE7WR3rZfNKxWI9-jn6hsHCAl7MDkB-FmuQbxWjFehQ,19 +cffi-1.5.0.dist-info/WHEEL,sha256=6wnUzulUPtamWQw9fduxmONJSMzqWZJWY62pN0Z3sA8,101 +c:\users\j\oml\platform\win32\Lib\site-packages\cffi-1.5.0.dist-info\INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cffi/__pycache__/commontypes.cpython-35.pyc,, +cffi/__pycache__/verifier.cpython-35.pyc,, +cffi/__pycache__/setuptools_ext.cpython-35.pyc,, +cffi/__pycache__/model.cpython-35.pyc,, +cffi/__pycache__/vengine_cpy.cpython-35.pyc,, +cffi/__pycache__/backend_ctypes.cpython-35.pyc,, +cffi/__pycache__/lock.cpython-35.pyc,, +cffi/__pycache__/recompiler.cpython-35.pyc,, +cffi/__pycache__/ffiplatform.cpython-35.pyc,, +cffi/__pycache__/vengine_gen.cpython-35.pyc,, +cffi/__pycache__/cparser.cpython-35.pyc,, +cffi/__pycache__/api.cpython-35.pyc,, +cffi/__pycache__/__init__.cpython-35.pyc,, +cffi/__pycache__/cffi_opcode.cpython-35.pyc,, +cffi/__pycache__/gc_weakref.cpython-35.pyc,, diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/WHEEL b/Lib/site-packages/cffi-1.5.0.dist-info/WHEEL new file mode 100644 index 0000000..184f1bd --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: false +Tag: cp35-none-win32 + diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/entry_points.txt b/Lib/site-packages/cffi-1.5.0.dist-info/entry_points.txt new file mode 100644 index 0000000..eee7e0f --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[distutils.setup_keywords] +cffi_modules = cffi.setuptools_ext:cffi_modules + diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/metadata.json b/Lib/site-packages/cffi-1.5.0.dist-info/metadata.json new file mode 100644 index 0000000..aa0ad11 --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.24.0)", "summary": "Foreign Function Interface for Python calling C code.", "run_requires": [{"requires": ["pycparser"]}], "name": "cffi", "extras": [], "license": "MIT", "metadata_version": "2.0", "version": "1.5.0", "classifiers": ["Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.details": {"contacts": [{"role": "author", "name": "Armin Rigo, Maciej Fijalkowski", "email": "python-cffi@googlegroups.com"}], "project_urls": {"Home": "http://cffi.readthedocs.org"}, "document_names": {"description": "DESCRIPTION.rst"}}, "python.exports": {"distutils.setup_keywords": {"cffi_modules": "cffi.setuptools_ext:cffi_modules"}}}} \ No newline at end of file diff --git a/Lib/site-packages/cffi-1.5.0.dist-info/top_level.txt b/Lib/site-packages/cffi-1.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/Lib/site-packages/cffi-1.5.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/Lib/site-packages/cffi/__init__.py b/Lib/site-packages/cffi/__init__.py new file mode 100644 index 0000000..40d602b --- /dev/null +++ b/Lib/site-packages/cffi/__init__.py @@ -0,0 +1,13 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI, CDefError, FFIError +from .ffiplatform import VerificationError, VerificationMissing + +__version__ = "1.5.0" +__version_info__ = (1, 5, 0) + +# The verifier module file names are based on the CRC32 of a string that +# contains the following version number. It may be older than __version__ +# if nothing is clearly incompatible. +__version_verifier_modules__ = "0.8.6" diff --git a/Lib/site-packages/cffi/_cffi_include.h b/Lib/site-packages/cffi/_cffi_include.h new file mode 100644 index 0000000..6e90dd8 --- /dev/null +++ b/Lib/site-packages/cffi/_cffi_include.h @@ -0,0 +1,236 @@ +#define _CFFI_ +#include +#ifdef __cplusplus +extern "C" { +#endif +#include +#include "parse_c_type.h" + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py + and cffi/_cffi_include.h */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux) +# include +# endif +#endif + +#ifdef __GNUC__ +# define _CFFI_UNUSED_FN __attribute__((unused)) +#else +# define _CFFI_UNUSED_FN /* nothing */ +#endif + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + not used any more +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_CPIDX 25 +#define _cffi_call_python \ + ((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX]) +#define _CFFI_NUM_EXPORTS 26 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; + +#define _cffi_type(index) ( \ + assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \ + (CTypeDescrObject *)_cffi_types[index]) + +static PyObject *_cffi_init(const char *module_name, Py_ssize_t version, + const struct _cffi_type_context_s *ctx) +{ + PyObject *module, *o_arg, *new_module; + void *raw[] = { + (void *)module_name, + (void *)version, + (void *)_cffi_exports, + (void *)ctx, + }; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + o_arg = PyLong_FromVoidPtr((void *)raw); + if (o_arg == NULL) + goto failure; + + new_module = PyObject_CallMethod( + module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg); + + Py_DECREF(o_arg); + Py_DECREF(module); + return new_module; + + failure: + Py_XDECREF(module); + return NULL; +} + +_CFFI_UNUSED_FN +static PyObject **_cffi_unpack_args(PyObject *args_tuple, Py_ssize_t expected, + const char *fnname) +{ + if (PyTuple_GET_SIZE(args_tuple) != expected) { + PyErr_Format(PyExc_TypeError, + "%.150s() takes exactly %zd arguments (%zd given)", + fnname, expected, PyTuple_GET_SIZE(args_tuple)); + return NULL; + } + return &PyTuple_GET_ITEM(args_tuple, 0); /* pointer to the first item, + the others follow */ +} + +/********** end CPython-specific section **********/ +#else +_CFFI_UNUSED_FN +static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *); +# define _cffi_call_python _cffi_call_python_org +#endif + + +#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0])) + +#define _cffi_prim_int(size, sign) \ + ((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \ + (size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \ + (size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \ + (size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \ + _CFFI__UNKNOWN_PRIM) + +#define _cffi_prim_float(size) \ + ((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \ + (size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \ + (size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \ + _CFFI__UNKNOWN_FLOAT_PRIM) + +#define _cffi_check_int(got, got_nonpos, expected) \ + ((got_nonpos) == (expected <= 0) && \ + (got) == (unsigned long long)expected) + +#ifdef __cplusplus +} +#endif diff --git a/Lib/site-packages/cffi/_embedding.h b/Lib/site-packages/cffi/_embedding.h new file mode 100644 index 0000000..aefe9d4 --- /dev/null +++ b/Lib/site-packages/cffi/_embedding.h @@ -0,0 +1,517 @@ + +/***** Support code for embedding *****/ + +#if defined(_MSC_VER) +# define CFFI_DLLEXPORT __declspec(dllexport) +#elif defined(__GNUC__) +# define CFFI_DLLEXPORT __attribute__((visibility("default"))) +#else +# define CFFI_DLLEXPORT /* nothing */ +#endif + + +/* There are two global variables of type _cffi_call_python_fnptr: + + * _cffi_call_python, which we declare just below, is the one called + by ``extern "Python"`` implementations. + + * _cffi_call_python_org, which on CPython is actually part of the + _cffi_exports[] array, is the function pointer copied from + _cffi_backend. + + After initialization is complete, both are equal. However, the + first one remains equal to &_cffi_start_and_call_python until the + very end of initialization, when we are (or should be) sure that + concurrent threads also see a completely initialized world, and + only then is it changed. +*/ +#undef _cffi_call_python +typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *); +static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *); +static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python; + + +#ifndef _MSC_VER + /* --- Assuming a GCC not infinitely old --- */ +# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n) +# define cffi_write_barrier() __sync_synchronize() +# if !defined(__amd64__) && !defined(__x86_64__) && \ + !defined(__i386__) && !defined(__i386) +# define cffi_read_barrier() __sync_synchronize() +# else +# define cffi_read_barrier() (void)0 +# endif +#else + /* --- Windows threads version --- */ +# include +# define cffi_compare_and_swap(l,o,n) \ + (InterlockedCompareExchangePointer(l,n,o) == (o)) +# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0) +# define cffi_read_barrier() (void)0 +static volatile LONG _cffi_dummy; +#endif + +#ifdef WITH_THREAD +# ifndef _MSC_VER +# include + static pthread_mutex_t _cffi_embed_startup_lock; +# else + static CRITICAL_SECTION _cffi_embed_startup_lock; +# endif + static char _cffi_embed_startup_lock_ready = 0; +#endif + +static void _cffi_acquire_reentrant_mutex(void) +{ + static void *volatile lock = NULL; + + while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) { + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: pthread_mutex_init() should be very fast, and + this is only run at start-up anyway. */ + } + +#ifdef WITH_THREAD + if (!_cffi_embed_startup_lock_ready) { +# ifndef _MSC_VER + pthread_mutexattr_t attr; + pthread_mutexattr_init(&attr); + pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE); + pthread_mutex_init(&_cffi_embed_startup_lock, &attr); +# else + InitializeCriticalSection(&_cffi_embed_startup_lock); +# endif + _cffi_embed_startup_lock_ready = 1; + } +#endif + + while (!cffi_compare_and_swap(&lock, (void *)1, NULL)) + ; + +#ifndef _MSC_VER + pthread_mutex_lock(&_cffi_embed_startup_lock); +#else + EnterCriticalSection(&_cffi_embed_startup_lock); +#endif +} + +static void _cffi_release_reentrant_mutex(void) +{ +#ifndef _MSC_VER + pthread_mutex_unlock(&_cffi_embed_startup_lock); +#else + LeaveCriticalSection(&_cffi_embed_startup_lock); +#endif +} + + +/********** CPython-specific section **********/ +#ifndef PYPY_VERSION + + +#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX] + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */ + +static void _cffi_py_initialize(void) +{ + /* XXX use initsigs=0, which "skips initialization registration of + signal handlers, which might be useful when Python is + embedded" according to the Python docs. But review and think + if it should be a user-controllable setting. + + XXX we should also give a way to write errors to a buffer + instead of to stderr. + + XXX if importing 'site' fails, CPython (any version) calls + exit(). Should we try to work around this behavior here? + */ + Py_InitializeEx(0); +} + +static int _cffi_initialize_python(void) +{ + /* This initializes Python, imports _cffi_backend, and then the + present .dll/.so is set up as a CPython C extension module. + */ + int result; + PyGILState_STATE state; + PyObject *pycode=NULL, *global_dict=NULL, *x; + +#if PY_MAJOR_VERSION >= 3 + /* see comments in _cffi_carefully_make_gil() about the + Python2/Python3 difference + */ +#else + /* Acquire the GIL. We have no threadstate here. If Python is + already initialized, it is possible that there is already one + existing for this thread, but it is not made current now. + */ + PyEval_AcquireLock(); + + _cffi_py_initialize(); + + /* The Py_InitializeEx() sometimes made a threadstate for us, but + not always. Indeed Py_InitializeEx() could be called and do + nothing. So do we have a threadstate, or not? We don't know, + but we can replace it with NULL in all cases. + */ + (void)PyThreadState_Swap(NULL); + + /* Now we can release the GIL and re-acquire immediately using the + logic of PyGILState(), which handles making or installing the + correct threadstate. + */ + PyEval_ReleaseLock(); +#endif + state = PyGILState_Ensure(); + + /* Call the initxxx() function from the present module. It will + create and initialize us as a CPython extension module, instead + of letting the startup Python code do it---it might reimport + the same .dll/.so and get maybe confused on some platforms. + It might also have troubles locating the .dll/.so again for all + I know. + */ + (void)_CFFI_PYTHON_STARTUP_FUNC(); + if (PyErr_Occurred()) + goto error; + + /* Now run the Python code provided to ffi.embedding_init_code(). + */ + pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE, + "", + Py_file_input); + if (pycode == NULL) + goto error; + global_dict = PyDict_New(); + if (global_dict == NULL) + goto error; + if (PyDict_SetItemString(global_dict, "__builtins__", + PyThreadState_GET()->interp->builtins) < 0) + goto error; + x = PyEval_EvalCode( +#if PY_MAJOR_VERSION < 3 + (PyCodeObject *) +#endif + pycode, global_dict, global_dict); + if (x == NULL) + goto error; + Py_DECREF(x); + + /* Done! Now if we've been called from + _cffi_start_and_call_python() in an ``extern "Python"``, we can + only hope that the Python code did correctly set up the + corresponding @ffi.def_extern() function. Otherwise, the + general logic of ``extern "Python"`` functions (inside the + _cffi_backend module) will find that the reference is still + missing and print an error. + */ + result = 0; + done: + Py_XDECREF(pycode); + Py_XDECREF(global_dict); + PyGILState_Release(state); + return result; + + error:; + { + /* Print as much information as potentially useful. + Debugging load-time failures with embedding is not fun + */ + PyObject *exception, *v, *tb, *f, *modules, *mod; + PyErr_Fetch(&exception, &v, &tb); + if (exception != NULL) { + PyErr_NormalizeException(&exception, &v, &tb); + PyErr_Display(exception, v, tb); + } + Py_XDECREF(exception); + Py_XDECREF(v); + Py_XDECREF(tb); + + f = PySys_GetObject((char *)"stderr"); + if (f != NULL && f != Py_None) { + PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME + "\ncompiled with cffi version: 1.5.0" + "\n_cffi_backend module: ", f); + modules = PyImport_GetModuleDict(); + mod = PyDict_GetItemString(modules, "_cffi_backend"); + if (mod == NULL) { + PyFile_WriteString("not loaded", f); + } + else { + v = PyObject_GetAttrString(mod, "__file__"); + PyFile_WriteObject(v, f, 0); + Py_XDECREF(v); + } + PyFile_WriteString("\nsys.path: ", f); + PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0); + PyFile_WriteString("\n\n", f); + } + } + result = -1; + goto done; +} + +PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */ + +static int _cffi_carefully_make_gil(void) +{ + /* This does the basic initialization of Python. It can be called + completely concurrently from unrelated threads. It assumes + that we don't hold the GIL before (if it exists), and we don't + hold it afterwards. + + What it really does is completely different in Python 2 and + Python 3. + + Python 2 + ======== + + Initialize the GIL, without initializing the rest of Python, + by calling PyEval_InitThreads(). + + PyEval_InitThreads() must not be called concurrently at all. + So we use a global variable as a simple spin lock. This global + variable must be from 'libpythonX.Y.so', not from this + cffi-based extension module, because it must be shared from + different cffi-based extension modules. We choose + _PyParser_TokenNames[0] as a completely arbitrary pointer value + that is never written to. The default is to point to the + string "ENDMARKER". We change it temporarily to point to the + next character in that string. (Yes, I know it's REALLY + obscure.) + + Python 3 + ======== + + In Python 3, PyEval_InitThreads() cannot be called before + Py_InitializeEx() any more. So this function calls + Py_InitializeEx() first. It uses the same obscure logic to + make sure we never call it concurrently. + + Arguably, this is less good on the spinlock, because + Py_InitializeEx() takes much longer to run than + PyEval_InitThreads(). But I didn't find a way around it. + */ + +#ifdef WITH_THREAD + char *volatile *lock = (char *volatile *)_PyParser_TokenNames; + char *old_value; + + while (1) { /* spin loop */ + old_value = *lock; + if (old_value[0] == 'E') { + assert(old_value[1] == 'N'); + if (cffi_compare_and_swap(lock, old_value, old_value + 1)) + break; + } + else { + assert(old_value[0] == 'N'); + /* should ideally do a spin loop instruction here, but + hard to do it portably and doesn't really matter I + think: PyEval_InitThreads() should be very fast, and + this is only run at start-up anyway. */ + } + } +#endif + +#if PY_MAJOR_VERSION >= 3 + /* Python 3: call Py_InitializeEx() */ + { + PyGILState_STATE state = PyGILState_UNLOCKED; + if (!Py_IsInitialized()) + _cffi_py_initialize(); + else + state = PyGILState_Ensure(); + + PyEval_InitThreads(); + PyGILState_Release(state); + } +#else + /* Python 2: call PyEval_InitThreads() */ +# ifdef WITH_THREAD + if (!PyEval_ThreadsInitialized()) { + PyEval_InitThreads(); /* makes the GIL */ + PyEval_ReleaseLock(); /* then release it */ + } + /* else: there is already a GIL, but we still needed to do the + spinlock dance to make sure that we see it as fully ready */ +# endif +#endif + +#ifdef WITH_THREAD + /* release the lock */ + while (!cffi_compare_and_swap(lock, old_value + 1, old_value)) + ; +#endif + + return 0; +} + +/********** end CPython-specific section **********/ + + +#else + + +/********** PyPy-specific section **********/ + +PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */ + +static struct _cffi_pypy_init_s { + const char *name; + void (*func)(const void *[]); + const char *code; +} _cffi_pypy_init = { + _CFFI_MODULE_NAME, + _CFFI_PYTHON_STARTUP_FUNC, + _CFFI_PYTHON_STARTUP_CODE, +}; + +extern int pypy_carefully_make_gil(const char *); +extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *); + +static int _cffi_carefully_make_gil(void) +{ + return pypy_carefully_make_gil(_CFFI_MODULE_NAME); +} + +static int _cffi_initialize_python(void) +{ + return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init); +} + +/********** end PyPy-specific section **********/ + + +#endif + + +#ifdef __GNUC__ +__attribute__((noinline)) +#endif +static _cffi_call_python_fnptr _cffi_start_python(void) +{ + /* Delicate logic to initialize Python. This function can be + called multiple times concurrently, e.g. when the process calls + its first ``extern "Python"`` functions in multiple threads at + once. It can also be called recursively, in which case we must + ignore it. We also have to consider what occurs if several + different cffi-based extensions reach this code in parallel + threads---it is a different copy of the code, then, and we + can't have any shared global variable unless it comes from + 'libpythonX.Y.so'. + + Idea: + + * _cffi_carefully_make_gil(): "carefully" call + PyEval_InitThreads() (possibly with Py_InitializeEx() first). + + * then we use a (local) custom lock to make sure that a call to this + cffi-based extension will wait if another call to the *same* + extension is running the initialization in another thread. + It is reentrant, so that a recursive call will not block, but + only one from a different thread. + + * then we grab the GIL and (Python 2) we call Py_InitializeEx(). + At this point, concurrent calls to Py_InitializeEx() are not + possible: we have the GIL. + + * do the rest of the specific initialization, which may + temporarily release the GIL but not the custom lock. + Only release the custom lock when we are done. + */ + static char called = 0; + + if (_cffi_carefully_make_gil() != 0) + return NULL; + + _cffi_acquire_reentrant_mutex(); + + /* Here the GIL exists, but we don't have it. We're only protected + from concurrency by the reentrant mutex. */ + + /* This file only initializes the embedded module once, the first + time this is called, even if there are subinterpreters. */ + if (!called) { + called = 1; /* invoke _cffi_initialize_python() only once, + but don't set '_cffi_call_python' right now, + otherwise concurrent threads won't call + this function at all (we need them to wait) */ + if (_cffi_initialize_python() == 0) { + /* now initialization is finished. Switch to the fast-path. */ + + /* We would like nobody to see the new value of + '_cffi_call_python' without also seeing the rest of the + data initialized. However, this is not possible. But + the new value of '_cffi_call_python' is the function + 'cffi_call_python()' from _cffi_backend. So: */ + cffi_write_barrier(); + /* ^^^ we put a write barrier here, and a corresponding + read barrier at the start of cffi_call_python(). This + ensures that after that read barrier, we see everything + done here before the write barrier. + */ + + assert(_cffi_call_python_org != NULL); + _cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org; + } + else { + /* initialization failed. Reset this to NULL, even if it was + already set to some other value. Future calls to + _cffi_start_python() are still forced to occur, and will + always return NULL from now on. */ + _cffi_call_python_org = NULL; + } + } + + _cffi_release_reentrant_mutex(); + + return (_cffi_call_python_fnptr)_cffi_call_python_org; +} + +static +void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args) +{ + _cffi_call_python_fnptr fnptr; + int current_err = errno; +#ifdef _MSC_VER + int current_lasterr = GetLastError(); +#endif + fnptr = _cffi_start_python(); + if (fnptr == NULL) { + fprintf(stderr, "function %s() called, but initialization code " + "failed. Returning 0.\n", externpy->name); + memset(args, 0, externpy->size_of_result); + } +#ifdef _MSC_VER + SetLastError(current_lasterr); +#endif + errno = current_err; + + if (fnptr != NULL) + fnptr(externpy, args); +} + + +/* The cffi_start_python() function makes sure Python is initialized + and our cffi module is set up. It can be called manually from the + user C code. The same effect is obtained automatically from any + dll-exported ``extern "Python"`` function. This function returns + -1 if initialization failed, 0 if all is OK. */ +_CFFI_UNUSED_FN +static int cffi_start_python(void) +{ + if (_cffi_call_python == &_cffi_start_and_call_python) { + if (_cffi_start_python() == NULL) + return -1; + } + cffi_read_barrier(); + return 0; +} + +#undef cffi_compare_and_swap +#undef cffi_write_barrier +#undef cffi_read_barrier diff --git a/Lib/site-packages/cffi/api.py b/Lib/site-packages/cffi/api.py new file mode 100644 index 0000000..b98b86c --- /dev/null +++ b/Lib/site-packages/cffi/api.py @@ -0,0 +1,827 @@ +import sys, types +from .lock import allocate_lock + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + + +class FFIError(Exception): + pass + +class CDefError(Exception): + def __str__(self): + try: + line = 'line %d: ' % (self.args[1].coord.line,) + except (AttributeError, TypeError, IndexError): + line = '' + return '%s%s' % (line, self.args[0]) + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + from . import cparser, model + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + assert backend.__version__ == __version__, \ + "version mismatch, %s != %s" % (backend.__version__, __version__) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + self._included_ffis = [] + self._windows_unicode = None + self._init_once_cache = {} + self._cdef_version = None + self._embedding = None + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in backend.__dict__: + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + self.BCharA = self._get_cached_btype(model.char_array_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + + def cdef(self, csource, override=False, packed=False): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + """ + self._cdef(csource, override=override, packed=packed) + + def embedding_api(self, csource, packed=False): + self._cdef(csource, packed=packed, dllexport=True) + if self._embedding is None: + self._embedding = '' + + def _cdef(self, csource, override=False, **options): + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._cdef_version = object() + self._parser.parse(csource, override=override, **options) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + finishlist = self._parser._recomplete + if finishlist: + self._parser._recomplete = [] + for tp in finishlist: + tp.finish_backend_type(self, finishlist) + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + assert isinstance(name, basestring) or name is None + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, *fields_or_indexes): + """Return the offset of the named field inside the given + structure or array, which must be given as a C type name. + You can give several field names in case of nested structures. + You can also give numeric values which correspond to array + items, in case of an array type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._typeoffsetof(cdecl, *fields_or_indexes)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def new_allocator(self, alloc=None, free=None, + should_clear_after_alloc=True): + """Return a new allocator, i.e. a function that behaves like ffi.new() + but uses the provided low-level 'alloc' and 'free' functions. + + 'alloc' is called with the size as argument. If it returns NULL, a + MemoryError is raised. 'free' is called with the result of 'alloc' + as argument. Both can be either Python function or directly C + functions. If 'free' is None, then no free function is called. + If both 'alloc' and 'free' are None, the default is used. + + If 'should_clear_after_alloc' is set to False, then the memory + returned by 'alloc' is assumed to be already cleared (or you are + fine with garbage); otherwise CFFI will clear it. + """ + compiled_ffi = self._backend.FFI() + allocator = compiled_ffi.new_allocator(alloc, free, + should_clear_after_alloc) + def allocate(cdecl, init=None): + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return allocator(cdecl, init) + return allocate + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def buffer(self, cdata, size=-1): + """Return a read-write buffer object that references the raw C data + pointed to by the given 'cdata'. The 'cdata' must be a pointer or + an array. Can be passed to functions expecting a buffer, or directly + manipulated with: + + buf[:] get a copy of it in a regular string, or + buf[idx] as a single character + buf[:] = ... + buf[idx] = ... change the content + """ + return self._backend.buffer(cdata, size) + + def from_buffer(self, python_buffer): + """Return a that points to the data of the + given Python object, which must support the buffer interface. + Note that this is not meant to be used on the built-in types str, + unicode, or bytearray (you can build 'char[]' arrays explicitly) + but only on objects containing large quantities of raw data + in some other format, like 'array.array' or numpy arrays. + """ + return self._backend.from_buffer(self.BCharA, python_buffer) + + def memmove(self, dest, src, n): + """ffi.memmove(dest, src, n) copies n bytes of memory from src to dest. + + Like the C function memmove(), the memory areas may overlap; + apart from that it behaves like the C function memcpy(). + + 'src' can be any cdata ptr or array, or any Python buffer object. + 'dest' can be any cdata ptr or array, or a writable Python buffer + object. The size to copy, 'n', is always measured in bytes. + + Unlike other methods, this one supports all Python buffer including + byte strings and bytearrays---but it still does not support + non-contiguous buffers. + """ + return self._backend.memmove(dest, src, n) + + def callback(self, cdecl, python_callable=None, error=None, onerror=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, + error, onerror) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + """ + try: + gcp = self._backend.gcp + except AttributeError: + pass + else: + return gcp(cdata, destructor) + # + with self._lock: + try: + gc_weakrefs = self.gc_weakrefs + except AttributeError: + from .gc_weakref import GcWeakrefs + gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self) + return gc_weakrefs.build(cdata, destructor) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + # + # If set_unicode(True) was called, insert the UNICODE and + # _UNICODE macro declarations + if self._windows_unicode: + self._apply_windows_unicode(kwargs) + # + # Set the tmpdir here, and not in Verifier.__init__: it picks + # up the caller's directory, which we want to be the caller of + # ffi.verify(), as opposed to the caller of Veritier(). + tmpdir = tmpdir or _caller_dir_pycache() + # + # Make a Verifier() and use it to load the library. + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + # + # Save the loaded library for keep-alive purposes, even + # if the caller doesn't keep it alive itself (it should). + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + from . import model + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, *fields_or_indexes): + """Return the address of a . + If 'fields_or_indexes' are given, returns the address of that + field or array item in the structure or array, recursively in + case of nested structures. + """ + ctype = self._backend.typeof(cdata) + if fields_or_indexes: + ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes) + else: + if ctype.kind == "pointer": + raise TypeError("addressof(pointer)") + offset = 0 + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes): + ctype, offset = self._backend.typeoffsetof(ctype, field_or_index) + for field1 in fields_or_indexes: + ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1) + offset += offset1 + return ctype, offset + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + if not isinstance(ffi_to_include, FFI): + raise TypeError("ffi.include() expects an argument that is also of" + " type cffi.FFI, not %r" % ( + type(ffi_to_include).__name__,)) + if ffi_to_include is self: + raise ValueError("self.include(self)") + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + self._included_ffis.append(ffi_to_include) + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + def set_unicode(self, enabled_flag): + """Windows: if 'enabled_flag' is True, enable the UNICODE and + _UNICODE defines in C, and declare the types like TCHAR and LPTCSTR + to be (pointers to) wchar_t. If 'enabled_flag' is False, + declare these types to be (pointers to) plain 8-bit characters. + This is mostly for backward compatibility; you usually want True. + """ + if self._windows_unicode is not None: + raise ValueError("set_unicode() can only be called once") + enabled_flag = bool(enabled_flag) + if enabled_flag: + self.cdef("typedef wchar_t TBYTE;" + "typedef wchar_t TCHAR;" + "typedef const wchar_t *LPCTSTR;" + "typedef const wchar_t *PCTSTR;" + "typedef wchar_t *LPTSTR;" + "typedef wchar_t *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + else: + self.cdef("typedef char TBYTE;" + "typedef char TCHAR;" + "typedef const char *LPCTSTR;" + "typedef const char *PCTSTR;" + "typedef char *LPTSTR;" + "typedef char *PTSTR;" + "typedef TBYTE *PTBYTE;" + "typedef TCHAR *PTCHAR;") + self._windows_unicode = enabled_flag + + def _apply_windows_unicode(self, kwds): + defmacros = kwds.get('define_macros', ()) + if not isinstance(defmacros, (list, tuple)): + raise TypeError("'define_macros' must be a list or tuple") + defmacros = list(defmacros) + [('UNICODE', '1'), + ('_UNICODE', '1')] + kwds['define_macros'] = defmacros + + def _apply_embedding_fix(self, kwds): + # must include an argument like "-lpython2.7" for the compiler + def ensure(key, value): + lst = kwds.setdefault(key, []) + if value not in lst: + lst.append(value) + # + if '__pypy__' in sys.builtin_module_names: + if hasattr(sys, 'prefix'): + import os + ensure('library_dirs', os.path.join(sys.prefix, 'bin')) + pythonlib = "pypy-c" + else: + if sys.platform == "win32": + template = "python%d%d" + if sys.flags.debug: + template = template + '_d' + else: + template = "python%d.%d" + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + if hasattr(sys, 'abiflags'): + pythonlib += sys.abiflags + ensure('libraries', pythonlib) + if sys.platform == "win32": + ensure('extra_link_args', '/MANIFEST') + + def set_source(self, module_name, source, source_extension='.c', **kwds): + if hasattr(self, '_assigned_source'): + raise ValueError("set_source() cannot be called several times " + "per ffi object") + if not isinstance(module_name, basestring): + raise TypeError("'module_name' must be a string") + self._assigned_source = (str(module_name), source, + source_extension, kwds) + + def distutils_extension(self, tmpdir='build', verbose=True): + from distutils.dir_util import mkpath + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored + return self.verifier.get_extension() + raise ValueError("set_source() must be called before" + " distutils_extension()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("distutils_extension() is only for C extension " + "modules, not for dlopen()-style pure Python " + "modules") + mkpath(tmpdir) + ext, updated = recompile(self, module_name, + source, tmpdir=tmpdir, extradir=tmpdir, + source_extension=source_extension, + call_c_compiler=False, **kwds) + if verbose: + if updated: + sys.stderr.write("regenerated: %r\n" % (ext.sources[0],)) + else: + sys.stderr.write("not modified: %r\n" % (ext.sources[0],)) + return ext + + def emit_c_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is None: + raise TypeError("emit_c_code() is only for C extension modules, " + "not for dlopen()-style pure Python modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def emit_python_code(self, filename): + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before emit_c_code()") + module_name, source, source_extension, kwds = self._assigned_source + if source is not None: + raise TypeError("emit_python_code() is only for dlopen()-style " + "pure Python modules, not for C extension modules") + recompile(self, module_name, source, + c_file=filename, call_c_compiler=False, **kwds) + + def compile(self, tmpdir='.', verbose=0, target=None): + """The 'target' argument gives the final file name of the + compiled DLL. Use '*' to force distutils' choice, suitable for + regular CPython C API modules. Use a file name ending in '.*' + to ask for the system's default extension for dynamic libraries + (.so/.dll/.dylib). + + The default is '*' when building a non-embedded C API extension, + and (module_name + '.*') when building an embedded library. + """ + from .recompiler import recompile + # + if not hasattr(self, '_assigned_source'): + raise ValueError("set_source() must be called before compile()") + module_name, source, source_extension, kwds = self._assigned_source + return recompile(self, module_name, source, tmpdir=tmpdir, + target=target, source_extension=source_extension, + compiler_verbose=verbose, **kwds) + + def init_once(self, func, tag): + # Read _init_once_cache[tag], which is either (False, lock) if + # we're calling the function now in some thread, or (True, result). + # Don't call setdefault() in most cases, to avoid allocating and + # immediately freeing a lock; but still use setdefaut() to avoid + # races. + try: + x = self._init_once_cache[tag] + except KeyError: + x = self._init_once_cache.setdefault(tag, (False, allocate_lock())) + # Common case: we got (True, result), so we return the result. + if x[0]: + return x[1] + # Else, it's a lock. Acquire it to serialize the following tests. + with x[1]: + # Read again from _init_once_cache the current status. + x = self._init_once_cache[tag] + if x[0]: + return x[1] + # Call the function and store the result back. + result = func() + self._init_once_cache[tag] = (True, result) + return result + + def embedding_init_code(self, pysource): + if self._embedding: + raise ValueError("embedding_init_code() can only be called once") + # fix 'pysource' before it gets dumped into the C file: + # - remove empty lines at the beginning, so it starts at "line 1" + # - dedent, if all non-empty lines are indented + # - check for SyntaxErrors + import re + match = re.match(r'\s*\n', pysource) + if match: + pysource = pysource[match.end():] + lines = pysource.splitlines() or [''] + prefix = re.match(r'\s*', lines[0]).group() + for i in range(1, len(lines)): + line = lines[i] + if line.rstrip(): + while not line.startswith(prefix): + prefix = prefix[:-1] + i = len(prefix) + lines = [line[i:]+'\n' for line in lines] + pysource = ''.join(lines) + # + compile(pysource, "cffi_init", "exec") + # + self._embedding = pysource + + +def _load_backend_lib(backend, name, flags): + if name is None: + if sys.platform != "win32": + return backend.load_library(None, flags) + name = "c" # Windows: load_library(None) fails, but this works + # (backward compatibility hack only) + try: + if '.' not in name and '/' not in name: + raise OSError("library not found: %r" % (name,)) + return backend.load_library(name, flags) + except OSError: + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + raise # propagate the original OSError + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + import os + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + # + def accessor_function(name): + key = 'function ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + try: + value = backendlib.load_function(BType, name) + except KeyError as e: + raise AttributeError('%s: %s' % (name, e)) + library.__dict__[name] = value + # + def accessor_variable(name): + key = 'variable ' + name + tp, _ = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + # + def accessor_constant(name): + raise NotImplementedError("non-integer constant '%s' cannot be " + "accessed from a dlopen() library" % (name,)) + # + def accessor_int_constant(name): + library.__dict__[name] = ffi._parser._int_constants[name] + # + accessors = {} + accessors_version = [False] + # + def update_accessors(): + if accessors_version[0] is ffi._cdef_version: + return + # + from . import model + for key, (tp, _) in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + tag, name = key.split(' ', 1) + if tag == 'function': + accessors[name] = accessor_function + elif tag == 'variable': + accessors[name] = accessor_variable + elif tag == 'constant': + accessors[name] = accessor_constant + else: + for i, enumname in enumerate(tp.enumerators): + def accessor_enum(name, tp=tp, i=i): + tp.check_not_partial() + library.__dict__[name] = tp.enumvalues[i] + accessors[enumname] = accessor_enum + for name in ffi._parser._int_constants: + accessors.setdefault(name, accessor_int_constant) + accessors_version[0] = ffi._cdef_version + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + if name not in accessors: + update_accessors() + if name not in accessors: + raise AttributeError(name) + accessors[name](name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + def __dir__(self): + with ffi._lock: + update_accessors() + return accessors.keys() + # + if libname is not None: + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/Lib/site-packages/cffi/backend_ctypes.py b/Lib/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..b061cda --- /dev/null +++ b/Lib/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1068 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + bytechr = chr +else: + unicode = str + long = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + if isinstance(other, CTypesData): + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + else: + return NotImplemented + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(type(self)) ^ hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __eq__(self, other): + return self is other + + def __ne__(self, other): + return self is not other + + def __hash__(self): + return object.__hash__(self) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + kind = "pointer" + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, (int, long)): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + + def __bool__(self): + return bool(self._address) + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (int, long, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + kind1 = kind + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + kind = kind1 + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (int, long, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return self._value + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long)): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (int, long, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, (int, long)): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, (int, long)): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, (int, long)): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + kind1 = kind + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind = kind1 + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error, onerror): + assert onerror is None # XXX not implemented + return BType(source, error) + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname, num=0): + if isinstance(fieldname, str): + if num == 0 and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + elif isinstance(fieldname, (int, long)): + if issubclass(BType, CTypesGenericArray): + BType = BType._CTPtr + if not issubclass(BType, CTypesGenericPtr): + raise TypeError("expected an array or ptr ctype") + BItem = BType._BItem + offset = BItem._get_size() * fieldname + if offset > sys.maxsize: + raise OverflowError + return (BItem, offset) + else: + raise TypeError(type(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset=None): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + if offset is None or not issubclass(type(cdata)._BItem, + CTypesBaseStructOrUnion): + raise TypeError("unexpected cdata type") + ptr = type(cdata)._to_ctypes(cdata) + elif isinstance(cdata, CTypesGenericArray): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/Lib/site-packages/cffi/cffi_opcode.py b/Lib/site-packages/cffi/cffi_opcode.py new file mode 100644 index 0000000..dc40030 --- /dev/null +++ b/Lib/site-packages/cffi/cffi_opcode.py @@ -0,0 +1,179 @@ + +class CffiOp(object): + def __init__(self, op, arg): + self.op = op + self.arg = arg + + def as_c_expr(self): + if self.op is None: + assert isinstance(self.arg, str) + return '(_cffi_opcode_t)(%s)' % (self.arg,) + classname = CLASS_NAME[self.op] + return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg) + + def as_python_bytes(self): + if self.op is None and self.arg.isdigit(): + value = int(self.arg) # non-negative: '-' not in self.arg + if value >= 2**31: + raise OverflowError("cannot emit %r: limited to 2**31-1" + % (self.arg,)) + return format_four_bytes(value) + if isinstance(self.arg, str): + from .ffiplatform import VerificationError + raise VerificationError("cannot emit to Python: %r" % (self.arg,)) + return format_four_bytes((self.arg << 8) | self.op) + + def __str__(self): + classname = CLASS_NAME.get(self.op, self.op) + return '(%s %s)' % (classname, self.arg) + +def format_four_bytes(num): + return '\\x%02X\\x%02X\\x%02X\\x%02X' % ( + (num >> 24) & 0xFF, + (num >> 16) & 0xFF, + (num >> 8) & 0xFF, + (num ) & 0xFF) + +OP_PRIMITIVE = 1 +OP_POINTER = 3 +OP_ARRAY = 5 +OP_OPEN_ARRAY = 7 +OP_STRUCT_UNION = 9 +OP_ENUM = 11 +OP_FUNCTION = 13 +OP_FUNCTION_END = 15 +OP_NOOP = 17 +OP_BITFIELD = 19 +OP_TYPENAME = 21 +OP_CPYTHON_BLTN_V = 23 # varargs +OP_CPYTHON_BLTN_N = 25 # noargs +OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg) +OP_CONSTANT = 29 +OP_CONSTANT_INT = 31 +OP_GLOBAL_VAR = 33 +OP_DLOPEN_FUNC = 35 +OP_DLOPEN_CONST = 37 +OP_GLOBAL_VAR_F = 39 +OP_EXTERN_PYTHON = 41 + +PRIM_VOID = 0 +PRIM_BOOL = 1 +PRIM_CHAR = 2 +PRIM_SCHAR = 3 +PRIM_UCHAR = 4 +PRIM_SHORT = 5 +PRIM_USHORT = 6 +PRIM_INT = 7 +PRIM_UINT = 8 +PRIM_LONG = 9 +PRIM_ULONG = 10 +PRIM_LONGLONG = 11 +PRIM_ULONGLONG = 12 +PRIM_FLOAT = 13 +PRIM_DOUBLE = 14 +PRIM_LONGDOUBLE = 15 + +PRIM_WCHAR = 16 +PRIM_INT8 = 17 +PRIM_UINT8 = 18 +PRIM_INT16 = 19 +PRIM_UINT16 = 20 +PRIM_INT32 = 21 +PRIM_UINT32 = 22 +PRIM_INT64 = 23 +PRIM_UINT64 = 24 +PRIM_INTPTR = 25 +PRIM_UINTPTR = 26 +PRIM_PTRDIFF = 27 +PRIM_SIZE = 28 +PRIM_SSIZE = 29 +PRIM_INT_LEAST8 = 30 +PRIM_UINT_LEAST8 = 31 +PRIM_INT_LEAST16 = 32 +PRIM_UINT_LEAST16 = 33 +PRIM_INT_LEAST32 = 34 +PRIM_UINT_LEAST32 = 35 +PRIM_INT_LEAST64 = 36 +PRIM_UINT_LEAST64 = 37 +PRIM_INT_FAST8 = 38 +PRIM_UINT_FAST8 = 39 +PRIM_INT_FAST16 = 40 +PRIM_UINT_FAST16 = 41 +PRIM_INT_FAST32 = 42 +PRIM_UINT_FAST32 = 43 +PRIM_INT_FAST64 = 44 +PRIM_UINT_FAST64 = 45 +PRIM_INTMAX = 46 +PRIM_UINTMAX = 47 + +_NUM_PRIM = 48 +_UNKNOWN_PRIM = -1 +_UNKNOWN_FLOAT_PRIM = -2 +_UNKNOWN_LONG_DOUBLE = -3 + +_IO_FILE_STRUCT = -1 + +PRIMITIVE_TO_INDEX = { + 'char': PRIM_CHAR, + 'short': PRIM_SHORT, + 'int': PRIM_INT, + 'long': PRIM_LONG, + 'long long': PRIM_LONGLONG, + 'signed char': PRIM_SCHAR, + 'unsigned char': PRIM_UCHAR, + 'unsigned short': PRIM_USHORT, + 'unsigned int': PRIM_UINT, + 'unsigned long': PRIM_ULONG, + 'unsigned long long': PRIM_ULONGLONG, + 'float': PRIM_FLOAT, + 'double': PRIM_DOUBLE, + 'long double': PRIM_LONGDOUBLE, + '_Bool': PRIM_BOOL, + 'wchar_t': PRIM_WCHAR, + 'int8_t': PRIM_INT8, + 'uint8_t': PRIM_UINT8, + 'int16_t': PRIM_INT16, + 'uint16_t': PRIM_UINT16, + 'int32_t': PRIM_INT32, + 'uint32_t': PRIM_UINT32, + 'int64_t': PRIM_INT64, + 'uint64_t': PRIM_UINT64, + 'intptr_t': PRIM_INTPTR, + 'uintptr_t': PRIM_UINTPTR, + 'ptrdiff_t': PRIM_PTRDIFF, + 'size_t': PRIM_SIZE, + 'ssize_t': PRIM_SSIZE, + 'int_least8_t': PRIM_INT_LEAST8, + 'uint_least8_t': PRIM_UINT_LEAST8, + 'int_least16_t': PRIM_INT_LEAST16, + 'uint_least16_t': PRIM_UINT_LEAST16, + 'int_least32_t': PRIM_INT_LEAST32, + 'uint_least32_t': PRIM_UINT_LEAST32, + 'int_least64_t': PRIM_INT_LEAST64, + 'uint_least64_t': PRIM_UINT_LEAST64, + 'int_fast8_t': PRIM_INT_FAST8, + 'uint_fast8_t': PRIM_UINT_FAST8, + 'int_fast16_t': PRIM_INT_FAST16, + 'uint_fast16_t': PRIM_UINT_FAST16, + 'int_fast32_t': PRIM_INT_FAST32, + 'uint_fast32_t': PRIM_UINT_FAST32, + 'int_fast64_t': PRIM_INT_FAST64, + 'uint_fast64_t': PRIM_UINT_FAST64, + 'intmax_t': PRIM_INTMAX, + 'uintmax_t': PRIM_UINTMAX, + } + +F_UNION = 0x01 +F_CHECK_FIELDS = 0x02 +F_PACKED = 0x04 +F_EXTERNAL = 0x08 +F_OPAQUE = 0x10 + +G_FLAGS = dict([('_CFFI_' + _key, globals()[_key]) + for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED', + 'F_EXTERNAL', 'F_OPAQUE']]) + +CLASS_NAME = {} +for _name, _value in list(globals().items()): + if _name.startswith('OP_') and isinstance(_value, int): + CLASS_NAME[_value] = _name[3:] diff --git a/Lib/site-packages/cffi/commontypes.py b/Lib/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..3d11aae --- /dev/null +++ b/Lib/site-packages/cffi/commontypes.py @@ -0,0 +1,76 @@ +import sys +from . import api, model + + +COMMON_TYPES = {} + +try: + # fetch "bool" and all simple Windows types + from _cffi_backend import _get_common_types + _get_common_types(COMMON_TYPES) +except ImportError: + pass + +COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE') +COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(parser, commontype): + try: + return _CACHE[commontype] + except KeyError: + cdecl = COMMON_TYPES.get(commontype, commontype) + if not isinstance(cdecl, str): + result, quals = cdecl, 0 # cdecl is already a BaseType + elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result, quals = model.PrimitiveType(cdecl), 0 + elif cdecl == 'set-unicode-needed': + raise api.FFIError("The Windows type %r is only available after " + "you call ffi.set_unicode()" % (commontype,)) + else: + if commontype == cdecl: + raise api.FFIError("Unsupported type: %r. Please file a bug " + "if you think it should be." % (commontype,)) + result, quals = parser.parse_type_and_quals(cdecl) # recursive + + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result, quals + return result, quals + + +# ____________________________________________________________ +# extra types for Windows (most of them are in commontypes.c) + + +def win_common_types(): + return { + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "TBYTE": "set-unicode-needed", + "TCHAR": "set-unicode-needed", + "LPCTSTR": "set-unicode-needed", + "PCTSTR": "set-unicode-needed", + "LPTSTR": "set-unicode-needed", + "PTSTR": "set-unicode-needed", + "PTBYTE": "set-unicode-needed", + "PTCHAR": "set-unicode-needed", + } + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types()) diff --git a/Lib/site-packages/cffi/cparser.py b/Lib/site-packages/cffi/cparser.py new file mode 100644 index 0000000..baa0e98 --- /dev/null +++ b/Lib/site-packages/cffi/cparser.py @@ -0,0 +1,839 @@ +from . import api, model +from .commontypes import COMMON_TYPES, resolve_common_type +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$", + re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)" + r"\b((?:[^\n\\]|\\.)*?)$", + re.DOTALL | re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None +_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE) +_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b") +_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b") +_r_cdecl = re.compile(r"\b__cdecl\b") +_r_extern_python = re.compile(r'\bextern\s*"Python"\s*.') +_r_star_const_space = re.compile( # matches "* const " + r"[*]\s*((const|volatile|restrict)\b\s*)+") + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _workaround_for_old_pycparser(csource): + # Workaround for a pycparser issue (fixed between pycparser 2.10 and + # 2.14): "char*const***" gives us a wrong syntax tree, the same as + # for "char***(*const)". This means we can't tell the difference + # afterwards. But "char(*const(***))" gives us the right syntax + # tree. The issue only occurs if there are several stars in + # sequence with no parenthesis inbetween, just possibly qualifiers. + # Attempt to fix it by adding some parentheses in the source: each + # time we see "* const" or "* const *", we add an opening + # parenthesis before each star---the hard part is figuring out where + # to close them. + parts = [] + while True: + match = _r_star_const_space.search(csource) + if not match: + break + #print repr(''.join(parts)+csource), '=>', + parts.append(csource[:match.start()]) + parts.append('('); closing = ')' + parts.append(match.group()) # e.g. "* const " + endpos = match.end() + if csource.startswith('*', endpos): + parts.append('('); closing += ')' + level = 0 + i = endpos + while i < len(csource): + c = csource[i] + if c == '(': + level += 1 + elif c == ')': + if level == 0: + break + level -= 1 + elif c in ',;=': + if level == 0: + break + i += 1 + csource = csource[endpos:i] + closing + csource[i:] + #print repr(''.join(parts)+csource) + parts.append(csource) + return ''.join(parts) + +def _preprocess_extern_python(csource): + # input: `extern "Python" int foo(int);` or + # `extern "Python" { int foo(int); }` + # output: + # void __cffi_extern_python_start; + # int foo(int); + # void __cffi_extern_python_stop; + parts = [] + while True: + match = _r_extern_python.search(csource) + if not match: + break + endpos = match.end() - 1 + #print + #print ''.join(parts)+csource + #print '=>' + parts.append(csource[:match.start()]) + parts.append('void __cffi_extern_python_start; ') + if csource[endpos] == '{': + # grouping variant + closing = csource.find('}', endpos) + if closing < 0: + raise api.CDefError("'extern \"Python\" {': no '}' found") + if csource.find('{', endpos + 1, closing) >= 0: + raise NotImplementedError("cannot use { } inside a block " + "'extern \"Python\" { ... }'") + parts.append(csource[endpos+1:closing]) + csource = csource[closing+1:] + else: + # non-grouping variant + semicolon = csource.find(';', endpos) + if semicolon < 0: + raise api.CDefError("'extern \"Python\": no ';' found") + parts.append(csource[endpos:semicolon+1]) + csource = csource[semicolon+1:] + parts.append(' void __cffi_extern_python_stop;') + #print ''.join(parts)+csource + #print + parts.append(csource) + return ''.join(parts) + +def _preprocess(csource): + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literal! + csource = _r_comment.sub(' ', csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macrovalue = macrovalue.replace('\\\n', '').strip() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # + if pycparser.__version__ < '2.14': + csource = _workaround_for_old_pycparser(csource) + # + # BIG HACK: replace WINAPI or __stdcall with "volatile const". + # It doesn't make sense for the return type of a function to be + # "volatile volatile const", so we abuse it to detect __stdcall... + # Hack number 2 is that "int(volatile *fptr)();" is not valid C + # syntax, so we place the "volatile" before the opening parenthesis. + csource = _r_stdcall2.sub(' volatile volatile const(', csource) + csource = _r_stdcall1.sub(' volatile volatile const ', csource) + csource = _r_cdecl.sub(' ', csource) + # + # Replace `extern "Python"` with start/end markers + csource = _preprocess_extern_python(csource) + # + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + return csource.replace('...', ' __dotdotdot__ '), macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add(',') + look_for_words.add('(') + look_for_words.add(')') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + paren = 0 + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + paren = 0 + elif word == '(': + paren += 1 + elif word == ')': + paren -= 1 + elif word == ',': + if is_typedef and paren == 0: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._included_declarations = set() + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._options = None + self._int_constants = {} + self._recomplete = [] + self._uses_new_feature = None + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = ['typedef int %s;' % typename for typename in typenames] + csourcelines.append('typedef int __dotdotdot__;') + csourcelines.append(csource) + csource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(csource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + # csource will be used to find buggy source text + return ast, macros, csource + + def _convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) and try to interpret + # it as a line number + line = None + msg = str(e) + if msg.startswith(':') and ':' in msg[1:]: + linenum = msg[1:msg.find(':',1)] + if linenum.isdigit(): + linenum = int(linenum, 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise api.CDefError(msg) + + def parse(self, csource, override=False, packed=False, dllexport=False): + prev_options = self._options + try: + self._options = {'override': override, + 'packed': packed, + 'dllexport': dllexport} + self._internal_parse(csource) + finally: + self._options = prev_options + + def _internal_parse(self, csource): + ast, macros, csource = self._parse(csource) + # add the macros + self._process_macros(macros) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + # + try: + self._inside_extern_python = False + for decl in iterator: + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise api.CDefError("typedef does not declare any name", + decl) + quals = 0 + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) + and decl.type.type.names[-1] == '__dotdotdot__'): + realtype = self._get_unknown_type(decl) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names == ['__dotdotdot__']): + realtype = model.unknown_ptr_type(decl.name) + else: + realtype, quals = self._get_type_and_quals( + decl.type, name=decl.name) + self._declare('typedef ' + decl.name, realtype, quals=quals) + else: + raise api.CDefError("unrecognized construct", decl) + except api.FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + if self._int_constants[key] == val: + return # ignore identical double declarations + raise api.FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _add_integer_constant(self, name, int_str): + int_str = int_str.lower().rstrip("ul") + neg = int_str.startswith('-') + if neg: + int_str = int_str[1:] + # "010" is not valid oct in py3 + if (int_str.startswith("0") and int_str != '0' + and not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + pyvalue = int(int_str, 0) + if neg: + pyvalue = -pyvalue + self._add_constants(name, pyvalue) + self._declare('macro ' + name, pyvalue) + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + if _r_int_literal.match(value): + self._add_integer_constant(key, value) + elif value == '...': + self._declare('macro ' + key, value) + else: + raise api.CDefError( + 'only supports one of the following syntax:\n' + ' #define %s ... (literally dot-dot-dot)\n' + ' #define %s NUMBER (with NUMBER an integer' + ' constant, decimal/hex/octal)\n' + 'got:\n' + ' #define %s %s' + % (key, key, key, value)) + + def _declare_function(self, tp, quals, decl): + tp = self._get_type_pointer(tp, quals) + if self._options['dllexport']: + tag = 'dllexport_python ' + elif self._inside_extern_python: + tag = 'extern_python ' + else: + tag = 'function ' + self._declare(tag + decl.name, tp) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp, quals = self._get_type_and_quals(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + self._declare_function(tp, quals, decl) + else: + if isinstance(node, pycparser.c_ast.Struct): + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise api.CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp, quals = self._get_type_and_quals(node, + partial_length_ok=True) + if tp.is_raw_function: + self._declare_function(tp, quals, decl) + elif (tp.is_integer_type() and + hasattr(decl, 'init') and + hasattr(decl.init, 'value') and + _r_int_literal.match(decl.init.value)): + self._add_integer_constant(decl.name, decl.init.value) + elif (tp.is_integer_type() and + isinstance(decl.init, pycparser.c_ast.UnaryOp) and + decl.init.op == '-' and + hasattr(decl.init.expr, 'value') and + _r_int_literal.match(decl.init.expr.value)): + self._add_integer_constant(decl.name, + '-' + decl.init.expr.value) + elif (tp is model.void_type and + decl.name.startswith('__cffi_extern_python_')): + # hack: `extern "Python"` in the C source is replaced + # with "void __cffi_extern_python_start;" and + # "void __cffi_extern_python_stop;" + self._inside_extern_python = not self._inside_extern_python + assert self._inside_extern_python == ( + decl.name == '__cffi_extern_python_start') + else: + if self._inside_extern_python: + raise api.CDefError( + "cannot declare constants or " + "variables with 'extern \"Python\"'") + if (quals & model.Q_CONST) and not tp.is_array_type: + self._declare('constant ' + decl.name, tp, quals=quals) + else: + self._declare('variable ' + decl.name, tp, quals=quals) + + def parse_type(self, cdecl): + return self.parse_type_and_quals(cdecl)[0] + + def parse_type_and_quals(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise api.CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type_and_quals(exprnode.type) + + def _declare(self, name, obj, included=False, quals=0): + if name in self._declarations: + prevobj, prevquals = self._declarations[name] + if prevobj is obj and prevquals == quals: + return + if not self._options['override']: + raise api.FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = (obj, quals) + if included: + self._included_declarations.add(obj) + + def _extract_quals(self, type): + quals = 0 + if isinstance(type, (pycparser.c_ast.TypeDecl, + pycparser.c_ast.PtrDecl)): + if 'const' in type.quals: + quals |= model.Q_CONST + if 'volatile' in type.quals: + quals |= model.Q_VOLATILE + if 'restrict' in type.quals: + quals |= model.Q_RESTRICT + return quals + + def _get_type_pointer(self, type, quals, declname=None): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if (isinstance(type, model.StructOrUnionOrEnum) and + type.name.startswith('$') and type.name[1:].isdigit() and + type.forcename is None and declname is not None): + return model.NamedPointerType(type, declname, quals) + return model.PointerType(type, quals) + + def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + tp, quals = self._declarations['typedef ' + typenode.type.names[0]] + quals |= self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + tp, quals = self._get_type_and_quals(typenode.type, + partial_length_ok=partial_length_ok) + return model.ArrayType(tp, length), quals + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + itemtype, itemquals = self._get_type_and_quals(typenode.type) + tp = self._get_type_pointer(itemtype, itemquals, declname=name) + quals = self._extract_quals(typenode) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + quals = self._extract_quals(typenode) + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type, quals + if ident == '__dotdotdot__': + raise api.FFIError(':%d: bad usage of "..."' % + typenode.coord.line) + tp0, quals0 = resolve_common_type(self, ident) + return tp0, (quals | quals0) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + tp = self._get_struct_union_enum_type('struct', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + tp = self._get_struct_union_enum_type('union', type, name) + return tp, quals + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + tp = self._get_struct_union_enum_type('enum', type, name) + return tp, quals + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name), 0 + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True), 0 + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True), 0 + # + raise api.FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + for i, arg in enumerate(params): + if not hasattr(arg, 'type'): + raise api.CDefError("%s arg %d: unknown type '%s'" + " (if you meant to use the old C syntax of giving" + " untyped arguments, it is not supported)" + % (funcname or 'in expression', i + 1, + getattr(arg, 'name', '?'))) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise api.CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type)) + for argdeclnode in params] + if not ellipsis and args == [model.void_type]: + args = [] + result, quals = self._get_type_and_quals(typenode.type) + # the 'quals' on the result type are ignored. HACK: we absure them + # to detect __stdcall functions: we textually replace "__stdcall" + # with "volatile volatile const" above. + abi = None + if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway + if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']: + abi = '__stdcall' + return model.RawFunctionType(tuple(args), result, ellipsis, abi) + + def _as_func_arg(self, type, quals): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item, quals) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp, _ = self._declarations.get(key, (None, None)) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + if explicit_name == '__dotdotdot__': + raise CDefError("Enums cannot be declared with ...") + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise api.CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + fldquals = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type, fqual = self._get_type_and_quals(decl.type, + partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + fldquals.append(fqual) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + tp.fldquals = tuple(fldquals) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._options['packed'] + if tp.completed: # must be re-completed: it is not opaque any more + tp.completed = 0 + self._recomplete.append(tp) + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise api.CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or positive/negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + s = exprnode.value + if s.startswith('0'): + if s.startswith('0x') or s.startswith('0X'): + return int(s, 16) + return int(s, 8) + elif '1' <= s[0] <= '9': + return int(s, 10) + elif s[0] == "'" and s[-1] == "'" and ( + len(s) == 3 or (len(s) == 4 and s[1] == "\\")): + return ord(s[-2]) + else: + raise api.CDefError("invalid constant %r" % (s,)) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '+'): + return self._parse_constant(exprnode.expr) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] + # + if partial_length_ok: + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + self._partial_length = True + return '...' + # + raise api.FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + partial = False + enumerators = [] + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators: + if _r_enum_dotdotdot.match(enum.name): + partial = True + continue + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumerators.append(enum.name) + enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) + nextenumvalue += 1 + enumerators = tuple(enumerators) + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, (tp, quals) in other._declarations.items(): + if name.startswith('anonymous $enum_$'): + continue # fix for test_anonymous_enum_include + kind = name.split(' ', 1)[0] + if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'): + self._declare(name, tp, included=True, quals=quals) + for k, v in other._int_constants.items(): + self._add_constants(k, v) + + def _get_unknown_type(self, decl): + typenames = decl.type.type.names + assert typenames[-1] == '__dotdotdot__' + if len(typenames) == 1: + return model.unknown_type(decl.name) + + if (typenames[:-1] == ['float'] or + typenames[:-1] == ['double']): + # not for 'long double' so far + result = model.UnknownFloatType(decl.name) + else: + for t in typenames[:-1]: + if t not in ['int', 'short', 'long', 'signed', + 'unsigned', 'char']: + raise api.FFIError(':%d: bad usage of "..."' % + decl.coord.line) + result = model.UnknownIntegerType(decl.name) + + if self._uses_new_feature is None: + self._uses_new_feature = "'typedef %s... %s'" % ( + ' '.join(typenames[:-1]), decl.name) + + return result diff --git a/Lib/site-packages/cffi/ffiplatform.py b/Lib/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..d2daa8e --- /dev/null +++ b/Lib/site-packages/cffi/ffiplatform.py @@ -0,0 +1,121 @@ +import sys, os + + +class VerificationError(Exception): + """ An error raised when verification fails + """ + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + + +LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs', + 'extra_objects', 'depends'] + +def get_extension(srcfilename, modname, sources=(), **kwds): + from distutils.core import Extension + allsources = [srcfilename] + for src in sources: + allsources.append(os.path.normpath(src)) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext, compiler_verbose=0): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext, compiler_verbose) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext, compiler_verbose=0): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors, distutils.log + # + dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() + options = dist.get_option_dict('build_ext') + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + old_level = distutils.log.set_threshold(0) or 0 + try: + distutils.log.set_verbosity(compiler_verbose) + dist.run_command('build_ext') + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + finally: + distutils.log.set_threshold(old_level) + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/Lib/site-packages/cffi/gc_weakref.py b/Lib/site-packages/cffi/gc_weakref.py new file mode 100644 index 0000000..d0ffb23 --- /dev/null +++ b/Lib/site-packages/cffi/gc_weakref.py @@ -0,0 +1,22 @@ +from weakref import ref + + +class GcWeakrefs(object): + def __init__(self, ffi): + self.ffi = ffi + self.data = {} + + def build(self, cdata, destructor): + # make a new cdata of the same type as the original one + new_cdata = self.ffi.cast(self.ffi._backend.typeof(cdata), cdata) + # + def remove(key): + # careful, this function is not protected by any lock + old_key = self.data.pop(index) + assert old_key is key + destructor(cdata) + # + key = ref(new_cdata, remove) + index = object() + self.data[index] = key + return new_cdata diff --git a/Lib/site-packages/cffi/lock.py b/Lib/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/Lib/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/Lib/site-packages/cffi/model.py b/Lib/site-packages/cffi/model.py new file mode 100644 index 0000000..5783034 --- /dev/null +++ b/Lib/site-packages/cffi/model.py @@ -0,0 +1,602 @@ +import types, sys +import weakref + +from .lock import allocate_lock + + +# type qualifiers +Q_CONST = 0x01 +Q_RESTRICT = 0x02 +Q_VOLATILE = 0x04 + +def qualify(quals, replace_with): + if quals & Q_CONST: + replace_with = ' const ' + replace_with.lstrip() + if quals & Q_VOLATILE: + replace_with = ' volatile ' + replace_with.lstrip() + if quals & Q_RESTRICT: + # It seems that __restrict is supported by gcc and msvc. + # If you hit some different compiler, add a #define in + # _cffi_include.h for it (and in its copies, documented there) + replace_with = ' __restrict ' + replace_with.lstrip() + return replace_with + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file', quals=0): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + replace_with = qualify(quals, replace_with) + result = result.replace('&', replace_with) + if '$' in result: + from .ffiplatform import VerificationError + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def is_integer_type(self): + return False + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class BasePrimitiveType(BaseType): + pass + + +class PrimitiveType(BasePrimitiveType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'int_least8_t': 'i', + 'uint_least8_t': 'i', + 'int_least16_t': 'i', + 'uint_least16_t': 'i', + 'int_least32_t': 'i', + 'uint_least32_t': 'i', + 'int_least64_t': 'i', + 'uint_least64_t': 'i', + 'int_fast8_t': 'i', + 'uint_fast8_t': 'i', + 'int_fast16_t': 'i', + 'uint_fast16_t': 'i', + 'int_fast32_t': 'i', + 'uint_fast32_t': 'i', + 'int_fast64_t': 'i', + 'uint_fast64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'intmax_t': 'i', + 'uintmax_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class UnknownIntegerType(BasePrimitiveType): + _attrs_ = ('name',) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def is_integer_type(self): + return True + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("integer type '%s' can only be used after " + "compilation" % self.name) + +class UnknownFloatType(BasePrimitiveType): + _attrs_ = ('name', ) + + def __init__(self, name): + self.name = name + self.c_name_with_marker = name + '&' + + def build_backend_type(self, ffi, finishlist): + raise NotImplementedError("float type '%s' can only be used after " + "compilation" % self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis', 'abi') + + def __init__(self, args, result, ellipsis, abi=None): + self.args = args + self.result = result + self.ellipsis = ellipsis + self.abi = abi + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + if abi is not None: + replace_with = replace_with[:1] + abi + ' ' + replace_with[1:] + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + from . import api + raise api.CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + abi_args = () + if self.abi == "__stdcall": + if not self.ellipsis: # __stdcall ignored for variadic funcs + try: + abi_args = (ffi._backend.FFI_STDCALL,) + except AttributeError: + pass + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis, *abi_args) + + def as_raw_function(self): + return RawFunctionType(self.args, self.result, self.ellipsis, self.abi) + + +class PointerType(BaseType): + _attrs_ = ('totype', 'quals') + + def __init__(self, totype, quals=0): + self.totype = totype + self.quals = quals + extra = qualify(quals, " *&") + if totype.is_array_type: + extra = "(%s)" % (extra.lstrip(),) + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + +def ConstPointerType(totype): + return PointerType(totype, Q_CONST) + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name, quals=0): + PointerType.__init__(self, totype, quals) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%s]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length == '...': + from . import api + raise api.CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + +char_array_type = ArrayType(PrimitiveType('char'), None) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = 0 + partial = False + packed = False + + def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.fldquals = fldquals + self.build_c_name_with_marker() + + def has_anonymous_struct_fields(self): + if self.fldtypes is None: + return False + for name, type in zip(self.fldnames, self.fldtypes): + if name == '' and isinstance(type, StructOrUnion): + return True + return False + + def enumfields(self): + fldquals = self.fldquals + if fldquals is None: + fldquals = (0,) * len(self.fldnames) + for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes, + self.fldbitsize, fldquals): + if name == '' and isinstance(type, StructOrUnion): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize, quals) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + fldquals = [] + for name, type, bitsize, quals in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + fldquals.append(quals) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + self.fldquals = tuple(fldquals) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + # + self.completed = 1 + # + if self.fldtypes is None: + pass # not completing it: it's an opaque struct + # + elif self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + sflags = 0 + if self.packed: + sflags = 8 # SF_PACKED + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, sflags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length == '...': + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + from .ffiplatform import VerificationError + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + from . import ffiplatform + raise ffiplatform.VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + from . import ffiplatform + raise ffiplatform.VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + from . import api + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + import warnings + warnings.warn("%r has no values explicitly defined; next version " + "will refuse to guess which integer type it is " + "meant to be (unsigned/signed, int/long)" + % self._get_c_name()) + smallest_value = largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise api.CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + tp.origin = "unknown_type" + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '$$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._backend.__typecache[key] + except KeyError: + pass + except AttributeError: + # initialize the __typecache attribute, either at the module level + # if ffi._backend is a module, or at the class level if ffi._backend + # is some instance. + if isinstance(ffi._backend, types.ModuleType): + ffi._backend.__typecache = weakref.WeakValueDictionary() + else: + type(ffi._backend).__typecache = weakref.WeakValueDictionary() + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._backend.__typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/Lib/site-packages/cffi/parse_c_type.h b/Lib/site-packages/cffi/parse_c_type.h new file mode 100644 index 0000000..a01d89e --- /dev/null +++ b/Lib/site-packages/cffi/parse_c_type.h @@ -0,0 +1,177 @@ + +/* This part is from file 'cffi/parse_c_type.h'. It is copied at the + beginning of C sources generated by CFFI's ffi.set_source(). */ + +typedef void *_cffi_opcode_t; + +#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8)) +#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode) +#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8) + +#define _CFFI_OP_PRIMITIVE 1 +#define _CFFI_OP_POINTER 3 +#define _CFFI_OP_ARRAY 5 +#define _CFFI_OP_OPEN_ARRAY 7 +#define _CFFI_OP_STRUCT_UNION 9 +#define _CFFI_OP_ENUM 11 +#define _CFFI_OP_FUNCTION 13 +#define _CFFI_OP_FUNCTION_END 15 +#define _CFFI_OP_NOOP 17 +#define _CFFI_OP_BITFIELD 19 +#define _CFFI_OP_TYPENAME 21 +#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs +#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs +#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg) +#define _CFFI_OP_CONSTANT 29 +#define _CFFI_OP_CONSTANT_INT 31 +#define _CFFI_OP_GLOBAL_VAR 33 +#define _CFFI_OP_DLOPEN_FUNC 35 +#define _CFFI_OP_DLOPEN_CONST 37 +#define _CFFI_OP_GLOBAL_VAR_F 39 +#define _CFFI_OP_EXTERN_PYTHON 41 + +#define _CFFI_PRIM_VOID 0 +#define _CFFI_PRIM_BOOL 1 +#define _CFFI_PRIM_CHAR 2 +#define _CFFI_PRIM_SCHAR 3 +#define _CFFI_PRIM_UCHAR 4 +#define _CFFI_PRIM_SHORT 5 +#define _CFFI_PRIM_USHORT 6 +#define _CFFI_PRIM_INT 7 +#define _CFFI_PRIM_UINT 8 +#define _CFFI_PRIM_LONG 9 +#define _CFFI_PRIM_ULONG 10 +#define _CFFI_PRIM_LONGLONG 11 +#define _CFFI_PRIM_ULONGLONG 12 +#define _CFFI_PRIM_FLOAT 13 +#define _CFFI_PRIM_DOUBLE 14 +#define _CFFI_PRIM_LONGDOUBLE 15 + +#define _CFFI_PRIM_WCHAR 16 +#define _CFFI_PRIM_INT8 17 +#define _CFFI_PRIM_UINT8 18 +#define _CFFI_PRIM_INT16 19 +#define _CFFI_PRIM_UINT16 20 +#define _CFFI_PRIM_INT32 21 +#define _CFFI_PRIM_UINT32 22 +#define _CFFI_PRIM_INT64 23 +#define _CFFI_PRIM_UINT64 24 +#define _CFFI_PRIM_INTPTR 25 +#define _CFFI_PRIM_UINTPTR 26 +#define _CFFI_PRIM_PTRDIFF 27 +#define _CFFI_PRIM_SIZE 28 +#define _CFFI_PRIM_SSIZE 29 +#define _CFFI_PRIM_INT_LEAST8 30 +#define _CFFI_PRIM_UINT_LEAST8 31 +#define _CFFI_PRIM_INT_LEAST16 32 +#define _CFFI_PRIM_UINT_LEAST16 33 +#define _CFFI_PRIM_INT_LEAST32 34 +#define _CFFI_PRIM_UINT_LEAST32 35 +#define _CFFI_PRIM_INT_LEAST64 36 +#define _CFFI_PRIM_UINT_LEAST64 37 +#define _CFFI_PRIM_INT_FAST8 38 +#define _CFFI_PRIM_UINT_FAST8 39 +#define _CFFI_PRIM_INT_FAST16 40 +#define _CFFI_PRIM_UINT_FAST16 41 +#define _CFFI_PRIM_INT_FAST32 42 +#define _CFFI_PRIM_UINT_FAST32 43 +#define _CFFI_PRIM_INT_FAST64 44 +#define _CFFI_PRIM_UINT_FAST64 45 +#define _CFFI_PRIM_INTMAX 46 +#define _CFFI_PRIM_UINTMAX 47 + +#define _CFFI__NUM_PRIM 48 +#define _CFFI__UNKNOWN_PRIM (-1) +#define _CFFI__UNKNOWN_FLOAT_PRIM (-2) +#define _CFFI__UNKNOWN_LONG_DOUBLE (-3) + +#define _CFFI__IO_FILE_STRUCT (-1) + + +struct _cffi_global_s { + const char *name; + void *address; + _cffi_opcode_t type_op; + void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown + // OP_CPYTHON_BLTN_*: addr of direct function +}; + +struct _cffi_getconst_s { + unsigned long long value; + const struct _cffi_type_context_s *ctx; + int gindex; +}; + +struct _cffi_struct_union_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_STRUCT_UNION + int flags; // _CFFI_F_* flags below + size_t size; + int alignment; + int first_field_index; // -> _cffi_fields array + int num_fields; +}; +#define _CFFI_F_UNION 0x01 // is a union, not a struct +#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the + // "standard layout" or if some are missing +#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct +#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include() +#define _CFFI_F_OPAQUE 0x10 // opaque + +struct _cffi_field_s { + const char *name; + size_t field_offset; + size_t field_size; + _cffi_opcode_t field_type_op; +}; + +struct _cffi_enum_s { + const char *name; + int type_index; // -> _cffi_types, on a OP_ENUM + int type_prim; // _CFFI_PRIM_xxx + const char *enumerators; // comma-delimited string +}; + +struct _cffi_typename_s { + const char *name; + int type_index; /* if opaque, points to a possibly artificial + OP_STRUCT which is itself opaque */ +}; + +struct _cffi_type_context_s { + _cffi_opcode_t *types; + const struct _cffi_global_s *globals; + const struct _cffi_field_s *fields; + const struct _cffi_struct_union_s *struct_unions; + const struct _cffi_enum_s *enums; + const struct _cffi_typename_s *typenames; + int num_globals; + int num_struct_unions; + int num_enums; + int num_typenames; + const char *const *includes; + int num_types; + int flags; /* future extension */ +}; + +struct _cffi_parse_info_s { + const struct _cffi_type_context_s *ctx; + _cffi_opcode_t *output; + unsigned int output_size; + size_t error_location; + const char *error_message; +}; + +struct _cffi_externpy_s { + const char *name; + size_t size_of_result; + void *reserved1, *reserved2; +}; + +#ifdef _CFFI_INTERNAL +static int parse_c_type(struct _cffi_parse_info_s *info, const char *input); +static int search_in_globals(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +static int search_in_struct_unions(const struct _cffi_type_context_s *ctx, + const char *search, size_t search_len); +#endif diff --git a/Lib/site-packages/cffi/recompiler.py b/Lib/site-packages/cffi/recompiler.py new file mode 100644 index 0000000..914d9e7 --- /dev/null +++ b/Lib/site-packages/cffi/recompiler.py @@ -0,0 +1,1485 @@ +import os, sys, io +from . import ffiplatform, model +from .cffi_opcode import * + +VERSION = "0x2601" +VERSION_EMBEDDED = "0x2701" + + +class GlobalExpr: + def __init__(self, name, address, type_op, size=0, check_value=0): + self.name = name + self.address = address + self.type_op = type_op + self.size = size + self.check_value = check_value + + def as_c_expr(self): + return ' { "%s", (void *)%s, %s, (void *)%s },' % ( + self.name, self.address, self.type_op.as_c_expr(), self.size) + + def as_python_expr(self): + return "b'%s%s',%d" % (self.type_op.as_python_bytes(), self.name, + self.check_value) + +class FieldExpr: + def __init__(self, name, field_offset, field_size, fbitsize, field_type_op): + self.name = name + self.field_offset = field_offset + self.field_size = field_size + self.fbitsize = fbitsize + self.field_type_op = field_type_op + + def as_c_expr(self): + spaces = " " * len(self.name) + return (' { "%s", %s,\n' % (self.name, self.field_offset) + + ' %s %s,\n' % (spaces, self.field_size) + + ' %s %s },' % (spaces, self.field_type_op.as_c_expr())) + + def as_python_expr(self): + raise NotImplementedError + + def as_field_python_expr(self): + if self.field_type_op.op == OP_NOOP: + size_expr = '' + elif self.field_type_op.op == OP_BITFIELD: + size_expr = format_four_bytes(self.fbitsize) + else: + raise NotImplementedError + return "b'%s%s%s'" % (self.field_type_op.as_python_bytes(), + size_expr, + self.name) + +class StructUnionExpr: + def __init__(self, name, type_index, flags, size, alignment, comment, + first_field_index, c_fields): + self.name = name + self.type_index = type_index + self.flags = flags + self.size = size + self.alignment = alignment + self.comment = comment + self.first_field_index = first_field_index + self.c_fields = c_fields + + def as_c_expr(self): + return (' { "%s", %d, %s,' % (self.name, self.type_index, self.flags) + + '\n %s, %s, ' % (self.size, self.alignment) + + '%d, %d ' % (self.first_field_index, len(self.c_fields)) + + ('/* %s */ ' % self.comment if self.comment else '') + + '},') + + def as_python_expr(self): + flags = eval(self.flags, G_FLAGS) + fields_expr = [c_field.as_field_python_expr() + for c_field in self.c_fields] + return "(b'%s%s%s',%s)" % ( + format_four_bytes(self.type_index), + format_four_bytes(flags), + self.name, + ','.join(fields_expr)) + +class EnumExpr: + def __init__(self, name, type_index, size, signed, allenums): + self.name = name + self.type_index = type_index + self.size = size + self.signed = signed + self.allenums = allenums + + def as_c_expr(self): + return (' { "%s", %d, _cffi_prim_int(%s, %s),\n' + ' "%s" },' % (self.name, self.type_index, + self.size, self.signed, self.allenums)) + + def as_python_expr(self): + prim_index = { + (1, 0): PRIM_UINT8, (1, 1): PRIM_INT8, + (2, 0): PRIM_UINT16, (2, 1): PRIM_INT16, + (4, 0): PRIM_UINT32, (4, 1): PRIM_INT32, + (8, 0): PRIM_UINT64, (8, 1): PRIM_INT64, + }[self.size, self.signed] + return "b'%s%s%s\\x00%s'" % (format_four_bytes(self.type_index), + format_four_bytes(prim_index), + self.name, self.allenums) + +class TypenameExpr: + def __init__(self, name, type_index): + self.name = name + self.type_index = type_index + + def as_c_expr(self): + return ' { "%s", %d },' % (self.name, self.type_index) + + def as_python_expr(self): + return "b'%s%s'" % (format_four_bytes(self.type_index), self.name) + + +# ____________________________________________________________ + + +class Recompiler: + _num_externpy = 0 + + def __init__(self, ffi, module_name, target_is_python=False): + self.ffi = ffi + self.module_name = module_name + self.target_is_python = target_is_python + + def collect_type_table(self): + self._typesdict = {} + self._generate("collecttype") + # + all_decls = sorted(self._typesdict, key=str) + # + # prepare all FUNCTION bytecode sequences first + self.cffi_types = [] + for tp in all_decls: + if tp.is_raw_function: + assert self._typesdict[tp] is None + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + for tp1 in tp.args: + assert isinstance(tp1, (model.VoidType, + model.BasePrimitiveType, + model.PointerType, + model.StructOrUnionOrEnum, + model.FunctionPtrType)) + if self._typesdict[tp1] is None: + self._typesdict[tp1] = len(self.cffi_types) + self.cffi_types.append(tp1) # placeholder + self.cffi_types.append('END') # placeholder + # + # prepare all OTHER bytecode sequences + for tp in all_decls: + if not tp.is_raw_function and self._typesdict[tp] is None: + self._typesdict[tp] = len(self.cffi_types) + self.cffi_types.append(tp) # placeholder + if tp.is_array_type and tp.length is not None: + self.cffi_types.append('LEN') # placeholder + assert None not in self._typesdict.values() + # + # collect all structs and unions and enums + self._struct_unions = {} + self._enums = {} + for tp in all_decls: + if isinstance(tp, model.StructOrUnion): + self._struct_unions[tp] = None + elif isinstance(tp, model.EnumType): + self._enums[tp] = None + for i, tp in enumerate(sorted(self._struct_unions, + key=lambda tp: tp.name)): + self._struct_unions[tp] = i + for i, tp in enumerate(sorted(self._enums, + key=lambda tp: tp.name)): + self._enums[tp] = i + # + # emit all bytecode sequences now + for tp in all_decls: + method = getattr(self, '_emit_bytecode_' + tp.__class__.__name__) + method(tp, self._typesdict[tp]) + # + # consistency check + for op in self.cffi_types: + assert isinstance(op, CffiOp) + self.cffi_types = tuple(self.cffi_types) # don't change any more + + def _do_collect_type(self, tp): + if not isinstance(tp, model.BaseTypeByIdentity): + if isinstance(tp, tuple): + for x in tp: + self._do_collect_type(x) + return + if tp not in self._typesdict: + self._typesdict[tp] = None + if isinstance(tp, model.FunctionPtrType): + self._do_collect_type(tp.as_raw_function()) + elif isinstance(tp, model.StructOrUnion): + if tp.fldtypes is not None and ( + tp not in self.ffi._parser._included_declarations): + for name1, tp1, _, _ in tp.enumfields(): + self._do_collect_type(self._field_type(tp, name1, tp1)) + else: + for _, x in tp._get_items(): + self._do_collect_type(x) + + def _generate(self, step_name): + lst = self.ffi._parser._declarations.items() + for name, (tp, quals) in sorted(lst): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in recompile(): %r" % name) + try: + self._current_quals = quals + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + # ---------- + + ALL_STEPS = ["global", "field", "struct_union", "enum", "typename"] + + def collect_step_tables(self): + # collect the declarations for '_cffi_globals', '_cffi_typenames', etc. + self._lsts = {} + for step_name in self.ALL_STEPS: + self._lsts[step_name] = [] + self._seen_struct_unions = set() + self._generate("ctx") + self._add_missing_struct_unions() + # + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if step_name != "field": + lst.sort(key=lambda entry: entry.name) + self._lsts[step_name] = tuple(lst) # don't change any more + # + # check for a possible internal inconsistency: _cffi_struct_unions + # should have been generated with exactly self._struct_unions + lst = self._lsts["struct_union"] + for tp, i in self._struct_unions.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._struct_unions) + # same with enums + lst = self._lsts["enum"] + for tp, i in self._enums.items(): + assert i < len(lst) + assert lst[i].name == tp.name + assert len(lst) == len(self._enums) + + # ---------- + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self, f, preamble): + if self.target_is_python: + assert preamble is None + self.write_py_source_to_f(f) + else: + assert preamble is not None + self.write_c_source_to_f(f, preamble) + + def _rel_readlines(self, filename): + g = open(os.path.join(os.path.dirname(__file__), filename), 'r') + lines = g.readlines() + g.close() + return lines + + def write_c_source_to_f(self, f, preamble): + self._f = f + prnt = self._prnt + # + # first the '#include' (actually done by inlining the file's content) + lines = self._rel_readlines('_cffi_include.h') + i = lines.index('#include "parse_c_type.h"\n') + lines[i:i+1] = self._rel_readlines('parse_c_type.h') + prnt(''.join(lines)) + # + # if we have ffi._embedding != None, we give it here as a macro + # and include an extra file + base_module_name = self.module_name.split('.')[-1] + if self.ffi._embedding is not None: + prnt('#define _CFFI_MODULE_NAME "%s"' % (self.module_name,)) + prnt('#define _CFFI_PYTHON_STARTUP_CODE %s' % + (self._string_literal(self.ffi._embedding),)) + prnt('#ifdef PYPY_VERSION') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC _cffi_pypyinit_%s' % ( + base_module_name,)) + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC PyInit_%s' % ( + base_module_name,)) + prnt('#else') + prnt('# define _CFFI_PYTHON_STARTUP_FUNC init%s' % ( + base_module_name,)) + prnt('#endif') + lines = self._rel_readlines('_embedding.h') + prnt(''.join(lines)) + version = VERSION_EMBEDDED + else: + version = VERSION + # + # then paste the C source given by the user, verbatim. + prnt('/************************************************************/') + prnt() + prnt(preamble) + prnt() + prnt('/************************************************************/') + prnt() + # + # the declaration of '_cffi_types' + prnt('static void *_cffi_types[] = {') + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + for i, op in enumerate(self.cffi_types): + comment = '' + if i in typeindex2type: + comment = ' // ' + typeindex2type[i]._get_c_name() + prnt('/* %2d */ %s,%s' % (i, op.as_c_expr(), comment)) + if not self.cffi_types: + prnt(' 0') + prnt('};') + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._seen_constants = set() + self._generate("decl") + # + # the declaration of '_cffi_globals' and '_cffi_typenames' + nums = {} + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + nums[step_name] = len(lst) + if nums[step_name] > 0: + prnt('static const struct _cffi_%s_s _cffi_%ss[] = {' % ( + step_name, step_name)) + for entry in lst: + prnt(entry.as_c_expr()) + prnt('};') + prnt() + # + # the declaration of '_cffi_includes' + if self.ffi._included_ffis: + prnt('static const char * const _cffi_includes[] = {') + for ffi_to_include in self.ffi._included_ffis: + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise ffiplatform.VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is None: + raise ffiplatform.VerificationError( + "not implemented yet: ffi.include() of a Python-based " + "ffi inside a C-based ffi") + prnt(' "%s",' % (included_module_name,)) + prnt(' NULL') + prnt('};') + prnt() + # + # the declaration of '_cffi_type_context' + prnt('static const struct _cffi_type_context_s _cffi_type_context = {') + prnt(' _cffi_types,') + for step_name in self.ALL_STEPS: + if nums[step_name] > 0: + prnt(' _cffi_%ss,' % step_name) + else: + prnt(' NULL, /* no %ss */' % step_name) + for step_name in self.ALL_STEPS: + if step_name != "field": + prnt(' %d, /* num_%ss */' % (nums[step_name], step_name)) + if self.ffi._included_ffis: + prnt(' _cffi_includes,') + else: + prnt(' NULL, /* no includes */') + prnt(' %d, /* num_types */' % (len(self.cffi_types),)) + flags = 0 + if self._num_externpy: + flags |= 1 # set to mean that we use extern "Python" + prnt(' %d, /* flags */' % flags) + prnt('};') + prnt() + # + # the init function + prnt('#ifdef PYPY_VERSION') + prnt('PyMODINIT_FUNC') + prnt('_cffi_pypyinit_%s(const void *p[])' % (base_module_name,)) + prnt('{') + if self._num_externpy: + prnt(' if (((intptr_t)p[0]) >= 0x0A03) {') + prnt(' _cffi_call_python_org = ' + '(void(*)(struct _cffi_externpy_s *, char *))p[1];') + prnt(' }') + prnt(' p[0] = (const void *)%s;' % version) + prnt(' p[1] = &_cffi_type_context;') + prnt('}') + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + prnt('# ifdef _MSC_VER') + prnt(' PyMODINIT_FUNC') + prnt('# if PY_MAJOR_VERSION >= 3') + prnt(' PyInit_%s(void) { return NULL; }' % (base_module_name,)) + prnt('# else') + prnt(' init%s(void) { }' % (base_module_name,)) + prnt('# endif') + prnt('# endif') + prnt('#elif PY_MAJOR_VERSION >= 3') + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % (base_module_name,)) + prnt('{') + prnt(' return _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, version)) + prnt('}') + prnt('#else') + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % (base_module_name,)) + prnt('{') + prnt(' _cffi_init("%s", %s, &_cffi_type_context);' % ( + self.module_name, version)) + prnt('}') + prnt('#endif') + + def _to_py(self, x): + if isinstance(x, str): + return "b'%s'" % (x,) + if isinstance(x, (list, tuple)): + rep = [self._to_py(item) for item in x] + if len(rep) == 1: + rep.append('') + return "(%s)" % (','.join(rep),) + return x.as_python_expr() # Py2: unicode unexpected; Py3: bytes unexp. + + def write_py_source_to_f(self, f): + self._f = f + prnt = self._prnt + # + # header + prnt("# auto-generated file") + prnt("import _cffi_backend") + # + # the 'import' of the included ffis + num_includes = len(self.ffi._included_ffis or ()) + for i in range(num_includes): + ffi_to_include = self.ffi._included_ffis[i] + try: + included_module_name, included_source = ( + ffi_to_include._assigned_source[:2]) + except AttributeError: + raise ffiplatform.VerificationError( + "ffi object %r includes %r, but the latter has not " + "been prepared with set_source()" % ( + self.ffi, ffi_to_include,)) + if included_source is not None: + raise ffiplatform.VerificationError( + "not implemented yet: ffi.include() of a C-based " + "ffi inside a Python-based ffi") + prnt('from %s import ffi as _ffi%d' % (included_module_name, i)) + prnt() + prnt("ffi = _cffi_backend.FFI('%s'," % (self.module_name,)) + prnt(" _version = %s," % (VERSION,)) + # + # the '_types' keyword argument + self.cffi_types = tuple(self.cffi_types) # don't change any more + types_lst = [op.as_python_bytes() for op in self.cffi_types] + prnt(' _types = %s,' % (self._to_py(''.join(types_lst)),)) + typeindex2type = dict([(i, tp) for (tp, i) in self._typesdict.items()]) + # + # the keyword arguments from ALL_STEPS + for step_name in self.ALL_STEPS: + lst = self._lsts[step_name] + if len(lst) > 0 and step_name != "field": + prnt(' _%ss = %s,' % (step_name, self._to_py(lst))) + # + # the '_includes' keyword argument + if num_includes > 0: + prnt(' _includes = (%s,),' % ( + ', '.join(['_ffi%d' % i for i in range(num_includes)]),)) + # + # the footer + prnt(')') + + # ---------- + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + elif isinstance(tp, model.UnknownFloatType): + # don't check with is_float_type(): it may be a 'long + # double' here, and _cffi_to_c_double would loose precision + converter = '(%s)_cffi_to_c_double' % (tp.get_c_name(''),) + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = (%s)alloca((size_t)datasize);' % ( + tovar, tp.get_c_name(''))) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.BasePrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif isinstance(tp, model.UnknownFloatType): + return '_cffi_from_c_double(%s)' % (var,) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructType): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs + + def _generate_cpy_typedef_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_typedef_decl(self, tp, name): + pass + + def _typedef_ctx(self, tp, name): + type_index = self._typesdict[tp] + self._lsts["typename"].append(TypenameExpr(name, type_index)) + + def _generate_cpy_typedef_ctx(self, tp, name): + self._typedef_ctx(tp, name) + if getattr(tp, "origin", None) == "unknown_type": + self._struct_ctx(tp, tp.name, approxname=None) + elif isinstance(tp, model.NamedPointerType): + self._struct_ctx(tp.totype, tp.totype.name, approxname=tp.name, + named_ptr=tp) + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + self._do_collect_type(tp.as_raw_function()) + if tp.ellipsis and not self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_function_decl(self, tp, name): + assert not self.target_is_python + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_constant_decl(tp, name) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + # + # ------------------------------ + # the 'd' version of the function, only for addressof(lib, 'func') + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arguments.append(type.get_c_name(' x%d' % i, context)) + call_arguments.append('x%d' % i) + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + name_and_arguments = '%s_cffi_d_%s(%s)' % (abi, name, repr_arguments) + prnt('static %s' % (tp.result.get_c_name(name_and_arguments),)) + prnt('{') + call_arguments = ', '.join(call_arguments) + result_code = 'return ' + if isinstance(tp.result, model.VoidType): + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, call_arguments)) + prnt('}') + # + prnt('#ifndef PYPY_VERSION') # ------------------------------ + # + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' x%d' % i, context) + prnt(' %s;' % arg) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + result_decl = ' %s;' % tp.result.get_c_name(' result', context) + prnt(result_decl) + else: + result_decl = None + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt(' PyObject **aa;') + prnt() + prnt(' aa = _cffi_unpack_args(args, %d, "%s");' % (len(rng), name)) + prnt(' if (aa == NULL)') + prnt(' return NULL;') + for i in rng: + prnt(' arg%d = aa[%d];' % (i, i)) + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + call_arguments = ['x%d' % i for i in range(len(tp.args))] + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + # + prnt('#else') # ------------------------------ + # + # the PyPy version: need to replace struct/union arguments with + # pointers, and if the result is a struct/union, insert a first + # arg that is a pointer to the result. + difference = False + arguments = [] + call_arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + difference = True + arg = type.get_c_name(' %sx%d' % (indirection, i), context) + arguments.append(arg) + call_arguments.append('%sx%d' % (indirection, i)) + tp_result = tp.result + if isinstance(tp_result, model.StructOrUnion): + context = 'result of %s' % name + arg = tp_result.get_c_name(' *result', context) + arguments.insert(0, arg) + tp_result = model.void_type + result_decl = None + result_code = '*result = ' + difference = True + if difference: + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s_cffi_f_%s(%s)' % (abi, name, + repr_arguments) + prnt('static %s' % (tp_result.get_c_name(name_and_arguments),)) + prnt('{') + if result_decl: + prnt(result_decl) + call_arguments = ', '.join(call_arguments) + prnt(' { %s%s(%s); }' % (result_code, name, call_arguments)) + if result_decl: + prnt(' return result;') + prnt('}') + else: + prnt('# define _cffi_f_%s _cffi_d_%s' % (name, name)) + # + prnt('#endif') # ------------------------------ + prnt() + + def _generate_cpy_function_ctx(self, tp, name): + if tp.ellipsis and not self.target_is_python: + self._generate_cpy_constant_ctx(tp, name) + return + type_index = self._typesdict[tp.as_raw_function()] + numargs = len(tp.args) + if self.target_is_python: + meth_kind = OP_DLOPEN_FUNC + elif numargs == 0: + meth_kind = OP_CPYTHON_BLTN_N # 'METH_NOARGS' + elif numargs == 1: + meth_kind = OP_CPYTHON_BLTN_O # 'METH_O' + else: + meth_kind = OP_CPYTHON_BLTN_V # 'METH_VARARGS' + self._lsts["global"].append( + GlobalExpr(name, '_cffi_f_%s' % name, + CffiOp(meth_kind, type_index), + size='_cffi_d_%s' % name)) + + # ---------- + # named structs or unions + + def _field_type(self, tp_struct, field_name, tp_field): + if isinstance(tp_field, model.ArrayType): + actual_length = tp_field.length + if actual_length == '...': + ptr_struct_name = tp_struct.get_c_name('*') + actual_length = '_cffi_array_len(((%s)0)->%s)' % ( + ptr_struct_name, field_name) + tp_item = self._field_type(tp_struct, '%s[0]' % field_name, + tp_field.item) + tp_field = model.ArrayType(tp_item, actual_length) + return tp_field + + def _struct_collecttype(self, tp): + self._do_collect_type(tp) + + def _struct_decl(self, tp, cname, approxname): + if tp.fldtypes is None: + return + prnt = self._prnt + checkfuncname = '_cffi_checkfld_%s' % (approxname,) + prnt('_CFFI_UNUSED_FN') + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + try: + if ftype.is_integer_type() or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(" (void)((p->%s) << 1); /* check that '%s.%s' is " + "an integer */" % (fname, cname, fname)) + continue + # only accept exactly the type declared, except that '[]' + # is interpreted as a '*' and so will match any array length. + # (It would also match '*', but that's harder to detect...) + while (isinstance(ftype, model.ArrayType) + and (ftype.length is None or ftype.length == '...')): + ftype = ftype.item + fname = fname + '[0]' + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('struct _cffi_align_%s { char x; %s y; };' % (approxname, cname)) + prnt() + + def _struct_ctx(self, tp, cname, approxname, named_ptr=None): + type_index = self._typesdict[tp] + reason_for_not_expanding = None + flags = [] + if isinstance(tp, model.UnionType): + flags.append("_CFFI_F_UNION") + if tp.fldtypes is None: + flags.append("_CFFI_F_OPAQUE") + reason_for_not_expanding = "opaque" + if (tp not in self.ffi._parser._included_declarations and + (named_ptr is None or + named_ptr not in self.ffi._parser._included_declarations)): + if tp.fldtypes is None: + pass # opaque + elif tp.partial or tp.has_anonymous_struct_fields(): + pass # field layout obtained silently from the C compiler + else: + flags.append("_CFFI_F_CHECK_FIELDS") + if tp.packed: + flags.append("_CFFI_F_PACKED") + else: + flags.append("_CFFI_F_EXTERNAL") + reason_for_not_expanding = "external" + flags = '|'.join(flags) or '0' + c_fields = [] + if reason_for_not_expanding is None: + enumfields = list(tp.enumfields()) + for fldname, fldtype, fbitsize, fqual in enumfields: + fldtype = self._field_type(tp, fldname, fldtype) + # cname is None for _add_missing_struct_unions() only + op = OP_NOOP + if fbitsize >= 0: + op = OP_BITFIELD + size = '%d /* bits */' % fbitsize + elif cname is None or ( + isinstance(fldtype, model.ArrayType) and + fldtype.length is None): + size = '(size_t)-1' + else: + size = 'sizeof(((%s)0)->%s)' % ( + tp.get_c_name('*') if named_ptr is None + else named_ptr.name, + fldname) + if cname is None or fbitsize >= 0: + offset = '(size_t)-1' + elif named_ptr is not None: + offset = '((char *)&((%s)0)->%s) - (char *)0' % ( + named_ptr.name, fldname) + else: + offset = 'offsetof(%s, %s)' % (tp.get_c_name(''), fldname) + c_fields.append( + FieldExpr(fldname, offset, size, fbitsize, + CffiOp(op, self._typesdict[fldtype]))) + first_field_index = len(self._lsts["field"]) + self._lsts["field"].extend(c_fields) + # + if cname is None: # unknown name, for _add_missing_struct_unions + size = '(size_t)-2' + align = -2 + comment = "unnamed" + else: + if named_ptr is not None: + size = 'sizeof(*(%s)0)' % (named_ptr.name,) + align = '-1 /* unknown alignment */' + else: + size = 'sizeof(%s)' % (cname,) + align = 'offsetof(struct _cffi_align_%s, y)' % (approxname,) + comment = None + else: + size = '(size_t)-1' + align = -1 + first_field_index = -1 + comment = reason_for_not_expanding + self._lsts["struct_union"].append( + StructUnionExpr(tp.name, type_index, flags, size, align, comment, + first_field_index, c_fields)) + self._seen_struct_unions.add(tp) + + def _add_missing_struct_unions(self): + # not very nice, but some struct declarations might be missing + # because they don't have any known C name. Check that they are + # not partial (we can't complete or verify them!) and emit them + # anonymously. + lst = list(self._struct_unions.items()) + lst.sort(key=lambda tp_order: tp_order[1]) + for tp, order in lst: + if tp not in self._seen_struct_unions: + if tp.partial: + raise NotImplementedError("internal inconsistency: %r is " + "partial but was not seen at " + "this point" % (tp,)) + if tp.name.startswith('$') and tp.name[1:].isdigit(): + approxname = tp.name[1:] + elif tp.name == '_IO_FILE' and tp.forcename == 'FILE': + approxname = 'FILE' + self._typedef_ctx(tp, 'FILE') + else: + raise NotImplementedError("internal inconsistency: %r" % + (tp,)) + self._struct_ctx(tp, None, approxname) + + def _generate_cpy_struct_collecttype(self, tp, name): + self._struct_collecttype(tp) + _generate_cpy_union_collecttype = _generate_cpy_struct_collecttype + + def _struct_names(self, tp): + cname = tp.get_c_name('') + if ' ' in cname: + return cname, cname.replace(' ', '_') + else: + return cname, '_' + cname + + def _generate_cpy_struct_decl(self, tp, name): + self._struct_decl(tp, *self._struct_names(tp)) + _generate_cpy_union_decl = _generate_cpy_struct_decl + + def _generate_cpy_struct_ctx(self, tp, name): + self._struct_ctx(tp, *self._struct_names(tp)) + _generate_cpy_union_ctx = _generate_cpy_struct_ctx + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_cpy_anonymous_collecttype(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_collecttype(tp, name) + else: + self._struct_collecttype(tp) + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp) + else: + self._struct_decl(tp, name, 'typedef_' + name) + + def _generate_cpy_anonymous_ctx(self, tp, name): + if isinstance(tp, model.EnumType): + self._enum_ctx(tp, name) + else: + self._struct_ctx(tp, name, 'typedef_' + name) + + # ---------- + # constants, declared with "static const ..." + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + check_value=None): + if (category, name) in self._seen_constants: + raise ffiplatform.VerificationError( + "duplicate declaration of %s '%s'" % (category, name)) + self._seen_constants.add((category, name)) + # + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + if is_int: + prnt('static int %s(unsigned long long *o)' % funcname) + prnt('{') + prnt(' int n = (%s) <= 0;' % (name,)) + prnt(' *o = (unsigned long long)((%s) << 0);' + ' /* check that %s is an integer */' % (name, name)) + if check_value is not None: + if check_value > 0: + check_value = '%dU' % (check_value,) + prnt(' if (!_cffi_check_int(*o, n, %s))' % (check_value,)) + prnt(' n |= 2;') + prnt(' return n;') + prnt('}') + else: + assert check_value is None + prnt('static void %s(char *o)' % funcname) + prnt('{') + prnt(' *(%s)o = %s;' % (tp.get_c_name('*'), name)) + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = tp.is_integer_type() + if not is_int or self.target_is_python: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + def _generate_cpy_constant_ctx(self, tp, name): + if not self.target_is_python and tp.is_integer_type(): + type_op = CffiOp(OP_CONSTANT_INT, -1) + else: + if self.target_is_python: + const_kind = OP_DLOPEN_CONST + else: + const_kind = OP_CONSTANT + type_index = self._typesdict[tp] + type_op = CffiOp(const_kind, type_index) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op)) + + # ---------- + # enums + + def _generate_cpy_enum_collecttype(self, tp, name): + self._do_collect_type(tp) + + def _generate_cpy_enum_decl(self, tp, name=None): + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator) + + def _enum_ctx(self, tp, cname): + type_index = self._typesdict[tp] + type_op = CffiOp(OP_ENUM, -1) + if self.target_is_python: + tp.check_not_partial() + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._lsts["global"].append( + GlobalExpr(enumerator, '_cffi_const_%s' % enumerator, type_op, + check_value=enumvalue)) + # + if cname is not None and '$' not in cname and not self.target_is_python: + size = "sizeof(%s)" % cname + signed = "((%s)-1) <= 0" % cname + else: + basetp = tp.build_baseinttype(self.ffi, []) + size = self.ffi.sizeof(basetp) + signed = int(int(self.ffi.cast(basetp, -1)) < 0) + allenums = ",".join(tp.enumerators) + self._lsts["enum"].append( + EnumExpr(tp.name, type_index, size, signed, allenums)) + + def _generate_cpy_enum_ctx(self, tp, name): + self._enum_ctx(tp, tp._get_c_name()) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_collecttype(self, tp, name): + pass + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + def _generate_cpy_macro_ctx(self, tp, name): + if tp == '...': + if self.target_is_python: + raise ffiplatform.VerificationError( + "cannot use the syntax '...' in '#define %s ...' when " + "using the ABI mode" % (name,)) + check_value = None + else: + check_value = tp # an integer + type_op = CffiOp(OP_CONSTANT_INT, -1) + self._lsts["global"].append( + GlobalExpr(name, '_cffi_const_%s' % name, type_op, + check_value=check_value)) + + # ---------- + # global variables + + def _global_type(self, tp, global_name): + if isinstance(tp, model.ArrayType): + actual_length = tp.length + if actual_length == '...': + actual_length = '_cffi_array_len(%s)' % (global_name,) + tp_item = self._global_type(tp.item, '%s[0]' % global_name) + tp = model.ArrayType(tp_item, actual_length) + return tp + + def _generate_cpy_variable_collecttype(self, tp, name): + self._do_collect_type(self._global_type(tp, name)) + + def _generate_cpy_variable_decl(self, tp, name): + prnt = self._prnt + tp = self._global_type(tp, name) + if isinstance(tp, model.ArrayType) and tp.length is None: + tp = tp.item + ampersand = '' + else: + ampersand = '&' + # This code assumes that casts from "tp *" to "void *" is a + # no-op, i.e. a function that returns a "tp *" can be called + # as if it returned a "void *". This should be generally true + # on any modern machine. The only exception to that rule (on + # uncommon architectures, and as far as I can tell) might be + # if 'tp' were a function type, but that is not possible here. + # (If 'tp' is a function _pointer_ type, then casts from "fn_t + # **" to "void *" are again no-ops, as far as I can tell.) + decl = '*_cffi_var_%s(void)' % (name,) + prnt('static ' + tp.get_c_name(decl, quals=self._current_quals)) + prnt('{') + prnt(' return %s(%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_cpy_variable_ctx(self, tp, name): + tp = self._global_type(tp, name) + type_index = self._typesdict[tp] + if self.target_is_python: + op = OP_GLOBAL_VAR + else: + op = OP_GLOBAL_VAR_F + self._lsts["global"].append( + GlobalExpr(name, '_cffi_var_%s' % name, CffiOp(op, type_index))) + + # ---------- + # extern "Python" + + def _generate_cpy_extern_python_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + self._do_collect_type(tp) + + def _generate_cpy_dllexport_python_collecttype(self, tp, name): + self._generate_cpy_extern_python_collecttype(tp, name) + + def _generate_cpy_extern_python_decl(self, tp, name, dllexport=False): + prnt = self._prnt + if isinstance(tp.result, model.VoidType): + size_of_result = '0' + else: + context = 'result of %s' % name + size_of_result = '(int)sizeof(%s)' % ( + tp.result.get_c_name('', context),) + prnt('static struct _cffi_externpy_s _cffi_externpy__%s =' % name) + prnt(' { "%s", %s };' % (name, size_of_result)) + prnt() + # + arguments = [] + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + arg = type.get_c_name(' a%d' % i, context) + arguments.append(arg) + # + repr_arguments = ', '.join(arguments) + repr_arguments = repr_arguments or 'void' + name_and_arguments = '%s(%s)' % (name, repr_arguments) + # + def may_need_128_bits(tp): + return (isinstance(tp, model.PrimitiveType) and + tp.name == 'long double') + # + size_of_a = max(len(tp.args)*8, 8) + if may_need_128_bits(tp.result): + size_of_a = max(size_of_a, 16) + if isinstance(tp.result, model.StructOrUnion): + size_of_a = 'sizeof(%s) > %d ? sizeof(%s) : %d' % ( + tp.result.get_c_name(''), size_of_a, + tp.result.get_c_name(''), size_of_a) + if dllexport: + tag = 'CFFI_DLLEXPORT' + else: + tag = 'static' + prnt('%s %s' % (tag, tp.result.get_c_name(name_and_arguments))) + prnt('{') + prnt(' char a[%s];' % size_of_a) + prnt(' char *p = a;') + for i, type in enumerate(tp.args): + arg = 'a%d' % i + if (isinstance(type, model.StructOrUnion) or + may_need_128_bits(type)): + arg = '&' + arg + type = model.PointerType(type) + prnt(' *(%s)(p + %d) = %s;' % (type.get_c_name('*'), i*8, arg)) + prnt(' _cffi_call_python(&_cffi_externpy__%s, p);' % name) + if not isinstance(tp.result, model.VoidType): + prnt(' return *(%s)p;' % (tp.result.get_c_name('*'),)) + prnt('}') + prnt() + self._num_externpy += 1 + + def _generate_cpy_dllexport_python_decl(self, tp, name): + self._generate_cpy_extern_python_decl(tp, name, dllexport=True) + + def _generate_cpy_extern_python_ctx(self, tp, name): + if self.target_is_python: + raise ffiplatform.VerificationError( + "cannot use 'extern \"Python\"' in the ABI mode") + if tp.ellipsis: + raise NotImplementedError("a vararg function is extern \"Python\"") + type_index = self._typesdict[tp] + type_op = CffiOp(OP_EXTERN_PYTHON, type_index) + self._lsts["global"].append( + GlobalExpr(name, '&_cffi_externpy__%s' % name, type_op, name)) + + def _generate_cpy_dllexport_python_ctx(self, tp, name): + self._generate_cpy_extern_python_ctx(tp, name) + + def _string_literal(self, s): + def _char_repr(c): + # escape with a '\' the characters '\', '"' or (for trigraphs) '?' + if c in '\\"?': return '\\' + c + if ' ' <= c < '\x7F': return c + if c == '\n': return '\\n' + return '\\%03o' % ord(c) + lines = [] + for line in s.splitlines(True): + lines.append('"%s"' % ''.join([_char_repr(c) for c in line])) + return ' \\\n'.join(lines) + + # ---------- + # emitting the opcodes for individual types + + def _emit_bytecode_VoidType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, PRIM_VOID) + + def _emit_bytecode_PrimitiveType(self, tp, index): + prim_index = PRIMITIVE_TO_INDEX[tp.name] + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, prim_index) + + def _emit_bytecode_UnknownIntegerType(self, tp, index): + s = ('_cffi_prim_int(sizeof(%s), (\n' + ' ((%s)-1) << 0 /* check that %s is an integer type */\n' + ' ) <= 0)' % (tp.name, tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_UnknownFloatType(self, tp, index): + s = ('_cffi_prim_float(sizeof(%s) *\n' + ' (((%s)1) / 2) * 2 /* integer => 0, float => 1 */\n' + ' )' % (tp.name, tp.name)) + self.cffi_types[index] = CffiOp(OP_PRIMITIVE, s) + + def _emit_bytecode_RawFunctionType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_FUNCTION, self._typesdict[tp.result]) + index += 1 + for tp1 in tp.args: + realindex = self._typesdict[tp1] + if index != realindex: + if isinstance(tp1, model.PrimitiveType): + self._emit_bytecode_PrimitiveType(tp1, index) + else: + self.cffi_types[index] = CffiOp(OP_NOOP, realindex) + index += 1 + flags = int(tp.ellipsis) + if tp.abi is not None: + if tp.abi == '__stdcall': + flags |= 2 + else: + raise NotImplementedError("abi=%r" % (tp.abi,)) + self.cffi_types[index] = CffiOp(OP_FUNCTION_END, flags) + + def _emit_bytecode_PointerType(self, tp, index): + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[tp.totype]) + + _emit_bytecode_ConstPointerType = _emit_bytecode_PointerType + _emit_bytecode_NamedPointerType = _emit_bytecode_PointerType + + def _emit_bytecode_FunctionPtrType(self, tp, index): + raw = tp.as_raw_function() + self.cffi_types[index] = CffiOp(OP_POINTER, self._typesdict[raw]) + + def _emit_bytecode_ArrayType(self, tp, index): + item_index = self._typesdict[tp.item] + if tp.length is None: + self.cffi_types[index] = CffiOp(OP_OPEN_ARRAY, item_index) + elif tp.length == '...': + raise ffiplatform.VerificationError( + "type %s badly placed: the '...' array length can only be " + "used on global arrays or on fields of structures" % ( + str(tp).replace('/*...*/', '...'),)) + else: + assert self.cffi_types[index + 1] == 'LEN' + self.cffi_types[index] = CffiOp(OP_ARRAY, item_index) + self.cffi_types[index + 1] = CffiOp(None, str(tp.length)) + + def _emit_bytecode_StructType(self, tp, index): + struct_index = self._struct_unions[tp] + self.cffi_types[index] = CffiOp(OP_STRUCT_UNION, struct_index) + _emit_bytecode_UnionType = _emit_bytecode_StructType + + def _emit_bytecode_EnumType(self, tp, index): + enum_index = self._enums[tp] + self.cffi_types[index] = CffiOp(OP_ENUM, enum_index) + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _make_c_or_py_source(ffi, module_name, preamble, target_file): + recompiler = Recompiler(ffi, module_name, + target_is_python=(preamble is None)) + recompiler.collect_type_table() + recompiler.collect_step_tables() + f = NativeIO() + recompiler.write_source_to_f(f, preamble) + output = f.getvalue() + try: + with open(target_file, 'r') as f1: + if f1.read(len(output) + 1) != output: + raise IOError + return False # already up-to-date + except IOError: + tmp_file = '%s.~%d' % (target_file, os.getpid()) + with open(tmp_file, 'w') as f1: + f1.write(output) + try: + os.rename(tmp_file, target_file) + except OSError: + os.unlink(target_file) + os.rename(tmp_file, target_file) + return True + +def make_c_source(ffi, module_name, preamble, target_c_file): + assert preamble is not None + return _make_c_or_py_source(ffi, module_name, preamble, target_c_file) + +def make_py_source(ffi, module_name, target_py_file): + return _make_c_or_py_source(ffi, module_name, None, target_py_file) + +def _modname_to_file(outputdir, modname, extension): + parts = modname.split('.') + try: + os.makedirs(os.path.join(outputdir, *parts[:-1])) + except OSError: + pass + parts[-1] += extension + return os.path.join(outputdir, *parts), parts + + +# Aaargh. Distutils is not tested at all for the purpose of compiling +# DLLs that are not extension modules. Here are some hacks to work +# around that, in the _patch_for_*() functions... + +def _patch_meth(patchlist, cls, name, new_meth): + old = getattr(cls, name) + patchlist.append((cls, name, old)) + setattr(cls, name, new_meth) + return old + +def _unpatch_meths(patchlist): + for cls, name, old_meth in reversed(patchlist): + setattr(cls, name, old_meth) + +def _patch_for_embedding(patchlist): + if sys.platform == 'win32': + # we must not remove the manifest when building for embedding! + from distutils.msvc9compiler import MSVCCompiler + _patch_meth(patchlist, MSVCCompiler, '_remove_visual_c_ref', + lambda self, manifest_file: manifest_file) + + if sys.platform == 'darwin': + # we must not make a '-bundle', but a '-dynamiclib' instead + from distutils.ccompiler import CCompiler + def my_link_shared_object(self, *args, **kwds): + if '-bundle' in self.linker_so: + self.linker_so = list(self.linker_so) + i = self.linker_so.index('-bundle') + self.linker_so[i] = '-dynamiclib' + return old_link_shared_object(self, *args, **kwds) + old_link_shared_object = _patch_meth(patchlist, CCompiler, + 'link_shared_object', + my_link_shared_object) + +def _patch_for_target(patchlist, target): + from distutils.command.build_ext import build_ext + # if 'target' is different from '*', we need to patch some internal + # method to just return this 'target' value, instead of having it + # built from module_name + if target.endswith('.*'): + target = target[:-2] + if sys.platform == 'win32': + target += '.dll' + elif sys.platform == 'darwin': + target += '.dylib' + else: + target += '.so' + _patch_meth(patchlist, build_ext, 'get_ext_filename', + lambda self, ext_name: target) + + +def recompile(ffi, module_name, preamble, tmpdir='.', call_c_compiler=True, + c_file=None, source_extension='.c', extradir=None, + compiler_verbose=1, target=None, **kwds): + if not isinstance(module_name, str): + module_name = module_name.encode('ascii') + if ffi._windows_unicode: + ffi._apply_windows_unicode(kwds) + if preamble is not None: + embedding = (ffi._embedding is not None) + if embedding: + ffi._apply_embedding_fix(kwds) + if c_file is None: + c_file, parts = _modname_to_file(tmpdir, module_name, + source_extension) + if extradir: + parts = [extradir] + parts + ext_c_file = os.path.join(*parts) + else: + ext_c_file = c_file + # + if target is None: + if embedding: + target = '%s.*' % module_name + else: + target = '*' + # + ext = ffiplatform.get_extension(ext_c_file, module_name, **kwds) + updated = make_c_source(ffi, module_name, preamble, c_file) + if call_c_compiler: + patchlist = [] + cwd = os.getcwd() + try: + if embedding: + _patch_for_embedding(patchlist) + if target != '*': + _patch_for_target(patchlist, target) + os.chdir(tmpdir) + outputfilename = ffiplatform.compile('.', ext, compiler_verbose) + finally: + os.chdir(cwd) + _unpatch_meths(patchlist) + return outputfilename + else: + return ext, updated + else: + if c_file is None: + c_file, _ = _modname_to_file(tmpdir, module_name, '.py') + updated = make_py_source(ffi, module_name, c_file) + if call_c_compiler: + return c_file + else: + return None, updated + +def _verify(ffi, module_name, preamble, *args, **kwds): + # FOR TESTS ONLY + from testing.udir import udir + import imp + assert module_name not in sys.modules, "module name conflict: %r" % ( + module_name,) + kwds.setdefault('tmpdir', str(udir)) + outputfilename = recompile(ffi, module_name, preamble, *args, **kwds) + module = imp.load_dynamic(module_name, outputfilename) + # + # hack hack hack: copy all *bound methods* from module.ffi back to the + # ffi instance. Then calls like ffi.new() will invoke module.ffi.new(). + for name in dir(module.ffi): + if not name.startswith('_'): + attr = getattr(module.ffi, name) + if attr is not getattr(ffi, name, object()): + setattr(ffi, name, attr) + def typeof_disabled(*args, **kwds): + raise NotImplementedError + ffi._typeof = typeof_disabled + return module.lib diff --git a/Lib/site-packages/cffi/setuptools_ext.py b/Lib/site-packages/cffi/setuptools_ext.py new file mode 100644 index 0000000..9c6436d --- /dev/null +++ b/Lib/site-packages/cffi/setuptools_ext.py @@ -0,0 +1,161 @@ +import os + +try: + basestring +except NameError: + # Python 3.x + basestring = str + +def error(msg): + from distutils.errors import DistutilsSetupError + raise DistutilsSetupError(msg) + + +def execfile(filename, glob): + # We use execfile() (here rewritten for Python 3) instead of + # __import__() to load the build script. The problem with + # a normal import is that in some packages, the intermediate + # __init__.py files may already try to import the file that + # we are generating. + with open(filename) as f: + src = f.read() + src += '\n' # Python 2.6 compatibility + code = compile(src, filename, 'exec') + exec(code, glob, glob) + + +def add_cffi_module(dist, mod_spec): + from cffi.api import FFI + + if not isinstance(mod_spec, basestring): + error("argument to 'cffi_modules=...' must be a str or a list of str," + " not %r" % (type(mod_spec).__name__,)) + mod_spec = str(mod_spec) + try: + build_file_name, ffi_var_name = mod_spec.split(':') + except ValueError: + error("%r must be of the form 'path/build.py:ffi_variable'" % + (mod_spec,)) + if not os.path.exists(build_file_name): + ext = '' + rewritten = build_file_name.replace('.', '/') + '.py' + if os.path.exists(rewritten): + ext = ' (rewrite cffi_modules to [%r])' % ( + rewritten + ':' + ffi_var_name,) + error("%r does not name an existing file%s" % (build_file_name, ext)) + + mod_vars = {'__name__': '__cffi__', '__file__': build_file_name} + execfile(build_file_name, mod_vars) + + try: + ffi = mod_vars[ffi_var_name] + except KeyError: + error("%r: object %r not found in module" % (mod_spec, + ffi_var_name)) + if not isinstance(ffi, FFI): + ffi = ffi() # maybe it's a function instead of directly an ffi + if not isinstance(ffi, FFI): + error("%r is not an FFI instance (got %r)" % (mod_spec, + type(ffi).__name__)) + if not hasattr(ffi, '_assigned_source'): + error("%r: the set_source() method was not called" % (mod_spec,)) + module_name, source, source_extension, kwds = ffi._assigned_source + if ffi._windows_unicode: + kwds = kwds.copy() + ffi._apply_windows_unicode(kwds) + + if source is None: + _add_py_module(dist, ffi, module_name) + else: + _add_c_module(dist, ffi, module_name, source, source_extension, kwds) + + +def _add_c_module(dist, ffi, module_name, source, source_extension, kwds): + from distutils.core import Extension + from distutils.command.build_ext import build_ext + from distutils.dir_util import mkpath + from distutils import log + from cffi import recompiler + + allsources = ['$PLACEHOLDER'] + allsources.extend(kwds.pop('sources', [])) + ext = Extension(name=module_name, sources=allsources, **kwds) + + def make_mod(tmpdir, pre_run=None): + c_file = os.path.join(tmpdir, module_name + source_extension) + log.info("generating cffi module %r" % c_file) + mkpath(tmpdir) + # a setuptools-only, API-only hook: called with the "ext" and "ffi" + # arguments just before we turn the ffi into C code. To use it, + # subclass the 'distutils.command.build_ext.build_ext' class and + # add a method 'def pre_run(self, ext, ffi)'. + if pre_run is not None: + pre_run(ext, ffi) + updated = recompiler.make_c_source(ffi, module_name, source, c_file) + if not updated: + log.info("already up-to-date") + return c_file + + if dist.ext_modules is None: + dist.ext_modules = [] + dist.ext_modules.append(ext) + + base_class = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class): + def run(self): + if ext.sources[0] == '$PLACEHOLDER': + pre_run = getattr(self, 'pre_run', None) + ext.sources[0] = make_mod(self.build_temp, pre_run) + base_class.run(self) + dist.cmdclass['build_ext'] = build_ext_make_mod + # NB. multiple runs here will create multiple 'build_ext_make_mod' + # classes. Even in this case the 'build_ext' command should be + # run once; but just in case, the logic above does nothing if + # called again. + + +def _add_py_module(dist, ffi, module_name): + from distutils.dir_util import mkpath + from distutils.command.build_py import build_py + from distutils.command.build_ext import build_ext + from distutils import log + from cffi import recompiler + + def generate_mod(py_file): + log.info("generating cffi module %r" % py_file) + mkpath(os.path.dirname(py_file)) + updated = recompiler.make_py_source(ffi, module_name, py_file) + if not updated: + log.info("already up-to-date") + + base_class = dist.cmdclass.get('build_py', build_py) + class build_py_make_mod(base_class): + def run(self): + base_class.run(self) + module_path = module_name.split('.') + module_path[-1] += '.py' + generate_mod(os.path.join(self.build_lib, *module_path)) + dist.cmdclass['build_py'] = build_py_make_mod + + # the following is only for "build_ext -i" + base_class_2 = dist.cmdclass.get('build_ext', build_ext) + class build_ext_make_mod(base_class_2): + def run(self): + base_class_2.run(self) + if self.inplace: + # from get_ext_fullpath() in distutils/command/build_ext.py + module_path = module_name.split('.') + package = '.'.join(module_path[:-1]) + build_py = self.get_finalized_command('build_py') + package_dir = build_py.get_package_dir(package) + file_name = module_path[-1] + '.py' + generate_mod(os.path.join(package_dir, file_name)) + dist.cmdclass['build_ext'] = build_ext_make_mod + +def cffi_modules(dist, attr, value): + assert attr == 'cffi_modules' + if isinstance(value, basestring): + value = [value] + + for cffi_module in value: + add_cffi_module(dist, cffi_module) diff --git a/Lib/site-packages/cffi/vengine_cpy.py b/Lib/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..9f82988 --- /dev/null +++ b/Lib/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,1007 @@ +import sys, imp +from . import model, ffiplatform + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['((void)lib,0)', '((void)lib,0)'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + prnt('PyMODINIT_FUNC') + prnt('PyInit_%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') + prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') + + def load_library(self, flags=None): + # XXX review all usages of 'self' here! + # import it as a new extension module + imp.acquire_lock() + try: + if hasattr(sys, "getdlopenflags"): + previous_flags = sys.getdlopenflags() + try: + if hasattr(sys, "setdlopenflags") and flags is not None: + sys.setdlopenflags(flags) + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise ffiplatform.VerificationError(error) + finally: + if hasattr(sys, "setdlopenflags"): + sys.setdlopenflags(previous_flags) + finally: + imp.release_lock() + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, ffiplatform.VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '(%s)_cffi_to_c_%s' % (tp.get_c_name(''), + tp.name.replace(' ', '_')) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = alloca((size_t)datasize);' % (tovar,)) + self._prnt(' memset((void *)%s, 0, (size_t)datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructType): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'noarg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + prnt(' (void)self; /* unused */') + if numargs == 0: + prnt(' (void)noarg; /* unused */') + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' (void)self; /* unused */') + prnt(' (void)noarg; /* unused */') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise ffiplatform.VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False, + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if check_value is not None: + self._check_int_constant_value(name, check_value) + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' o = _cffi_from_c_int_const(%s);' % name) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _check_int_constant_value(self, name, value, err_prefix=''): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + name) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "%s%s has the real value %s, not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + err_prefix, name, value)) + prnt(' return -1;') + prnt(' }') + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = self._enum_funcname(prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue, + "enum %s: " % name) + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_cpy_const(True, name, check_value=check_value) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = (tp.length == '...')) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise ffiplatform.VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + sizeof(type) == sizeof(long) ? \ + PyLong_FromUnsignedLong((unsigned long)x) : \ + PyLong_FromUnsignedLongLong((unsigned long long)x)) : \ + (sizeof(type) <= sizeof(long) ? \ + PyInt_FromLong((long)x) : \ + PyLong_FromLongLong((long long)x))) + +#define _cffi_to_c_int(o, type) \ + ((type)( \ + sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), (type)0))) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + (void)self; /* unused */ + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +static int _cffi_init(void) +{ + PyObject *module, *c_api_object = NULL; + + module = PyImport_ImportModule("_cffi_backend"); + if (module == NULL) + goto failure; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + goto failure; + if (!PyCapsule_CheckExact(c_api_object)) { + PyErr_SetNone(PyExc_ImportError); + goto failure; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); + Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/Lib/site-packages/cffi/vengine_gen.py b/Lib/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..9cc3853 --- /dev/null +++ b/Lib/site-packages/cffi/vengine_gen.py @@ -0,0 +1,668 @@ +import sys, os +import types + +from . import model, ffiplatform + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self, flags=0): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename, flags) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + lst = [(key, tp) for (key, (tp, qual)) in + self.ffi._parser._declarations.items()] + lst.sort() + return lst + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + tpresult = tp.result + if isinstance(tpresult, model.StructOrUnion): + arglist.insert(0, tpresult.get_c_name(' *r', context)) + tpresult = model.void_type + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + if tp.abi: + abi = tp.abi + ' ' + else: + abi = '' + funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist) + context = 'result of %s' % name + prnt(tpresult.get_c_name(funcdecl, context)) + prnt('{') + # + if isinstance(tp.result, model.StructOrUnion): + result_code = '*r = ' + elif not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args) + or isinstance(tp.result, model.StructOrUnion)): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + indirect_result = tp.result + if isinstance(indirect_result, model.StructOrUnion): + if indirect_result.fldtypes is None: + raise TypeError("'%s' is used as result type, " + "but is opaque" % ( + indirect_result._get_c_name(),)) + indirect_result = model.PointerType(indirect_result) + indirect_args.insert(0, indirect_result) + indirections.insert(0, ("result", indirect_result)) + indirect_result = model.void_type + tp = model.FunctionPtrType(tuple(indirect_args), + indirect_result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + if i == "result": + ffi = self.ffi + def newfunc(*args): + res = ffi.new(BType) + oldfunc(res, *args) + return res[0] + else: + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + prnt(' (void)p;') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual), + fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static intptr_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise ffiplatform.VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize, fqual in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const', + check_value=None): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if check_value is not None: + assert is_int + assert category == 'const' + prnt('int %s(char *out_error)' % funcname) + prnt('{') + self._check_int_constant_value(name, check_value) + prnt(' return 0;') + prnt('}') + elif is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + assert check_value is None + if category == 'var': + ampersand = '&' + else: + ampersand = '' + extra = '' + if category == 'const' and isinstance(tp, model.StructOrUnion): + extra = 'const *' + ampersand = '&' + prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name)) + prnt('{') + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module, check_value=None): + funcname = '_cffi_const_%s' % name + if check_value is not None: + assert is_int + self._load_known_int_constant(module, funcname) + value = check_value + elif is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + assert check_value is None + fntypeextra = '(*)(void)' + if isinstance(tp, model.StructOrUnion): + fntypeextra = '*' + fntypeextra + BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0] + function = module.load_function(BFunc, funcname) + value = function() + if isinstance(tp, model.StructOrUnion): + value = value[0] + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _check_int_constant_value(self, name, value): + prnt = self._prnt + if value <= 0: + prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % ( + name, name, value)) + else: + prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % ( + name, name, value)) + prnt(' char buf[64];') + prnt(' if ((%s) <= 0)' % name) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % name) + prnt(' else') + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % + name) + prnt(' sprintf(out_error, "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % (name[:100], value)) + prnt(' return -1;') + prnt(' }') + + def _load_known_int_constant(self, module, funcname): + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise ffiplatform.VerificationError(error) + + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = self._enum_funcname(prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + self._check_int_constant_value(enumerator, enumvalue) + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + funcname = self._enum_funcname(prefix, name) + self._load_known_int_constant(module, funcname) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + self._generate_gen_const(True, name, check_value=check_value) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + if tp == '...': + check_value = None + else: + check_value = tp # an integer + value = self._load_constant(True, tp, name, module, + check_value=check_value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length == '...': + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise ffiplatform.VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; + typedef __int8 int_least8_t; + typedef __int16 int_least16_t; + typedef __int32 int_least32_t; + typedef __int64 int_least64_t; + typedef unsigned __int8 uint_least8_t; + typedef unsigned __int16 uint_least16_t; + typedef unsigned __int32 uint_least32_t; + typedef unsigned __int64 uint_least64_t; + typedef __int8 int_fast8_t; + typedef __int16 int_fast16_t; + typedef __int32 int_fast32_t; + typedef __int64 int_fast64_t; + typedef unsigned __int8 uint_fast8_t; + typedef unsigned __int16 uint_fast16_t; + typedef unsigned __int32 uint_fast32_t; + typedef unsigned __int64 uint_fast64_t; + typedef __int64 intmax_t; + typedef unsigned __int64 uintmax_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif +#endif +''' diff --git a/Lib/site-packages/cffi/verifier.py b/Lib/site-packages/cffi/verifier.py new file mode 100644 index 0000000..01728ae --- /dev/null +++ b/Lib/site-packages/cffi/verifier.py @@ -0,0 +1,313 @@ +import sys, os, binascii, shutil, io +from . import __version_verifier_modules__ +from . import ffiplatform + +if sys.version_info >= (3, 3): + import importlib.machinery + def _extension_suffixes(): + return importlib.machinery.EXTENSION_SUFFIXES[:] +else: + import imp + def _extension_suffixes(): + return [suffix for suffix, _, type in imp.get_suffixes() + if type == imp.C_EXTENSION] + + +if sys.version_info >= (3,): + NativeIO = io.StringIO +else: + class NativeIO(io.BytesIO): + def write(self, s): + if isinstance(s, unicode): + s = s.encode('ascii') + super(NativeIO, self).write(s) + +def _hack_at_distutils(): + # Windows-only workaround for some configurations: see + # https://bugs.python.org/issue23246 (Python 2.7 with + # a specific MS compiler suite download) + if sys.platform == "win32": + try: + import setuptools # for side-effects, patches distutils + except ImportError: + pass + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, + source_extension='.c', flags=None, relative_to=None, **kwds): + if ffi._parser._uses_new_feature: + raise ffiplatform.VerificationError( + "feature not supported with ffi.verify(), but only " + "with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,)) + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.flags = flags + self.kwds = self.make_relative_to(kwds, relative_to) + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join([sys.version[:3], __version_verifier_modules__, + preamble, flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension) + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise ffiplatform.VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise ffiplatform.VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + _hack_at_distutils() # backward compatibility hack + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + def make_relative_to(self, kwds, relative_to): + if relative_to and os.path.dirname(relative_to): + dirname = os.path.dirname(relative_to) + kwds = kwds.copy() + for key in ffiplatform.LIST_OF_FILE_NAMES: + if key in kwds: + lst = kwds[key] + if not isinstance(lst, (list, tuple)): + raise TypeError("keyword '%s' should be a list or tuple" + % (key,)) + lst = [os.path.join(dirname, fn) for fn in lst] + kwds[key] = lst + return kwds + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source_to(self, file): + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + + def _write_source(self, file=None): + if file is not None: + self._write_source_to(file) + else: + # Write our source file to an in memory file. + f = NativeIO() + self._write_source_to(f) + source_data = f.getvalue() + + # Determine if this matches the current file + if os.path.exists(self.sourcefilename): + with open(self.sourcefilename, "r") as fp: + needs_written = not (fp.read() == source_data) + else: + needs_written = True + + # Actually write the file out if it doesn't match + if needs_written: + _ensure_dir(self.sourcefilename) + with open(self.sourcefilename, "w") as fp: + fp.write(source_data) + + # Set this flag + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + if self.flags is not None: + return self._vengine.load_library(self.flags) + else: + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + result = os.environ.get('CFFI_TMPDIR') + if result: + return result + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = _extension_suffixes() + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + try: + os.makedirs(os.path.dirname(filename)) + except OSError: + pass diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/DESCRIPTION.rst b/Lib/site-packages/cryptography-1.2.2.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..951d7db --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/DESCRIPTION.rst @@ -0,0 +1,57 @@ +Cryptography +============ + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.python.org/pypi/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master + :target: https://travis-ci.org/pyca/cryptography + +.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/cryptography?branch=master + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+. + +``cryptography`` includes both high level recipes, and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#cryptography-dev`` on Freenode to ask questions or get +involved. + + +.. _`documentation`: https://cryptography.io/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev + + diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/INSTALLER b/Lib/site-packages/cryptography-1.2.2.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/METADATA b/Lib/site-packages/cryptography-1.2.2.dist-info/METADATA new file mode 100644 index 0000000..0974ec0 --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/METADATA @@ -0,0 +1,92 @@ +Metadata-Version: 2.0 +Name: cryptography +Version: 1.2.2 +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Home-page: https://github.com/pyca/cryptography +Author: The cryptography developers +Author-email: cryptography-dev@python.org +License: BSD or Apache License, Version 2.0 +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Requires-Dist: idna (>=2.0) +Requires-Dist: pyasn1 (>=0.1.8) +Requires-Dist: six (>=1.4.1) +Requires-Dist: setuptools (>=1.0) +Requires-Dist: cffi (>=1.4.1) + +Cryptography +============ + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.python.org/pypi/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master + :target: https://travis-ci.org/pyca/cryptography + +.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master + :target: https://codecov.io/github/pyca/cryptography?branch=master + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+. + +``cryptography`` includes both high level recipes, and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#cryptography-dev`` on Freenode to ask questions or get +involved. + + +.. _`documentation`: https://cryptography.io/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev + + diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/RECORD b/Lib/site-packages/cryptography-1.2.2.dist-info/RECORD new file mode 100644 index 0000000..8e153e0 --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/RECORD @@ -0,0 +1,143 @@ +cryptography/exceptions.py,sha256=mFziFAW-ArgLwv3ppycGczqXiZf9_AKvD2h3Zkn9xs0,1198 +cryptography/fernet.py,sha256=S1wID45we0yrimIoMOMYu89sI32rhoCgCnygzG0peSU,4295 +cryptography/utils.py,sha256=GE5x2y3o96iX7HDVgWNzD6rFnzBuCFsMkGcJUCsK6ro,3606 +cryptography/__about__.py,sha256=xQ6xCRDpEId7csrSZpuXRgFKiEXZPUgdtBniuT-Os5s,817 +cryptography/__init__.py,sha256=NiBm3Qj07gEmyagrxoqVFqbhG-elC3kRqrqd76oyk9g,829 +cryptography/hazmat/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/backends/interfaces.py,sha256=8LVoRMALfVyaK6_XB9L7Pf0ko0tg5VMFKeyxBFdeZM8,9750 +cryptography/hazmat/backends/multibackend.py,sha256=Vmd2EkMvJHSgKnV3BHpNQRMfy5GM6JfNViOqEeeTn6s,15584 +cryptography/hazmat/backends/__init__.py,sha256=oQFGi9nQ6d7IiWDMaoleb2taGLgqQEvXXkgOxSTW5bQ,1325 +cryptography/hazmat/backends/commoncrypto/backend.py,sha256=GjHLdBKBCIg0hoSEuf_PFAaJfcxUHr6ShHh1LJG7NIg,8577 +cryptography/hazmat/backends/commoncrypto/ciphers.py,sha256=E1T9KtsnqeUVk4dl64ESwL9WiACdDUEqTDbc2JgtBaw,7946 +cryptography/hazmat/backends/commoncrypto/hashes.py,sha256=AJl9-ALt2HV3F7GfkMVc7z2Rj_E4msk9juscmSjx5bE,2040 +cryptography/hazmat/backends/commoncrypto/hmac.py,sha256=THOz8zjgFb-fbzOjQvKy7fr0Ri9Qw8fzr2hAe3N9iPo,2188 +cryptography/hazmat/backends/commoncrypto/__init__.py,sha256=aihmGquS6-l9Bcx6W8-nPO7BbpBlFg86IjPMIOKtJDk,341 +cryptography/hazmat/backends/openssl/backend.py,sha256=af9bP-RaYmtz44jm_lCNUfVfQZhHyPyEuTpQxnSB-2I,87022 +cryptography/hazmat/backends/openssl/ciphers.py,sha256=67Paxuvbw8U_AcHvOm17RLIN9iinQgxtT89icnNcUHY,8759 +cryptography/hazmat/backends/openssl/cmac.py,sha256=SeZ7v_2sm4_GJmB0S39DMcCYr16X7bwQIBHAYG6Qkms,2797 +cryptography/hazmat/backends/openssl/dsa.py,sha256=-90bcweQjZmaGonnvz0_EN_I7d6wC5wvMyA_GmiVg1k,8353 +cryptography/hazmat/backends/openssl/ec.py,sha256=mA3VBP7cVBX1lLWwEmMyL3Hn9ptmMVAJYijCCiKGdOM,10288 +cryptography/hazmat/backends/openssl/hashes.py,sha256=6KnIaZAAXz8vbMy1-1oZ_mptSaLyNSA1TFGiXmT7AkQ,2594 +cryptography/hazmat/backends/openssl/hmac.py,sha256=ZXLmKVRRObmlbvmLxruv3etarkLRCTkYh_70nKJlp9w,3067 +cryptography/hazmat/backends/openssl/rsa.py,sha256=IE2F9LM5OmNYBqLOJhLKGwSeRQA3xjaXx60_OmVvI2Q,23427 +cryptography/hazmat/backends/openssl/utils.py,sha256=q2XWHoedrfB6bUqxV4Omr_ebp_R7VvSHJCw5sQN3Iag,737 +cryptography/hazmat/backends/openssl/x509.py,sha256=Qfuy9bT5Oin6F_mDIoKl1JSuA5se0ZssDFHakN6W7CA,39702 +cryptography/hazmat/backends/openssl/__init__.py,sha256=k4DMe228_hTuB2kY3Lwk62JdI3EmCd7VkV01zJm57ps,336 +cryptography/hazmat/bindings/_constant_time.cp35-win32.pyd,sha256=P3yZi4M9PJFBR4Hc7F95rzjmvqalUuv4ifQtgHpdY70,9728 +cryptography/hazmat/bindings/_openssl.cp35-win32.pyd,sha256=1XZ5d6HflQ_D_qtv5GngtX6t6NSi2MrouOQWD8h9kuU,1836544 +cryptography/hazmat/bindings/_padding.cp35-win32.pyd,sha256=VeA5x2A5Ilv-AKhTi7RtrwLEUcSciKhINY5ZD42oQKI,9216 +cryptography/hazmat/bindings/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/bindings/commoncrypto/binding.py,sha256=cO7kSFGxbBSnszoA1EXQSlxC0vA0EcrfphNdUK61DJ8,410 +cryptography/hazmat/bindings/commoncrypto/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/bindings/openssl/binding.py,sha256=5ZcuQiaxVr4PIaM9Z1ByxhKyPXCi_fj_dEZRyeCJLd4,7081 +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=e0cL947ffTyxoKLS3egkuvqgIK4f1V6ewK-bWV-pP3A,12168 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/primitives/cmac.py,sha256=qGZcqMN57eGkqYx7Ypd23WOBkMkrwCb5M17QbqrvZW0,2235 +cryptography/hazmat/primitives/constant_time.py,sha256=rsSJc99kyQ2VwNVP9w-0mr80sZnppgWcm9LfQitftF0,798 +cryptography/hazmat/primitives/hashes.py,sha256=Lw9UpEzn5flkQJUK4_psFo7vxsTBBVqRtb5dvZ2F1Ro,3954 +cryptography/hazmat/primitives/hmac.py,sha256=pp6gwio7HR6QWZNhnPEyuG9Zj3-gKoR6UM7fhphSvRo,2353 +cryptography/hazmat/primitives/keywrap.py,sha256=gJp1qggkxvQXlrl262aW4sfpBWWhLQYhNWNYGy0M3_A,3050 +cryptography/hazmat/primitives/padding.py,sha256=ORXdpuspku0CcYr0WrOnKtM6WwxhzLl7XyUG5_mFcMU,3635 +cryptography/hazmat/primitives/serialization.py,sha256=dWaO3RlNcmygbknNIN_VbaSNHZXw6qhvfiTrMqdVdCY,5152 +cryptography/hazmat/primitives/__init__.py,sha256=0wGw2OF9R7fHX7NWENCmrsYigbXHU2ojgn-N4Rkjs9U,246 +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=iEk2dnAdro7SqH9VU9v39rSWQoyJpf2nCAR9J3WmZ60,4183 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=_2KFCkIuYRz2Ni1TPcvLxXOJH-D8rXhIJwcAG4R4FoU,6292 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=sChzpKwmvpmhuYHzgdWEBqoK4bM-wohMz7oceM1CjM4,8632 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=OR6Nm2KTdVJ-NNdRkBVhIPGj9cJJdF2LEkHfAFLA_EY,1803 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=QpJ0q_Fg97cvXMVb5fFcQ9s9c-3U9QIz0GehVO8oOGo,9908 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=rMjag4WEA_DVwNg2hYMUqy49jlUeCpvype0mP-P5eLk,2029 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=WhUn3tGxoLAxGAsZHElJ2aOILXSh55AZi04MBudYmQA,1020 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=sBQ3t1H5MQqr1qTRIFtiTVi_XFVS_wdICVGlg3iGxT8,3424 +cryptography/hazmat/primitives/ciphers/base.py,sha256=bcuWG0v2ETvtJeEs1dfk-_Fm_FQKvAPMBlURilhNJRE,6169 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=TiOYjod8RwYATKkyZNFlA6H_o2n1aHMMKHSL8UxKmFM,4682 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=Kqf2BvvQ--KiFwfZ7oCRSfwrdi5wnERm7ctO7cEQ2Fo,574 +cryptography/hazmat/primitives/interfaces/__init__.py,sha256=dgLoN6PdA_QqowwB_RzSeEqJjIPnAqTUTxio-8CFP-8,884 +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=WTooWTAkHUCtaMAUouAluNbg71qXxPj_AHLyuRnPDR8,4109 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=sf1YjTp8wVqvTEiga4Vee9km_ToHc3oVDLS7luATCyI,3675 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=YiFNDI4CbGRH0OQXDMXG8DfYap32vtjvA-Zo-n_oAE4,2185 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=yOK2qAlWTHABVIvwT55jl_B6UR5ELyLFlcVtH16HxGc,2363 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=nod5HjPswjZr8wFp6Tsu6en9blHYF3khgXI5R0zIcnM,771 +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=EwMzI0B8GUa3FpGKLryDve4_raIiDIQAsWCyxeUcht8,2516 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=3x9sPZ70VbSWSPfembr57imJkoq6-JgPiKjvmuKjyaA,1532 +cryptography/hazmat/primitives/twofactor/utils.py,sha256=71gX1bJeP9TQa-HbSPzeUUJwVY78ALYQNvuusknUO4U,954 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=BWrm3DKDoAa281E7U_nzz8v44OmAiXmlIycFcsehwfE,288 +cryptography/x509/base.py,sha256=v5Rg86AtsouOlFrTppCN73FysR2tvT8XPaGO98vIHFA,21759 +cryptography/x509/extensions.py,sha256=o5UytyLnvOeVzddrz6RYJRDslZV5FHpT7ILhw9UcoKQ,33963 +cryptography/x509/general_name.py,sha256=i3l5jcxarKJGT86iPIWgEL51SNDNgEp7KMbcNBRbyjE,7519 +cryptography/x509/name.py,sha256=I-dCbj2Fv3pAKzszivYfFOdIu0CvrUqmUKOtXxvR-7c,2116 +cryptography/x509/oid.py,sha256=uMuj-GZGUXU9FvyX2ovyF_ysAxbPsHTCAu_deNaeRwQ,11144 +cryptography/x509/__init__.py,sha256=5SsF4IVQJ1FCum_OtsI6oC1dFgCqH8t-AXdqLUEVxco,6727 +cryptography-1.2.2.dist-info/DESCRIPTION.rst,sha256=pVyhl71rmRmivUoJQGAS4Jy93myVUgINRBhxIFzqI6w,1940 +cryptography-1.2.2.dist-info/entry_points.txt,sha256=aG4jSsppK5sGk3sJ88DGQsXxbseci9MN5ZgbY9VKvyk,80 +cryptography-1.2.2.dist-info/METADATA,sha256=wNbUR0HbokIlBCoh9Gjsc4neiHPi_Opf_m7XdD8I_0U,3453 +cryptography-1.2.2.dist-info/metadata.json,sha256=8ftp_O4LSKl1ZaEcOSTUXad92UwGavzFGMunk2SUT_s,1763 +cryptography-1.2.2.dist-info/RECORD,, +cryptography-1.2.2.dist-info/top_level.txt,sha256=QCkYQE4HJBpqIr-aBqbOZ70NlfbejKnDE6ODbNgUwwg,46 +cryptography-1.2.2.dist-info/WHEEL,sha256=6wnUzulUPtamWQw9fduxmONJSMzqWZJWY62pN0Z3sA8,101 +c:\users\j\oml\platform\win32\Lib\site-packages\cryptography-1.2.2.dist-info\INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-35.pyc,, +cryptography/__pycache__/fernet.cpython-35.pyc,, +cryptography/hazmat/bindings/commoncrypto/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/constant_time.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/hmac.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-35.pyc,, +cryptography/x509/__pycache__/general_name.cpython-35.pyc,, +cryptography/x509/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-35.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/ciphers.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-35.pyc,, +cryptography/hazmat/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/hashes.cpython-35.pyc,, +cryptography/hazmat/bindings/commoncrypto/__pycache__/binding.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/cmac.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-35.pyc,, +cryptography/__pycache__/__about__.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-35.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/serialization.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/bindings/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/__pycache__/interfaces.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/hashes.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-35.pyc,, +cryptography/x509/__pycache__/name.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/hmac.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/padding.cpython-35.pyc,, +cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-35.pyc,, +cryptography/__pycache__/utils.cpython-35.pyc,, +cryptography/x509/__pycache__/extensions.cpython-35.pyc,, +cryptography/hazmat/backends/commoncrypto/__pycache__/backend.cpython-35.pyc,, +cryptography/x509/__pycache__/base.cpython-35.pyc,, +cryptography/hazmat/backends/__pycache__/__init__.cpython-35.pyc,, +cryptography/__pycache__/exceptions.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-35.pyc,, +cryptography/hazmat/backends/__pycache__/multibackend.cpython-35.pyc,, +cryptography/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-35.pyc,, +cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-35.pyc,, +cryptography/x509/__pycache__/oid.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/keywrap.cpython-35.pyc,, +cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-35.pyc,, +cryptography/hazmat/primitives/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/primitives/interfaces/__pycache__/__init__.cpython-35.pyc,, +cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-35.pyc,, diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/WHEEL b/Lib/site-packages/cryptography-1.2.2.dist-info/WHEEL new file mode 100644 index 0000000..184f1bd --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: false +Tag: cp35-none-win32 + diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/entry_points.txt b/Lib/site-packages/cryptography-1.2.2.dist-info/entry_points.txt new file mode 100644 index 0000000..bfde650 --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/entry_points.txt @@ -0,0 +1,3 @@ +[cryptography.backends] +openssl = cryptography.hazmat.backends.openssl:backend + diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/metadata.json b/Lib/site-packages/cryptography-1.2.2.dist-info/metadata.json new file mode 100644 index 0000000..fd5ae97 --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "BSD or Apache License, Version 2.0", "metadata_version": "2.0", "run_requires": [{"requires": ["idna (>=2.0)", "pyasn1 (>=0.1.8)", "six (>=1.4.1)", "setuptools (>=1.0)", "cffi (>=1.4.1)"]}], "name": "cryptography", "extras": [], "generator": "bdist_wheel (0.24.0)", "extensions": {"python.exports": {"cryptography.backends": {"openssl": "cryptography.hazmat.backends.openssl:backend"}}, "python.details": {"document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/pyca/cryptography"}, "contacts": [{"email": "cryptography-dev@python.org", "name": "The cryptography developers", "role": "author"}]}}, "classifiers": ["Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: BSD License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: POSIX", "Operating System :: POSIX :: BSD", "Operating System :: POSIX :: Linux", "Operating System :: Microsoft :: Windows", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Security :: Cryptography"], "version": "1.2.2", "test_requires": [{"requires": ["pytest", "pretend", "iso8601", "hypothesis", "pyasn1-modules", "cryptography-vectors (==1.2.2)"]}], "summary": "cryptography is a package which provides cryptographic recipes and primitives to Python developers."} \ No newline at end of file diff --git a/Lib/site-packages/cryptography-1.2.2.dist-info/top_level.txt b/Lib/site-packages/cryptography-1.2.2.dist-info/top_level.txt new file mode 100644 index 0000000..e32461e --- /dev/null +++ b/Lib/site-packages/cryptography-1.2.2.dist-info/top_level.txt @@ -0,0 +1,4 @@ +_constant_time +_openssl +_padding +cryptography diff --git a/Lib/site-packages/cryptography/__about__.py b/Lib/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..198ab49 --- /dev/null +++ b/Lib/site-packages/cryptography/__about__.py @@ -0,0 +1,23 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "cryptography" +__summary__ = ("cryptography is a package which provides cryptographic recipes" + " and primitives to Python developers.") +__uri__ = "https://github.com/pyca/cryptography" + +__version__ = "1.2.2" + +__author__ = "The cryptography developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2016 {0}".format(__author__) diff --git a/Lib/site-packages/cryptography/__init__.py b/Lib/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..940c66b --- /dev/null +++ b/Lib/site-packages/cryptography/__init__.py @@ -0,0 +1,27 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import sys +import warnings + +from cryptography.__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +if sys.version_info[:2] == (2, 6): + warnings.warn( + "Python 2.6 is no longer supported by the Python core team, please " + "upgrade your Python. A future version of cryptography will drop " + "support for Python 2.6", + DeprecationWarning + ) diff --git a/Lib/site-packages/cryptography/exceptions.py b/Lib/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..ee43fed --- /dev/null +++ b/Lib/site-packages/cryptography/exceptions.py @@ -0,0 +1,56 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from enum import Enum + + +class _Reasons(Enum): + BACKEND_MISSING_INTERFACE = 0 + UNSUPPORTED_HASH = 1 + UNSUPPORTED_CIPHER = 2 + UNSUPPORTED_PADDING = 3 + UNSUPPORTED_MGF = 4 + UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5 + UNSUPPORTED_ELLIPTIC_CURVE = 6 + UNSUPPORTED_SERIALIZATION = 7 + UNSUPPORTED_X509 = 8 + UNSUPPORTED_EXCHANGE_ALGORITHM = 9 + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message, reason=None): + super(UnsupportedAlgorithm, self).__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + def __init__(self, msg, err_code): + super(InternalError, self).__init__(msg) + self.err_code = err_code + + +class InvalidKey(Exception): + pass diff --git a/Lib/site-packages/cryptography/fernet.py b/Lib/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..6fbe9f2 --- /dev/null +++ b/Lib/site-packages/cryptography/fernet.py @@ -0,0 +1,141 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 +import binascii +import os +import struct +import time + +import six + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet(object): + def __init__(self, key, backend=None): + if backend is None: + backend = default_backend() + + key = base64.urlsafe_b64decode(key) + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes." + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + self._backend = backend + + @classmethod + def generate_key(cls): + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data): + current_time = int(time.time()) + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts(self, data, current_time, iv): + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token, ttl=None): + if not isinstance(token, bytes): + raise TypeError("token must be bytes.") + + current_time = int(time.time()) + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if not data or six.indexbytes(data, 0) != 0x80: + raise InvalidToken + + try: + timestamp, = struct.unpack(">Q", data[1:9]) + except struct.error: + raise InvalidToken + if ttl is not None: + if timestamp + ttl < current_time: + raise InvalidToken + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded + + +class MultiFernet(object): + def __init__(self, fernets): + fernets = list(fernets) + if not fernets: + raise ValueError( + "MultiFernet requires at least one Fernet instance" + ) + self._fernets = fernets + + def encrypt(self, msg): + return self._fernets[0].encrypt(msg) + + def decrypt(self, msg, ttl=None): + for f in self._fernets: + try: + return f.decrypt(msg, ttl) + except InvalidToken: + pass + raise InvalidToken diff --git a/Lib/site-packages/cryptography/hazmat/__init__.py b/Lib/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Lib/site-packages/cryptography/hazmat/backends/__init__.py b/Lib/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..256fee3 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,42 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import pkg_resources + +from cryptography.hazmat.backends.multibackend import MultiBackend + + +_available_backends_list = None + + +def _available_backends(): + global _available_backends_list + + if _available_backends_list is None: + _available_backends_list = [ + # setuptools 11.3 deprecated support for the require parameter to + # load(), and introduced the new resolve() method instead. + # This can be removed if/when we can assume setuptools>=11.3. At + # some point we may wish to add a warning, to push people along, + # but at present this would result in too many warnings. + ep.resolve() if hasattr(ep, "resolve") else ep.load(require=False) + for ep in pkg_resources.iter_entry_points( + "cryptography.backends" + ) + ] + + return _available_backends_list + +_default_backend = None + + +def default_backend(): + global _default_backend + + if _default_backend is None: + _default_backend = MultiBackend(_available_backends()) + + return _default_backend diff --git a/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py new file mode 100644 index 0000000..1d52a25 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.commoncrypto.backend import backend + + +__all__ = ["backend"] diff --git a/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py new file mode 100644 index 0000000..315d67d --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py @@ -0,0 +1,245 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from collections import namedtuple + +from cryptography import utils +from cryptography.exceptions import InternalError +from cryptography.hazmat.backends.commoncrypto.ciphers import ( + _CipherContext, _GCMCipherContext +) +from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext +from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext +from cryptography.hazmat.backends.interfaces import ( + CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend +) +from cryptography.hazmat.bindings.commoncrypto.binding import Binding +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CFB8, CTR, ECB, GCM, OFB +) + + +HashMethods = namedtuple( + "HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"] +) + + +@utils.register_interface(CipherBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +class Backend(object): + """ + CommonCrypto API wrapper. + """ + name = "commoncrypto" + + def __init__(self): + self._binding = Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + self._cipher_registry = {} + self._register_default_ciphers() + self._hash_mapping = { + "md5": HashMethods( + "CC_MD5_CTX *", self._lib.CC_MD5_Init, + self._lib.CC_MD5_Update, self._lib.CC_MD5_Final + ), + "sha1": HashMethods( + "CC_SHA1_CTX *", self._lib.CC_SHA1_Init, + self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final + ), + "sha224": HashMethods( + "CC_SHA256_CTX *", self._lib.CC_SHA224_Init, + self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final + ), + "sha256": HashMethods( + "CC_SHA256_CTX *", self._lib.CC_SHA256_Init, + self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final + ), + "sha384": HashMethods( + "CC_SHA512_CTX *", self._lib.CC_SHA384_Init, + self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final + ), + "sha512": HashMethods( + "CC_SHA512_CTX *", self._lib.CC_SHA512_Init, + self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final + ), + } + + self._supported_hmac_algorithms = { + "md5": self._lib.kCCHmacAlgMD5, + "sha1": self._lib.kCCHmacAlgSHA1, + "sha224": self._lib.kCCHmacAlgSHA224, + "sha256": self._lib.kCCHmacAlgSHA256, + "sha384": self._lib.kCCHmacAlgSHA384, + "sha512": self._lib.kCCHmacAlgSHA512, + } + + self._supported_pbkdf2_hmac_algorithms = { + "sha1": self._lib.kCCPRFHmacAlgSHA1, + "sha224": self._lib.kCCPRFHmacAlgSHA224, + "sha256": self._lib.kCCPRFHmacAlgSHA256, + "sha384": self._lib.kCCPRFHmacAlgSHA384, + "sha512": self._lib.kCCPRFHmacAlgSHA512, + } + + def hash_supported(self, algorithm): + return algorithm.name in self._hash_mapping + + def hmac_supported(self, algorithm): + return algorithm.name in self._supported_hmac_algorithms + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def cipher_supported(self, cipher, mode): + return (type(cipher), type(mode)) in self._cipher_registry + + def create_symmetric_encryption_ctx(self, cipher, mode): + if isinstance(mode, GCM): + return _GCMCipherContext( + self, cipher, mode, self._lib.kCCEncrypt + ) + else: + return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt) + + def create_symmetric_decryption_ctx(self, cipher, mode): + if isinstance(mode, GCM): + return _GCMCipherContext( + self, cipher, mode, self._lib.kCCDecrypt + ) + else: + return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt) + + def pbkdf2_hmac_supported(self, algorithm): + return algorithm.name in self._supported_pbkdf2_hmac_algorithms + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name] + buf = self._ffi.new("char[]", length) + res = self._lib.CCKeyDerivationPBKDF( + self._lib.kCCPBKDF2, + key_material, + len(key_material), + salt, + len(salt), + alg_enum, + iterations, + buf, + length + ) + self._check_cipher_response(res) + + return self._ffi.buffer(buf)[:] + + def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls, + mode_const): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}.".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = (cipher_const, + mode_const) + + def _register_default_ciphers(self): + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (CFB8, self._lib.kCCModeCFB8), + (OFB, self._lib.kCCModeOFB), + (CTR, self._lib.kCCModeCTR), + (GCM, self._lib.kCCModeGCM), + ]: + self._register_cipher_adapter( + AES, + self._lib.kCCAlgorithmAES128, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (CFB8, self._lib.kCCModeCFB8), + (OFB, self._lib.kCCModeOFB), + ]: + self._register_cipher_adapter( + TripleDES, + self._lib.kCCAlgorithm3DES, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB) + ]: + self._register_cipher_adapter( + Blowfish, + self._lib.kCCAlgorithmBlowfish, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB), + (CTR, self._lib.kCCModeCTR) + ]: + self._register_cipher_adapter( + CAST5, + self._lib.kCCAlgorithmCAST, + mode_cls, + mode_const + ) + self._register_cipher_adapter( + ARC4, + self._lib.kCCAlgorithmRC4, + type(None), + self._lib.kCCModeRC4 + ) + + def _check_cipher_response(self, response): + if response == self._lib.kCCSuccess: + return + elif response == self._lib.kCCAlignmentError: + # This error is not currently triggered due to a bug filed as + # rdar://15589470 + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + else: + raise InternalError( + "The backend returned an unknown error, consider filing a bug." + " Code: {0}.".format(response), + response + ) + + def _release_cipher_ctx(self, ctx): + """ + Called by the garbage collector and used to safely dereference and + release the context. + """ + if ctx[0] != self._ffi.NULL: + res = self._lib.CCCryptorRelease(ctx[0]) + self._check_cipher_response(res) + ctx[0] = self._ffi.NULL + + +backend = Backend() diff --git a/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py new file mode 100644 index 0000000..1ce8aec --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py @@ -0,0 +1,193 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidTag, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import ciphers, constant_time +from cryptography.hazmat.primitives.ciphers import modes +from cryptography.hazmat.primitives.ciphers.modes import ( + CFB, CFB8, CTR, OFB +) + + +@utils.register_interface(ciphers.CipherContext) +class _CipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + # There is a bug in CommonCrypto where block ciphers do not raise + # kCCAlignmentError when finalizing if you supply non-block aligned + # data. To work around this we need to keep track of the block + # alignment ourselves, but only for alg+mode combos that require + # block alignment. OFB, CFB, and CTR make a block cipher algorithm + # into a stream cipher so we don't need to track them (and thus their + # block size is effectively 1 byte just like OpenSSL/CommonCrypto + # treat RC4 and other stream cipher block sizes). + # This bug has been filed as rdar://15589470 + self._bytes_processed = 0 + if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not + isinstance(mode, (OFB, CFB, CFB8, CTR))): + self._byte_block_size = cipher.block_size // 8 + else: + self._byte_block_size = 1 + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) + + if isinstance(mode, modes.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, modes.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + + if isinstance(mode, CTR): + mode_option = self._backend._lib.kCCModeOptionCTR_BE + else: + mode_option = 0 + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, iv_nonce, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, mode_option, ctx) + self._backend._check_cipher_response(res) + + self._ctx = ctx + + def update(self, data): + # Count bytes processed to handle block alignment. + self._bytes_processed += len(data) + buf = self._backend._ffi.new( + "unsigned char[]", len(data) + self._byte_block_size - 1) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorUpdate( + self._ctx[0], data, len(data), buf, + len(data) + self._byte_block_size - 1, outlen) + self._backend._check_cipher_response(res) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + # Raise error if block alignment is wrong. + if self._bytes_processed % self._byte_block_size: + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorFinal( + self._ctx[0], buf, len(buf), outlen) + self._backend._check_cipher_response(res) + self._backend._release_cipher_ctx(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) +class _GCMCipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) + + self._ctx = ctx + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, + self._backend._ffi.NULL, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, 0, self._ctx) + self._backend._check_cipher_response(res) + + res = self._backend._lib.CCCryptorGCMAddIV( + self._ctx[0], + mode.initialization_vector, + len(mode.initialization_vector) + ) + self._backend._check_cipher_response(res) + # CommonCrypto has a bug where calling update without at least one + # call to authenticate_additional_data will result in null byte output + # for ciphertext. The following empty byte string call prevents the + # issue, which is present in at least 10.8 and 10.9. + # Filed as rdar://18314544 + self.authenticate_additional_data(b"") + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + args = (self._ctx[0], data, len(data), buf) + if self._operation == self._backend._lib.kCCEncrypt: + res = self._backend._lib.CCCryptorGCMEncrypt(*args) + else: + res = self._backend._lib.CCCryptorGCMDecrypt(*args) + + self._backend._check_cipher_response(res) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + # CommonCrypto has a yet another bug where you must make at least one + # call to update. If you pass just AAD and call finalize without a call + # to update you'll get null bytes for tag. The following update call + # prevents this issue, which is present in at least 10.8 and 10.9. + # Filed as rdar://18314580 + self.update(b"") + tag_size = self._cipher.block_size // 8 + tag_buf = self._backend._ffi.new("unsigned char[]", tag_size) + tag_len = self._backend._ffi.new("size_t *", tag_size) + res = self._backend._lib.CCCryptorGCMFinal( + self._ctx[0], tag_buf, tag_len + ) + self._backend._check_cipher_response(res) + self._backend._release_cipher_ctx(self._ctx) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + if (self._operation == self._backend._lib.kCCDecrypt and + not constant_time.bytes_eq( + self._tag[:len(self._mode.tag)], self._mode.tag + )): + raise InvalidTag + return b"" + + def authenticate_additional_data(self, data): + res = self._backend._lib.CCCryptorGCMAddAAD( + self._ctx[0], data, len(data) + ) + self._backend._check_cipher_response(res) + + tag = utils.read_only_property("_tag") diff --git a/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py new file mode 100644 index 0000000..a54e983 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py @@ -0,0 +1,55 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import hashes + + +@utils.register_interface(hashes.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + + if ctx is None: + try: + methods = self._backend._hash_mapping[self.algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + ctx = self._backend._ffi.new(methods.ctx) + res = methods.hash_init(ctx) + assert res == 1 + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + methods = self._backend._hash_mapping[self.algorithm.name] + new_ctx = self._backend._ffi.new(methods.ctx) + # CommonCrypto has no APIs for copying hashes, so we have to copy the + # underlying struct. + new_ctx[0] = self._ctx[0] + + return _HashContext(self._backend, self.algorithm, ctx=new_ctx) + + def update(self, data): + methods = self._backend._hash_mapping[self.algorithm.name] + res = methods.hash_update(self._ctx, data, len(data)) + assert res == 1 + + def finalize(self): + methods = self._backend._hash_mapping[self.algorithm.name] + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + res = methods.hash_final(buf, self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:] diff --git a/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py new file mode 100644 index 0000000..ae623d8 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py @@ -0,0 +1,59 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes, interfaces + + +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + if ctx is None: + ctx = self._backend._ffi.new("CCHmacContext *") + try: + alg = self._backend._supported_hmac_algorithms[algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported HMAC hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + self._backend._lib.CCHmacInit(ctx, alg, key, len(key)) + + self._ctx = ctx + self._key = key + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._ffi.new("CCHmacContext *") + # CommonCrypto has no APIs for copying HMACs, so we have to copy the + # underlying struct. + copied_ctx[0] = self._ctx[0] + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + self._backend._lib.CCHmacUpdate(self._ctx, data, len(data)) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + self._backend._lib.CCHmacFinal(self._ctx, buf) + return self._backend._ffi.buffer(buf)[:] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Lib/site-packages/cryptography/hazmat/backends/interfaces.py b/Lib/site-packages/cryptography/hazmat/backends/interfaces.py new file mode 100644 index 0000000..5b9e6f3 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/interfaces.py @@ -0,0 +1,359 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class CipherBackend(object): + @abc.abstractmethod + def cipher_supported(self, cipher, mode): + """ + Return True if the given cipher and mode are supported. + """ + + @abc.abstractmethod + def create_symmetric_encryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for encryption. + """ + + @abc.abstractmethod + def create_symmetric_decryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for decryption. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashBackend(object): + @abc.abstractmethod + def hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by this backend. + """ + + @abc.abstractmethod + def create_hash_ctx(self, algorithm): + """ + Create a HashContext for calculating a message digest. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HMACBackend(object): + @abc.abstractmethod + def hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for HMAC by this + backend. + """ + + @abc.abstractmethod + def create_hmac_ctx(self, key, algorithm): + """ + Create a MACContext for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CMACBackend(object): + @abc.abstractmethod + def cmac_algorithm_supported(self, algorithm): + """ + Returns True if the block cipher is supported for CMAC by this backend + """ + + @abc.abstractmethod + def create_cmac_ctx(self, algorithm): + """ + Create a MACContext for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PBKDF2HMACBackend(object): + @abc.abstractmethod + def pbkdf2_hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for PBKDF2 by this + backend. + """ + + @abc.abstractmethod + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + """ + Return length bytes derived from provided PBKDF2 parameters. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSABackend(object): + @abc.abstractmethod + def generate_rsa_private_key(self, public_exponent, key_size): + """ + Generate an RSAPrivateKey instance with public_exponent and a modulus + of key_size bits. + """ + + @abc.abstractmethod + def rsa_padding_supported(self, padding): + """ + Returns True if the backend supports the given padding options. + """ + + @abc.abstractmethod + def generate_rsa_parameters_supported(self, public_exponent, key_size): + """ + Returns True if the backend supports the given parameters for key + generation. + """ + + @abc.abstractmethod + def load_rsa_private_numbers(self, numbers): + """ + Returns an RSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_rsa_public_numbers(self, numbers): + """ + Returns an RSAPublicKey provider. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSABackend(object): + @abc.abstractmethod + def generate_dsa_parameters(self, key_size): + """ + Generate a DSAParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dsa_private_key(self, parameters): + """ + Generate a DSAPrivateKey instance with parameters as a DSAParameters + object. + """ + + @abc.abstractmethod + def generate_dsa_private_key_and_parameters(self, key_size): + """ + Generate a DSAPrivateKey instance using key size only. + """ + + @abc.abstractmethod + def dsa_hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by the backend for DSA. + """ + + @abc.abstractmethod + def dsa_parameters_supported(self, p, q, g): + """ + Return True if the parameters are supported by the backend for DSA. + """ + + @abc.abstractmethod + def load_dsa_private_numbers(self, numbers): + """ + Returns a DSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_dsa_public_numbers(self, numbers): + """ + Returns a DSAPublicKey provider. + """ + + @abc.abstractmethod + def load_dsa_parameter_numbers(self, numbers): + """ + Returns a DSAParameters provider. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveBackend(object): + @abc.abstractmethod + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + """ + Returns True if the backend supports the named elliptic curve with the + specified signature algorithm. + """ + + @abc.abstractmethod + def elliptic_curve_supported(self, curve): + """ + Returns True if the backend supports the named elliptic curve. + """ + + @abc.abstractmethod + def generate_elliptic_curve_private_key(self, curve): + """ + Return an object conforming to the EllipticCurvePrivateKey interface. + """ + + @abc.abstractmethod + def load_elliptic_curve_public_numbers(self, numbers): + """ + Return an EllipticCurvePublicKey provider using the given numbers. + """ + + @abc.abstractmethod + def load_elliptic_curve_private_numbers(self, numbers): + """ + Return an EllipticCurvePrivateKey provider using the given numbers. + """ + + @abc.abstractmethod + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PEMSerializationBackend(object): + @abc.abstractmethod + def load_pem_private_key(self, data, password): + """ + Loads a private key from PEM encoded data, using the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_pem_public_key(self, data): + """ + Loads a public key from PEM encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DERSerializationBackend(object): + @abc.abstractmethod + def load_der_private_key(self, data, password): + """ + Loads a private key from DER encoded data. Uses the provided password + if the data is encrypted. + """ + + @abc.abstractmethod + def load_der_public_key(self, data): + """ + Loads a public key from DER encoded data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class X509Backend(object): + @abc.abstractmethod + def load_pem_x509_certificate(self, data): + """ + Load an X.509 certificate from PEM encoded data. + """ + + @abc.abstractmethod + def load_der_x509_certificate(self, data): + """ + Load an X.509 certificate from DER encoded data. + """ + + @abc.abstractmethod + def load_der_x509_csr(self, data): + """ + Load an X.509 CSR from DER encoded data. + """ + + @abc.abstractmethod + def load_pem_x509_csr(self, data): + """ + Load an X.509 CSR from PEM encoded data. + """ + + @abc.abstractmethod + def create_x509_csr(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CSR from a CSR builder object. + """ + + @abc.abstractmethod + def create_x509_certificate(self, builder, private_key, algorithm): + """ + Create and sign an X.509 certificate from a CertificateBuilder object. + """ + + @abc.abstractmethod + def create_x509_crl(self, builder, private_key, algorithm): + """ + Create and sign an X.509 CertificateRevocationList from a + CertificateRevocationListBuilder object. + """ + + @abc.abstractmethod + def create_x509_revoked_certificate(self, builder): + """ + Create a RevokedCertificate object from a RevokedCertificateBuilder + object. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHBackend(object): + @abc.abstractmethod + def generate_dh_parameters(self, key_size): + """ + Generate a DHParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dh_private_key(self, parameters): + """ + Generate a DHPrivateKey instance with parameters as a DHParameters + object. + """ + + @abc.abstractmethod + def generate_dh_private_key_and_parameters(self, key_size): + """ + Generate a DHPrivateKey instance using key size only. + """ + + @abc.abstractmethod + def load_dh_private_numbers(self, numbers): + """ + Returns a DHPrivateKey provider. + """ + + @abc.abstractmethod + def load_dh_public_numbers(self, numbers): + """ + Returns a DHPublicKey provider. + """ + + @abc.abstractmethod + def load_dh_parameter_numbers(self, numbers): + """ + Returns a DHParameters provider. + """ + + @abc.abstractmethod + def dh_exchange_algorithm_supported(self, exchange_algorithm): + """ + Returns whether the exchange algorithm is supported by this backend. + """ + + @abc.abstractmethod + def dh_parameters_supported(self, p, g): + """ + Returns whether the backend supports DH with these parameter values. + """ diff --git a/Lib/site-packages/cryptography/hazmat/backends/multibackend.py b/Lib/site-packages/cryptography/hazmat/backends/multibackend.py new file mode 100644 index 0000000..65f1853 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/multibackend.py @@ -0,0 +1,404 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DERSerializationBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, X509Backend +) + + +@utils.register_interface(CMACBackend) +@utils.register_interface(CipherBackend) +@utils.register_interface(DERSerializationBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) +class MultiBackend(object): + name = "multibackend" + + def __init__(self, backends): + self._backends = backends + + def _filtered_backends(self, interface): + for b in self._backends: + if isinstance(b, interface): + yield b + + def cipher_supported(self, cipher, mode): + return any( + b.cipher_supported(cipher, mode) + for b in self._filtered_backends(CipherBackend) + ) + + def create_symmetric_encryption_ctx(self, cipher, mode): + for b in self._filtered_backends(CipherBackend): + try: + return b.create_symmetric_encryption_ctx(cipher, mode) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + def create_symmetric_decryption_ctx(self, cipher, mode): + for b in self._filtered_backends(CipherBackend): + try: + return b.create_symmetric_decryption_ctx(cipher, mode) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + def hash_supported(self, algorithm): + return any( + b.hash_supported(algorithm) + for b in self._filtered_backends(HashBackend) + ) + + def create_hash_ctx(self, algorithm): + for b in self._filtered_backends(HashBackend): + try: + return b.create_hash_ctx(algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def hmac_supported(self, algorithm): + return any( + b.hmac_supported(algorithm) + for b in self._filtered_backends(HMACBackend) + ) + + def create_hmac_ctx(self, key, algorithm): + for b in self._filtered_backends(HMACBackend): + try: + return b.create_hmac_ctx(key, algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def pbkdf2_hmac_supported(self, algorithm): + return any( + b.pbkdf2_hmac_supported(algorithm) + for b in self._filtered_backends(PBKDF2HMACBackend) + ) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + for b in self._filtered_backends(PBKDF2HMACBackend): + try: + return b.derive_pbkdf2_hmac( + algorithm, length, salt, iterations, key_material + ) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def generate_rsa_private_key(self, public_exponent, key_size): + for b in self._filtered_backends(RSABackend): + return b.generate_rsa_private_key(public_exponent, key_size) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + for b in self._filtered_backends(RSABackend): + return b.generate_rsa_parameters_supported( + public_exponent, key_size + ) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def rsa_padding_supported(self, padding): + for b in self._filtered_backends(RSABackend): + return b.rsa_padding_supported(padding) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_rsa_private_numbers(self, numbers): + for b in self._filtered_backends(RSABackend): + return b.load_rsa_private_numbers(numbers) + + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_rsa_public_numbers(self, numbers): + for b in self._filtered_backends(RSABackend): + return b.load_rsa_public_numbers(numbers) + + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_parameters(self, key_size): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_parameters(key_size) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_private_key(self, parameters): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_private_key(parameters) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_private_key_and_parameters(self, key_size): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_private_key_and_parameters(key_size) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def dsa_hash_supported(self, algorithm): + for b in self._filtered_backends(DSABackend): + return b.dsa_hash_supported(algorithm) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def dsa_parameters_supported(self, p, q, g): + for b in self._filtered_backends(DSABackend): + return b.dsa_parameters_supported(p, q, g) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_public_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_public_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_private_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_private_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_dsa_parameter_numbers(self, numbers): + for b in self._filtered_backends(DSABackend): + return b.load_dsa_parameter_numbers(numbers) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def cmac_algorithm_supported(self, algorithm): + return any( + b.cmac_algorithm_supported(algorithm) + for b in self._filtered_backends(CMACBackend) + ) + + def create_cmac_ctx(self, algorithm): + for b in self._filtered_backends(CMACBackend): + try: + return b.create_cmac_ctx(algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm("This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER) + + def elliptic_curve_supported(self, curve): + return any( + b.elliptic_curve_supported(curve) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + return any( + b.elliptic_curve_signature_algorithm_supported( + signature_algorithm, curve + ) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def generate_elliptic_curve_private_key(self, curve): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.generate_elliptic_curve_private_key(curve) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_private_numbers(self, numbers): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.load_elliptic_curve_private_numbers(numbers) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_public_numbers(self, numbers): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.load_elliptic_curve_public_numbers(numbers) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return any( + b.elliptic_curve_exchange_algorithm_supported(algorithm, curve) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def load_pem_private_key(self, data, password): + for b in self._filtered_backends(PEMSerializationBackend): + return b.load_pem_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_pem_public_key(self, data): + for b in self._filtered_backends(PEMSerializationBackend): + return b.load_pem_public_key(data) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_der_private_key(self, data, password): + for b in self._filtered_backends(DERSerializationBackend): + return b.load_der_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_der_public_key(self, data): + for b in self._filtered_backends(DERSerializationBackend): + return b.load_der_public_key(data) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_pem_x509_certificate(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_certificate(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_certificate(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_certificate(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_pem_x509_crl(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_crl(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_crl(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_crl(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_der_x509_csr(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_der_x509_csr(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def load_pem_x509_csr(self, data): + for b in self._filtered_backends(X509Backend): + return b.load_pem_x509_csr(data) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_csr(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_csr(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_certificate(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_certificate(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_crl(self, builder, private_key, algorithm): + for b in self._filtered_backends(X509Backend): + return b.create_x509_crl(builder, private_key, algorithm) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) + + def create_x509_revoked_certificate(self, builder): + for b in self._filtered_backends(X509Backend): + return b.create_x509_revoked_certificate(builder) + + raise UnsupportedAlgorithm( + "This backend does not support X.509.", + _Reasons.UNSUPPORTED_X509 + ) diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..8eadeb6 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,10 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.openssl.backend import backend + + +__all__ = ["backend"] diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..c21d542 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,2299 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import calendar +import collections +import datetime +import itertools +from contextlib import contextmanager + +import idna + +import six + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DERSerializationBackend, DSABackend, + EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend, + PEMSerializationBackend, RSABackend, X509Backend +) +from cryptography.hazmat.backends.openssl.ciphers import ( + _AESCTRCipherContext, _CipherContext +) +from cryptography.hazmat.backends.openssl.cmac import _CMACContext +from cryptography.hazmat.backends.openssl.dsa import ( + _DSAParameters, _DSAPrivateKey, _DSAPublicKey +) +from cryptography.hazmat.backends.openssl.ec import ( + _EllipticCurvePrivateKey, _EllipticCurvePublicKey +) +from cryptography.hazmat.backends.openssl.hashes import _HashContext +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.backends.openssl.rsa import ( + _RSAPrivateKey, _RSAPublicKey +) +from cryptography.hazmat.backends.openssl.x509 import ( + _CRL_ENTRY_REASON_ENUM_TO_CODE, _Certificate, _CertificateRevocationList, + _CertificateSigningRequest, _DISTPOINT_TYPE_FULLNAME, + _DISTPOINT_TYPE_RELATIVENAME, _RevokedCertificate +) +from cryptography.hazmat.bindings._openssl import ffi as _ffi +from cryptography.hazmat.bindings.openssl import binding +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CFB8, CTR, ECB, GCM, OFB +) +from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID, NameOID + + +_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) + + +def _encode_asn1_int(backend, x): + """ + Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER + will not be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will be + discarded after use. + + """ + # Convert Python integer to OpenSSL "bignum" in case value exceeds + # machine's native integer limits (note: `int_to_bn` doesn't automatically + # GC). + i = backend._int_to_bn(x) + i = backend._ffi.gc(i, backend._lib.BN_free) + + # Wrap in an ASN.1 integer. Don't GC -- as documented. + i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL) + backend.openssl_assert(i != backend._ffi.NULL) + return i + + +def _encode_asn1_int_gc(backend, x): + i = _encode_asn1_int(backend, x) + i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free) + return i + + +def _encode_asn1_str(backend, data, length): + """ + Create an ASN1_OCTET_STRING from a Python byte string. + """ + s = backend._lib.ASN1_OCTET_STRING_new() + res = backend._lib.ASN1_OCTET_STRING_set(s, data, length) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_utf8_str(backend, string): + """ + Create an ASN1_UTF8STRING from a Python unicode string. + This object will be an ASN1_STRING with UTF8 type in OpenSSL and + can be decoded with ASN1_STRING_to_UTF8. + """ + s = backend._lib.ASN1_UTF8STRING_new() + res = backend._lib.ASN1_STRING_set( + s, string.encode("utf8"), len(string.encode("utf8")) + ) + backend.openssl_assert(res == 1) + return s + + +def _encode_asn1_str_gc(backend, data, length): + s = _encode_asn1_str(backend, data, length) + s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free) + return s + + +def _encode_extension_to_der(backend, i2d_func, value): + pp = backend._ffi.new("unsigned char **") + r = i2d_func(value, pp) + backend.openssl_assert(r > 0) + pp = backend._ffi.gc( + pp, lambda pointer: backend._lib.OPENSSL_free(pointer[0]) + ) + return pp, r + + +def _encode_inhibit_any_policy(backend, inhibit_any_policy): + asn1int = _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs) + return _encode_extension_to_der( + backend, backend._lib.i2d_ASN1_INTEGER, asn1int + ) + + +def _encode_name(backend, attributes): + """ + The X509_NAME created will not be gc'd. Use _encode_name_gc if needed. + """ + subject = backend._lib.X509_NAME_new() + for attribute in attributes: + value = attribute.value.encode('utf8') + obj = _txt2obj_gc(backend, attribute.oid.dotted_string) + if attribute.oid == NameOID.COUNTRY_NAME: + # Per RFC5280 Appendix A.1 countryName should be encoded as + # PrintableString, not UTF8String + type = backend._lib.MBSTRING_ASC + else: + type = backend._lib.MBSTRING_UTF8 + res = backend._lib.X509_NAME_add_entry_by_OBJ( + subject, obj, type, value, -1, -1, 0, + ) + backend.openssl_assert(res == 1) + return subject + + +def _encode_name_gc(backend, attributes): + subject = _encode_name(backend, attributes) + subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free) + return subject + + +def _encode_crl_number(backend, crl_number): + asn1int = _encode_asn1_int_gc(backend, crl_number.crl_number) + return _encode_extension_to_der( + backend, backend._lib.i2d_ASN1_INTEGER, asn1int + ) + + +def _encode_crl_reason(backend, crl_reason): + asn1enum = backend._lib.ASN1_ENUMERATED_new() + backend.openssl_assert(asn1enum != backend._ffi.NULL) + asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free) + res = backend._lib.ASN1_ENUMERATED_set( + asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason] + ) + backend.openssl_assert(res == 1) + + return _encode_extension_to_der( + backend, backend._lib.i2d_ASN1_ENUMERATED, asn1enum + ) + + +def _encode_invalidity_date(backend, invalidity_date): + time = backend._lib.ASN1_GENERALIZEDTIME_set( + backend._ffi.NULL, calendar.timegm( + invalidity_date.invalidity_date.timetuple() + ) + ) + backend.openssl_assert(time != backend._ffi.NULL) + time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free) + + return _encode_extension_to_der( + backend, backend._lib.i2d_ASN1_GENERALIZEDTIME, time + ) + + +def _encode_certificate_policies(backend, certificate_policies): + cp = backend._lib.sk_POLICYINFO_new_null() + backend.openssl_assert(cp != backend._ffi.NULL) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + for policy_info in certificate_policies: + pi = backend._lib.POLICYINFO_new() + backend.openssl_assert(pi != backend._ffi.NULL) + res = backend._lib.sk_POLICYINFO_push(cp, pi) + backend.openssl_assert(res >= 1) + oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string) + pi.policyid = oid + if policy_info.policy_qualifiers: + pqis = backend._lib.sk_POLICYQUALINFO_new_null() + backend.openssl_assert(pqis != backend._ffi.NULL) + for qualifier in policy_info.policy_qualifiers: + pqi = backend._lib.POLICYQUALINFO_new() + backend.openssl_assert(pqi != backend._ffi.NULL) + res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi) + backend.openssl_assert(res >= 1) + if isinstance(qualifier, six.text_type): + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_QUALIFIER.dotted_string + ) + pqi.d.cpsuri = _encode_asn1_str( + backend, + qualifier.encode("ascii"), + len(qualifier.encode("ascii")) + ) + else: + assert isinstance(qualifier, x509.UserNotice) + pqi.pqualid = _txt2obj( + backend, x509.OID_CPS_USER_NOTICE.dotted_string + ) + un = backend._lib.USERNOTICE_new() + backend.openssl_assert(un != backend._ffi.NULL) + pqi.d.usernotice = un + if qualifier.explicit_text: + un.exptext = _encode_asn1_utf8_str( + backend, qualifier.explicit_text + ) + + un.noticeref = _encode_notice_reference( + backend, qualifier.notice_reference + ) + + pi.qualifiers = pqis + + return _encode_extension_to_der( + backend, backend._lib.i2d_CERTIFICATEPOLICIES, cp + ) + + +def _encode_notice_reference(backend, notice): + if notice is None: + return backend._ffi.NULL + else: + nr = backend._lib.NOTICEREF_new() + backend.openssl_assert(nr != backend._ffi.NULL) + # organization is a required field + nr.organization = _encode_asn1_utf8_str(backend, notice.organization) + + notice_stack = backend._lib.sk_ASN1_INTEGER_new_null() + nr.noticenos = notice_stack + for number in notice.notice_numbers: + num = _encode_asn1_int(backend, number) + res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num) + backend.openssl_assert(res >= 1) + + return nr + + +def _txt2obj(backend, name): + """ + Converts a Python string with an ASN.1 object ID in dotted form to a + ASN1_OBJECT. + """ + name = name.encode('ascii') + obj = backend._lib.OBJ_txt2obj(name, 1) + backend.openssl_assert(obj != backend._ffi.NULL) + return obj + + +def _txt2obj_gc(backend, name): + obj = _txt2obj(backend, name) + obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free) + return obj + + +def _encode_ocsp_nocheck(backend, ext): + """ + The OCSP No Check extension is defined as a null ASN.1 value. Rather than + calling OpenSSL we can return a Python bytestring value in a list. + """ + return [b"\x05\x00"], 2 + + +def _encode_key_usage(backend, key_usage): + set_bit = backend._lib.ASN1_BIT_STRING_set_bit + ku = backend._lib.ASN1_BIT_STRING_new() + ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free) + res = set_bit(ku, 0, key_usage.digital_signature) + backend.openssl_assert(res == 1) + res = set_bit(ku, 1, key_usage.content_commitment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 2, key_usage.key_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 3, key_usage.data_encipherment) + backend.openssl_assert(res == 1) + res = set_bit(ku, 4, key_usage.key_agreement) + backend.openssl_assert(res == 1) + res = set_bit(ku, 5, key_usage.key_cert_sign) + backend.openssl_assert(res == 1) + res = set_bit(ku, 6, key_usage.crl_sign) + backend.openssl_assert(res == 1) + if key_usage.key_agreement: + res = set_bit(ku, 7, key_usage.encipher_only) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, key_usage.decipher_only) + backend.openssl_assert(res == 1) + else: + res = set_bit(ku, 7, 0) + backend.openssl_assert(res == 1) + res = set_bit(ku, 8, 0) + backend.openssl_assert(res == 1) + + return _encode_extension_to_der( + backend, backend._lib.i2d_ASN1_BIT_STRING, ku + ) + + +def _encode_authority_key_identifier(backend, authority_keyid): + akid = backend._lib.AUTHORITY_KEYID_new() + backend.openssl_assert(akid != backend._ffi.NULL) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + if authority_keyid.key_identifier is not None: + akid.keyid = _encode_asn1_str( + backend, + authority_keyid.key_identifier, + len(authority_keyid.key_identifier) + ) + + if authority_keyid.authority_cert_issuer is not None: + akid.issuer = _encode_general_names( + backend, authority_keyid.authority_cert_issuer + ) + + if authority_keyid.authority_cert_serial_number is not None: + akid.serial = _encode_asn1_int( + backend, authority_keyid.authority_cert_serial_number + ) + + return _encode_extension_to_der( + backend, backend._lib.i2d_AUTHORITY_KEYID, akid + ) + + +def _encode_basic_constraints(backend, basic_constraints): + constraints = backend._lib.BASIC_CONSTRAINTS_new() + constraints = backend._ffi.gc( + constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + constraints.ca = 255 if basic_constraints.ca else 0 + if basic_constraints.ca and basic_constraints.path_length is not None: + constraints.pathlen = _encode_asn1_int( + backend, basic_constraints.path_length + ) + + return _encode_extension_to_der( + backend, backend._lib.i2d_BASIC_CONSTRAINTS, constraints + ) + + +def _encode_authority_information_access(backend, authority_info_access): + aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null() + backend.openssl_assert(aia != backend._ffi.NULL) + aia = backend._ffi.gc( + aia, backend._lib.sk_ACCESS_DESCRIPTION_free + ) + for access_description in authority_info_access: + ad = backend._lib.ACCESS_DESCRIPTION_new() + method = _txt2obj( + backend, access_description.access_method.dotted_string + ) + gn = _encode_general_name(backend, access_description.access_location) + ad.method = method + ad.location = gn + res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad) + backend.openssl_assert(res >= 1) + + return _encode_extension_to_der( + backend, backend._lib.i2d_AUTHORITY_INFO_ACCESS, aia + ) + + +def _encode_general_names(backend, names): + general_names = backend._lib.GENERAL_NAMES_new() + backend.openssl_assert(general_names != backend._ffi.NULL) + for name in names: + gn = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_NAME_push(general_names, gn) + backend.openssl_assert(res != 0) + + return general_names + + +def _encode_alt_name(backend, san): + general_names = _encode_general_names(backend, san) + general_names = backend._ffi.gc( + general_names, backend._lib.GENERAL_NAMES_free + ) + return _encode_extension_to_der( + backend, backend._lib.i2d_GENERAL_NAMES, general_names + ) + + +def _encode_subject_key_identifier(backend, ski): + asn1_str = _encode_asn1_str_gc(backend, ski.digest, len(ski.digest)) + return _encode_extension_to_der( + backend, backend._lib.i2d_ASN1_OCTET_STRING, asn1_str + ) + + +def _encode_general_name(backend, name): + if isinstance(name, x509.DNSName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_DNS + + ia5 = backend._lib.ASN1_IA5STRING_new() + backend.openssl_assert(ia5 != backend._ffi.NULL) + + if name.value.startswith(u"*."): + value = b"*." + idna.encode(name.value[2:]) + else: + value = idna.encode(name.value) + + res = backend._lib.ASN1_STRING_set(ia5, value, len(value)) + backend.openssl_assert(res == 1) + gn.d.dNSName = ia5 + elif isinstance(name, x509.RegisteredID): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + gn.type = backend._lib.GEN_RID + obj = backend._lib.OBJ_txt2obj( + name.value.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(obj != backend._ffi.NULL) + gn.d.registeredID = obj + elif isinstance(name, x509.DirectoryName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + dir_name = _encode_name(backend, name.value) + gn.type = backend._lib.GEN_DIRNAME + gn.d.directoryName = dir_name + elif isinstance(name, x509.IPAddress): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + ipaddr = _encode_asn1_str( + backend, name.value.packed, len(name.value.packed) + ) + gn.type = backend._lib.GEN_IPADD + gn.d.iPAddress = ipaddr + elif isinstance(name, x509.OtherName): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + other_name = backend._lib.OTHERNAME_new() + backend.openssl_assert(other_name != backend._ffi.NULL) + + type_id = backend._lib.OBJ_txt2obj( + name.type_id.dotted_string.encode('ascii'), 1 + ) + backend.openssl_assert(type_id != backend._ffi.NULL) + data = backend._ffi.new("unsigned char[]", name.value) + data_ptr_ptr = backend._ffi.new("unsigned char **") + data_ptr_ptr[0] = data + value = backend._lib.d2i_ASN1_TYPE( + backend._ffi.NULL, data_ptr_ptr, len(name.value) + ) + if value == backend._ffi.NULL: + backend._consume_errors() + raise ValueError("Invalid ASN.1 data") + other_name.type_id = type_id + other_name.value = value + gn.type = backend._lib.GEN_OTHERNAME + gn.d.otherName = other_name + elif isinstance(name, x509.RFC822Name): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + asn1_str = _encode_asn1_str( + backend, name._encoded, len(name._encoded) + ) + gn.type = backend._lib.GEN_EMAIL + gn.d.rfc822Name = asn1_str + elif isinstance(name, x509.UniformResourceIdentifier): + gn = backend._lib.GENERAL_NAME_new() + backend.openssl_assert(gn != backend._ffi.NULL) + asn1_str = _encode_asn1_str( + backend, name._encoded, len(name._encoded) + ) + gn.type = backend._lib.GEN_URI + gn.d.uniformResourceIdentifier = asn1_str + else: + raise ValueError( + "{0} is an unknown GeneralName type".format(name) + ) + + return gn + + +def _encode_extended_key_usage(backend, extended_key_usage): + eku = backend._lib.sk_ASN1_OBJECT_new_null() + eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free) + for oid in extended_key_usage: + obj = _txt2obj(backend, oid.dotted_string) + res = backend._lib.sk_ASN1_OBJECT_push(eku, obj) + backend.openssl_assert(res >= 1) + + eku_ptr = backend._ffi.cast("EXTENDED_KEY_USAGE *", eku) + return _encode_extension_to_der( + backend, backend._lib.i2d_EXTENDED_KEY_USAGE, eku_ptr + ) + + +_CRLREASONFLAGS = { + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.privilege_withdrawn: 7, + x509.ReasonFlags.aa_compromise: 8, +} + + +def _encode_crl_distribution_points(backend, crl_distribution_points): + cdp = backend._lib.sk_DIST_POINT_new_null() + cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free) + for point in crl_distribution_points: + dp = backend._lib.DIST_POINT_new() + backend.openssl_assert(dp != backend._ffi.NULL) + + if point.reasons: + bitmask = backend._lib.ASN1_BIT_STRING_new() + backend.openssl_assert(bitmask != backend._ffi.NULL) + dp.reasons = bitmask + for reason in point.reasons: + res = backend._lib.ASN1_BIT_STRING_set_bit( + bitmask, _CRLREASONFLAGS[reason], 1 + ) + backend.openssl_assert(res == 1) + + if point.full_name: + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_FULLNAME + dpn.name.fullname = _encode_general_names(backend, point.full_name) + dp.distpoint = dpn + + if point.relative_name: + dpn = backend._lib.DIST_POINT_NAME_new() + backend.openssl_assert(dpn != backend._ffi.NULL) + dpn.type = _DISTPOINT_TYPE_RELATIVENAME + name = _encode_name_gc(backend, point.relative_name) + relativename = backend._lib.sk_X509_NAME_ENTRY_dup(name.entries) + backend.openssl_assert(relativename != backend._ffi.NULL) + dpn.name.relativename = relativename + dp.distpoint = dpn + + if point.crl_issuer: + dp.CRLissuer = _encode_general_names(backend, point.crl_issuer) + + res = backend._lib.sk_DIST_POINT_push(cdp, dp) + backend.openssl_assert(res >= 1) + + return _encode_extension_to_der( + backend, backend._lib.i2d_CRL_DIST_POINTS, cdp + ) + + +def _encode_name_constraints(backend, name_constraints): + nc = backend._lib.NAME_CONSTRAINTS_new() + assert nc != backend._ffi.NULL + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _encode_general_subtree( + backend, name_constraints.permitted_subtrees + ) + nc.permittedSubtrees = permitted + excluded = _encode_general_subtree( + backend, name_constraints.excluded_subtrees + ) + nc.excludedSubtrees = excluded + + return _encode_extension_to_der( + backend, backend._lib.Cryptography_i2d_NAME_CONSTRAINTS, nc + ) + + +def _encode_general_subtree(backend, subtrees): + if subtrees is None: + return backend._ffi.NULL + else: + general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null() + for name in subtrees: + gs = backend._lib.GENERAL_SUBTREE_new() + gs.base = _encode_general_name(backend, name) + res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs) + assert res >= 1 + + return general_subtrees + + +_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _encode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_crl_distribution_points, + ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy, + ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck, + ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints, +} + +_CRL_EXTENSION_ENCODE_HANDLERS = { + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _encode_authority_information_access + ), + ExtensionOID.CRL_NUMBER: _encode_crl_number, +} + +_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = { + CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name, + CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason, + CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date, +} + + +class _PasswordUserdata(object): + def __init__(self, password): + self.password = password + self.called = 0 + self.exception = None + + +@binding.ffi_callback("int (char *, int, int, void *)", + name="Cryptography_pem_password_cb") +def _pem_password_cb(buf, size, writing, userdata_handle): + """ + A pem_password_cb function pointer that copied the password to + OpenSSL as required and returns the number of bytes copied. + + typedef int pem_password_cb(char *buf, int size, + int rwflag, void *userdata); + + Useful for decrypting PKCS8 files and so on. + + The userdata pointer must point to a cffi handle of a + _PasswordUserdata instance. + """ + ud = _ffi.from_handle(userdata_handle) + ud.called += 1 + + if not ud.password: + ud.exception = TypeError( + "Password was not given but private key is encrypted." + ) + return -1 + elif len(ud.password) < size: + pw_buf = _ffi.buffer(buf, size) + pw_buf[:len(ud.password)] = ud.password + return len(ud.password) + else: + ud.exception = ValueError( + "Passwords longer than {0} bytes are not supported " + "by this backend.".format(size - 1) + ) + return 0 + + +@utils.register_interface(CipherBackend) +@utils.register_interface(CMACBackend) +@utils.register_interface(DERSerializationBackend) +@utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(PEMSerializationBackend) +@utils.register_interface(X509Backend) +class Backend(object): + """ + OpenSSL API binding interfaces. + """ + name = "openssl" + + def __init__(self): + self._binding = binding.Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + # Set the default string mask for encoding ASN1 strings to UTF8. This + # is the default for newer OpenSSLs for several years and is + # recommended in RFC 2459. + res = self._lib.ASN1_STRING_set_default_mask_asc(b"utf8only") + self.openssl_assert(res == 1) + + self._cipher_registry = {} + self._register_default_ciphers() + self.activate_osrandom_engine() + + def openssl_assert(self, ok): + return binding._openssl_assert(self._lib, ok) + + def activate_builtin_random(self): + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + + def activate_osrandom_engine(self): + # Unregister and free the current engine. + self.activate_builtin_random() + # Fetches an engine by id and returns it. This creates a structural + # reference. + e = self._lib.ENGINE_by_id(self._binding._osrandom_engine_id) + self.openssl_assert(e != self._ffi.NULL) + # Initialize the engine for use. This adds a functional reference. + res = self._lib.ENGINE_init(e) + self.openssl_assert(res == 1) + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + self.openssl_assert(res == 1) + # Decrement the structural ref incremented by ENGINE_by_id. + res = self._lib.ENGINE_free(e) + self.openssl_assert(res == 1) + # Decrement the functional ref incremented by ENGINE_init. + res = self._lib.ENGINE_finish(e) + self.openssl_assert(res == 1) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + + def openssl_version_text(self): + """ + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. + + Example: OpenSSL 1.0.1e 11 Feb 2013 + """ + return self._ffi.string( + self._lib.SSLeay_version(self._lib.SSLEAY_VERSION) + ).decode("ascii") + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def hash_supported(self, algorithm): + digest = self._lib.EVP_get_digestbyname(algorithm.name.encode("ascii")) + return digest != self._ffi.NULL + + def hmac_supported(self, algorithm): + return self.hash_supported(algorithm) + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def cipher_supported(self, cipher, mode): + if self._evp_cipher_supported(cipher, mode): + return True + elif isinstance(mode, CTR) and isinstance(cipher, AES): + return True + else: + return False + + def _evp_cipher_supported(self, cipher, mode): + try: + adapter = self._cipher_registry[type(cipher), type(mode)] + except KeyError: + return False + evp_cipher = adapter(self, cipher, mode) + return self._ffi.NULL != evp_cipher + + def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}.".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = adapter + + def _register_default_ciphers(self): + for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8]: + self.register_cipher_adapter( + AES, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CTR, ECB, OFB, CFB]: + self.register_cipher_adapter( + Camellia, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CFB, CFB8, OFB]: + self.register_cipher_adapter( + TripleDES, + mode_cls, + GetCipherByName("des-ede3-{mode.name}") + ) + self.register_cipher_adapter( + TripleDES, + ECB, + GetCipherByName("des-ede3") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + Blowfish, + mode_cls, + GetCipherByName("bf-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + SEED, + mode_cls, + GetCipherByName("seed-{mode.name}") + ) + for cipher_cls, mode_cls in itertools.product( + [CAST5, IDEA], + [CBC, OFB, CFB, ECB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{mode.name}") + ) + self.register_cipher_adapter( + ARC4, + type(None), + GetCipherByName("rc4") + ) + self.register_cipher_adapter( + AES, + GCM, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + + def create_symmetric_encryption_ctx(self, cipher, mode): + if (isinstance(mode, CTR) and isinstance(cipher, AES) and + not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + + def create_symmetric_decryption_ctx(self, cipher, mode): + if (isinstance(mode, CTR) and isinstance(cipher, AES) and + not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + + def pbkdf2_hmac_supported(self, algorithm): + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + return self.hmac_supported(algorithm) + else: + # OpenSSL < 1.0.0 has an explicit PBKDF2-HMAC-SHA1 function, + # so if the PBKDF2_HMAC function is missing we only support + # SHA1 via PBKDF2_HMAC_SHA1. + return isinstance(algorithm, hashes.SHA1) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + buf = self._ffi.new("char[]", length) + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + self.openssl_assert(evp_md != self._ffi.NULL) + res = self._lib.PKCS5_PBKDF2_HMAC( + key_material, + len(key_material), + salt, + len(salt), + iterations, + evp_md, + length, + buf + ) + self.openssl_assert(res == 1) + else: + if not isinstance(algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This version of OpenSSL only supports PBKDF2HMAC with " + "SHA1.", + _Reasons.UNSUPPORTED_HASH + ) + res = self._lib.PKCS5_PBKDF2_HMAC_SHA1( + key_material, + len(key_material), + salt, + len(salt), + iterations, + length, + buf + ) + self.openssl_assert(res == 1) + + return self._ffi.buffer(buf)[:] + + def _consume_errors(self): + return binding._consume_errors(self._lib) + + def _bn_to_int(self, bn): + assert bn != self._ffi.NULL + if six.PY3: + # Python 3 has constant time from_bytes, so use that. + bn_num_bytes = self._lib.BN_num_bytes(bn) + bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) + bin_len = self._lib.BN_bn2bin(bn, bin_ptr) + # A zero length means the BN has value 0 + self.openssl_assert(bin_len >= 0) + return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + else: + # Under Python 2 the best we can do is hex() + hex_cdata = self._lib.BN_bn2hex(bn) + self.openssl_assert(hex_cdata != self._ffi.NULL) + hex_str = self._ffi.string(hex_cdata) + self._lib.OPENSSL_free(hex_cdata) + return int(hex_str, 16) + + def _int_to_bn(self, num, bn=None): + """ + Converts a python integer to a BIGNUM. The returned BIGNUM will not + be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will + be discarded after use. + """ + assert bn is None or bn != self._ffi.NULL + + if bn is None: + bn = self._ffi.NULL + + if six.PY3: + # Python 3 has constant time to_bytes, so use that. + + binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") + bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) + self.openssl_assert(bn_ptr != self._ffi.NULL) + return bn_ptr + + else: + # Under Python 2 the best we can do is hex() + + hex_num = hex(num).rstrip("L").lstrip("0x").encode("ascii") or b"0" + bn_ptr = self._ffi.new("BIGNUM **") + bn_ptr[0] = bn + res = self._lib.BN_hex2bn(bn_ptr, hex_num) + self.openssl_assert(res != 0) + self.openssl_assert(bn_ptr[0] != self._ffi.NULL) + return bn_ptr[0] + + def generate_rsa_private_key(self, public_exponent, key_size): + rsa._verify_rsa_parameters(public_exponent, key_size) + + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + + bn = self._int_to_bn(public_exponent) + bn = self._ffi.gc(bn, self._lib.BN_free) + + res = self._lib.RSA_generate_key_ex( + rsa_cdata, key_size, bn, self._ffi.NULL + ) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + return (public_exponent >= 3 and public_exponent & 1 != 0 and + key_size >= 512) + + def load_rsa_private_numbers(self, numbers): + rsa._check_private_key_components( + numbers.p, + numbers.q, + numbers.d, + numbers.dmp1, + numbers.dmq1, + numbers.iqmp, + numbers.public_numbers.e, + numbers.public_numbers.n + ) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + rsa_cdata.p = self._int_to_bn(numbers.p) + rsa_cdata.q = self._int_to_bn(numbers.q) + rsa_cdata.d = self._int_to_bn(numbers.d) + rsa_cdata.dmp1 = self._int_to_bn(numbers.dmp1) + rsa_cdata.dmq1 = self._int_to_bn(numbers.dmq1) + rsa_cdata.iqmp = self._int_to_bn(numbers.iqmp) + rsa_cdata.e = self._int_to_bn(numbers.public_numbers.e) + rsa_cdata.n = self._int_to_bn(numbers.public_numbers.n) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + + def load_rsa_public_numbers(self, numbers): + rsa._check_public_key_components(numbers.e, numbers.n) + rsa_cdata = self._lib.RSA_new() + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + rsa_cdata.e = self._int_to_bn(numbers.e) + rsa_cdata.n = self._int_to_bn(numbers.n) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) + self.openssl_assert(res == 1) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + + def _create_evp_pkey_gc(self): + evp_pkey = self._lib.EVP_PKEY_new() + self.openssl_assert(evp_pkey != self._ffi.NULL) + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return evp_pkey + + def _rsa_cdata_to_evp_pkey(self, rsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _bytes_to_bio(self, data): + """ + Return a _MemoryBIO namedtuple of (BIO, char*). + + The char* is the storage for the BIO and it must stay alive until the + BIO is finished with. + """ + data_char_p = self._ffi.new("char[]", data) + bio = self._lib.BIO_new_mem_buf( + data_char_p, len(data) + ) + self.openssl_assert(bio != self._ffi.NULL) + + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p) + + def _create_mem_bio_gc(self): + """ + Creates an empty memory BIO. + """ + bio_method = self._lib.BIO_s_mem() + self.openssl_assert(bio_method != self._ffi.NULL) + bio = self._lib.BIO_new(bio_method) + self.openssl_assert(bio != self._ffi.NULL) + bio = self._ffi.gc(bio, self._lib.BIO_free) + return bio + + def _read_mem_bio(self, bio): + """ + Reads a memory BIO. This only works on memory BIOs. + """ + buf = self._ffi.new("char **") + buf_len = self._lib.BIO_get_mem_data(bio, buf) + self.openssl_assert(buf_len > 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + bio_data = self._ffi.buffer(buf[0], buf_len)[:] + return bio_data + + def _evp_pkey_to_private_key(self, evp_pkey): + """ + Return the appropriate type of PrivateKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.Cryptography_EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPrivateKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _evp_pkey_to_public_key(self, evp_pkey): + """ + Return the appropriate type of PublicKey given an evp_pkey cdata + pointer. + """ + + key_type = self._lib.Cryptography_EVP_PKEY_id(evp_pkey) + + if key_type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + self.openssl_assert(rsa_cdata != self._ffi.NULL) + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + elif key_type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + ec_cdata = self._lib.EVP_PKEY_get1_EC_KEY(evp_pkey) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _pem_password_cb(self, password): + """ + Generate a pem_password_cb function pointer that copied the password to + OpenSSL as required and returns the number of bytes copied. + + typedef int pem_password_cb(char *buf, int size, + int rwflag, void *userdata); + + Useful for decrypting PKCS8 files and so on. + + Returns a tuple of (cdata function pointer, userdata). + """ + # Forward compatibility for new static callbacks: + # _pem_password_cb is not a nested function because closures don't + # work well with static callbacks. Static callbacks are registered + # globally. The backend is passed in as userdata argument. + + userdata = _PasswordUserdata(password=password) + return _pem_password_cb, userdata + + def _mgf1_hash_supported(self, algorithm): + if self._lib.Cryptography_HAS_MGF1_MD: + return self.hash_supported(algorithm) + else: + return isinstance(algorithm, hashes.SHA1) + + def rsa_padding_supported(self, padding): + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + return self._mgf1_hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return isinstance(padding._mgf._algorithm, hashes.SHA1) + else: + return False + + def generate_dsa_parameters(self, key_size): + if key_size not in (1024, 2048, 3072): + raise ValueError("Key size must be 1024 or 2048 or 3072 bits.") + + if (self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f and + key_size > 1024): + raise ValueError( + "Key size must be 1024 because OpenSSL < 1.0.0 doesn't " + "support larger key sizes.") + + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + + res = self._lib.DSA_generate_parameters_ex( + ctx, key_size, self._ffi.NULL, 0, + self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + + self.openssl_assert(res == 1) + + return _DSAParameters(self, ctx) + + def generate_dsa_private_key(self, parameters): + ctx = self._lib.DSA_new() + self.openssl_assert(ctx != self._ffi.NULL) + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + ctx.p = self._lib.BN_dup(parameters._dsa_cdata.p) + ctx.q = self._lib.BN_dup(parameters._dsa_cdata.q) + ctx.g = self._lib.BN_dup(parameters._dsa_cdata.g) + + self._lib.DSA_generate_key(ctx) + evp_pkey = self._dsa_cdata_to_evp_pkey(ctx) + + return _DSAPrivateKey(self, ctx, evp_pkey) + + def generate_dsa_private_key_and_parameters(self, key_size): + parameters = self.generate_dsa_parameters(key_size) + return self.generate_dsa_private_key(parameters) + + def load_dsa_private_numbers(self, numbers): + dsa._check_dsa_private_numbers(numbers) + parameter_numbers = numbers.public_numbers.parameter_numbers + + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(parameter_numbers.p) + dsa_cdata.q = self._int_to_bn(parameter_numbers.q) + dsa_cdata.g = self._int_to_bn(parameter_numbers.g) + dsa_cdata.pub_key = self._int_to_bn(numbers.public_numbers.y) + dsa_cdata.priv_key = self._int_to_bn(numbers.x) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPrivateKey(self, dsa_cdata, evp_pkey) + + def load_dsa_public_numbers(self, numbers): + dsa._check_dsa_parameters(numbers.parameter_numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(numbers.parameter_numbers.p) + dsa_cdata.q = self._int_to_bn(numbers.parameter_numbers.q) + dsa_cdata.g = self._int_to_bn(numbers.parameter_numbers.g) + dsa_cdata.pub_key = self._int_to_bn(numbers.y) + + evp_pkey = self._dsa_cdata_to_evp_pkey(dsa_cdata) + + return _DSAPublicKey(self, dsa_cdata, evp_pkey) + + def load_dsa_parameter_numbers(self, numbers): + dsa._check_dsa_parameters(numbers) + dsa_cdata = self._lib.DSA_new() + self.openssl_assert(dsa_cdata != self._ffi.NULL) + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(numbers.p) + dsa_cdata.q = self._int_to_bn(numbers.q) + dsa_cdata.g = self._int_to_bn(numbers.g) + + return _DSAParameters(self, dsa_cdata) + + def _dsa_cdata_to_evp_pkey(self, dsa_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_DSA(evp_pkey, dsa_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def dsa_hash_supported(self, algorithm): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return isinstance(algorithm, hashes.SHA1) + else: + return self.hash_supported(algorithm) + + def dsa_parameters_supported(self, p, q, g): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return utils.bit_length(p) <= 1024 and utils.bit_length(q) <= 160 + else: + return True + + def cmac_algorithm_supported(self, algorithm): + return ( + self._lib.Cryptography_HAS_CMAC == 1 and + self.cipher_supported( + algorithm, CBC(b"\x00" * algorithm.block_size) + ) + ) + + def create_cmac_ctx(self, algorithm): + return _CMACContext(self, algorithm) + + def create_x509_csr(self, builder, private_key, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "Certificate signing requests aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "Certificate signing requests aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + # Resolve the signature algorithm. + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty request. + x509_req = self._lib.X509_REQ_new() + self.openssl_assert(x509_req != self._ffi.NULL) + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + + # Set x509 version. + res = self._lib.X509_REQ_set_version(x509_req, x509.Version.v1.value) + self.openssl_assert(res == 1) + + # Set subject name. + res = self._lib.X509_REQ_set_subject_name( + x509_req, _encode_name_gc(self, builder._subject_name) + ) + self.openssl_assert(res == 1) + + # Set subject public key. + public_key = private_key.public_key() + res = self._lib.X509_REQ_set_pubkey( + x509_req, public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Add extensions. + sk_extension = self._lib.sk_X509_EXTENSION_new_null() + self.openssl_assert(sk_extension != self._ffi.NULL) + sk_extension = self._ffi.gc( + sk_extension, self._lib.sk_X509_EXTENSION_free + ) + # gc is not necessary for CSRs, as sk_X509_EXTENSION_free + # will release all the X509_EXTENSIONs. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_EXTENSION_ENCODE_HANDLERS, + x509_obj=sk_extension, + add_func=self._lib.sk_X509_EXTENSION_insert, + gc=False + ) + res = self._lib.X509_REQ_add_extensions(x509_req, sk_extension) + self.openssl_assert(res == 1) + + # Sign the request using the requester's private key. + res = self._lib.X509_REQ_sign( + x509_req, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _CertificateSigningRequest(self, x509_req) + + def create_x509_certificate(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateBuilder): + raise TypeError('Builder type mismatch.') + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "Certificate signatures aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "Certificate signatures aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + # Resolve the signature algorithm. + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty certificate. + x509_cert = self._lib.X509_new() + x509_cert = self._ffi.gc(x509_cert, backend._lib.X509_free) + + # Set the x509 version. + res = self._lib.X509_set_version(x509_cert, builder._version.value) + self.openssl_assert(res == 1) + + # Set the subject's name. + res = self._lib.X509_set_subject_name( + x509_cert, _encode_name_gc(self, list(builder._subject_name)) + ) + self.openssl_assert(res == 1) + + # Set the subject's public key. + res = self._lib.X509_set_pubkey( + x509_cert, builder._public_key._evp_pkey + ) + self.openssl_assert(res == 1) + + # Set the certificate serial number. + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_set_serialNumber(x509_cert, serial_number) + self.openssl_assert(res == 1) + + # Set the "not before" time. + res = self._lib.ASN1_TIME_set( + self._lib.X509_get_notBefore(x509_cert), + calendar.timegm(builder._not_valid_before.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + + # Set the "not after" time. + res = self._lib.ASN1_TIME_set( + self._lib.X509_get_notAfter(x509_cert), + calendar.timegm(builder._not_valid_after.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + + # Add extensions. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_cert, + add_func=self._lib.X509_add_ext, + gc=True + ) + + # Set the issuer name. + res = self._lib.X509_set_issuer_name( + x509_cert, _encode_name_gc(self, list(builder._issuer_name)) + ) + self.openssl_assert(res == 1) + + # Sign the certificate with the issuer's private key. + res = self._lib.X509_sign( + x509_cert, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _Certificate(self, x509_cert) + + def create_x509_crl(self, builder, private_key, algorithm): + if not isinstance(builder, x509.CertificateRevocationListBuilder): + raise TypeError('Builder type mismatch.') + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError('Algorithm must be a registered hash algorithm.') + + if self._lib.OPENSSL_VERSION_NUMBER <= 0x10001000: + if isinstance(private_key, _DSAPrivateKey): + raise NotImplementedError( + "CRL signatures aren't implemented for DSA" + " keys on OpenSSL versions less than 1.0.1." + ) + if isinstance(private_key, _EllipticCurvePrivateKey): + raise NotImplementedError( + "CRL signatures aren't implemented for EC" + " keys on OpenSSL versions less than 1.0.1." + ) + + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii') + ) + self.openssl_assert(evp_md != self._ffi.NULL) + + # Create an empty CRL. + x509_crl = self._lib.X509_CRL_new() + x509_crl = self._ffi.gc(x509_crl, backend._lib.X509_CRL_free) + + # Set the x509 CRL version. We only support v2 (integer value 1). + res = self._lib.X509_CRL_set_version(x509_crl, 1) + self.openssl_assert(res == 1) + + # Set the issuer name. + res = self._lib.X509_CRL_set_issuer_name( + x509_crl, _encode_name_gc(self, list(builder._issuer_name)) + ) + self.openssl_assert(res == 1) + + # Set the last update time. + last_update = self._lib.ASN1_TIME_set( + self._ffi.NULL, calendar.timegm(builder._last_update.timetuple()) + ) + self.openssl_assert(last_update != self._ffi.NULL) + last_update = self._ffi.gc(last_update, self._lib.ASN1_TIME_free) + res = self._lib.X509_CRL_set_lastUpdate(x509_crl, last_update) + self.openssl_assert(res == 1) + + # Set the next update time. + next_update = self._lib.ASN1_TIME_set( + self._ffi.NULL, calendar.timegm(builder._next_update.timetuple()) + ) + self.openssl_assert(next_update != self._ffi.NULL) + next_update = self._ffi.gc(next_update, self._lib.ASN1_TIME_free) + res = self._lib.X509_CRL_set_nextUpdate(x509_crl, next_update) + self.openssl_assert(res == 1) + + # Add extensions. + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_CRL_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_crl, + add_func=self._lib.X509_CRL_add_ext, + gc=True + ) + + # add revoked certificates + for revoked_cert in builder._revoked_certificates: + # Duplicating because the X509_CRL takes ownership and will free + # this memory when X509_CRL_free is called. + revoked = self._lib.Cryptography_X509_REVOKED_dup( + revoked_cert._x509_revoked + ) + self.openssl_assert(revoked != self._ffi.NULL) + res = self._lib.X509_CRL_add0_revoked(x509_crl, revoked) + self.openssl_assert(res == 1) + + res = self._lib.X509_CRL_sign( + x509_crl, private_key._evp_pkey, evp_md + ) + if res == 0: + errors = self._consume_errors() + self.openssl_assert(errors[0][1] == self._lib.ERR_LIB_RSA) + self.openssl_assert( + errors[0][3] == self._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY + ) + raise ValueError("Digest too big for RSA key") + + return _CertificateRevocationList(self, x509_crl) + + def _create_x509_extensions(self, extensions, handlers, x509_obj, + add_func, gc): + for i, extension in enumerate(extensions): + try: + encode = handlers[extension.oid] + except KeyError: + raise NotImplementedError( + 'Extension not supported: {0}'.format(extension.oid) + ) + + pp, r = encode(self, extension.value) + obj = _txt2obj_gc(self, extension.oid.dotted_string) + x509_extension = self._lib.X509_EXTENSION_create_by_OBJ( + self._ffi.NULL, + obj, + 1 if extension.critical else 0, + _encode_asn1_str_gc(self, pp[0], r) + ) + self.openssl_assert(x509_extension != self._ffi.NULL) + if gc: + x509_extension = self._ffi.gc( + x509_extension, self._lib.X509_EXTENSION_free + ) + res = add_func(x509_obj, x509_extension, i) + self.openssl_assert(res >= 1) + + def create_x509_revoked_certificate(self, builder): + if not isinstance(builder, x509.RevokedCertificateBuilder): + raise TypeError('Builder type mismatch.') + + x509_revoked = self._lib.X509_REVOKED_new() + self.openssl_assert(x509_revoked != self._ffi.NULL) + x509_revoked = self._ffi.gc(x509_revoked, self._lib.X509_REVOKED_free) + serial_number = _encode_asn1_int_gc(self, builder._serial_number) + res = self._lib.X509_REVOKED_set_serialNumber( + x509_revoked, serial_number + ) + self.openssl_assert(res == 1) + res = self._lib.ASN1_TIME_set( + x509_revoked.revocationDate, + calendar.timegm(builder._revocation_date.timetuple()) + ) + self.openssl_assert(res != self._ffi.NULL) + # add CRL entry extensions + self._create_x509_extensions( + extensions=builder._extensions, + handlers=_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS, + x509_obj=x509_revoked, + add_func=self._lib.X509_REVOKED_add_ext, + gc=True + ) + return _RevokedCertificate(self, None, x509_revoked) + + def load_pem_private_key(self, data, password): + return self._load_key( + self._lib.PEM_read_bio_PrivateKey, + self._evp_pkey_to_private_key, + data, + password, + ) + + def load_pem_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.PEM_read_bio_PUBKEY( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.PEM_read_bio_RSAPublicKey( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_der_private_key(self, data, password): + # OpenSSL has a function called d2i_AutoPrivateKey that can simplify + # this. Unfortunately it doesn't properly support PKCS8 on OpenSSL + # 0.9.8 so we can't use it. Instead we sequentially try to load it 3 + # different ways. First we'll try to load it as a traditional key + bio_data = self._bytes_to_bio(data) + key = self._evp_pkey_from_der_traditional_key(bio_data, password) + if not key: + # Okay so it's not a traditional key. Let's try + # PKCS8 unencrypted. OpenSSL 0.9.8 can't load unencrypted + # PKCS8 keys using d2i_PKCS8PrivateKey_bio so we do this instead. + # Reset the memory BIO so we can read the data again. + res = self._lib.BIO_reset(bio_data.bio) + self.openssl_assert(res == 1) + key = self._evp_pkey_from_der_unencrypted_pkcs8(bio_data, password) + + if key: + return self._evp_pkey_to_private_key(key) + else: + # Finally we try to load it with the method that handles encrypted + # PKCS8 properly. + return self._load_key( + self._lib.d2i_PKCS8PrivateKey_bio, + self._evp_pkey_to_private_key, + data, + password, + ) + + def _evp_pkey_from_der_traditional_key(self, bio_data, password): + key = self._lib.d2i_PrivateKey_bio(bio_data.bio, self._ffi.NULL) + if key != self._ffi.NULL: + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + + return key + else: + self._consume_errors() + return None + + def _evp_pkey_from_der_unencrypted_pkcs8(self, bio_data, password): + info = self._lib.d2i_PKCS8_PRIV_KEY_INFO_bio( + bio_data.bio, self._ffi.NULL + ) + info = self._ffi.gc(info, self._lib.PKCS8_PRIV_KEY_INFO_free) + if info != self._ffi.NULL: + key = self._lib.EVP_PKCS82PKEY(info) + self.openssl_assert(key != self._ffi.NULL) + key = self._ffi.gc(key, self._lib.EVP_PKEY_free) + if password is not None: + raise TypeError( + "Password was given but private key is not encrypted." + ) + return key + else: + self._consume_errors() + return None + + def load_der_public_key(self, data): + mem_bio = self._bytes_to_bio(data) + evp_pkey = self._lib.d2i_PUBKEY_bio(mem_bio.bio, self._ffi.NULL) + if evp_pkey != self._ffi.NULL: + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + return self._evp_pkey_to_public_key(evp_pkey) + else: + # It's not a (RSA/DSA/ECDSA) subjectPublicKeyInfo, but we still + # need to check to see if it is a pure PKCS1 RSA public key (not + # embedded in a subjectPublicKeyInfo) + self._consume_errors() + res = self._lib.BIO_reset(mem_bio.bio) + self.openssl_assert(res == 1) + rsa_cdata = self._lib.d2i_RSAPublicKey_bio( + mem_bio.bio, self._ffi.NULL + ) + if rsa_cdata != self._ffi.NULL: + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + evp_pkey = self._rsa_cdata_to_evp_pkey(rsa_cdata) + return _RSAPublicKey(self, rsa_cdata, evp_pkey) + else: + self._handle_key_loading_error() + + def load_pem_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.PEM_read_bio_X509( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_der_x509_certificate(self, data): + mem_bio = self._bytes_to_bio(data) + x509 = self._lib.d2i_X509_bio(mem_bio.bio, self._ffi.NULL) + if x509 == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load certificate") + + x509 = self._ffi.gc(x509, self._lib.X509_free) + return _Certificate(self, x509) + + def load_pem_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.PEM_read_bio_X509_CRL( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_der_x509_crl(self, data): + mem_bio = self._bytes_to_bio(data) + x509_crl = self._lib.d2i_X509_CRL_bio(mem_bio.bio, self._ffi.NULL) + if x509_crl == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load CRL") + + x509_crl = self._ffi.gc(x509_crl, self._lib.X509_CRL_free) + return _CertificateRevocationList(self, x509_crl) + + def load_pem_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.PEM_read_bio_X509_REQ( + mem_bio.bio, self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def load_der_x509_csr(self, data): + mem_bio = self._bytes_to_bio(data) + x509_req = self._lib.d2i_X509_REQ_bio(mem_bio.bio, self._ffi.NULL) + if x509_req == self._ffi.NULL: + self._consume_errors() + raise ValueError("Unable to load request") + + x509_req = self._ffi.gc(x509_req, self._lib.X509_REQ_free) + return _CertificateSigningRequest(self, x509_req) + + def _load_key(self, openssl_read_func, convert_func, data, password): + mem_bio = self._bytes_to_bio(data) + + password_cb, userdata = self._pem_password_cb(password) + userdata_handle = self._ffi.new_handle(userdata) + + evp_pkey = openssl_read_func( + mem_bio.bio, + self._ffi.NULL, + password_cb, + userdata_handle, + ) + + if evp_pkey == self._ffi.NULL: + if userdata.exception is not None: + errors = self._consume_errors() + self.openssl_assert(errors) + raise userdata.exception + else: + self._handle_key_loading_error() + + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + if password is not None and userdata.called == 0: + raise TypeError( + "Password was given but private key is not encrypted.") + + assert ( + (password is not None and userdata.called == 1) or + password is None + ) + + return convert_func(evp_pkey) + + def _handle_key_loading_error(self): + errors = self._consume_errors() + + if not errors: + raise ValueError("Could not unserialize key data.") + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._lib.EVP_R_BAD_DECRYPT + ), + ( + self._lib.ERR_LIB_PKCS12, + self._lib.PKCS12_F_PKCS12_PBE_CRYPT, + self._lib.PKCS12_R_PKCS12_CIPHERFINAL_ERROR, + ) + ): + raise ValueError("Bad decrypt. Incorrect password?") + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_PEM, + self._lib.PEM_F_PEM_GET_EVP_CIPHER_INFO, + self._lib.PEM_R_UNSUPPORTED_ENCRYPTION + ), + + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PBE_CIPHERINIT, + self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM + ) + ): + raise UnsupportedAlgorithm( + "PEM data is encrypted with an unsupported cipher", + _Reasons.UNSUPPORTED_CIPHER + ) + + elif any( + error[1:] == ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PKCS82PKEY, + self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM + ) + for error in errors + ): + raise UnsupportedAlgorithm( + "Unsupported public key algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + else: + assert errors[0][1] in ( + self._lib.ERR_LIB_EVP, + self._lib.ERR_LIB_PEM, + self._lib.ERR_LIB_ASN1, + ) + raise ValueError("Could not unserialize key data.") + + def elliptic_curve_supported(self, curve): + if self._lib.Cryptography_HAS_EC != 1: + return False + + try: + curve_nid = self._elliptic_curve_to_nid(curve) + except UnsupportedAlgorithm: + curve_nid = self._lib.NID_undef + + ctx = self._lib.EC_GROUP_new_by_curve_name(curve_nid) + + if ctx == self._ffi.NULL: + errors = self._consume_errors() + self.openssl_assert( + curve_nid == self._lib.NID_undef or + errors[0][1:] == ( + self._lib.ERR_LIB_EC, + self._lib.EC_F_EC_GROUP_NEW_BY_CURVE_NAME, + self._lib.EC_R_UNKNOWN_GROUP + ) + ) + return False + else: + self.openssl_assert(curve_nid != self._lib.NID_undef) + self._lib.EC_GROUP_free(ctx) + return True + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + if self._lib.Cryptography_HAS_EC != 1: + return False + + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + # Before 0.9.8m OpenSSL can't cope with digests longer than the curve. + if ( + self._lib.OPENSSL_VERSION_NUMBER < 0x009080df and + curve.key_size < signature_algorithm.algorithm.digest_size * 8 + ): + return False + + return self.elliptic_curve_supported(curve) + + def generate_elliptic_curve_private_key(self, curve): + """ + Generate a new private key on the named curve. + """ + + if self.elliptic_curve_supported(curve): + curve_nid = self._elliptic_curve_to_nid(curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + res = self._lib.EC_KEY_generate_key(ec_cdata) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_check_key(ec_cdata) + self.openssl_assert(res == 1) + + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + else: + raise UnsupportedAlgorithm( + "Backend object does not support {0}.".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_elliptic_curve_private_numbers(self, numbers): + public = numbers.public_numbers + + curve_nid = self._elliptic_curve_to_nid(public.curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, public.x, public.y) + + res = self._lib.EC_KEY_set_private_key( + ec_cdata, self._int_to_bn(numbers.private_value)) + self.openssl_assert(res == 1) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePrivateKey(self, ec_cdata, evp_pkey) + + def load_elliptic_curve_public_numbers(self, numbers): + curve_nid = self._elliptic_curve_to_nid(numbers.curve) + + ec_cdata = self._lib.EC_KEY_new_by_curve_name(curve_nid) + self.openssl_assert(ec_cdata != self._ffi.NULL) + ec_cdata = self._ffi.gc(ec_cdata, self._lib.EC_KEY_free) + + ec_cdata = self._ec_key_set_public_key_affine_coordinates( + ec_cdata, numbers.x, numbers.y) + evp_pkey = self._ec_cdata_to_evp_pkey(ec_cdata) + + return _EllipticCurvePublicKey(self, ec_cdata, evp_pkey) + + def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve): + return ( + self.elliptic_curve_supported(curve) and + self._lib.Cryptography_HAS_ECDH == 1 and + isinstance(algorithm, ec.ECDH) + ) + + def _ec_cdata_to_evp_pkey(self, ec_cdata): + evp_pkey = self._create_evp_pkey_gc() + res = self._lib.EVP_PKEY_set1_EC_KEY(evp_pkey, ec_cdata) + self.openssl_assert(res == 1) + return evp_pkey + + def _elliptic_curve_to_nid(self, curve): + """ + Get the NID for a curve name. + """ + + curve_aliases = { + "secp192r1": "prime192v1", + "secp256r1": "prime256v1" + } + + curve_name = curve_aliases.get(curve.name, curve.name) + + curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) + if curve_nid == self._lib.NID_undef: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + return curve_nid + + @contextmanager + def _tmp_bn_ctx(self): + bn_ctx = self._lib.BN_CTX_new() + self.openssl_assert(bn_ctx != self._ffi.NULL) + bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) + self._lib.BN_CTX_start(bn_ctx) + try: + yield bn_ctx + finally: + self._lib.BN_CTX_end(bn_ctx) + + def _ec_key_determine_group_get_set_funcs(self, ctx): + """ + Given an EC_KEY determine the group and what methods are required to + get/set point coordinates. + """ + self.openssl_assert(ctx != self._ffi.NULL) + + nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") + self.openssl_assert(nid_two_field != self._lib.NID_undef) + + group = self._lib.EC_KEY_get0_group(ctx) + self.openssl_assert(group != self._ffi.NULL) + + method = self._lib.EC_GROUP_method_of(group) + self.openssl_assert(method != self._ffi.NULL) + + nid = self._lib.EC_METHOD_get_field_type(method) + self.openssl_assert(nid != self._lib.NID_undef) + + if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: + set_func = self._lib.EC_POINT_set_affine_coordinates_GF2m + get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m + else: + set_func = self._lib.EC_POINT_set_affine_coordinates_GFp + get_func = self._lib.EC_POINT_get_affine_coordinates_GFp + + assert set_func and get_func + + return set_func, get_func, group + + def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y): + """ + This is a port of EC_KEY_set_public_key_affine_coordinates that was + added in 1.0.1. + + Sets the public key point in the EC_KEY context to the affine x and y + values. + """ + + if x < 0 or y < 0: + raise ValueError( + "Invalid EC key. Both x and y must be non-negative." + ) + + set_func, get_func, group = ( + self._ec_key_determine_group_get_set_funcs(ctx) + ) + + point = self._lib.EC_POINT_new(group) + self.openssl_assert(point != self._ffi.NULL) + point = self._ffi.gc(point, self._lib.EC_POINT_free) + + bn_x = self._int_to_bn(x) + bn_y = self._int_to_bn(y) + + with self._tmp_bn_ctx() as bn_ctx: + check_x = self._lib.BN_CTX_get(bn_ctx) + check_y = self._lib.BN_CTX_get(bn_ctx) + + res = set_func(group, point, bn_x, bn_y, bn_ctx) + self.openssl_assert(res == 1) + + res = get_func(group, point, check_x, check_y, bn_ctx) + self.openssl_assert(res == 1) + + res = self._lib.BN_cmp(bn_x, check_x) + if res != 0: + self._consume_errors() + raise ValueError("Invalid EC Key X point.") + res = self._lib.BN_cmp(bn_y, check_y) + if res != 0: + self._consume_errors() + raise ValueError("Invalid EC Key Y point.") + + res = self._lib.EC_KEY_set_public_key(ctx, point) + self.openssl_assert(res == 1) + + res = self._lib.EC_KEY_check_key(ctx) + if res != 1: + self._consume_errors() + raise ValueError("Invalid EC key.") + + return ctx + + def _private_key_bytes(self, encoding, format, encryption_algorithm, + evp_pkey, cdata): + if not isinstance(format, serialization.PrivateFormat): + raise TypeError( + "format must be an item from the PrivateFormat enum" + ) + + if not isinstance(encryption_algorithm, + serialization.KeySerializationEncryption): + raise TypeError( + "Encryption algorithm must be a KeySerializationEncryption " + "instance" + ) + + if isinstance(encryption_algorithm, serialization.NoEncryption): + password = b"" + passlen = 0 + evp_cipher = self._ffi.NULL + elif isinstance(encryption_algorithm, + serialization.BestAvailableEncryption): + # This is a curated value that we will update over time. + evp_cipher = self._lib.EVP_get_cipherbyname( + b"aes-256-cbc" + ) + password = encryption_algorithm.password + passlen = len(password) + if passlen > 1023: + raise ValueError( + "Passwords longer than 1023 bytes are not supported by " + "this backend" + ) + else: + raise ValueError("Unsupported encryption type") + + key_type = self._lib.Cryptography_EVP_PKEY_id(evp_pkey) + if encoding is serialization.Encoding.PEM: + if format is serialization.PrivateFormat.PKCS8: + write_bio = self._lib.PEM_write_bio_PKCS8PrivateKey + key = evp_pkey + else: + assert format is serialization.PrivateFormat.TraditionalOpenSSL + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.PEM_write_bio_RSAPrivateKey + elif key_type == self._lib.EVP_PKEY_DSA: + write_bio = self._lib.PEM_write_bio_DSAPrivateKey + else: + assert self._lib.Cryptography_HAS_EC == 1 + assert key_type == self._lib.EVP_PKEY_EC + write_bio = self._lib.PEM_write_bio_ECPrivateKey + + key = cdata + elif encoding is serialization.Encoding.DER: + if format is serialization.PrivateFormat.TraditionalOpenSSL: + if not isinstance( + encryption_algorithm, serialization.NoEncryption + ): + raise ValueError( + "Encryption is not supported for DER encoded " + "traditional OpenSSL keys" + ) + + return self._private_key_bytes_traditional_der(key_type, cdata) + else: + assert format is serialization.PrivateFormat.PKCS8 + write_bio = self._lib.i2d_PKCS8PrivateKey_bio + key = evp_pkey + else: + raise TypeError("encoding must be an item from the Encoding enum") + + bio = self._create_mem_bio_gc() + res = write_bio( + bio, + key, + evp_cipher, + password, + passlen, + self._ffi.NULL, + self._ffi.NULL + ) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _private_key_bytes_traditional_der(self, key_type, cdata): + if key_type == self._lib.EVP_PKEY_RSA: + write_bio = self._lib.i2d_RSAPrivateKey_bio + elif (self._lib.Cryptography_HAS_EC == 1 and + key_type == self._lib.EVP_PKEY_EC): + write_bio = self._lib.i2d_ECPrivateKey_bio + else: + self.openssl_assert(key_type == self._lib.EVP_PKEY_DSA) + write_bio = self._lib.i2d_DSAPrivateKey_bio + + bio = self._create_mem_bio_gc() + res = write_bio(bio, cdata) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _public_key_bytes(self, encoding, format, evp_pkey, cdata): + if not isinstance(encoding, serialization.Encoding): + raise TypeError("encoding must be an item from the Encoding enum") + + if format is serialization.PublicFormat.SubjectPublicKeyInfo: + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_PUBKEY + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_PUBKEY_bio + + key = evp_pkey + elif format is serialization.PublicFormat.PKCS1: + # Only RSA is supported here. + assert self._lib.Cryptography_EVP_PKEY_id( + evp_pkey + ) == self._lib.EVP_PKEY_RSA + if encoding is serialization.Encoding.PEM: + write_bio = self._lib.PEM_write_bio_RSAPublicKey + else: + assert encoding is serialization.Encoding.DER + write_bio = self._lib.i2d_RSAPublicKey_bio + + key = cdata + else: + raise TypeError( + "format must be an item from the PublicFormat enum" + ) + + bio = self._create_mem_bio_gc() + res = write_bio(bio, key) + self.openssl_assert(res == 1) + return self._read_mem_bio(bio) + + def _asn1_integer_to_int(self, asn1_int): + bn = self._lib.ASN1_INTEGER_to_BN(asn1_int, self._ffi.NULL) + self.openssl_assert(bn != self._ffi.NULL) + bn = self._ffi.gc(bn, self._lib.BN_free) + return self._bn_to_int(bn) + + def _asn1_string_to_bytes(self, asn1_string): + return self._ffi.buffer(asn1_string.data, asn1_string.length)[:] + + def _asn1_string_to_ascii(self, asn1_string): + return self._asn1_string_to_bytes(asn1_string).decode("ascii") + + def _asn1_string_to_utf8(self, asn1_string): + buf = self._ffi.new("unsigned char **") + res = self._lib.ASN1_STRING_to_UTF8(buf, asn1_string) + self.openssl_assert(res >= 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + buf = self._ffi.gc( + buf, lambda buffer: self._lib.OPENSSL_free(buffer[0]) + ) + return self._ffi.buffer(buf[0], res)[:].decode('utf8') + + def _asn1_to_der(self, asn1_type): + buf = self._ffi.new("unsigned char **") + res = self._lib.i2d_ASN1_TYPE(asn1_type, buf) + self.openssl_assert(res >= 0) + self.openssl_assert(buf[0] != self._ffi.NULL) + buf = self._ffi.gc( + buf, lambda buffer: self._lib.OPENSSL_free(buffer[0]) + ) + return self._ffi.buffer(buf[0], res)[:] + + def _parse_asn1_time(self, asn1_time): + self.openssl_assert(asn1_time != self._ffi.NULL) + generalized_time = self._lib.ASN1_TIME_to_generalizedtime( + asn1_time, self._ffi.NULL + ) + self.openssl_assert(generalized_time != self._ffi.NULL) + generalized_time = self._ffi.gc( + generalized_time, self._lib.ASN1_GENERALIZEDTIME_free + ) + return self._parse_asn1_generalized_time(generalized_time) + + def _parse_asn1_generalized_time(self, generalized_time): + time = self._asn1_string_to_ascii( + self._ffi.cast("ASN1_STRING *", generalized_time) + ) + return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ") + + +class GetCipherByName(object): + def __init__(self, fmt): + self._fmt = fmt + + def __call__(self, backend, cipher, mode): + cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +backend = Backend() diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py new file mode 100644 index 0000000..a80708a --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/ciphers.py @@ -0,0 +1,213 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import ciphers +from cryptography.hazmat.primitives.ciphers import modes + + +@utils.register_interface(ciphers.CipherContext) +@utils.register_interface(ciphers.AEADCipherContext) +@utils.register_interface(ciphers.AEADEncryptionContext) +class _CipherContext(object): + _ENCRYPT = 1 + _DECRYPT = 0 + + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + if isinstance(self._cipher, ciphers.BlockCipherAlgorithm): + self._block_size = self._cipher.block_size + else: + self._block_size = 1 + + ctx = self._backend._lib.EVP_CIPHER_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_CIPHER_CTX_free + ) + + registry = self._backend._cipher_registry + try: + adapter = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + evp_cipher = adapter(self._backend, cipher, mode) + if evp_cipher == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + if isinstance(mode, modes.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, modes.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + # begin init with cipher and operation type + res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + operation) + self._backend.openssl_assert(res != 0) + # set the key length to handle variable key ciphers + res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( + ctx, len(cipher.key) + ) + self._backend.openssl_assert(res != 0) + if isinstance(mode, modes.GCM): + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN, + len(iv_nonce), self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + if operation == self._DECRYPT: + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG, + len(mode.tag), mode.tag + ) + self._backend.openssl_assert(res != 0) + + # pass key/iv + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + cipher.key, + iv_nonce, + operation + ) + self._backend.openssl_assert(res != 0) + # We purposely disable padding here as it's handled higher up in the + # API. + self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) + self._ctx = ctx + + def update(self, data): + # OpenSSL 0.9.8e has an assertion in its EVP code that causes it + # to SIGABRT if you call update with an empty byte string. This can be + # removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch + # should be taken only when length is zero and mode is not GCM because + # AES GCM can return improper tag values if you don't call update + # with empty plaintext when authenticating AAD for ...reasons. + if len(data) == 0 and not isinstance(self._mode, modes.GCM): + return b"" + + buf = self._backend._ffi.new("unsigned char[]", + len(data) + self._block_size - 1) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data, + len(data)) + self._backend.openssl_assert(res != 0) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + # OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions) + # appears to have a bug where you must make at least one call to update + # even if you are only using authenticate_additional_data or the + # GCM tag will be wrong. An (empty) call to update resolves this + # and is harmless for all other versions of OpenSSL. + if isinstance(self._mode, modes.GCM): + self.update(b"") + + buf = self._backend._ffi.new("unsigned char[]", self._block_size) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) + if res == 0: + errors = self._backend._consume_errors() + + if not errors and isinstance(self._mode, modes.GCM): + raise InvalidTag + + self._backend.openssl_assert( + errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) or errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) + ) + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + + if (isinstance(self._mode, modes.GCM) and + self._operation == self._ENCRYPT): + block_byte_size = self._block_size // 8 + tag_buf = self._backend._ffi.new( + "unsigned char[]", block_byte_size + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG, + block_byte_size, tag_buf + ) + self._backend.openssl_assert(res != 0) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + + res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def authenticate_additional_data(self, data): + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, self._backend._ffi.NULL, outlen, data, len(data) + ) + self._backend.openssl_assert(res != 0) + + tag = utils.read_only_property("_tag") + + +@utils.register_interface(ciphers.CipherContext) +class _AESCTRCipherContext(object): + """ + This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can + be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020). + """ + def __init__(self, backend, cipher, mode): + self._backend = backend + + self._key = self._backend._ffi.new("AES_KEY *") + res = self._backend._lib.AES_set_encrypt_key( + cipher.key, len(cipher.key) * 8, self._key + ) + self._backend.openssl_assert(res == 0) + self._ecount = self._backend._ffi.new("char[]", 16) + self._nonce = self._backend._ffi.new("char[16]", mode.nonce) + self._num = self._backend._ffi.new("unsigned int *", 0) + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + self._backend._lib.AES_ctr128_encrypt( + data, buf, len(data), self._key, self._nonce, + self._ecount, self._num + ) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + self._key = None + self._ecount = None + self._nonce = None + self._num = None + return b"" diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py new file mode 100644 index 0000000..eaefc27 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/cmac.py @@ -0,0 +1,80 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives.ciphers.modes import CBC + + +@utils.register_interface(interfaces.MACContext) +class _CMACContext(object): + def __init__(self, backend, algorithm, ctx=None): + if not backend.cmac_algorithm_supported(algorithm): + raise UnsupportedAlgorithm("This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER) + + self._backend = backend + self._key = algorithm.key + self._algorithm = algorithm + self._output_length = algorithm.block_size // 8 + + if ctx is None: + registry = self._backend._cipher_registry + adapter = registry[type(algorithm), CBC] + + evp_cipher = adapter(self._backend, algorithm, CBC) + + ctx = self._backend._lib.CMAC_CTX_new() + + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + + self._backend._lib.CMAC_Init( + ctx, self._key, len(self._key), + evp_cipher, self._backend._ffi.NULL + ) + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) + self._backend.openssl_assert(res == 1) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._output_length) + length = self._backend._ffi.new("size_t *", self._output_length) + res = self._backend._lib.CMAC_Final( + self._ctx, buf, length + ) + self._backend.openssl_assert(res == 1) + + self._ctx = None + + return self._backend._ffi.buffer(buf)[:] + + def copy(self): + copied_ctx = self._backend._lib.CMAC_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.CMAC_CTX_free + ) + res = self._backend._lib.CMAC_CTX_copy( + copied_ctx, self._ctx + ) + self._backend.openssl_assert(res == 1) + return _CMACContext( + self._backend, self._algorithm, ctx=copied_ctx + ) + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py new file mode 100644 index 0000000..9b4c1af --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/dsa.py @@ -0,0 +1,218 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends.openssl.utils import _truncate_digest +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, dsa +) + + +def _truncate_digest_for_dsa(dsa_cdata, digest, backend): + """ + This function truncates digests that are longer than a given DS + key's length so they can be signed. OpenSSL does this for us in + 1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three + releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This + truncation is not required in 0.9.8 because DSA is limited to SHA-1. + """ + + order_bits = backend._lib.BN_num_bits(dsa_cdata.q) + return _truncate_digest(digest, order_bits) + + +@utils.register_interface(AsymmetricVerificationContext) +class _DSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._algorithm = algorithm + + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + data_to_verify = self._hash_ctx.finalize() + + data_to_verify = _truncate_digest_for_dsa( + self._public_key._dsa_cdata, data_to_verify, self._backend + ) + + # The first parameter passed to DSA_verify is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_verify( + 0, data_to_verify, len(data_to_verify), self._signature, + len(self._signature), self._public_key._dsa_cdata) + + if res != 1: + self._backend._consume_errors() + raise InvalidSignature + + +@utils.register_interface(AsymmetricSignatureContext) +class _DSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + data_to_sign = self._hash_ctx.finalize() + data_to_sign = _truncate_digest_for_dsa( + self._private_key._dsa_cdata, data_to_sign, self._backend + ) + sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata) + sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len) + buflen = self._backend._ffi.new("unsigned int *") + + # The first parameter passed to DSA_sign is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_sign( + 0, data_to_sign, len(data_to_sign), sig_buf, + buflen, self._private_key._dsa_cdata) + self._backend.openssl_assert(res == 1) + self._backend.openssl_assert(buflen[0]) + + return self._backend._ffi.buffer(sig_buf)[:buflen[0]] + + +@utils.register_interface(dsa.DSAParametersWithNumbers) +class _DSAParameters(object): + def __init__(self, backend, dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + + def parameter_numbers(self): + return dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(self._dsa_cdata.p), + q=self._backend._bn_to_int(self._dsa_cdata.q), + g=self._backend._bn_to_int(self._dsa_cdata.g) + ) + + def generate_private_key(self): + return self._backend.generate_dsa_private_key(self) + + +@utils.register_interface(dsa.DSAPrivateKeyWithSerialization) +class _DSAPrivateKey(object): + def __init__(self, backend, dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p) + + key_size = utils.read_only_property("_key_size") + + def signer(self, signature_algorithm): + return _DSASignatureContext(self._backend, self, signature_algorithm) + + def private_numbers(self): + return dsa.DSAPrivateNumbers( + public_numbers=dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(self._dsa_cdata.p), + q=self._backend._bn_to_int(self._dsa_cdata.q), + g=self._backend._bn_to_int(self._dsa_cdata.g) + ), + y=self._backend._bn_to_int(self._dsa_cdata.pub_key) + ), + x=self._backend._bn_to_int(self._dsa_cdata.priv_key) + ) + + def public_key(self): + dsa_cdata = self._backend._lib.DSA_new() + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p) + dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) + dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) + dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key) + evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata) + return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey) + + def parameters(self): + dsa_cdata = self._backend._lib.DSA_new() + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p) + dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) + dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) + return _DSAParameters(self._backend, dsa_cdata) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._dsa_cdata + ) + + +@utils.register_interface(dsa.DSAPublicKeyWithSerialization) +class _DSAPublicKey(object): + def __init__(self, backend, dsa_cdata, evp_pkey): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._evp_pkey = evp_pkey + self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p) + + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, signature_algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + return _DSAVerificationContext( + self._backend, self, signature, signature_algorithm + ) + + def public_numbers(self): + return dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(self._dsa_cdata.p), + q=self._backend._bn_to_int(self._dsa_cdata.q), + g=self._backend._bn_to_int(self._dsa_cdata.g) + ), + y=self._backend._bn_to_int(self._dsa_cdata.pub_key) + ) + + def parameters(self): + dsa_cdata = self._backend._lib.DSA_new() + self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL) + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p) + dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) + dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) + return _DSAParameters(self._backend, dsa_cdata) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "DSA public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self._evp_pkey, + None + ) diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/ec.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/ec.py new file mode 100644 index 0000000..aa4a7a3 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1,304 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.openssl.utils import _truncate_digest +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, ec +) + + +def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend): + """ + This function truncates digests that are longer than a given elliptic + curve key's length so they can be signed. Since elliptic curve keys are + much shorter than RSA keys many digests (e.g. SHA-512) may require + truncation. + """ + + _lib = backend._lib + _ffi = backend._ffi + + group = _lib.EC_KEY_get0_group(ec_key_cdata) + + with backend._tmp_bn_ctx() as bn_ctx: + order = _lib.BN_CTX_get(bn_ctx) + backend.openssl_assert(order != _ffi.NULL) + + res = _lib.EC_GROUP_get_order(group, order, bn_ctx) + backend.openssl_assert(res == 1) + + order_bits = _lib.BN_num_bits(order) + + return _truncate_digest(digest, order_bits) + + +def _ec_key_curve_sn(backend, ec_key): + group = backend._lib.EC_KEY_get0_group(ec_key) + backend.openssl_assert(group != backend._ffi.NULL) + + nid = backend._lib.EC_GROUP_get_curve_name(group) + # The following check is to find EC keys with unnamed curves and raise + # an error for now. + if nid == backend._lib.NID_undef: + raise NotImplementedError( + "ECDSA certificates with unnamed curves are unsupported " + "at this time" + ) + + curve_name = backend._lib.OBJ_nid2sn(nid) + backend.openssl_assert(curve_name != backend._ffi.NULL) + + sn = backend._ffi.string(curve_name).decode('ascii') + return sn + + +def _mark_asn1_named_ec_curve(backend, ec_cdata): + """ + Set the named curve flag on the EC_KEY. This causes OpenSSL to + serialize EC keys along with their curve OID which makes + deserialization easier. + """ + + backend._lib.EC_KEY_set_asn1_flag( + ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE + ) + + +def _sn_to_elliptic_curve(backend, sn): + try: + return ec._CURVE_TYPES[sn]() + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(sn), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + +@utils.register_interface(AsymmetricSignatureContext) +class _ECDSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def finalize(self): + ec_key = self._private_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + max_size = self._backend._lib.ECDSA_size(ec_key) + self._backend.openssl_assert(max_size > 0) + + sigbuf = self._backend._ffi.new("char[]", max_size) + siglen_ptr = self._backend._ffi.new("unsigned int[]", 1) + res = self._backend._lib.ECDSA_sign( + 0, + digest, + len(digest), + sigbuf, + siglen_ptr, + ec_key + ) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]] + + +@utils.register_interface(AsymmetricVerificationContext) +class _ECDSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def verify(self): + ec_key = self._public_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + res = self._backend._lib.ECDSA_verify( + 0, + digest, + len(digest), + self._signature, + len(self._signature), + ec_key + ) + if res != 1: + self._backend._consume_errors() + raise InvalidSignature + return True + + +@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization) +class _EllipticCurvePrivateKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + + curve = utils.read_only_property("_curve") + + def signer(self, signature_algorithm): + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSASignatureContext( + self._backend, self, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def exchange(self, algorithm, peer_public_key): + if not ( + self._backend.elliptic_curve_exchange_algorithm_supported( + algorithm, self.curve + ) + ): + raise UnsupportedAlgorithm( + "This backend does not support the ECDH algorithm.", + _Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM + ) + + if peer_public_key.curve.name != self.curve.name: + raise ValueError( + "peer_public_key and self are not on the same curve" + ) + + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8 + self._backend.openssl_assert(z_len > 0) + z_buf = self._backend._ffi.new("uint8_t[]", z_len) + peer_key = self._backend._lib.EC_KEY_get0_public_key( + peer_public_key._ec_key + ) + + r = self._backend._lib.ECDH_compute_key( + z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL + ) + self._backend.openssl_assert(r > 0) + return self._backend._ffi.buffer(z_buf)[:z_len] + + def public_key(self): + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + self._backend.openssl_assert(group != self._backend._ffi.NULL) + + curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) + + public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid) + self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL) + public_ec_key = self._backend._ffi.gc( + public_ec_key, self._backend._lib.EC_KEY_free + ) + + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) + self._backend.openssl_assert(res == 1) + + evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key) + + return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey) + + def private_numbers(self): + bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key) + private_value = self._backend._bn_to_int(bn) + return ec.EllipticCurvePrivateNumbers( + private_value=private_value, + public_numbers=self.public_key().public_numbers() + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._ec_key + ) + + +@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization) +class _EllipticCurvePublicKey(object): + def __init__(self, backend, ec_key_cdata, evp_pkey): + self._backend = backend + _mark_asn1_named_ec_curve(backend, ec_key_cdata) + self._ec_key = ec_key_cdata + self._evp_pkey = evp_pkey + + sn = _ec_key_curve_sn(backend, ec_key_cdata) + self._curve = _sn_to_elliptic_curve(backend, sn) + + curve = utils.read_only_property("_curve") + + def verifier(self, signature, signature_algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSAVerificationContext( + self._backend, self, signature, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def public_numbers(self): + set_func, get_func, group = ( + self._backend._ec_key_determine_group_get_set_funcs(self._ec_key) + ) + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + self._backend.openssl_assert(point != self._backend._ffi.NULL) + + with self._backend._tmp_bn_ctx() as bn_ctx: + bn_x = self._backend._lib.BN_CTX_get(bn_ctx) + bn_y = self._backend._lib.BN_CTX_get(bn_ctx) + + res = get_func(group, point, bn_x, bn_y, bn_ctx) + self._backend.openssl_assert(res == 1) + + x = self._backend._bn_to_int(bn_x) + y = self._backend._bn_to_int(bn_y) + + return ec.EllipticCurvePublicNumbers( + x=x, + y=y, + curve=self._curve + ) + + def public_bytes(self, encoding, format): + if format is serialization.PublicFormat.PKCS1: + raise ValueError( + "EC public keys do not support PKCS1 serialization" + ) + + return self._backend._public_key_bytes( + encoding, + format, + self._evp_pkey, + None + ) diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py new file mode 100644 index 0000000..02ce5f0 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/hashes.py @@ -0,0 +1,62 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import hashes + + +@utils.register_interface(hashes.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.EVP_MD_CTX_create() + ctx = self._backend._ffi.gc(ctx, + self._backend._lib.EVP_MD_CTX_destroy) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, + self._backend._ffi.NULL) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._lib.EVP_MD_CTX_create() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.EVP_MD_CTX_destroy + ) + res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) + self._backend.openssl_assert(res != 0) + return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) + + def update(self, data): + res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) + res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx) + self._backend.openssl_assert(res == 1) + return self._backend._ffi.buffer(buf)[:outlen[0]] diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py new file mode 100644 index 0000000..dcf2fba --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/hmac.py @@ -0,0 +1,81 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, hashes, interfaces + + +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self._algorithm = algorithm + self._backend = backend + + if ctx is None: + ctx = self._backend._ffi.new("HMAC_CTX *") + self._backend._lib.HMAC_CTX_init(ctx) + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.HMAC_CTX_cleanup + ) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii')) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.Cryptography_HMAC_Init_ex( + ctx, key, len(key), evp_md, self._backend._ffi.NULL + ) + self._backend.openssl_assert(res != 0) + + self._ctx = ctx + self._key = key + + algorithm = utils.read_only_property("_algorithm") + + def copy(self): + copied_ctx = self._backend._ffi.new("HMAC_CTX *") + self._backend._lib.HMAC_CTX_init(copied_ctx) + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.HMAC_CTX_cleanup + ) + res = self._backend._lib.Cryptography_HMAC_CTX_copy( + copied_ctx, self._ctx + ) + self._backend.openssl_assert(res != 0) + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + res = self._backend._lib.Cryptography_HMAC_Update( + self._ctx, data, len(data) + ) + self._backend.openssl_assert(res != 0) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.Cryptography_HMAC_Final( + self._ctx, buf, outlen + ) + self._backend.openssl_assert(res != 0) + self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size) + self._backend._lib.HMAC_CTX_cleanup(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def verify(self, signature): + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py new file mode 100644 index 0000000..033cd3b --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/rsa.py @@ -0,0 +1,605 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import math + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import ( + AsymmetricSignatureContext, AsymmetricVerificationContext, rsa +) +from cryptography.hazmat.primitives.asymmetric.padding import ( + AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.asymmetric.rsa import ( + RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization +) + + +def _get_rsa_pss_salt_length(pss, key_size, digest_size): + salt = pss._salt_length + + if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: + # bit length - 1 per RFC 3447 + emlen = int(math.ceil((key_size - 1) / 8.0)) + salt_length = emlen - digest_size - 2 + assert salt_length >= 0 + return salt_length + else: + return salt + + +def _enc_dec_rsa(backend, key, data, padding): + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Padding must be an instance of AsymmetricPadding.") + + if isinstance(padding, PKCS1v15): + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, OAEP): + padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + if not isinstance(padding._mgf._algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This backend supports only SHA1 inside MGF1 when " + "using OAEP.", + _Reasons.UNSUPPORTED_HASH + ) + + if padding._label is not None and padding._label != b"": + raise ValueError("This backend does not support OAEP labels.") + + if not isinstance(padding._algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This backend only supports SHA1 when using OAEP.", + _Reasons.UNSUPPORTED_HASH + ) + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format( + padding.name + ), + _Reasons.UNSUPPORTED_PADDING + ) + + if backend._lib.Cryptography_HAS_PKEY_CTX: + return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum) + else: + return _enc_dec_rsa_098(backend, key, data, padding_enum) + + +def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum): + if isinstance(key, _RSAPublicKey): + init = backend._lib.EVP_PKEY_encrypt_init + crypt = backend._lib.Cryptography_EVP_PKEY_encrypt + else: + init = backend._lib.EVP_PKEY_decrypt_init + crypt = backend._lib.Cryptography_EVP_PKEY_decrypt + + pkey_ctx = backend._lib.EVP_PKEY_CTX_new( + key._evp_pkey, backend._ffi.NULL + ) + backend.openssl_assert(pkey_ctx != backend._ffi.NULL) + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init(pkey_ctx) + backend.openssl_assert(res == 1) + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, padding_enum) + backend.openssl_assert(res > 0) + buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + backend.openssl_assert(buf_size > 0) + outlen = backend._ffi.new("size_t *", buf_size) + buf = backend._ffi.new("char[]", buf_size) + res = crypt(pkey_ctx, buf, outlen, data, len(data)) + if res <= 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:outlen[0]] + + +def _enc_dec_rsa_098(backend, key, data, padding_enum): + if isinstance(key, _RSAPublicKey): + crypt = backend._lib.RSA_public_encrypt + else: + crypt = backend._lib.RSA_private_decrypt + + key_size = backend._lib.RSA_size(key._rsa_cdata) + backend.openssl_assert(key_size > 0) + buf = backend._ffi.new("unsigned char[]", key_size) + res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum) + if res < 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:res] + + +def _handle_rsa_enc_dec_error(backend, key): + errors = backend._consume_errors() + assert errors + assert errors[0].lib == backend._lib.ERR_LIB_RSA + if isinstance(key, _RSAPublicKey): + assert (errors[0].reason == + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError( + "Data too long for key size. Encrypt less data or use a " + "larger key size." + ) + else: + decoding_errors = [ + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01, + backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02, + backend._lib.RSA_R_OAEP_DECODING_ERROR, + ] + if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR: + decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR) + + assert errors[0].reason in decoding_errors + raise ValueError("Decryption failed.") + + +@utils.register_interface(AsymmetricSignatureContext) +class _RSASignatureContext(object): + def __init__(self, backend, private_key, padding, algorithm): + self._backend = backend + self._private_key = private_key + + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") + + self._pkey_size = self._backend._lib.EVP_PKEY_size( + self._private_key._evp_pkey + ) + self._backend.openssl_assert(self._pkey_size > 0) + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._finalize_method = self._finalize_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + if self._pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + if not self._backend._mgf1_hash_supported(padding._mgf._algorithm): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._finalize_method = self._finalize_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + self._backend.openssl_assert(evp_md != self._backend._ffi.NULL) + + return self._finalize_method(evp_md) + + def _finalize_pkey_ctx(self, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._private_key._evp_pkey, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL) + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + self._backend.openssl_assert(res > 0) + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + self._backend.openssl_assert(res > 0) + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + self._backend.openssl_assert(res > 0) + + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + self._backend.openssl_assert( + mgf1_md != self._backend._ffi.NULL + ) + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + self._backend.openssl_assert(res > 0) + data_to_sign = self._hash_ctx.finalize() + buflen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, + self._backend._ffi.NULL, + buflen, + data_to_sign, + len(data_to_sign) + ) + self._backend.openssl_assert(res == 1) + buf = self._backend._ffi.new("unsigned char[]", buflen[0]) + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign)) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + reason = None + if (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): + reason = ("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + else: + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + reason = "Digest too large for key size. Use a larger key." + assert reason is not None + raise ValueError(reason) + + return self._backend._ffi.buffer(buf)[:] + + def _finalize_pkcs1(self, evp_md): + if self._hash_ctx._ctx is None: + raise AlreadyFinalized("Context has already been finalized.") + + sig_buf = self._backend._ffi.new("char[]", self._pkey_size) + sig_len = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_SignFinal( + self._hash_ctx._ctx._ctx, + sig_buf, + sig_len, + self._private_key._evp_pkey + ) + self._hash_ctx.finalize() + if res == 0: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + return self._backend._ffi.buffer(sig_buf)[:sig_len[0]] + + def _finalize_pss(self, evp_md): + data_to_sign = self._hash_ctx.finalize() + padded = self._backend._ffi.new("unsigned char[]", self._pkey_size) + res = self._backend._lib.RSA_padding_add_PKCS1_PSS( + self._private_key._rsa_cdata, + padded, + data_to_sign, + evp_md, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + len(data_to_sign) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + + sig_buf = self._backend._ffi.new("char[]", self._pkey_size) + sig_len = self._backend._lib.RSA_private_encrypt( + self._pkey_size, + padded, + sig_buf, + self._private_key._rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + self._backend.openssl_assert(sig_len != -1) + return self._backend._ffi.buffer(sig_buf)[:sig_len] + + +@utils.register_interface(AsymmetricVerificationContext) +class _RSAVerificationContext(object): + def __init__(self, backend, public_key, signature, padding, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + + if not isinstance(padding, AsymmetricPadding): + raise TypeError("Expected provider of AsymmetricPadding.") + + self._pkey_size = self._backend._lib.EVP_PKEY_size( + self._public_key._evp_pkey + ) + self._backend.openssl_assert(self._pkey_size > 0) + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._verify_method = self._verify_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + if self._pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError( + "Digest too large for key size. Check that you have the " + "correct key and digest algorithm." + ) + + if not self._backend._mgf1_hash_supported(padding._mgf._algorithm): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._verify_method = self._verify_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + self._backend.openssl_assert(evp_md != self._backend._ffi.NULL) + + self._verify_method(evp_md) + + def _verify_pkey_ctx(self, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._public_key._evp_pkey, self._backend._ffi.NULL + ) + self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL) + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx) + self._backend.openssl_assert(res == 1) + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + self._backend.openssl_assert(res > 0) + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + self._backend.openssl_assert(res > 0) + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + self._backend.openssl_assert(res > 0) + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + self._backend.openssl_assert( + mgf1_md != self._backend._ffi.NULL + ) + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + self._backend.openssl_assert(res > 0) + + data_to_verify = self._hash_ctx.finalize() + res = self._backend._lib.EVP_PKEY_verify( + pkey_ctx, + self._signature, + len(self._signature), + data_to_verify, + len(data_to_verify) + ) + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + self._backend.openssl_assert(res >= 0) + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pkcs1(self, evp_md): + if self._hash_ctx._ctx is None: + raise AlreadyFinalized("Context has already been finalized.") + + res = self._backend._lib.EVP_VerifyFinal( + self._hash_ctx._ctx._ctx, + self._signature, + len(self._signature), + self._public_key._evp_pkey + ) + self._hash_ctx.finalize() + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + self._backend.openssl_assert(res >= 0) + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pss(self, evp_md): + buf = self._backend._ffi.new("unsigned char[]", self._pkey_size) + res = self._backend._lib.RSA_public_decrypt( + len(self._signature), + self._signature, + buf, + self._public_key._rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + if res != self._pkey_size: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + data_to_verify = self._hash_ctx.finalize() + res = self._backend._lib.RSA_verify_PKCS1_PSS( + self._public_key._rsa_cdata, + data_to_verify, + evp_md, + buf, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + len(data_to_verify) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + +@utils.register_interface(RSAPrivateKeyWithSerialization) +class _RSAPrivateKey(object): + def __init__(self, backend, rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n) + + key_size = utils.read_only_property("_key_size") + + def signer(self, padding, algorithm): + return _RSASignatureContext(self._backend, self, padding, algorithm) + + def decrypt(self, ciphertext, padding): + key_size_bytes = int(math.ceil(self.key_size / 8.0)) + if key_size_bytes != len(ciphertext): + raise ValueError("Ciphertext length must be equal to key size.") + + return _enc_dec_rsa(self._backend, self, ciphertext, padding) + + def public_key(self): + ctx = self._backend._lib.RSA_new() + self._backend.openssl_assert(ctx != self._backend._ffi.NULL) + ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free) + ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e) + ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n) + res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL) + self._backend.openssl_assert(res == 1) + evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx) + return _RSAPublicKey(self._backend, ctx, evp_pkey) + + def private_numbers(self): + return rsa.RSAPrivateNumbers( + p=self._backend._bn_to_int(self._rsa_cdata.p), + q=self._backend._bn_to_int(self._rsa_cdata.q), + d=self._backend._bn_to_int(self._rsa_cdata.d), + dmp1=self._backend._bn_to_int(self._rsa_cdata.dmp1), + dmq1=self._backend._bn_to_int(self._rsa_cdata.dmq1), + iqmp=self._backend._bn_to_int(self._rsa_cdata.iqmp), + public_numbers=rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(self._rsa_cdata.e), + n=self._backend._bn_to_int(self._rsa_cdata.n), + ) + ) + + def private_bytes(self, encoding, format, encryption_algorithm): + return self._backend._private_key_bytes( + encoding, + format, + encryption_algorithm, + self._evp_pkey, + self._rsa_cdata + ) + + +@utils.register_interface(RSAPublicKeyWithSerialization) +class _RSAPublicKey(object): + def __init__(self, backend, rsa_cdata, evp_pkey): + self._backend = backend + self._rsa_cdata = rsa_cdata + self._evp_pkey = evp_pkey + + self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n) + + key_size = utils.read_only_property("_key_size") + + def verifier(self, signature, padding, algorithm): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + + return _RSAVerificationContext( + self._backend, self, signature, padding, algorithm + ) + + def encrypt(self, plaintext, padding): + return _enc_dec_rsa(self._backend, self, plaintext, padding) + + def public_numbers(self): + return rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(self._rsa_cdata.e), + n=self._backend._bn_to_int(self._rsa_cdata.n), + ) + + def public_bytes(self, encoding, format): + return self._backend._public_key_bytes( + encoding, + format, + self._evp_pkey, + self._rsa_cdata + ) diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/utils.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/utils.py new file mode 100644 index 0000000..001121f --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/utils.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + + +def _truncate_digest(digest, order_bits): + digest_len = len(digest) + + if 8 * digest_len > order_bits: + digest_len = (order_bits + 7) // 8 + digest = digest[:digest_len] + + if 8 * digest_len > order_bits: + rshift = 8 - (order_bits & 0x7) + assert 0 < rshift < 8 + + mask = 0xFF >> rshift << rshift + + # Set the bottom rshift bits to 0 + digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask) + + return digest diff --git a/Lib/site-packages/cryptography/hazmat/backends/openssl/x509.py b/Lib/site-packages/cryptography/hazmat/backends/openssl/x509.py new file mode 100644 index 0000000..7692086 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/backends/openssl/x509.py @@ -0,0 +1,1067 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import ipaddress +import operator + +from email.utils import parseaddr + +import idna + +import six + +from six.moves import urllib_parse + +from cryptography import utils, x509 +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives import hashes, serialization +from cryptography.x509.oid import ( + CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID +) + + +def _obj2txt(backend, obj): + # Set to 80 on the recommendation of + # https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values + buf_len = 80 + buf = backend._ffi.new("char[]", buf_len) + res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1) + backend.openssl_assert(res > 0) + return backend._ffi.buffer(buf, res)[:].decode() + + +def _decode_x509_name_entry(backend, x509_name_entry): + obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry) + backend.openssl_assert(obj != backend._ffi.NULL) + data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry) + backend.openssl_assert(data != backend._ffi.NULL) + value = backend._asn1_string_to_utf8(data) + oid = _obj2txt(backend, obj) + + return x509.NameAttribute(x509.ObjectIdentifier(oid), value) + + +def _decode_x509_name(backend, x509_name): + count = backend._lib.X509_NAME_entry_count(x509_name) + attributes = [] + for x in range(count): + entry = backend._lib.X509_NAME_get_entry(x509_name, x) + attributes.append(_decode_x509_name_entry(backend, entry)) + + return x509.Name(attributes) + + +def _decode_general_names(backend, gns): + num = backend._lib.sk_GENERAL_NAME_num(gns) + names = [] + for i in range(num): + gn = backend._lib.sk_GENERAL_NAME_value(gns, i) + backend.openssl_assert(gn != backend._ffi.NULL) + names.append(_decode_general_name(backend, gn)) + + return names + + +def _decode_general_name(backend, gn): + if gn.type == backend._lib.GEN_DNS: + data = backend._asn1_string_to_bytes(gn.d.dNSName) + if not data: + decoded = u"" + elif data.startswith(b"*."): + # This is a wildcard name. We need to remove the leading wildcard, + # IDNA decode, then re-add the wildcard. Wildcard characters should + # always be left-most (RFC 2595 section 2.4). + decoded = u"*." + idna.decode(data[2:]) + else: + # Not a wildcard, decode away. If the string has a * in it anywhere + # invalid this will raise an InvalidCodePoint + decoded = idna.decode(data) + if data.startswith(b"."): + # idna strips leading periods. Name constraints can have that + # so we need to re-add it. Sigh. + decoded = u"." + decoded + + return x509.DNSName(decoded) + elif gn.type == backend._lib.GEN_URI: + data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier) + parsed = urllib_parse.urlparse(data) + if parsed.hostname: + hostname = idna.decode(parsed.hostname) + else: + hostname = "" + if parsed.port: + netloc = hostname + u":" + six.text_type(parsed.port) + else: + netloc = hostname + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + uri = urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )) + return x509.UniformResourceIdentifier(uri) + elif gn.type == backend._lib.GEN_RID: + oid = _obj2txt(backend, gn.d.registeredID) + return x509.RegisteredID(x509.ObjectIdentifier(oid)) + elif gn.type == backend._lib.GEN_IPADD: + data = backend._asn1_string_to_bytes(gn.d.iPAddress) + data_len = len(data) + if data_len == 8 or data_len == 32: + # This is an IPv4 or IPv6 Network and not a single IP. This + # type of data appears in Name Constraints. Unfortunately, + # ipaddress doesn't support packed bytes + netmask. Additionally, + # IPv6Network can only handle CIDR rather than the full 16 byte + # netmask. To handle this we convert the netmask to integer, then + # find the first 0 bit, which will be the prefix. If another 1 + # bit is present after that the netmask is invalid. + base = ipaddress.ip_address(data[:data_len // 2]) + netmask = ipaddress.ip_address(data[data_len // 2:]) + bits = bin(int(netmask))[2:] + prefix = bits.find('0') + # If no 0 bits are found it is a /32 or /128 + if prefix == -1: + prefix = len(bits) + + if "1" in bits[prefix:]: + raise ValueError("Invalid netmask") + + ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix)) + else: + ip = ipaddress.ip_address(data) + + return x509.IPAddress(ip) + elif gn.type == backend._lib.GEN_DIRNAME: + return x509.DirectoryName( + _decode_x509_name(backend, gn.d.directoryName) + ) + elif gn.type == backend._lib.GEN_EMAIL: + data = backend._asn1_string_to_ascii(gn.d.rfc822Name) + name, address = parseaddr(data) + parts = address.split(u"@") + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + elif len(parts) == 1: + # Single label email name. This is valid for local delivery. No + # IDNA decoding can be done since there is no domain component. + return x509.RFC822Name(address) + else: + # A normal email of the form user@domain.com. Let's attempt to + # decode the domain component and return the entire address. + return x509.RFC822Name( + parts[0] + u"@" + idna.decode(parts[1]) + ) + elif gn.type == backend._lib.GEN_OTHERNAME: + type_id = _obj2txt(backend, gn.d.otherName.type_id) + value = backend._asn1_to_der(gn.d.otherName.value) + return x509.OtherName(x509.ObjectIdentifier(type_id), value) + else: + # x400Address or ediPartyName + raise x509.UnsupportedGeneralNameType( + "{0} is not a supported type".format( + x509._GENERAL_NAMES.get(gn.type, gn.type) + ), + gn.type + ) + + +def _decode_ocsp_no_check(backend, ext): + return x509.OCSPNoCheck() + + +def _decode_crl_number(backend, ext): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + return x509.CRLNumber(backend._asn1_integer_to_int(asn1_int)) + + +class _X509ExtensionParser(object): + def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None): + self.ext_count = ext_count + self.get_ext = get_ext + self.handlers = handlers + self.unsupported_exts = unsupported_exts + + def parse(self, backend, x509_obj): + extensions = [] + seen_oids = set() + for i in range(self.ext_count(backend, x509_obj)): + ext = self.get_ext(backend, x509_obj, i) + backend.openssl_assert(ext != backend._ffi.NULL) + crit = backend._lib.X509_EXTENSION_get_critical(ext) + critical = crit == 1 + oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object)) + if oid in seen_oids: + raise x509.DuplicateExtension( + "Duplicate {0} extension found".format(oid), oid + ) + try: + handler = self.handlers[oid] + except KeyError: + if critical: + raise x509.UnsupportedExtension( + "Critical extension {0} is not currently supported" + .format(oid), oid + ) + else: + # Dump the DER payload into an UnrecognizedExtension object + data = backend._lib.X509_EXTENSION_get_data(ext) + backend.openssl_assert(data != backend._ffi.NULL) + der = backend._ffi.buffer(data.data, data.length)[:] + unrecognized = x509.UnrecognizedExtension(oid, der) + extensions.append( + x509.Extension(oid, critical, unrecognized) + ) + else: + # For extensions which are not supported by OpenSSL we pass the + # extension object directly to the parsing routine so it can + # be decoded manually. + if self.unsupported_exts and oid in self.unsupported_exts: + ext_data = ext + else: + ext_data = backend._lib.X509V3_EXT_d2i(ext) + if ext_data == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {0} extension is invalid and can't be " + "parsed".format(oid) + ) + + value = handler(backend, ext_data) + extensions.append(x509.Extension(oid, critical, value)) + + seen_oids.add(oid) + + return x509.Extensions(extensions) + + +@utils.register_interface(x509.Certificate) +class _Certificate(object): + def __init__(self, backend, x509): + self._backend = backend + self._x509 = x509 + + def __repr__(self): + return "".format(self.subject) + + def __eq__(self, other): + if not isinstance(other, x509.Certificate): + return NotImplemented + + res = self._backend._lib.X509_cmp(self._x509, other._x509) + return res == 0 + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + h.update(self.public_bytes(serialization.Encoding.DER)) + return h.finalize() + + @property + def version(self): + version = self._backend._lib.X509_get_version(self._x509) + if version == 0: + return x509.Version.v1 + elif version == 2: + return x509.Version.v3 + else: + raise x509.InvalidVersion( + "{0} is not a valid X509 version".format(version), version + ) + + @property + def serial(self): + asn1_int = self._backend._lib.X509_get_serialNumber(self._x509) + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return self._backend._asn1_integer_to_int(asn1_int) + + def public_key(self): + pkey = self._backend._lib.X509_get_pubkey(self._x509) + if pkey == self._backend._ffi.NULL: + # Remove errors from the stack. + self._backend._consume_errors() + raise ValueError("Certificate public key is of an unknown type") + + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def not_valid_before(self): + asn1_time = self._backend._lib.X509_get_notBefore(self._x509) + return self._backend._parse_asn1_time(asn1_time) + + @property + def not_valid_after(self): + asn1_time = self._backend._lib.X509_get_notAfter(self._x509) + return self._backend._parse_asn1_time(asn1_time) + + @property + def issuer(self): + issuer = self._backend._lib.X509_get_issuer_name(self._x509) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def subject(self): + subject = self._backend._lib.X509_get_subject_name(self._x509) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + oid = _obj2txt(self._backend, self._x509.sig_alg.algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def extensions(self): + return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509) + + @property + def signature(self): + return self._backend._asn1_string_to_bytes(self._x509.signature) + + @property + def tbs_certificate_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + # the X509_CINF struct holds the tbsCertificate data + res = self._backend._lib.i2d_X509_CINF(self._x509.cert_info, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509(bio, self._x509) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_bio(bio, self._x509) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + +def _decode_certificate_policies(backend, cp): + cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp) + cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free) + num = backend._lib.sk_POLICYINFO_num(cp) + certificate_policies = [] + for i in range(num): + qualifiers = None + pi = backend._lib.sk_POLICYINFO_value(cp, i) + oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid)) + if pi.qualifiers != backend._ffi.NULL: + qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers) + qualifiers = [] + for j in range(qnum): + pqi = backend._lib.sk_POLICYQUALINFO_value( + pi.qualifiers, j + ) + pqualid = x509.ObjectIdentifier( + _obj2txt(backend, pqi.pqualid) + ) + if pqualid == CertificatePoliciesOID.CPS_QUALIFIER: + cpsuri = backend._ffi.buffer( + pqi.d.cpsuri.data, pqi.d.cpsuri.length + )[:].decode('ascii') + qualifiers.append(cpsuri) + else: + assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE + user_notice = _decode_user_notice( + backend, pqi.d.usernotice + ) + qualifiers.append(user_notice) + + certificate_policies.append( + x509.PolicyInformation(oid, qualifiers) + ) + + return x509.CertificatePolicies(certificate_policies) + + +def _decode_user_notice(backend, un): + explicit_text = None + notice_reference = None + + if un.exptext != backend._ffi.NULL: + explicit_text = backend._asn1_string_to_utf8(un.exptext) + + if un.noticeref != backend._ffi.NULL: + organization = backend._asn1_string_to_utf8(un.noticeref.organization) + + num = backend._lib.sk_ASN1_INTEGER_num( + un.noticeref.noticenos + ) + notice_numbers = [] + for i in range(num): + asn1_int = backend._lib.sk_ASN1_INTEGER_value( + un.noticeref.noticenos, i + ) + notice_num = backend._asn1_integer_to_int(asn1_int) + notice_numbers.append(notice_num) + + notice_reference = x509.NoticeReference( + organization, notice_numbers + ) + + return x509.UserNotice(notice_reference, explicit_text) + + +def _decode_basic_constraints(backend, bc_st): + basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st) + basic_constraints = backend._ffi.gc( + basic_constraints, backend._lib.BASIC_CONSTRAINTS_free + ) + # The byte representation of an ASN.1 boolean true is \xff. OpenSSL + # chooses to just map this to its ordinal value, so true is 255 and + # false is 0. + ca = basic_constraints.ca == 255 + if basic_constraints.pathlen == backend._ffi.NULL: + path_length = None + else: + path_length = backend._asn1_integer_to_int(basic_constraints.pathlen) + + return x509.BasicConstraints(ca, path_length) + + +def _decode_subject_key_identifier(backend, asn1_string): + asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string) + asn1_string = backend._ffi.gc( + asn1_string, backend._lib.ASN1_OCTET_STRING_free + ) + return x509.SubjectKeyIdentifier( + backend._ffi.buffer(asn1_string.data, asn1_string.length)[:] + ) + + +def _decode_authority_key_identifier(backend, akid): + akid = backend._ffi.cast("AUTHORITY_KEYID *", akid) + akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free) + key_identifier = None + authority_cert_issuer = None + authority_cert_serial_number = None + + if akid.keyid != backend._ffi.NULL: + key_identifier = backend._ffi.buffer( + akid.keyid.data, akid.keyid.length + )[:] + + if akid.issuer != backend._ffi.NULL: + authority_cert_issuer = _decode_general_names( + backend, akid.issuer + ) + + if akid.serial != backend._ffi.NULL: + authority_cert_serial_number = backend._asn1_integer_to_int( + akid.serial + ) + + return x509.AuthorityKeyIdentifier( + key_identifier, authority_cert_issuer, authority_cert_serial_number + ) + + +def _decode_authority_information_access(backend, aia): + aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia) + aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free) + num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia) + access_descriptions = [] + for i in range(num): + ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i) + backend.openssl_assert(ad.method != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method)) + backend.openssl_assert(ad.location != backend._ffi.NULL) + gn = _decode_general_name(backend, ad.location) + access_descriptions.append(x509.AccessDescription(oid, gn)) + + return x509.AuthorityInformationAccess(access_descriptions) + + +def _decode_key_usage(backend, bit_string): + bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string) + bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free) + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + digital_signature = get_bit(bit_string, 0) == 1 + content_commitment = get_bit(bit_string, 1) == 1 + key_encipherment = get_bit(bit_string, 2) == 1 + data_encipherment = get_bit(bit_string, 3) == 1 + key_agreement = get_bit(bit_string, 4) == 1 + key_cert_sign = get_bit(bit_string, 5) == 1 + crl_sign = get_bit(bit_string, 6) == 1 + encipher_only = get_bit(bit_string, 7) == 1 + decipher_only = get_bit(bit_string, 8) == 1 + return x509.KeyUsage( + digital_signature, + content_commitment, + key_encipherment, + data_encipherment, + key_agreement, + key_cert_sign, + crl_sign, + encipher_only, + decipher_only + ) + + +def _decode_general_names_extension(backend, gns): + gns = backend._ffi.cast("GENERAL_NAMES *", gns) + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + general_names = _decode_general_names(backend, gns) + return general_names + + +def _decode_subject_alt_name(backend, ext): + return x509.SubjectAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_issuer_alt_name(backend, ext): + return x509.IssuerAlternativeName( + _decode_general_names_extension(backend, ext) + ) + + +def _decode_name_constraints(backend, nc): + nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc) + nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free) + permitted = _decode_general_subtrees(backend, nc.permittedSubtrees) + excluded = _decode_general_subtrees(backend, nc.excludedSubtrees) + return x509.NameConstraints( + permitted_subtrees=permitted, excluded_subtrees=excluded + ) + + +def _decode_general_subtrees(backend, stack_subtrees): + if stack_subtrees == backend._ffi.NULL: + return None + + num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees) + subtrees = [] + + for i in range(num): + obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i) + backend.openssl_assert(obj != backend._ffi.NULL) + name = _decode_general_name(backend, obj.base) + subtrees.append(name) + + return subtrees + + +def _decode_extended_key_usage(backend, sk): + sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk) + sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free) + num = backend._lib.sk_ASN1_OBJECT_num(sk) + ekus = [] + + for i in range(num): + obj = backend._lib.sk_ASN1_OBJECT_value(sk, i) + backend.openssl_assert(obj != backend._ffi.NULL) + oid = x509.ObjectIdentifier(_obj2txt(backend, obj)) + ekus.append(oid) + + return x509.ExtendedKeyUsage(ekus) + + +_DISTPOINT_TYPE_FULLNAME = 0 +_DISTPOINT_TYPE_RELATIVENAME = 1 + + +def _decode_crl_distribution_points(backend, cdps): + cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps) + cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free) + num = backend._lib.sk_DIST_POINT_num(cdps) + + dist_points = [] + for i in range(num): + full_name = None + relative_name = None + crl_issuer = None + reasons = None + cdp = backend._lib.sk_DIST_POINT_value(cdps, i) + if cdp.reasons != backend._ffi.NULL: + # We will check each bit from RFC 5280 + # ReasonFlags ::= BIT STRING { + # unused (0), + # keyCompromise (1), + # cACompromise (2), + # affiliationChanged (3), + # superseded (4), + # cessationOfOperation (5), + # certificateHold (6), + # privilegeWithdrawn (7), + # aACompromise (8) } + reasons = [] + get_bit = backend._lib.ASN1_BIT_STRING_get_bit + if get_bit(cdp.reasons, 1): + reasons.append(x509.ReasonFlags.key_compromise) + + if get_bit(cdp.reasons, 2): + reasons.append(x509.ReasonFlags.ca_compromise) + + if get_bit(cdp.reasons, 3): + reasons.append(x509.ReasonFlags.affiliation_changed) + + if get_bit(cdp.reasons, 4): + reasons.append(x509.ReasonFlags.superseded) + + if get_bit(cdp.reasons, 5): + reasons.append(x509.ReasonFlags.cessation_of_operation) + + if get_bit(cdp.reasons, 6): + reasons.append(x509.ReasonFlags.certificate_hold) + + if get_bit(cdp.reasons, 7): + reasons.append(x509.ReasonFlags.privilege_withdrawn) + + if get_bit(cdp.reasons, 8): + reasons.append(x509.ReasonFlags.aa_compromise) + + reasons = frozenset(reasons) + + if cdp.CRLissuer != backend._ffi.NULL: + crl_issuer = _decode_general_names(backend, cdp.CRLissuer) + + # Certificates may have a crl_issuer/reasons and no distribution + # point so make sure it's not null. + if cdp.distpoint != backend._ffi.NULL: + # Type 0 is fullName, there is no #define for it in the code. + if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME: + full_name = _decode_general_names( + backend, cdp.distpoint.name.fullname + ) + # OpenSSL code doesn't test for a specific type for + # relativename, everything that isn't fullname is considered + # relativename. + else: + rns = cdp.distpoint.name.relativename + rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns) + attributes = [] + for i in range(rnum): + rn = backend._lib.sk_X509_NAME_ENTRY_value( + rns, i + ) + backend.openssl_assert(rn != backend._ffi.NULL) + attributes.append( + _decode_x509_name_entry(backend, rn) + ) + + relative_name = x509.Name(attributes) + + dist_points.append( + x509.DistributionPoint( + full_name, relative_name, reasons, crl_issuer + ) + ) + + return x509.CRLDistributionPoints(dist_points) + + +def _decode_inhibit_any_policy(backend, asn1_int): + asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int) + asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free) + skip_certs = backend._asn1_integer_to_int(asn1_int) + return x509.InhibitAnyPolicy(skip_certs) + + +# CRLReason ::= ENUMERATED { +# unspecified (0), +# keyCompromise (1), +# cACompromise (2), +# affiliationChanged (3), +# superseded (4), +# cessationOfOperation (5), +# certificateHold (6), +# -- value 7 is not used +# removeFromCRL (8), +# privilegeWithdrawn (9), +# aACompromise (10) } +_CRL_ENTRY_REASON_CODE_TO_ENUM = { + 0: x509.ReasonFlags.unspecified, + 1: x509.ReasonFlags.key_compromise, + 2: x509.ReasonFlags.ca_compromise, + 3: x509.ReasonFlags.affiliation_changed, + 4: x509.ReasonFlags.superseded, + 5: x509.ReasonFlags.cessation_of_operation, + 6: x509.ReasonFlags.certificate_hold, + 8: x509.ReasonFlags.remove_from_crl, + 9: x509.ReasonFlags.privilege_withdrawn, + 10: x509.ReasonFlags.aa_compromise, +} + + +_CRL_ENTRY_REASON_ENUM_TO_CODE = { + x509.ReasonFlags.unspecified: 0, + x509.ReasonFlags.key_compromise: 1, + x509.ReasonFlags.ca_compromise: 2, + x509.ReasonFlags.affiliation_changed: 3, + x509.ReasonFlags.superseded: 4, + x509.ReasonFlags.cessation_of_operation: 5, + x509.ReasonFlags.certificate_hold: 6, + x509.ReasonFlags.remove_from_crl: 8, + x509.ReasonFlags.privilege_withdrawn: 9, + x509.ReasonFlags.aa_compromise: 10 +} + + +def _decode_crl_reason(backend, enum): + enum = backend._ffi.cast("ASN1_ENUMERATED *", enum) + enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free) + code = backend._lib.ASN1_ENUMERATED_get(enum) + + try: + return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code]) + except KeyError: + raise ValueError("Unsupported reason code: {0}".format(code)) + + +def _decode_invalidity_date(backend, inv_date): + generalized_time = backend._ffi.cast( + "ASN1_GENERALIZEDTIME *", inv_date + ) + generalized_time = backend._ffi.gc( + generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free + ) + return x509.InvalidityDate( + backend._parse_asn1_generalized_time(generalized_time) + ) + + +def _decode_cert_issuer(backend, ext): + """ + This handler decodes the CertificateIssuer entry extension directly + from the X509_EXTENSION object. This is necessary because this entry + extension is not directly supported by OpenSSL 0.9.8. + """ + + data_ptr_ptr = backend._ffi.new("const unsigned char **") + data_ptr_ptr[0] = ext.value.data + gns = backend._lib.d2i_GENERAL_NAMES( + backend._ffi.NULL, data_ptr_ptr, ext.value.length + ) + + # Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered + # in _X509ExtensionParser but since we are responsible for decoding this + # entry extension ourselves, we have to this here. + if gns == backend._ffi.NULL: + backend._consume_errors() + raise ValueError( + "The {0} extension is corrupted and can't be parsed".format( + CRLEntryExtensionOID.CERTIFICATE_ISSUER)) + + gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free) + return x509.CertificateIssuer(_decode_general_names(backend, gns)) + + +@utils.register_interface(x509.RevokedCertificate) +class _RevokedCertificate(object): + def __init__(self, backend, crl, x509_revoked): + self._backend = backend + # The X509_REVOKED_value is a X509_REVOKED * that has + # no reference counting. This means when X509_CRL_free is + # called then the CRL and all X509_REVOKED * are freed. Since + # you can retain a reference to a single revoked certificate + # and let the CRL fall out of scope we need to retain a + # private reference to the CRL inside the RevokedCertificate + # object to prevent the gc from being called inappropriately. + self._crl = crl + self._x509_revoked = x509_revoked + + @property + def serial_number(self): + asn1_int = self._x509_revoked.serialNumber + self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL) + return self._backend._asn1_integer_to_int(asn1_int) + + @property + def revocation_date(self): + return self._backend._parse_asn1_time( + self._x509_revoked.revocationDate) + + @property + def extensions(self): + return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse( + self._backend, self._x509_revoked + ) + + +@utils.register_interface(x509.CertificateRevocationList) +class _CertificateRevocationList(object): + def __init__(self, backend, x509_crl): + self._backend = backend + self._x509_crl = x509_crl + + def __eq__(self, other): + if not isinstance(other, x509.CertificateRevocationList): + return NotImplemented + + res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl) + return res == 0 + + def __ne__(self, other): + return not self == other + + def fingerprint(self, algorithm): + h = hashes.Hash(algorithm, self._backend) + bio = self._backend._create_mem_bio_gc() + res = self._backend._lib.i2d_X509_CRL_bio( + bio, self._x509_crl + ) + self._backend.openssl_assert(res == 1) + der = self._backend._read_mem_bio(bio) + h.update(der) + return h.finalize() + + @property + def signature_hash_algorithm(self): + oid = _obj2txt(self._backend, self._x509_crl.sig_alg.algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def issuer(self): + issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl) + self._backend.openssl_assert(issuer != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, issuer) + + @property + def next_update(self): + nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl) + self._backend.openssl_assert(nu != self._backend._ffi.NULL) + return self._backend._parse_asn1_time(nu) + + @property + def last_update(self): + lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl) + self._backend.openssl_assert(lu != self._backend._ffi.NULL) + return self._backend._parse_asn1_time(lu) + + @property + def signature(self): + return self._backend._asn1_string_to_bytes(self._x509_crl.signature) + + @property + def tbs_certlist_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + # the X509_CRL_INFO struct holds the tbsCertList data + res = self._backend._lib.i2d_X509_CRL_INFO(self._x509_crl.crl, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_CRL( + bio, self._x509_crl + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + def _revoked_cert(self, idx): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx) + self._backend.openssl_assert(r != self._backend._ffi.NULL) + return _RevokedCertificate(self._backend, self, r) + + def __iter__(self): + for i in range(len(self)): + yield self._revoked_cert(i) + + def __getitem__(self, idx): + if isinstance(idx, slice): + start, stop, step = idx.indices(len(self)) + return [self._revoked_cert(i) for i in range(start, stop, step)] + else: + idx = operator.index(idx) + if idx < 0: + idx += len(self) + if not 0 <= idx < len(self): + raise IndexError + return self._revoked_cert(idx) + + def __len__(self): + revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl) + if revoked == self._backend._ffi.NULL: + return 0 + else: + return self._backend._lib.sk_X509_REVOKED_num(revoked) + + @property + def extensions(self): + return _CRL_EXTENSION_PARSER.parse(self._backend, self._x509_crl) + + +@utils.register_interface(x509.CertificateSigningRequest) +class _CertificateSigningRequest(object): + def __init__(self, backend, x509_req): + self._backend = backend + self._x509_req = x509_req + + def __eq__(self, other): + if not isinstance(other, _CertificateSigningRequest): + return NotImplemented + + self_bytes = self.public_bytes(serialization.Encoding.DER) + other_bytes = other.public_bytes(serialization.Encoding.DER) + return self_bytes == other_bytes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.public_bytes(serialization.Encoding.DER)) + + def public_key(self): + pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req) + self._backend.openssl_assert(pkey != self._backend._ffi.NULL) + pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free) + return self._backend._evp_pkey_to_public_key(pkey) + + @property + def subject(self): + subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req) + self._backend.openssl_assert(subject != self._backend._ffi.NULL) + return _decode_x509_name(self._backend, subject) + + @property + def signature_hash_algorithm(self): + oid = _obj2txt(self._backend, self._x509_req.sig_alg.algorithm) + try: + return x509._SIG_OIDS_TO_HASH[oid] + except KeyError: + raise UnsupportedAlgorithm( + "Signature algorithm OID:{0} not recognized".format(oid) + ) + + @property + def extensions(self): + x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req) + return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts) + + def public_bytes(self, encoding): + bio = self._backend._create_mem_bio_gc() + if encoding is serialization.Encoding.PEM: + res = self._backend._lib.PEM_write_bio_X509_REQ( + bio, self._x509_req + ) + elif encoding is serialization.Encoding.DER: + res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req) + else: + raise TypeError("encoding must be an item from the Encoding enum") + + self._backend.openssl_assert(res == 1) + return self._backend._read_mem_bio(bio) + + @property + def tbs_certrequest_bytes(self): + pp = self._backend._ffi.new("unsigned char **") + # the X509_REQ_INFO struct holds the CertificateRequestInfo data + res = self._backend._lib.i2d_X509_REQ_INFO(self._x509_req.req_info, pp) + self._backend.openssl_assert(res > 0) + pp = self._backend._ffi.gc( + pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0]) + ) + return self._backend._ffi.buffer(pp[0], res)[:] + + @property + def signature(self): + return self._backend._asn1_string_to_bytes(self._x509_req.signature) + + +_EXTENSION_HANDLERS = { + ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints, + ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier, + ExtensionOID.KEY_USAGE: _decode_key_usage, + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name, + ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), + ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies, + ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points, + ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check, + ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints, +} + +_REVOKED_EXTENSION_HANDLERS = { + CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason, + CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date, + CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer, +} + +_REVOKED_UNSUPPORTED_EXTENSIONS = set([ + CRLEntryExtensionOID.CERTIFICATE_ISSUER, +]) + +_CRL_EXTENSION_HANDLERS = { + ExtensionOID.CRL_NUMBER: _decode_crl_number, + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier, + ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name, + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: ( + _decode_authority_information_access + ), +} + +_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i), + handlers=_EXTENSION_HANDLERS +) + +_CSR_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x), + get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i), + handlers=_EXTENSION_HANDLERS +) + +_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i), + handlers=_REVOKED_EXTENSION_HANDLERS, + unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS +) + +_CRL_EXTENSION_PARSER = _X509ExtensionParser( + ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x), + get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i), + handlers=_CRL_EXTENSION_HANDLERS, +) diff --git a/Lib/site-packages/cryptography/hazmat/bindings/__init__.py b/Lib/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py b/Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py b/Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py new file mode 100644 index 0000000..dfe046b --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py @@ -0,0 +1,15 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.bindings._commoncrypto import ffi, lib + + +class Binding(object): + """ + CommonCrypto API wrapper. + """ + lib = lib + ffi = ffi diff --git a/Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py b/Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py new file mode 100644 index 0000000..206c291 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/bindings/openssl/_conditional.py @@ -0,0 +1,414 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +# This is a temporary copy of all the CONDITIONAL_NAMES from _cffi_src so +# we can loop over them and delete them at runtime. It will be removed when +# cffi supports #if in cdef + +CONDITIONAL_NAMES = { + "Cryptography_HAS_AES_WRAP": [ + "AES_wrap_key", + "AES_unwrap_key", + ], + "Cryptography_HAS_CMAC": [ + "CMAC_CTX_new", + "CMAC_Init", + "CMAC_Update", + "CMAC_Final", + "CMAC_CTX_copy", + "CMAC_CTX_free", + ], + "Cryptography_HAS_CMS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + "CMS_final", + "CMS_sign", + "CMS_verify", + "CMS_encrypt", + "CMS_decrypt", + "CMS_add1_signer", + "CMS_TEXT", + "CMS_NOCERTS", + "CMS_NO_CONTENT_VERIFY", + "CMS_NO_ATTR_VERIFY", + "CMS_NOSIGS", + "CMS_NOINTERN", + "CMS_NO_SIGNER_CERT_VERIFY", + "CMS_NOVERIFY", + "CMS_DETACHED", + "CMS_BINARY", + "CMS_NOATTR", + "CMS_NOSMIMECAP", + "CMS_NOOLDMIMETYPE", + "CMS_CRLFEOL", + "CMS_STREAM", + "CMS_NOCRL", + "CMS_PARTIAL", + "CMS_REUSE_DIGEST", + "CMS_USE_KEYID", + "CMS_DEBUG_DECRYPT", + ], + "Cryptography_HAS_CMS_BIO_FUNCTIONS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + ], + "Cryptography_HAS_EC": [ + "OPENSSL_EC_NAMED_CURVE", + "EC_GROUP_new", + "EC_GROUP_free", + "EC_GROUP_clear_free", + "EC_GROUP_new_curve_GFp", + "EC_GROUP_new_by_curve_name", + "EC_GROUP_set_curve_GFp", + "EC_GROUP_get_curve_GFp", + "EC_GROUP_method_of", + "EC_GROUP_get0_generator", + "EC_GROUP_get_curve_name", + "EC_GROUP_get_degree", + "EC_GROUP_set_asn1_flag", + "EC_GROUP_set_point_conversion_form", + "EC_KEY_new", + "EC_KEY_free", + "EC_get_builtin_curves", + "EC_KEY_new_by_curve_name", + "EC_KEY_copy", + "EC_KEY_dup", + "EC_KEY_up_ref", + "EC_KEY_set_group", + "EC_KEY_get0_private_key", + "EC_KEY_set_private_key", + "EC_KEY_set_public_key", + "EC_KEY_get_enc_flags", + "EC_KEY_set_enc_flags", + "EC_KEY_set_conv_form", + "EC_KEY_get_key_method_data", + "EC_KEY_insert_key_method_data", + "EC_KEY_set_asn1_flag", + "EC_KEY_precompute_mult", + "EC_KEY_generate_key", + "EC_KEY_check_key", + "EC_POINT_new", + "EC_POINT_free", + "EC_POINT_clear_free", + "EC_POINT_copy", + "EC_POINT_dup", + "EC_POINT_method_of", + "EC_POINT_set_to_infinity", + "EC_POINT_set_Jprojective_coordinates_GFp", + "EC_POINT_get_Jprojective_coordinates_GFp", + "EC_POINT_set_affine_coordinates_GFp", + "EC_POINT_get_affine_coordinates_GFp", + "EC_POINT_set_compressed_coordinates_GFp", + "EC_POINT_point2oct", + "EC_POINT_oct2point", + "EC_POINT_point2bn", + "EC_POINT_bn2point", + "EC_POINT_point2hex", + "EC_POINT_hex2point", + "EC_POINT_add", + "EC_POINT_dbl", + "EC_POINT_invert", + "EC_POINT_is_at_infinity", + "EC_POINT_is_on_curve", + "EC_POINT_cmp", + "EC_POINT_make_affine", + "EC_POINTs_make_affine", + "EC_POINTs_mul", + "EC_POINT_mul", + "EC_GROUP_precompute_mult", + "EC_GROUP_have_precompute_mult", + "EC_GFp_simple_method", + "EC_GFp_mont_method", + "EC_GFp_nist_method", + "EC_METHOD_get_field_type", + "EVP_PKEY_assign_EC_KEY", + "EVP_PKEY_get1_EC_KEY", + "EVP_PKEY_set1_EC_KEY", + "PEM_write_bio_ECPrivateKey", + "i2d_EC_PUBKEY", + "d2i_EC_PUBKEY", + "d2i_EC_PUBKEY_bio", + "i2d_EC_PUBKEY_bio", + "d2i_ECPrivateKey", + "d2i_ECPrivateKey_bio", + "i2d_ECPrivateKey", + "i2d_ECPrivateKey_bio", + "i2o_ECPublicKey", + "o2i_ECPublicKey", + "SSL_CTX_set_tmp_ecdh", + "POINT_CONVERSION_COMPRESSED", + "POINT_CONVERSION_UNCOMPRESSED", + "POINT_CONVERSION_HYBRID", + ], + + "Cryptography_HAS_EC_1_0_1": [ + "EC_KEY_get_flags", + "EC_KEY_set_flags", + "EC_KEY_clear_flags", + "EC_KEY_set_public_key_affine_coordinates", + ], + + "Cryptography_HAS_EC2M": [ + "EC_GF2m_simple_method", + "EC_POINT_set_affine_coordinates_GF2m", + "EC_POINT_get_affine_coordinates_GF2m", + "EC_POINT_set_compressed_coordinates_GF2m", + "EC_GROUP_set_curve_GF2m", + "EC_GROUP_get_curve_GF2m", + "EC_GROUP_new_curve_GF2m", + ], + + "Cryptography_HAS_EC_1_0_2": [ + "EC_curve_nid2nist", + ], + "Cryptography_HAS_ECDH": [ + "ECDH_compute_key", + "ECDH_get_ex_new_index", + "ECDH_set_ex_data", + "ECDH_get_ex_data", + ], + "Cryptography_HAS_ECDSA": [ + "ECDSA_SIG_new", + "ECDSA_SIG_free", + "i2d_ECDSA_SIG", + "d2i_ECDSA_SIG", + "ECDSA_do_sign", + "ECDSA_do_sign_ex", + "ECDSA_do_verify", + "ECDSA_sign_setup", + "ECDSA_sign", + "ECDSA_sign_ex", + "ECDSA_verify", + "ECDSA_size", + "ECDSA_OpenSSL", + "ECDSA_set_default_method", + "ECDSA_get_default_method", + "ECDSA_set_method", + "ECDSA_get_ex_new_index", + "ECDSA_set_ex_data", + "ECDSA_get_ex_data", + ], + "Cryptography_HAS_ENGINE_CRYPTODEV": [ + "ENGINE_load_cryptodev" + ], + "Cryptography_HAS_REMOVE_THREAD_STATE": [ + "ERR_remove_thread_state" + ], + "Cryptography_HAS_098H_ERROR_CODES": [ + "ASN1_F_B64_READ_ASN1", + "ASN1_F_B64_WRITE_ASN1", + "ASN1_F_SMIME_READ_ASN1", + "ASN1_F_SMIME_TEXT", + "ASN1_R_NO_CONTENT_TYPE", + "ASN1_R_NO_MULTIPART_BODY_FAILURE", + "ASN1_R_NO_MULTIPART_BOUNDARY", + ], + "Cryptography_HAS_098C_CAMELLIA_CODES": [ + "EVP_F_CAMELLIA_INIT_KEY", + "EVP_R_CAMELLIA_KEY_SETUP_FAILED" + ], + "Cryptography_HAS_EC_CODES": [ + "EC_R_UNKNOWN_GROUP", + "EC_F_EC_GROUP_NEW_BY_CURVE_NAME" + ], + "Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": [ + "RSA_R_PKCS_DECODING_ERROR" + ], + "Cryptography_HAS_GCM": [ + "EVP_CTRL_GCM_GET_TAG", + "EVP_CTRL_GCM_SET_TAG", + "EVP_CTRL_GCM_SET_IVLEN", + ], + "Cryptography_HAS_PBKDF2_HMAC": [ + "PKCS5_PBKDF2_HMAC" + ], + "Cryptography_HAS_PKEY_CTX": [ + "EVP_PKEY_CTX_new", + "EVP_PKEY_CTX_new_id", + "EVP_PKEY_CTX_dup", + "EVP_PKEY_CTX_free", + "EVP_PKEY_sign", + "EVP_PKEY_sign_init", + "EVP_PKEY_verify", + "EVP_PKEY_verify_init", + "Cryptography_EVP_PKEY_encrypt", + "EVP_PKEY_encrypt_init", + "Cryptography_EVP_PKEY_decrypt", + "EVP_PKEY_decrypt_init", + "EVP_PKEY_CTX_set_signature_md", + "EVP_PKEY_id", + "EVP_PKEY_CTX_set_rsa_padding", + "EVP_PKEY_CTX_set_rsa_pss_saltlen", + ], + "Cryptography_HAS_ECDSA_SHA2_NIDS": [ + "NID_ecdsa_with_SHA224", + "NID_ecdsa_with_SHA256", + "NID_ecdsa_with_SHA384", + "NID_ecdsa_with_SHA512", + ], + "Cryptography_HAS_EGD": [ + "RAND_egd", + "RAND_egd_bytes", + "RAND_query_egd_bytes", + ], + "Cryptography_HAS_PSS_PADDING": [ + "RSA_PKCS1_PSS_PADDING", + ], + "Cryptography_HAS_MGF1_MD": [ + "EVP_PKEY_CTX_set_rsa_mgf1_md", + ], + "Cryptography_HAS_TLSv1_1": [ + "SSL_OP_NO_TLSv1_1", + "TLSv1_1_method", + "TLSv1_1_server_method", + "TLSv1_1_client_method", + ], + + "Cryptography_HAS_TLSv1_2": [ + "SSL_OP_NO_TLSv1_2", + "TLSv1_2_method", + "TLSv1_2_server_method", + "TLSv1_2_client_method", + ], + + "Cryptography_HAS_SSL3_METHOD": [ + "SSLv3_method", + "SSLv3_client_method", + "SSLv3_server_method", + ], + + "Cryptography_HAS_TLSEXT_HOSTNAME": [ + "SSL_set_tlsext_host_name", + "SSL_get_servername", + "SSL_CTX_set_tlsext_servername_callback", + ], + + "Cryptography_HAS_TLSEXT_STATUS_REQ_CB": [ + "SSL_CTX_set_tlsext_status_cb", + "SSL_CTX_set_tlsext_status_arg" + ], + + "Cryptography_HAS_STATUS_REQ_OCSP_RESP": [ + "SSL_set_tlsext_status_ocsp_resp", + "SSL_get_tlsext_status_ocsp_resp", + ], + + "Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE": [ + "SSL_set_tlsext_status_type", + ], + + "Cryptography_HAS_RELEASE_BUFFERS": [ + "SSL_MODE_RELEASE_BUFFERS", + ], + + "Cryptography_HAS_OP_NO_COMPRESSION": [ + "SSL_OP_NO_COMPRESSION", + ], + + "Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [ + "SSL_OP_MSIE_SSLV2_RSA_PADDING", + ], + + "Cryptography_HAS_SSL_OP_NO_TICKET": [ + "SSL_OP_NO_TICKET", + ], + + "Cryptography_HAS_SSL_SET_SSL_CTX": [ + "SSL_set_SSL_CTX", + "TLSEXT_NAMETYPE_host_name", + ], + + "Cryptography_HAS_NETBSD_D1_METH": [ + "DTLSv1_method", + ], + + "Cryptography_HAS_NEXTPROTONEG": [ + "SSL_CTX_set_next_protos_advertised_cb", + "SSL_CTX_set_next_proto_select_cb", + "SSL_select_next_proto", + "SSL_get0_next_proto_negotiated", + ], + + "Cryptography_HAS_SECURE_RENEGOTIATION": [ + "SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION", + "SSL_OP_LEGACY_SERVER_CONNECT", + "SSL_get_secure_renegotiation_support", + ], + + "Cryptography_HAS_ALPN": [ + "SSL_CTX_set_alpn_protos", + "SSL_set_alpn_protos", + "SSL_CTX_set_alpn_select_cb", + "SSL_get0_alpn_selected", + ], + + "Cryptography_HAS_COMPRESSION": [ + "SSL_get_current_compression", + "SSL_get_current_expansion", + "SSL_COMP_get_name", + ], + + "Cryptography_HAS_GET_SERVER_TMP_KEY": [ + "SSL_get_server_tmp_key", + ], + + "Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE": [ + "SSL_CTX_set_client_cert_engine", + ], + "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_SUITE_B_INVALID_VERSION', + 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM', + 'X509_V_ERR_SUITE_B_INVALID_CURVE', + 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM', + 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED', + 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256', + 'X509_V_ERR_HOSTNAME_MISMATCH', + 'X509_V_ERR_EMAIL_MISMATCH', + 'X509_V_ERR_IP_ADDRESS_MISMATCH' + ], + "Cryptography_HAS_102_VERIFICATION_PARAMS": [ + "X509_V_FLAG_SUITEB_128_LOS_ONLY", + "X509_V_FLAG_SUITEB_192_LOS", + "X509_V_FLAG_SUITEB_128_LOS", + "X509_VERIFY_PARAM_set1_host", + "X509_VERIFY_PARAM_set1_email", + "X509_VERIFY_PARAM_set1_ip", + "X509_VERIFY_PARAM_set1_ip_asc", + "X509_VERIFY_PARAM_set_hostflags", + ], + "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [ + "X509_V_FLAG_TRUSTED_FIRST", + ], + "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [ + "X509_V_FLAG_PARTIAL_CHAIN", + ], + "Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_DIFFERENT_CRL_SCOPE', + 'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE', + 'X509_V_ERR_UNNESTED_RESOURCE', + 'X509_V_ERR_PERMITTED_VIOLATION', + 'X509_V_ERR_EXCLUDED_VIOLATION', + 'X509_V_ERR_SUBTREE_MINMAX', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX', + 'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX', + 'X509_V_ERR_CRL_PATH_VALIDATION_ERROR', + ], + "Cryptography_HAS_100_VERIFICATION_PARAMS": [ + "Cryptography_HAS_100_VERIFICATION_PARAMS", + "X509_V_FLAG_EXTENDED_CRL_SUPPORT", + "X509_V_FLAG_USE_DELTAS", + ], + "Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [ + "X509_V_FLAG_CHECK_SS_SIGNATURE", + ], + "Cryptography_HAS_SET_CERT_CB": [ + "SSL_CTX_set_cert_cb", + "SSL_set_cert_cb", + ], +} diff --git a/Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..1cfe816 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,213 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import collections +import os +import threading +import types +import warnings + +from cryptography.exceptions import InternalError +from cryptography.hazmat.bindings._openssl import ffi, lib +from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES + +_OpenSSLError = collections.namedtuple("_OpenSSLError", + ["code", "lib", "func", "reason"]) + + +def _consume_errors(lib): + errors = [] + while True: + code = lib.ERR_get_error() + if code == 0: + break + + err_lib = lib.ERR_GET_LIB(code) + err_func = lib.ERR_GET_FUNC(code) + err_reason = lib.ERR_GET_REASON(code) + + errors.append(_OpenSSLError(code, err_lib, err_func, err_reason)) + return errors + + +def _openssl_assert(lib, ok): + if not ok: + errors = _consume_errors(lib) + raise InternalError( + "Unknown OpenSSL error. Please file an issue at https://github.com" + "/pyca/cryptography/issues with information on how to reproduce " + "this. ({0!r})".format(errors), + errors + ) + + +def ffi_callback(signature, name, **kwargs): + """Callback dispatcher + + The ffi_callback() dispatcher keeps callbacks compatible between dynamic + and static callbacks. + """ + def wrapper(func): + if lib.Cryptography_STATIC_CALLBACKS: + # def_extern() returns a decorator that sets the internal + # function pointer and returns the original function unmodified. + ffi.def_extern(name=name, **kwargs)(func) + callback = getattr(lib, name) + else: + # callback() wraps the function in a cdata function. + callback = ffi.callback(signature, **kwargs)(func) + return callback + return wrapper + + +@ffi_callback("int (*)(unsigned char *, int)", + name="Cryptography_rand_bytes", + error=-1) +def _osrandom_rand_bytes(buf, size): + signed = ffi.cast("char *", buf) + result = os.urandom(size) + signed[0:size] = result + return 1 + + +@ffi_callback("int (*)(void)", name="Cryptography_rand_status") +def _osrandom_rand_status(): + return 1 + + +def build_conditional_library(lib, conditional_names): + conditional_lib = types.ModuleType("lib") + excluded_names = set() + for condition, names in conditional_names.items(): + if not getattr(lib, condition): + excluded_names |= set(names) + + for attr in dir(lib): + if attr not in excluded_names: + setattr(conditional_lib, attr, getattr(lib, attr)) + + return conditional_lib + + +class Binding(object): + """ + OpenSSL API wrapper. + """ + lib = None + ffi = ffi + _lib_loaded = False + _locks = None + _lock_cb_handle = None + _init_lock = threading.Lock() + _lock_init_lock = threading.Lock() + + _osrandom_engine_id = ffi.new("const char[]", b"osrandom") + _osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine") + _osrandom_method = ffi.new( + "RAND_METHOD *", + dict(bytes=_osrandom_rand_bytes, + pseudorand=_osrandom_rand_bytes, + status=_osrandom_rand_status) + ) + + def __init__(self): + self._ensure_ffi_initialized() + + @classmethod + def _register_osrandom_engine(cls): + _openssl_assert(cls.lib, cls.lib.ERR_peek_error() == 0) + + engine = cls.lib.ENGINE_new() + _openssl_assert(cls.lib, engine != cls.ffi.NULL) + try: + result = cls.lib.ENGINE_set_id(engine, cls._osrandom_engine_id) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_set_name(engine, cls._osrandom_engine_name) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_set_RAND(engine, cls._osrandom_method) + _openssl_assert(cls.lib, result == 1) + result = cls.lib.ENGINE_add(engine) + if result != 1: + errors = _consume_errors(cls.lib) + _openssl_assert( + cls.lib, + errors[0].reason == cls.lib.ENGINE_R_CONFLICTING_ENGINE_ID + ) + + finally: + result = cls.lib.ENGINE_free(engine) + _openssl_assert(cls.lib, result == 1) + + @classmethod + def _ensure_ffi_initialized(cls): + with cls._init_lock: + if not cls._lib_loaded: + cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES) + cls._lib_loaded = True + # initialize the SSL library + cls.lib.SSL_library_init() + # adds all ciphers/digests for EVP + cls.lib.OpenSSL_add_all_algorithms() + # loads error strings for libcrypto and libssl functions + cls.lib.SSL_load_error_strings() + cls._register_osrandom_engine() + + @classmethod + def init_static_locks(cls): + with cls._lock_init_lock: + cls._ensure_ffi_initialized() + + if not cls._lock_cb_handle: + wrapper = ffi_callback( + "void(int, int, const char *, int)", + name="Cryptography_locking_cb", + ) + cls._lock_cb_handle = wrapper(cls._lock_cb) + + # Use Python's implementation if available, importing _ssl triggers + # the setup for this. + __import__("_ssl") + + if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL: + return + + # If nothing else has setup a locking callback already, we set up + # our own + num_locks = cls.lib.CRYPTO_num_locks() + cls._locks = [threading.Lock() for n in range(num_locks)] + + cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle) + + @classmethod + def _lock_cb(cls, mode, n, file, line): + lock = cls._locks[n] + + if mode & cls.lib.CRYPTO_LOCK: + lock.acquire() + elif mode & cls.lib.CRYPTO_UNLOCK: + lock.release() + else: + raise RuntimeError( + "Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format( + mode, n, file, line + ) + ) + + +# OpenSSL is not thread safe until the locks are initialized. We call this +# method in module scope so that it executes with the import lock. On +# Pythons < 3.4 this import lock is a global lock, which can prevent a race +# condition registering the OpenSSL locks. On Python 3.4+ the import lock +# is per module so this approach will not work. +Binding.init_static_locks() + +if Binding.lib.SSLeay() < 0x10001000: + warnings.warn( + "OpenSSL versions less than 1.0.1 are no longer supported by the " + "OpenSSL project, please upgrade. A future version of cryptography " + "will drop support for these versions.", + DeprecationWarning + ) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/__init__.py b/Lib/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..4b54088 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,5 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..494a7a1 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,40 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricSignatureContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the signature as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricVerificationContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def verify(self): + """ + Raises an exception if the bytes provided to update do not match the + signature or the signature does not match the public key. + """ diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py new file mode 100644 index 0000000..12d53ee --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dh.py @@ -0,0 +1,166 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +class DHPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("x must be an integer.") + + if not isinstance(public_numbers, DHPublicNumbers): + raise TypeError("public_numbers must be an instance of " + "DHPublicNumbers.") + + self._x = x + self._public_numbers = public_numbers + + def __eq__(self, other): + if not isinstance(other, DHPrivateNumbers): + return NotImplemented + + return ( + self._x == other._x and + self._public_numbers == other._public_numbers + ) + + def __ne__(self, other): + return not self == other + + public_numbers = utils.read_only_property("_public_numbers") + x = utils.read_only_property("_x") + + +class DHPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("y must be an integer.") + + if not isinstance(parameter_numbers, DHParameterNumbers): + raise TypeError( + "parameters must be an instance of DHParameterNumbers.") + + self._y = y + self._parameter_numbers = parameter_numbers + + def __eq__(self, other): + if not isinstance(other, DHPublicNumbers): + return NotImplemented + + return ( + self._y == other._y and + self._parameter_numbers == other._parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + +class DHParameterNumbers(object): + def __init__(self, p, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError("p and g must be integers") + + self._p = p + self._g = g + + def __eq__(self, other): + if not isinstance(other, DHParameterNumbers): + return NotImplemented + + return ( + self._p == other._p and + self._g == other._g + ) + + def __ne__(self, other): + return not self == other + + p = utils.read_only_property("_p") + g = utils.read_only_property("_g") + + +@six.add_metaclass(abc.ABCMeta) +class DHParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DHPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHParametersWithSerialization(DHParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DHParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DHPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this private key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPrivateKeyWithSerialization(DHPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DHPrivateNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DHParameters object associated with this public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DHPublicKeyWithSerialization(DHPublicKey): + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DHPublicNumbers. + """ diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..184177e --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,229 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class DSAParameters(object): + @abc.abstractmethod + def generate_private_key(self): + """ + Generates and returns a DSAPrivateKey. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAParametersWithNumbers(DSAParameters): + @abc.abstractmethod + def parameter_numbers(self): + """ + Returns a DSAParameterNumbers. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this private key. + """ + + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKeyWithSerialization(DSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this public key. + """ + + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DSAPublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +DSAPublicKeyWithSerialization = DSAPublicKey + + +def generate_parameters(key_size, backend): + return backend.generate_dsa_parameters(key_size) + + +def generate_private_key(key_size, backend): + return backend.generate_dsa_private_key_and_parameters(key_size) + + +def _check_dsa_parameters(parameters): + if utils.bit_length(parameters.p) not in [1024, 2048, 3072]: + raise ValueError("p must be exactly 1024, 2048, or 3072 bits long") + if utils.bit_length(parameters.q) not in [160, 256]: + raise ValueError("q must be exactly 160 or 256 bits long") + + if not (1 < parameters.g < parameters.p): + raise ValueError("g, p don't satisfy 1 < g < p.") + + +def _check_dsa_private_numbers(numbers): + parameters = numbers.public_numbers.parameter_numbers + _check_dsa_parameters(parameters) + if numbers.x <= 0 or numbers.x >= parameters.q: + raise ValueError("x must be > 0 and < q.") + + if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): + raise ValueError("y must be equal to (g ** x % p).") + + +class DSAParameterNumbers(object): + def __init__(self, p, q, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError( + "DSAParameterNumbers p, q, and g arguments must be integers." + ) + + self._p = p + self._q = q + self._g = g + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + g = utils.read_only_property("_g") + + def parameters(self, backend): + return backend.load_dsa_parameter_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAParameterNumbers): + return NotImplemented + + return self.p == other.p and self.q == other.q and self.g == other.g + + def __ne__(self, other): + return not self == other + + +class DSAPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("DSAPublicNumbers y argument must be an integer.") + + if not isinstance(parameter_numbers, DSAParameterNumbers): + raise TypeError( + "parameter_numbers must be a DSAParameterNumbers instance." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + y = utils.read_only_property("_y") + parameter_numbers = utils.read_only_property("_parameter_numbers") + + def public_key(self, backend): + return backend.load_dsa_public_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPublicNumbers): + return NotImplemented + + return ( + self.y == other.y and + self.parameter_numbers == other.parameter_numbers + ) + + def __ne__(self, other): + return not self == other + + +class DSAPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("DSAPrivateNumbers x argument must be an integer.") + + if not isinstance(public_numbers, DSAPublicNumbers): + raise TypeError( + "public_numbers must be a DSAPublicNumbers instance." + ) + self._public_numbers = public_numbers + self._x = x + + x = utils.read_only_property("_x") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_dsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, DSAPrivateNumbers): + return NotImplemented + + return ( + self.x == other.x and self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..eda7df0 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,346 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurve(object): + @abc.abstractproperty + def name(self): + """ + The name of the curve. e.g. secp256r1. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the base point of the curve. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveSignatureAlgorithm(object): + @abc.abstractproperty + def algorithm(self): + """ + The digest algorithm used with this signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKey(object): + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def exchange(self, algorithm, peer_public_key): + """ + Performs a key exchange operation using the provided algorithm with the + provided peer's public key. + """ + + @abc.abstractmethod + def public_key(self): + """ + The EllipticCurvePublicKey for this private key. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an EllipticCurvePrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePublicKey(object): + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an EllipticCurvePublicNumbers. + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey + + +@utils.register_interface(EllipticCurve) +class SECT571R1(object): + name = "sect571r1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409R1(object): + name = "sect409r1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283R1(object): + name = "sect283r1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233R1(object): + name = "sect233r1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163R2(object): + name = "sect163r2" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECT571K1(object): + name = "sect571k1" + key_size = 571 + + +@utils.register_interface(EllipticCurve) +class SECT409K1(object): + name = "sect409k1" + key_size = 409 + + +@utils.register_interface(EllipticCurve) +class SECT283K1(object): + name = "sect283k1" + key_size = 283 + + +@utils.register_interface(EllipticCurve) +class SECT233K1(object): + name = "sect233k1" + key_size = 233 + + +@utils.register_interface(EllipticCurve) +class SECT163K1(object): + name = "sect163k1" + key_size = 163 + + +@utils.register_interface(EllipticCurve) +class SECP521R1(object): + name = "secp521r1" + key_size = 521 + + +@utils.register_interface(EllipticCurve) +class SECP384R1(object): + name = "secp384r1" + key_size = 384 + + +@utils.register_interface(EllipticCurve) +class SECP256R1(object): + name = "secp256r1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP256K1(object): + name = "secp256k1" + key_size = 256 + + +@utils.register_interface(EllipticCurve) +class SECP224R1(object): + name = "secp224r1" + key_size = 224 + + +@utils.register_interface(EllipticCurve) +class SECP192R1(object): + name = "secp192r1" + key_size = 192 + + +_CURVE_TYPES = { + "prime192v1": SECP192R1, + "prime256v1": SECP256R1, + + "secp192r1": SECP192R1, + "secp224r1": SECP224R1, + "secp256r1": SECP256R1, + "secp384r1": SECP384R1, + "secp521r1": SECP521R1, + "secp256k1": SECP256K1, + + "sect163k1": SECT163K1, + "sect233k1": SECT233K1, + "sect283k1": SECT283K1, + "sect409k1": SECT409K1, + "sect571k1": SECT571K1, + + "sect163r2": SECT163R2, + "sect233r1": SECT233R1, + "sect283r1": SECT283R1, + "sect409r1": SECT409R1, + "sect571r1": SECT571R1, +} + + +@utils.register_interface(EllipticCurveSignatureAlgorithm) +class ECDSA(object): + def __init__(self, algorithm): + self._algorithm = algorithm + + algorithm = utils.read_only_property("_algorithm") + + +def generate_private_key(curve, backend): + return backend.generate_elliptic_curve_private_key(curve) + + +class EllipticCurvePublicNumbers(object): + def __init__(self, x, y, curve): + if ( + not isinstance(x, six.integer_types) or + not isinstance(y, six.integer_types) + ): + raise TypeError("x and y must be integers.") + + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + self._y = y + self._x = x + self._curve = curve + + def public_key(self, backend): + return backend.load_elliptic_curve_public_numbers(self) + + def encode_point(self): + # key_size is in bits. Convert to bytes and round up + byte_length = (self.curve.key_size + 7) // 8 + return ( + b'\x04' + utils.int_to_bytes(self.x, byte_length) + + utils.int_to_bytes(self.y, byte_length) + ) + + @classmethod + def from_encoded_point(cls, curve, data): + if not isinstance(curve, EllipticCurve): + raise TypeError("curve must be an EllipticCurve instance") + + if data.startswith(b'\x04'): + # key_size is in bits. Convert to bytes and round up + byte_length = (curve.key_size + 7) // 8 + if len(data) == 2 * byte_length + 1: + x = utils.int_from_bytes(data[1:byte_length + 1], 'big') + y = utils.int_from_bytes(data[byte_length + 1:], 'big') + return cls(x, y, curve) + else: + raise ValueError('Invalid elliptic curve point data length') + else: + raise ValueError('Unsupported elliptic curve point type') + + curve = utils.read_only_property("_curve") + x = utils.read_only_property("_x") + y = utils.read_only_property("_y") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePublicNumbers): + return NotImplemented + + return ( + self.x == other.x and + self.y == other.y and + self.curve.name == other.curve.name and + self.curve.key_size == other.curve.key_size + ) + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return ( + "".format(self) + ) + + +class EllipticCurvePrivateNumbers(object): + def __init__(self, private_value, public_numbers): + if not isinstance(private_value, six.integer_types): + raise TypeError("private_value must be an integer.") + + if not isinstance(public_numbers, EllipticCurvePublicNumbers): + raise TypeError( + "public_numbers must be an EllipticCurvePublicNumbers " + "instance." + ) + + self._private_value = private_value + self._public_numbers = public_numbers + + def private_key(self, backend): + return backend.load_elliptic_curve_private_numbers(self) + + private_value = utils.read_only_property("_private_value") + public_numbers = utils.read_only_property("_public_numbers") + + def __eq__(self, other): + if not isinstance(other, EllipticCurvePrivateNumbers): + return NotImplemented + + return ( + self.private_value == other.private_value and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + +class ECDH(object): + pass diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..c796d8e --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,67 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import hashes + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricPadding(object): + @abc.abstractproperty + def name(self): + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ + + +@utils.register_interface(AsymmetricPadding) +class PKCS1v15(object): + name = "EMSA-PKCS1-v1_5" + + +@utils.register_interface(AsymmetricPadding) +class PSS(object): + MAX_LENGTH = object() + name = "EMSA-PSS" + + def __init__(self, mgf, salt_length): + self._mgf = mgf + + if (not isinstance(salt_length, six.integer_types) and + salt_length is not self.MAX_LENGTH): + raise TypeError("salt_length must be an integer.") + + if salt_length is not self.MAX_LENGTH and salt_length < 0: + raise ValueError("salt_length must be zero or greater.") + + self._salt_length = salt_length + + +@utils.register_interface(AsymmetricPadding) +class OAEP(object): + name = "EME-OAEP" + + def __init__(self, mgf, algorithm, label): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + +class MGF1(object): + MAX_LENGTH = object() + + def __init__(self, algorithm): + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + + self._algorithm = algorithm diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..41b0089 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,352 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +from fractions import gcd + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import RSABackend + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKey(object): + @abc.abstractmethod + def signer(self, padding, algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def decrypt(self, ciphertext, padding): + """ + Decrypts the provided ciphertext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The RSAPublicKey associated with this private key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKeyWithSerialization(RSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns an RSAPrivateNumbers. + """ + + @abc.abstractmethod + def private_bytes(self, encoding, format, encryption_algorithm): + """ + Returns the key serialized as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPublicKey(object): + @abc.abstractmethod + def verifier(self, signature, padding, algorithm): + """ + Returns an AsymmetricVerificationContext used for verifying signatures. + """ + + @abc.abstractmethod + def encrypt(self, plaintext, padding): + """ + Encrypts the given plaintext. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_numbers(self): + """ + Returns an RSAPublicNumbers + """ + + @abc.abstractmethod + def public_bytes(self, encoding, format): + """ + Returns the key serialized as bytes. + """ + + +RSAPublicKeyWithSerialization = RSAPublicKey + + +def generate_private_key(public_exponent, key_size, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + _verify_rsa_parameters(public_exponent, key_size) + return backend.generate_rsa_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent, key_size): + if public_exponent < 3: + raise ValueError("public_exponent must be >= 3.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits.") + + +def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus): + if modulus < 3: + raise ValueError("modulus must be >= 3.") + + if p >= modulus: + raise ValueError("p must be < modulus.") + + if q >= modulus: + raise ValueError("q must be < modulus.") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus.") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus.") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus.") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus.") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd.") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd.") + + if p * q != modulus: + raise ValueError("p*q must equal modulus.") + + +def _check_public_key_components(e, n): + if n < 3: + raise ValueError("n must be >= 3.") + + if e < 3 or e >= n: + raise ValueError("e must be >= 3 and < n.") + + if e & 1 == 0: + raise ValueError("e must be odd.") + + +def _modinv(e, m): + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, y1, x2, y2 = 1, 0, 0, 1 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn, yn = x1 - q * x2, y1 - q * y2 + a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn + return x1 % m + + +def rsa_crt_iqmp(p, q): + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent, p): + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent, q): + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent and q. + """ + return private_exponent % (q - 1) + + +# Controls the number of iterations rsa_recover_prime_factors will perform +# to obtain the prime factors. Each iteration increments by 2 so the actual +# maximum attempts is half this number. +_MAX_RECOVERY_ATTEMPTS = 1000 + + +def rsa_recover_prime_factors(n, e, d): + """ + Compute factors p and q from the private exponent d. We assume that n has + no more than two factors. This function is adapted from code in PyCrypto. + """ + # See 8.2.2(i) in Handbook of Applied Cryptography. + ktot = d * e - 1 + # The quantity d*e-1 is a multiple of phi(n), even, + # and can be represented as t*2^s. + t = ktot + while t % 2 == 0: + t = t // 2 + # Cycle through all multiplicative inverses in Zn. + # The algorithm is non-deterministic, but there is a 50% chance + # any candidate a leads to successful factoring. + # See "Digitalized Signatures and Public Key Functions as Intractable + # as Factorization", M. Rabin, 1979 + spotted = False + a = 2 + while not spotted and a < _MAX_RECOVERY_ATTEMPTS: + k = t + # Cycle through all values a^{t*2^i}=a^k + while k < ktot: + cand = pow(a, k, n) + # Check if a^k is a non-trivial root of unity (mod n) + if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1: + # We have found a number such that (cand-1)(cand+1)=0 (mod n). + # Either of the terms divides n. + p = gcd(cand + 1, n) + spotted = True + break + k *= 2 + # This value was not any good... let's try another! + a += 2 + if not spotted: + raise ValueError("Unable to compute factors p and q from exponent d.") + # Found ! + q, r = divmod(n, p) + assert r == 0 + + return (p, q) + + +class RSAPrivateNumbers(object): + def __init__(self, p, q, d, dmp1, dmq1, iqmp, + public_numbers): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(d, six.integer_types) or + not isinstance(dmp1, six.integer_types) or + not isinstance(dmq1, six.integer_types) or + not isinstance(iqmp, six.integer_types) + ): + raise TypeError( + "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" + " all be an integers." + ) + + if not isinstance(public_numbers, RSAPublicNumbers): + raise TypeError( + "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" + " instance." + ) + + self._p = p + self._q = q + self._d = d + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._public_numbers = public_numbers + + p = utils.read_only_property("_p") + q = utils.read_only_property("_q") + d = utils.read_only_property("_d") + dmp1 = utils.read_only_property("_dmp1") + dmq1 = utils.read_only_property("_dmq1") + iqmp = utils.read_only_property("_iqmp") + public_numbers = utils.read_only_property("_public_numbers") + + def private_key(self, backend): + return backend.load_rsa_private_numbers(self) + + def __eq__(self, other): + if not isinstance(other, RSAPrivateNumbers): + return NotImplemented + + return ( + self.p == other.p and + self.q == other.q and + self.d == other.d and + self.dmp1 == other.dmp1 and + self.dmq1 == other.dmq1 and + self.iqmp == other.iqmp and + self.public_numbers == other.public_numbers + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(( + self.p, + self.q, + self.d, + self.dmp1, + self.dmq1, + self.iqmp, + self.public_numbers, + )) + + +class RSAPublicNumbers(object): + def __init__(self, e, n): + if ( + not isinstance(e, six.integer_types) or + not isinstance(n, six.integer_types) + ): + raise TypeError("RSAPublicNumbers arguments must be integers.") + + self._e = e + self._n = n + + e = utils.read_only_property("_e") + n = utils.read_only_property("_n") + + def public_key(self, backend): + return backend.load_rsa_public_numbers(self) + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, RSAPublicNumbers): + return NotImplemented + + return self.e == other.e and self.n == other.n + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.e, self.n)) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py new file mode 100644 index 0000000..bad9ab7 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/asymmetric/utils.py @@ -0,0 +1,73 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import warnings + +from pyasn1.codec.der import decoder, encoder +from pyasn1.error import PyAsn1Error +from pyasn1.type import namedtype, univ + +import six + +from cryptography import utils + + +class _DSSSigValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('r', univ.Integer()), + namedtype.NamedType('s', univ.Integer()) + ) + + +def decode_rfc6979_signature(signature): + warnings.warn( + "decode_rfc6979_signature is deprecated and will " + "be removed in a future version, use decode_dss_signature instead " + "instead.", + utils.DeprecatedIn10, + stacklevel=2 + ) + return decode_dss_signature(signature) + + +def decode_dss_signature(signature): + try: + data, remaining = decoder.decode(signature, asn1Spec=_DSSSigValue()) + except PyAsn1Error: + raise ValueError("Invalid signature data. Unable to decode ASN.1") + + if remaining: + raise ValueError( + "The signature contains bytes after the end of the ASN.1 sequence." + ) + + r = int(data.getComponentByName('r')) + s = int(data.getComponentByName('s')) + return (r, s) + + +def encode_rfc6979_signature(r, s): + warnings.warn( + "encode_rfc6979_signature is deprecated and will " + "be removed in a future version, use encode_dss_signature instead " + "instead.", + utils.DeprecatedIn10, + stacklevel=2 + ) + return encode_dss_signature(r, s) + + +def encode_dss_signature(r, s): + if ( + not isinstance(r, six.integer_types) or + not isinstance(s, six.integer_types) + ): + raise ValueError("Both r and s must be integers") + + sig = _DSSSigValue() + sig.setComponentByName('r', r) + sig.setComponentByName('s', s) + return encoder.encode(sig) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..b5dd0ed --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,20 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.ciphers.base import ( + AEADCipherContext, AEADEncryptionContext, BlockCipherAlgorithm, Cipher, + CipherAlgorithm, CipherContext +) + + +__all__ = [ + "Cipher", + "CipherAlgorithm", + "BlockCipherAlgorithm", + "CipherContext", + "AEADCipherContext", + "AEADEncryptionContext", +] diff --git a/Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..c193f79 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,140 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives.ciphers import ( + BlockCipherAlgorithm, CipherAlgorithm +) + + +def _verify_key_size(algorithm, key): + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError("Invalid key size ({0}) for {1}.".format( + len(key) * 8, algorithm.name + )) + return key + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class AES(object): + name = "AES" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class Camellia(object): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class TripleDES(object): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class Blowfish(object): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class CAST5(object): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +class ARC4(object): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(CipherAlgorithm) +class IDEA(object): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(BlockCipherAlgorithm) +@utils.register_interface(CipherAlgorithm) +class SEED(object): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 diff --git a/Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..dae9365 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,203 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm, + _Reasons +) +from cryptography.hazmat.backends.interfaces import CipherBackend +from cryptography.hazmat.primitives.ciphers import modes + + +@six.add_metaclass(abc.ABCMeta) +class CipherAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @abc.abstractproperty + def key_size(self): + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +@six.add_metaclass(abc.ABCMeta) +class BlockCipherAlgorithm(object): + @abc.abstractproperty + def block_size(self): + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +@six.add_metaclass(abc.ABCMeta) +class CipherContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the results of processing the final block as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADCipherContext(object): + @abc.abstractmethod + def authenticate_additional_data(self, data): + """ + Authenticates the provided bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADEncryptionContext(object): + @abc.abstractproperty + def tag(self): + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +class Cipher(object): + def __init__(self, algorithm, mode, backend): + if not isinstance(backend, CipherBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CipherBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, CipherAlgorithm): + raise TypeError("Expected interface of CipherAlgorithm.") + + if mode is not None: + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + self._backend = backend + + def encryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting." + ) + ctx = self._backend.create_symmetric_encryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=True) + + def decryptor(self): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if self.mode.tag is None: + raise ValueError( + "Authentication tag must be provided when decrypting." + ) + ctx = self._backend.create_symmetric_decryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=False) + + def _wrap_ctx(self, ctx, encrypt): + if isinstance(self.mode, modes.ModeWithAuthenticationTag): + if encrypt: + return _AEADEncryptionContext(ctx) + else: + return _AEADCipherContext(ctx) + else: + return _CipherContext(ctx) + + +@utils.register_interface(CipherContext) +class _CipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._ctx = None + return data + + +@utils.register_interface(AEADCipherContext) +@utils.register_interface(CipherContext) +class _AEADCipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + self._bytes_processed = 0 + self._aad_bytes_processed = 0 + self._tag = None + self._updated = False + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + self._updated = True + self._bytes_processed += len(data) + if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES: + raise ValueError( + "{0} has a maximum encrypted byte limit of {1}".format( + self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES + ) + ) + + return self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + data = self._ctx.finalize() + self._tag = self._ctx.tag + self._ctx = None + return data + + def authenticate_additional_data(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if self._updated: + raise AlreadyUpdated("Update has been called on this context.") + + self._aad_bytes_processed += len(data) + if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES: + raise ValueError( + "{0} has a maximum AAD byte limit of {0}".format( + self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES + ) + ) + + self._ctx.authenticate_additional_data(data) + + +@utils.register_interface(AEADEncryptionContext) +class _AEADEncryptionContext(_AEADCipherContext): + @property + def tag(self): + if self._ctx is not None: + raise NotYetFinalized("You must finalize encryption before " + "getting the tag.") + return self._tag diff --git a/Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..4284042 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,164 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils + + +@six.add_metaclass(abc.ABCMeta) +class Mode(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm): + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithInitializationVector(object): + @abc.abstractproperty + def initialization_vector(self): + """ + The value of the initialization vector for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithNonce(object): + @abc.abstractproperty + def nonce(self): + """ + The value of the nonce for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithAuthenticationTag(object): + @abc.abstractproperty + def tag(self): + """ + The value of the tag supplied to the constructor of this mode. + """ + + +def _check_iv_length(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid IV size ({0}) for {1}.".format( + len(self.initialization_vector), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CBC(object): + name = "CBC" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +class ECB(object): + name = "ECB" + + def validate_for_algorithm(self, algorithm): + pass + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class OFB(object): + name = "OFB" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB(object): + name = "CFB" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +class CFB8(object): + name = "CFB8" + + def __init__(self, initialization_vector): + self._initialization_vector = initialization_vector + + initialization_vector = utils.read_only_property("_initialization_vector") + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithNonce) +class CTR(object): + name = "CTR" + + def __init__(self, nonce): + self._nonce = nonce + + nonce = utils.read_only_property("_nonce") + + def validate_for_algorithm(self, algorithm): + if len(self.nonce) * 8 != algorithm.block_size: + raise ValueError("Invalid nonce size ({0}) for {1}.".format( + len(self.nonce), self.name + )) + + +@utils.register_interface(Mode) +@utils.register_interface(ModeWithInitializationVector) +@utils.register_interface(ModeWithAuthenticationTag) +class GCM(object): + name = "GCM" + _MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8 + _MAX_AAD_BYTES = (2 ** 64) // 8 + + def __init__(self, initialization_vector, tag=None, min_tag_length=16): + # len(initialization_vector) must in [1, 2 ** 64), but it's impossible + # to actually construct a bytes object that large, so we don't check + # for it + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if tag is not None and len(tag) < min_tag_length: + raise ValueError( + "Authentication tag must be {0} bytes or longer.".format( + min_tag_length) + ) + + self._initialization_vector = initialization_vector + self._tag = tag + + tag = utils.read_only_property("_tag") + initialization_vector = utils.read_only_property("_initialization_vector") + + def validate_for_algorithm(self, algorithm): + pass diff --git a/Lib/site-packages/cryptography/hazmat/primitives/cmac.py b/Lib/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..c2038a3 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,66 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import CMACBackend +from cryptography.hazmat.primitives import ciphers, interfaces + + +@utils.register_interface(interfaces.MACContext) +class CMAC(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, CMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, ciphers.BlockCipherAlgorithm): + raise TypeError( + "Expected instance of BlockCipherAlgorithm." + ) + self._algorithm = algorithm + + self._backend = backend + if ctx is None: + self._ctx = self._backend.create_cmac_ctx(self._algorithm) + else: + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return CMAC( + self._algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/constant_time.py b/Lib/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..5a682ca --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import hmac + +from cryptography.hazmat.bindings._constant_time import lib + + +if hasattr(hmac, "compare_digest"): + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return hmac.compare_digest(a, b) + +else: + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return lib.Cryptography_constant_time_bytes_eq( + a, len(a), b, len(b) + ) == 1 diff --git a/Lib/site-packages/cryptography/hazmat/primitives/hashes.py b/Lib/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..6bc8500 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,163 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend + + +@six.add_metaclass(abc.ABCMeta) +class HashAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @abc.abstractproperty + def digest_size(self): + """ + The size of the resulting digest in bytes. + """ + + @abc.abstractproperty + def block_size(self): + """ + The internal block size of the hash algorithm in bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashContext(object): + @abc.abstractproperty + def algorithm(self): + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a HashContext that is a copy of the current context. + """ + + +@utils.register_interface(HashContext) +class Hash(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._backend = backend + + if ctx is None: + self._ctx = self._backend.create_hash_ctx(self.algorithm) + else: + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return Hash( + self.algorithm, backend=self._backend, ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + +@utils.register_interface(HashAlgorithm) +class SHA1(object): + name = "sha1" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA224(object): + name = "sha224" + digest_size = 28 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA256(object): + name = "sha256" + digest_size = 32 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class SHA384(object): + name = "sha384" + digest_size = 48 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class SHA512(object): + name = "sha512" + digest_size = 64 + block_size = 128 + + +@utils.register_interface(HashAlgorithm) +class RIPEMD160(object): + name = "ripemd160" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class Whirlpool(object): + name = "whirlpool" + digest_size = 64 + block_size = 64 + + +@utils.register_interface(HashAlgorithm) +class MD5(object): + name = "md5" + digest_size = 16 + block_size = 64 diff --git a/Lib/site-packages/cryptography/hazmat/primitives/hmac.py b/Lib/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..15b9ee6 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,69 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import hashes, interfaces + + +@utils.register_interface(interfaces.MACContext) +@utils.register_interface(hashes.HashContext) +class HMAC(object): + def __init__(self, key, algorithm, backend, ctx=None): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, hashes.HashAlgorithm): + raise TypeError("Expected instance of hashes.HashAlgorithm.") + self._algorithm = algorithm + + self._backend = backend + self._key = key + if ctx is None: + self._ctx = self._backend.create_hmac_ctx(key, self.algorithm) + else: + self._ctx = ctx + + algorithm = utils.read_only_property("_algorithm") + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + return HMAC( + self._key, + self.algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized.") + + ctx, self._ctx = self._ctx, None + ctx.verify(signature) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py b/Lib/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py new file mode 100644 index 0000000..4c95190 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/interfaces/__init__.py @@ -0,0 +1,37 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class MACContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the message authentication code as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a MACContext that is a copy of the current context. + """ + + @abc.abstractmethod + def verify(self, signature): + """ + Checks if the generated message authentication code matches the + signature. + """ diff --git a/Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..2d0724e --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,26 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class KeyDerivationFunction(object): + @abc.abstractmethod + def derive(self, key_material): + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material, expected_key): + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ diff --git a/Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py b/Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py new file mode 100644 index 0000000..c6399e4 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/kdf/concatkdf.py @@ -0,0 +1,125 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +def _common_args_checks(algorithm, length, otherinfo): + max_length = algorithm.digest_size * (2 ** 32 - 1) + if length > max_length: + raise ValueError( + "Can not derive keys larger than {0} bits.".format( + max_length + )) + if not (otherinfo is None or isinstance(otherinfo, bytes)): + raise TypeError("otherinfo must be bytes.") + + +def _concatkdf_derive(key_material, length, auxfn, otherinfo): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + output = [b""] + outlen = 0 + counter = 1 + + while (length > outlen): + h = auxfn() + h.update(_int_to_u32be(counter)) + h.update(key_material) + h.update(otherinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:length] + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHash(object): + def __init__(self, algorithm, length, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hash(self): + return hashes.Hash(self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hash, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class ConcatKDFHMAC(object): + def __init__(self, algorithm, length, salt, otherinfo, backend): + + _common_args_checks(algorithm, length, otherinfo) + self._algorithm = algorithm + self._length = length + self._otherinfo = otherinfo + if self._otherinfo is None: + self._otherinfo = b"" + + if not (salt is None or isinstance(salt, bytes)): + raise TypeError("salt must be bytes.") + if salt is None: + salt = b"\x00" * algorithm.block_size + self._salt = salt + + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def _hmac(self): + return hmac.HMAC(self._salt, self._algorithm, self._backend) + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + return _concatkdf_derive(key_material, self._length, + self._hmac, self._otherinfo) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..f738bbd --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,116 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +@utils.register_interface(KeyDerivationFunction) +class HKDF(object): + def __init__(self, algorithm, length, salt, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + if not (salt is None or isinstance(salt, bytes)): + raise TypeError("salt must be bytes.") + + if salt is None: + salt = b"\x00" * (self._algorithm.digest_size // 8) + + self._salt = salt + + self._backend = backend + + self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend) + + def _extract(self, key_material): + h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend) + h.update(key_material) + return h.finalize() + + def derive(self, key_material): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + return self._hkdf_expand.derive(self._extract(key_material)) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(KeyDerivationFunction) +class HKDFExpand(object): + def __init__(self, algorithm, length, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + self._backend = backend + + max_length = 255 * (algorithm.digest_size // 8) + + if length > max_length: + raise ValueError( + "Can not derive keys larger than {0} octets.".format( + max_length + )) + + self._length = length + + if not (info is None or isinstance(info, bytes)): + raise TypeError("info must be bytes.") + + if info is None: + info = b"" + + self._info = info + + self._used = False + + def _expand(self, key_material): + output = [b""] + counter = 1 + + while (self._algorithm.digest_size // 8) * len(output) < self._length: + h = hmac.HMAC(key_material, self._algorithm, backend=self._backend) + h.update(output[-1]) + h.update(self._info) + h.update(six.int2byte(counter)) + output.append(h.finalize()) + counter += 1 + + return b"".join(output)[:self._length] + + def derive(self, key_material): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + if self._used: + raise AlreadyFinalized + + self._used = True + return self._expand(key_material) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..f8ce7a3 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,58 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +@utils.register_interface(KeyDerivationFunction) +class PBKDF2HMAC(object): + def __init__(self, algorithm, length, salt, iterations, backend): + if not isinstance(backend, PBKDF2HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement PBKDF2HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not backend.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "{0} is not supported for PBKDF2 by this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + self._used = False + self._algorithm = algorithm + self._length = length + if not isinstance(salt, bytes): + raise TypeError("salt must be bytes.") + self._salt = salt + self._iterations = iterations + self._backend = backend + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once.") + self._used = True + + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + return self._backend.derive_pbkdf2_hmac( + self._algorithm, + self._length, + self._salt, + self._iterations, + key_material + ) + + def verify(self, key_material, expected_key): + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py b/Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py new file mode 100644 index 0000000..83789b3 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/kdf/x963kdf.py @@ -0,0 +1,70 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import constant_time, hashes +from cryptography.hazmat.primitives.kdf import KeyDerivationFunction + + +def _int_to_u32be(n): + return struct.pack('>I', n) + + +@utils.register_interface(KeyDerivationFunction) +class X963KDF(object): + def __init__(self, algorithm, length, sharedinfo, backend): + + max_len = algorithm.digest_size * (2 ** 32 - 1) + if length > max_len: + raise ValueError( + "Can not derive keys larger than {0} bits.".format(max_len)) + if not (sharedinfo is None or isinstance(sharedinfo, bytes)): + raise TypeError("sharedinfo must be bytes.") + self._algorithm = algorithm + self._length = length + self._sharedinfo = sharedinfo + + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + self._backend = backend + self._used = False + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized + self._used = True + + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + output = [b""] + outlen = 0 + counter = 1 + + while self._length > outlen: + h = hashes.Hash(self._algorithm, self._backend) + h.update(key_material) + h.update(_int_to_u32be(counter)) + if self._sharedinfo is not None: + h.update(self._sharedinfo) + output.append(h.finalize()) + outlen += len(output[-1]) + counter += 1 + + return b"".join(output)[:self._length] + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/Lib/site-packages/cryptography/hazmat/primitives/keywrap.py b/Lib/site-packages/cryptography/hazmat/primitives/keywrap.py new file mode 100644 index 0000000..6e79ab6 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/keywrap.py @@ -0,0 +1,85 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +from cryptography.hazmat.primitives.ciphers import Cipher +from cryptography.hazmat.primitives.ciphers.algorithms import AES +from cryptography.hazmat.primitives.ciphers.modes import ECB +from cryptography.hazmat.primitives.constant_time import bytes_eq + + +def aes_key_wrap(wrapping_key, key_to_wrap, backend): + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + if len(key_to_wrap) < 16: + raise ValueError("The key to wrap must be at least 16 bytes") + + if len(key_to_wrap) % 8 != 0: + raise ValueError("The key to wrap must be a multiple of 8 bytes") + + # RFC 3394 Key Wrap - 2.2.1 (index method) + encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor() + a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)] + n = len(r) + for j in range(6): + for i in range(n): + # every encryption operation is a discrete 16 byte chunk (because + # AES has a 128-bit block size) and since we're using ECB it is + # safe to reuse the encryptor for the entire operation + b = encryptor.update(a + r[i]) + # pack/unpack are safe as these are always 64-bit chunks + a = struct.pack( + ">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1) + ) + r[i] = b[-8:] + + assert encryptor.finalize() == b"" + + return a + b"".join(r) + + +def aes_key_unwrap(wrapping_key, wrapped_key, backend): + if len(wrapped_key) < 24: + raise ValueError("Must be at least 24 bytes") + + if len(wrapped_key) % 8 != 0: + raise ValueError("The wrapped key must be a multiple of 8 bytes") + + if len(wrapping_key) not in [16, 24, 32]: + raise ValueError("The wrapping key must be a valid AES key length") + + # Implement RFC 3394 Key Unwrap - 2.2.2 (index method) + decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor() + aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6" + + r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)] + a = r.pop(0) + n = len(r) + for j in reversed(range(6)): + for i in reversed(range(n)): + # pack/unpack are safe as these are always 64-bit chunks + atr = struct.pack( + ">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1) + ) + r[i] + # every decryption operation is a discrete 16 byte chunk so + # it is safe to reuse the decryptor for the entire operation + b = decryptor.update(atr) + a = b[:8] + r[i] = b[-8:] + + assert decryptor.finalize() == b"" + + if not bytes_eq(a, aiv): + raise InvalidUnwrap() + + return b"".join(r) + + +class InvalidUnwrap(Exception): + pass diff --git a/Lib/site-packages/cryptography/hazmat/primitives/padding.py b/Lib/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..f6491eb --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,124 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.bindings._padding import lib + + +@six.add_metaclass(abc.ABCMeta) +class PaddingContext(object): + @abc.abstractmethod + def update(self, data): + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalize the padding, returns bytes. + """ + + +class PKCS7(object): + def __init__(self, block_size): + if not (0 <= block_size < 256): + raise ValueError("block_size must be in range(0, 256).") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8.") + + self.block_size = block_size + + def padder(self): + return _PKCS7PaddingContext(self.block_size) + + def unpadder(self): + return _PKCS7UnpaddingContext(self.block_size) + + +@utils.register_interface(PaddingContext) +class _PKCS7PaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized.") + + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + + self._buffer += data + + finished_blocks = len(self._buffer) // (self.block_size // 8) + + result = self._buffer[:finished_blocks * (self.block_size // 8)] + self._buffer = self._buffer[finished_blocks * (self.block_size // 8):] + + return result + + def finalize(self): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized.") + + pad_size = self.block_size // 8 - len(self._buffer) + result = self._buffer + six.int2byte(pad_size) * pad_size + self._buffer = None + return result + + +@utils.register_interface(PaddingContext) +class _PKCS7UnpaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized.") + + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") + + self._buffer += data + + finished_blocks = max( + len(self._buffer) // (self.block_size // 8) - 1, + 0 + ) + + result = self._buffer[:finished_blocks * (self.block_size // 8)] + self._buffer = self._buffer[finished_blocks * (self.block_size // 8):] + + return result + + def finalize(self): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized.") + + if len(self._buffer) != self.block_size // 8: + raise ValueError("Invalid padding bytes.") + + valid = lib.Cryptography_check_pkcs7_padding( + self._buffer, self.block_size // 8 + ) + + if not valid: + raise ValueError("Invalid padding bytes.") + + pad_size = six.indexbytes(self._buffer, -1) + res = self._buffer[:-pad_size] + self._buffer = None + return res diff --git a/Lib/site-packages/cryptography/hazmat/primitives/serialization.py b/Lib/site-packages/cryptography/hazmat/primitives/serialization.py new file mode 100644 index 0000000..fc50456 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/serialization.py @@ -0,0 +1,188 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import base64 +import struct +from enum import Enum + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa + + +def load_pem_private_key(data, password, backend): + return backend.load_pem_private_key(data, password) + + +def load_pem_public_key(data, backend): + return backend.load_pem_public_key(data) + + +def load_der_private_key(data, password, backend): + return backend.load_der_private_key(data, password) + + +def load_der_public_key(data, backend): + return backend.load_der_public_key(data) + + +def load_ssh_public_key(data, backend): + key_parts = data.split(b' ', 2) + + if len(key_parts) < 2: + raise ValueError( + 'Key is not in the proper format or contains extra data.') + + key_type = key_parts[0] + + if key_type == b'ssh-rsa': + loader = _load_ssh_rsa_public_key + elif key_type == b'ssh-dss': + loader = _load_ssh_dss_public_key + elif key_type in [ + b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521', + ]: + loader = _load_ssh_ecdsa_public_key + else: + raise UnsupportedAlgorithm('Key type is not supported.') + + key_body = key_parts[1] + + try: + decoded_data = base64.b64decode(key_body) + except TypeError: + raise ValueError('Key is not in the proper format.') + + inner_key_type, rest = _read_next_string(decoded_data) + + if inner_key_type != key_type: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + return loader(key_type, rest, backend) + + +def _load_ssh_rsa_public_key(key_type, decoded_data, backend): + e, rest = _read_next_mpint(decoded_data) + n, rest = _read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + return rsa.RSAPublicNumbers(e, n).public_key(backend) + + +def _load_ssh_dss_public_key(key_type, decoded_data, backend): + p, rest = _read_next_mpint(decoded_data) + q, rest = _read_next_mpint(rest) + g, rest = _read_next_mpint(rest) + y, rest = _read_next_mpint(rest) + + if rest: + raise ValueError('Key body contains extra bytes.') + + parameter_numbers = dsa.DSAParameterNumbers(p, q, g) + public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers) + + return public_numbers.public_key(backend) + + +def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend): + curve_name, rest = _read_next_string(decoded_data) + data, rest = _read_next_string(rest) + + if expected_key_type != b"ecdsa-sha2-" + curve_name: + raise ValueError( + 'Key header and key body contain different key type values.' + ) + + if rest: + raise ValueError('Key body contains extra bytes.') + + curve = { + b"nistp256": ec.SECP256R1, + b"nistp384": ec.SECP384R1, + b"nistp521": ec.SECP521R1, + }[curve_name]() + + if six.indexbytes(data, 0) != 4: + raise NotImplementedError( + "Compressed elliptic curve points are not supported" + ) + + # key_size is in bits, and sometimes it's not evenly divisible by 8, so we + # add 7 to round up the number of bytes. + if len(data) != 1 + 2 * ((curve.key_size + 7) // 8): + raise ValueError("Malformed key bytes") + + x = utils.int_from_bytes( + data[1:1 + (curve.key_size + 7) // 8], byteorder='big' + ) + y = utils.int_from_bytes( + data[1 + (curve.key_size + 7) // 8:], byteorder='big' + ) + return ec.EllipticCurvePublicNumbers(x, y, curve).public_key(backend) + + +def _read_next_string(data): + """ + Retrieves the next RFC 4251 string value from the data. + + While the RFC calls these strings, in Python they are bytes objects. + """ + str_len, = struct.unpack('>I', data[:4]) + return data[4:4 + str_len], data[4 + str_len:] + + +def _read_next_mpint(data): + """ + Reads the next mpint from the data. + + Currently, all mpints are interpreted as unsigned. + """ + mpint_data, rest = _read_next_string(data) + + return ( + utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest + ) + + +class Encoding(Enum): + PEM = "PEM" + DER = "DER" + + +class PrivateFormat(Enum): + PKCS8 = "PKCS8" + TraditionalOpenSSL = "TraditionalOpenSSL" + + +class PublicFormat(Enum): + SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1" + PKCS1 = "Raw PKCS#1" + + +@six.add_metaclass(abc.ABCMeta) +class KeySerializationEncryption(object): + pass + + +@utils.register_interface(KeySerializationEncryption) +class BestAvailableEncryption(object): + def __init__(self, password): + if not isinstance(password, bytes) or len(password) == 0: + raise ValueError("Password must be 1 or more bytes.") + + self.password = password + + +@utils.register_interface(KeySerializationEncryption) +class NoEncryption(object): + pass diff --git a/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..e71f9e6 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,9 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + + +class InvalidToken(Exception): + pass diff --git a/Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..12bc766 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,67 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import struct + +import six + +from cryptography.exceptions import ( + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri + + +class HOTP(object): + def __init__(self, key, length, algorithm, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if len(key) < 16: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, six.integer_types): + raise TypeError("Length parameter must be an integer type.") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 to 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") + + self._key = key + self._length = length + self._algorithm = algorithm + self._backend = backend + + def generate(self, counter): + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10 ** self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp, counter): + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match.") + + def _dynamic_truncate(self, counter): + ctx = hmac.HMAC(self._key, self._algorithm, self._backend) + ctx.update(struct.pack(">Q", counter)) + hmac_value = ctx.finalize() + + offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 + p = hmac_value[offset:offset + 4] + return struct.unpack(">I", p)[0] & 0x7fffffff + + def get_provisioning_uri(self, account_name, counter, issuer): + return _generate_uri(self, "hotp", account_name, issuer, [ + ("counter", int(counter)), + ]) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..6070590 --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,39 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.exceptions import ( + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor import InvalidToken +from cryptography.hazmat.primitives.twofactor.hotp import HOTP +from cryptography.hazmat.primitives.twofactor.utils import _generate_uri + + +class TOTP(object): + def __init__(self, key, length, algorithm, time_step, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._time_step = time_step + self._hotp = HOTP(key, length, algorithm, backend) + + def generate(self, time): + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp, time): + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match.") + + def get_provisioning_uri(self, account_name, issuer): + return _generate_uri(self._hotp, "totp", account_name, issuer, [ + ("period", int(self._time_step)), + ]) diff --git a/Lib/site-packages/cryptography/hazmat/primitives/twofactor/utils.py b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/utils.py new file mode 100644 index 0000000..0ed8c4c --- /dev/null +++ b/Lib/site-packages/cryptography/hazmat/primitives/twofactor/utils.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import base64 + +from six.moves.urllib.parse import quote, urlencode + + +def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters): + parameters = [ + ("digits", hotp._length), + ("secret", base64.b32encode(hotp._key)), + ("algorithm", hotp._algorithm.name.upper()), + ] + + if issuer is not None: + parameters.append(("issuer", issuer)) + + parameters.extend(extra_parameters) + + uriparts = { + "type": type_name, + "label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer + else quote(account_name)), + "parameters": urlencode(parameters), + } + return "otpauth://{type}/{label}?{parameters}".format(**uriparts) diff --git a/Lib/site-packages/cryptography/utils.py b/Lib/site-packages/cryptography/utils.py new file mode 100644 index 0000000..b85d50d --- /dev/null +++ b/Lib/site-packages/cryptography/utils.py @@ -0,0 +1,129 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import binascii +import inspect +import struct +import sys +import warnings + + +# the functions deprecated in 1.0 are on an arbitrarily extended deprecation +# cycle and should not be removed until we agree on when that cycle ends. +DeprecatedIn10 = DeprecationWarning +DeprecatedIn12 = PendingDeprecationWarning + + +def read_only_property(name): + return property(lambda self: getattr(self, name)) + + +def register_interface(iface): + def register_decorator(klass): + verify_interface(iface, klass) + iface.register(klass) + return klass + return register_decorator + + +if hasattr(int, "from_bytes"): + int_from_bytes = int.from_bytes +else: + def int_from_bytes(data, byteorder, signed=False): + assert byteorder == 'big' + assert not signed + + if len(data) % 4 != 0: + data = (b'\x00' * (4 - (len(data) % 4))) + data + + result = 0 + + while len(data) > 0: + digit, = struct.unpack('>I', data[:4]) + result = (result << 32) + digit + data = data[4:] + + return result + + +def int_to_bytes(integer, length=None): + hex_string = '%x' % integer + if length is None: + n = len(hex_string) + else: + n = length * 2 + return binascii.unhexlify(hex_string.zfill(n + (n & 1))) + + +class InterfaceNotImplemented(Exception): + pass + + +if hasattr(inspect, "signature"): + signature = inspect.signature +else: + signature = inspect.getargspec + + +def verify_interface(iface, klass): + for method in iface.__abstractmethods__: + if not hasattr(klass, method): + raise InterfaceNotImplemented( + "{0} is missing a {1!r} method".format(klass, method) + ) + if isinstance(getattr(iface, method), abc.abstractproperty): + # Can't properly verify these yet. + continue + sig = signature(getattr(iface, method)) + actual = signature(getattr(klass, method)) + if sig != actual: + raise InterfaceNotImplemented( + "{0}.{1}'s signature differs from the expected. Expected: " + "{2!r}. Received: {3!r}".format( + klass, method, sig, actual + ) + ) + + +if sys.version_info >= (2, 7): + def bit_length(x): + return x.bit_length() +else: + def bit_length(x): + return len(bin(x)) - (2 + (x <= 0)) + + +class _DeprecatedValue(object): + def __init__(self, value, message, warning_class): + self.value = value + self.message = message + self.warning_class = warning_class + + +class _ModuleWithDeprecations(object): + def __init__(self, module): + self.__dict__["_module"] = module + + def __getattr__(self, attr): + obj = getattr(self._module, attr) + if isinstance(obj, _DeprecatedValue): + warnings.warn(obj.message, obj.warning_class, stacklevel=2) + obj = obj.value + return obj + + def __setattr__(self, attr, value): + setattr(self._module, attr, value) + + def __dir__(self): + return ["_module"] + dir(self._module) + + +def deprecated(value, module_name, message, warning_class): + module = sys.modules[module_name] + if not isinstance(module, _ModuleWithDeprecations): + sys.modules[module_name] = module = _ModuleWithDeprecations(module) + return _DeprecatedValue(value, message, warning_class) diff --git a/Lib/site-packages/cryptography/x509/__init__.py b/Lib/site-packages/cryptography/x509/__init__.py new file mode 100644 index 0000000..a1deb7f --- /dev/null +++ b/Lib/site-packages/cryptography/x509/__init__.py @@ -0,0 +1,173 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography.x509.base import ( + Certificate, CertificateBuilder, CertificateRevocationList, + CertificateRevocationListBuilder, + CertificateSigningRequest, CertificateSigningRequestBuilder, + InvalidVersion, RevokedCertificate, RevokedCertificateBuilder, + Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr, + load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr, +) +from cryptography.x509.extensions import ( + AccessDescription, AuthorityInformationAccess, + AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints, + CRLNumber, CRLReason, CertificateIssuer, CertificatePolicies, + DistributionPoint, DuplicateExtension, ExtendedKeyUsage, Extension, + ExtensionNotFound, ExtensionType, Extensions, GeneralNames, + InhibitAnyPolicy, InvalidityDate, IssuerAlternativeName, KeyUsage, + NameConstraints, NoticeReference, OCSPNoCheck, PolicyInformation, + ReasonFlags, SubjectAlternativeName, SubjectKeyIdentifier, + UnrecognizedExtension, UnsupportedExtension, UserNotice +) +from cryptography.x509.general_name import ( + DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name, + RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType, + _GENERAL_NAMES +) +from cryptography.x509.name import Name, NameAttribute +from cryptography.x509.oid import ( + AuthorityInformationAccessOID, CRLEntryExtensionOID, CRLExtensionOID, + CertificatePoliciesOID, ExtendedKeyUsageOID, ExtensionOID, NameOID, + ObjectIdentifier, SignatureAlgorithmOID, _SIG_OIDS_TO_HASH +) + + +OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS +OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER +OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS +OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES +OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS +OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE +OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL +OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY +OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME +OID_KEY_USAGE = ExtensionOID.KEY_USAGE +OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS +OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK +OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS +OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS +OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME +OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES +OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS +OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER + +OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1 +OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224 +OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256 +OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1 +OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224 +OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256 +OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384 +OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512 +OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5 +OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1 +OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224 +OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256 +OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384 +OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512 + +OID_COMMON_NAME = NameOID.COMMON_NAME +OID_COUNTRY_NAME = NameOID.COUNTRY_NAME +OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT +OID_DN_QUALIFIER = NameOID.DN_QUALIFIER +OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS +OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER +OID_GIVEN_NAME = NameOID.GIVEN_NAME +OID_LOCALITY_NAME = NameOID.LOCALITY_NAME +OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME +OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME +OID_PSEUDONYM = NameOID.PSEUDONYM +OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER +OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME +OID_SURNAME = NameOID.SURNAME +OID_TITLE = NameOID.TITLE + +OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH +OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING +OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION +OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING +OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH +OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING + +OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY +OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER +OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE + +OID_CERTIFICATE_ISSUER = CRLEntryExtensionOID.CERTIFICATE_ISSUER +OID_CRL_REASON = CRLEntryExtensionOID.CRL_REASON +OID_INVALIDITY_DATE = CRLEntryExtensionOID.INVALIDITY_DATE + +OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS +OID_OCSP = AuthorityInformationAccessOID.OCSP + + +__all__ = [ + "load_pem_x509_certificate", + "load_der_x509_certificate", + "load_pem_x509_csr", + "load_der_x509_csr", + "load_pem_x509_crl", + "load_der_x509_crl", + "InvalidVersion", + "DuplicateExtension", + "UnsupportedExtension", + "ExtensionNotFound", + "UnsupportedGeneralNameType", + "NameAttribute", + "Name", + "ObjectIdentifier", + "ExtensionType", + "Extensions", + "Extension", + "ExtendedKeyUsage", + "OCSPNoCheck", + "BasicConstraints", + "CRLNumber", + "KeyUsage", + "AuthorityInformationAccess", + "AccessDescription", + "CertificatePolicies", + "PolicyInformation", + "UserNotice", + "NoticeReference", + "SubjectKeyIdentifier", + "NameConstraints", + "CRLDistributionPoints", + "DistributionPoint", + "ReasonFlags", + "InhibitAnyPolicy", + "SubjectAlternativeName", + "IssuerAlternativeName", + "AuthorityKeyIdentifier", + "GeneralNames", + "GeneralName", + "RFC822Name", + "DNSName", + "UniformResourceIdentifier", + "RegisteredID", + "DirectoryName", + "IPAddress", + "OtherName", + "Certificate", + "CertificateRevocationList", + "CertificateRevocationListBuilder", + "CertificateSigningRequest", + "RevokedCertificate", + "RevokedCertificateBuilder", + "CertificateSigningRequestBuilder", + "CertificateBuilder", + "Version", + "_SIG_OIDS_TO_HASH", + "OID_CA_ISSUERS", + "OID_OCSP", + "_GENERAL_NAMES", + "CRLExtensionOID", + "CertificateIssuer", + "CRLReason", + "InvalidityDate", + "UnrecognizedExtension", +] diff --git a/Lib/site-packages/cryptography/x509/base.py b/Lib/site-packages/cryptography/x509/base.py new file mode 100644 index 0000000..55e965f --- /dev/null +++ b/Lib/site-packages/cryptography/x509/base.py @@ -0,0 +1,676 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +from enum import Enum + +import six + +from cryptography import utils +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa +from cryptography.x509.extensions import Extension, ExtensionType +from cryptography.x509.name import Name + + +_UNIX_EPOCH = datetime.datetime(1970, 1, 1) + + +class Version(Enum): + v1 = 0 + v3 = 2 + + +def load_pem_x509_certificate(data, backend): + return backend.load_pem_x509_certificate(data) + + +def load_der_x509_certificate(data, backend): + return backend.load_der_x509_certificate(data) + + +def load_pem_x509_csr(data, backend): + return backend.load_pem_x509_csr(data) + + +def load_der_x509_csr(data, backend): + return backend.load_der_x509_csr(data) + + +def load_pem_x509_crl(data, backend): + return backend.load_pem_x509_crl(data) + + +def load_der_x509_crl(data, backend): + return backend.load_der_x509_crl(data) + + +class InvalidVersion(Exception): + def __init__(self, msg, parsed_version): + super(InvalidVersion, self).__init__(msg) + self.parsed_version = parsed_version + + +@six.add_metaclass(abc.ABCMeta) +class Certificate(object): + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def serial(self): + """ + Returns certificate serial number + """ + + @abc.abstractproperty + def version(self): + """ + Returns the certificate version + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def not_valid_before(self): + """ + Not before time (represented as UTC datetime) + """ + + @abc.abstractproperty + def not_valid_after(self): + """ + Not after time (represented as UTC datetime) + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the issuer name object. + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certificate_bytes(self): + """ + Returns the tbsCertificate payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the certificate to PEM or DER format. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateRevocationList(object): + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Serializes the CRL to PEM or DER format. + """ + + @abc.abstractmethod + def fingerprint(self, algorithm): + """ + Returns bytes using digest passed. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def issuer(self): + """ + Returns the X509Name with the issuer of this CRL. + """ + + @abc.abstractproperty + def next_update(self): + """ + Returns the date of next update for this CRL. + """ + + @abc.abstractproperty + def last_update(self): + """ + Returns the date of last update for this CRL. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of CRL extensions. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certlist_bytes(self): + """ + Returns the tbsCertList payload bytes as defined in RFC 5280. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CertificateSigningRequest(object): + @abc.abstractmethod + def __eq__(self, other): + """ + Checks equality. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Checks not equal. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Computes a hash. + """ + + @abc.abstractmethod + def public_key(self): + """ + Returns the public key + """ + + @abc.abstractproperty + def subject(self): + """ + Returns the subject name object. + """ + + @abc.abstractproperty + def signature_hash_algorithm(self): + """ + Returns a HashAlgorithm corresponding to the type of the digest signed + in the certificate. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns the extensions in the signing request. + """ + + @abc.abstractmethod + def public_bytes(self, encoding): + """ + Encodes the request to PEM or DER format. + """ + + @abc.abstractproperty + def signature(self): + """ + Returns the signature bytes. + """ + + @abc.abstractproperty + def tbs_certrequest_bytes(self): + """ + Returns the PKCS#10 CertificationRequestInfo bytes as defined in RFC + 2986. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RevokedCertificate(object): + @abc.abstractproperty + def serial_number(self): + """ + Returns the serial number of the revoked certificate. + """ + + @abc.abstractproperty + def revocation_date(self): + """ + Returns the date of when this certificate was revoked. + """ + + @abc.abstractproperty + def extensions(self): + """ + Returns an Extensions object containing a list of Revoked extensions. + """ + + +class CertificateSigningRequestBuilder(object): + def __init__(self, subject_name=None, extensions=[]): + """ + Creates an empty X.509 certificate request (v1). + """ + self._subject_name = subject_name + self._extensions = extensions + + def subject_name(self, name): + """ + Sets the certificate requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateSigningRequestBuilder(name, self._extensions) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate request. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return CertificateSigningRequestBuilder( + self._subject_name, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the request using the requestor's private key. + """ + if self._subject_name is None: + raise ValueError("A CertificateSigningRequest must have a subject") + return backend.create_x509_csr(self, private_key, algorithm) + + +class CertificateBuilder(object): + def __init__(self, issuer_name=None, subject_name=None, + public_key=None, serial_number=None, not_valid_before=None, + not_valid_after=None, extensions=[]): + self._version = Version.v3 + self._issuer_name = issuer_name + self._subject_name = subject_name + self._public_key = public_key + self._serial_number = serial_number + self._not_valid_before = not_valid_before + self._not_valid_after = not_valid_after + self._extensions = extensions + + def issuer_name(self, name): + """ + Sets the CA's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateBuilder( + name, self._subject_name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def subject_name(self, name): + """ + Sets the requestor's distinguished name. + """ + if not isinstance(name, Name): + raise TypeError('Expecting x509.Name object.') + if self._subject_name is not None: + raise ValueError('The subject name may only be set once.') + return CertificateBuilder( + self._issuer_name, name, self._public_key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def public_key(self, key): + """ + Sets the requestor's public key (as found in the signing request). + """ + if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey, + ec.EllipticCurvePublicKey)): + raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,' + ' or EllipticCurvePublicKey.') + if self._public_key is not None: + raise ValueError('The public key may only be set once.') + return CertificateBuilder( + self._issuer_name, self._subject_name, key, + self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def serial_number(self, number): + """ + Sets the certificate serial number. + """ + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number < 0: + raise ValueError('The serial number should be non-negative.') + if utils.bit_length(number) > 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 160 ' + 'bits.') + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, number, self._not_valid_before, + self._not_valid_after, self._extensions + ) + + def not_valid_before(self, time): + """ + Sets the certificate activation time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_before is not None: + raise ValueError('The not valid before may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The not valid before date must be after the unix' + ' epoch (1970 January 1).') + if self._not_valid_after is not None and time > self._not_valid_after: + raise ValueError( + 'The not valid before date must be before the not valid after ' + 'date.' + ) + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, time, + self._not_valid_after, self._extensions + ) + + def not_valid_after(self, time): + """ + Sets the certificate expiration time. + """ + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._not_valid_after is not None: + raise ValueError('The not valid after may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The not valid after date must be after the unix' + ' epoch (1970 January 1).') + if (self._not_valid_before is not None and + time < self._not_valid_before): + raise ValueError( + 'The not valid after date must be after the not valid before ' + 'date.' + ) + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + time, self._extensions + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + + return CertificateBuilder( + self._issuer_name, self._subject_name, + self._public_key, self._serial_number, self._not_valid_before, + self._not_valid_after, self._extensions + [extension] + ) + + def sign(self, private_key, algorithm, backend): + """ + Signs the certificate using the CA's private key. + """ + if self._subject_name is None: + raise ValueError("A certificate must have a subject name") + + if self._issuer_name is None: + raise ValueError("A certificate must have an issuer name") + + if self._serial_number is None: + raise ValueError("A certificate must have a serial number") + + if self._not_valid_before is None: + raise ValueError("A certificate must have a not valid before time") + + if self._not_valid_after is None: + raise ValueError("A certificate must have a not valid after time") + + if self._public_key is None: + raise ValueError("A certificate must have a public key") + + return backend.create_x509_certificate(self, private_key, algorithm) + + +class CertificateRevocationListBuilder(object): + def __init__(self, issuer_name=None, last_update=None, next_update=None, + extensions=[], revoked_certificates=[]): + self._issuer_name = issuer_name + self._last_update = last_update + self._next_update = next_update + self._extensions = extensions + self._revoked_certificates = revoked_certificates + + def issuer_name(self, issuer_name): + if not isinstance(issuer_name, Name): + raise TypeError('Expecting x509.Name object.') + if self._issuer_name is not None: + raise ValueError('The issuer name may only be set once.') + return CertificateRevocationListBuilder( + issuer_name, self._last_update, self._next_update, + self._extensions, self._revoked_certificates + ) + + def last_update(self, last_update): + if not isinstance(last_update, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._last_update is not None: + raise ValueError('Last update may only be set once.') + if last_update <= _UNIX_EPOCH: + raise ValueError('The last update date must be after the unix' + ' epoch (1970 January 1).') + if self._next_update is not None and last_update > self._next_update: + raise ValueError( + 'The last update date must be before the next update date.' + ) + return CertificateRevocationListBuilder( + self._issuer_name, last_update, self._next_update, + self._extensions, self._revoked_certificates + ) + + def next_update(self, next_update): + if not isinstance(next_update, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._next_update is not None: + raise ValueError('Last update may only be set once.') + if next_update <= _UNIX_EPOCH: + raise ValueError('The last update date must be after the unix' + ' epoch (1970 January 1).') + if self._last_update is not None and next_update < self._last_update: + raise ValueError( + 'The next update date must be after the last update date.' + ) + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, next_update, + self._extensions, self._revoked_certificates + ) + + def add_extension(self, extension, critical): + """ + Adds an X.509 extension to the certificate revocation list. + """ + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, self._next_update, + self._extensions + [extension], self._revoked_certificates + ) + + def add_revoked_certificate(self, revoked_certificate): + """ + Adds a revoked certificate to the CRL. + """ + if not isinstance(revoked_certificate, RevokedCertificate): + raise TypeError("Must be an instance of RevokedCertificate") + + return CertificateRevocationListBuilder( + self._issuer_name, self._last_update, + self._next_update, self._extensions, + self._revoked_certificates + [revoked_certificate] + ) + + def sign(self, private_key, algorithm, backend): + if self._issuer_name is None: + raise ValueError("A CRL must have an issuer name") + + if self._last_update is None: + raise ValueError("A CRL must have a last update time") + + if self._next_update is None: + raise ValueError("A CRL must have a next update time") + + return backend.create_x509_crl(self, private_key, algorithm) + + +class RevokedCertificateBuilder(object): + def __init__(self, serial_number=None, revocation_date=None, + extensions=[]): + self._serial_number = serial_number + self._revocation_date = revocation_date + self._extensions = extensions + + def serial_number(self, number): + if not isinstance(number, six.integer_types): + raise TypeError('Serial number must be of integral type.') + if self._serial_number is not None: + raise ValueError('The serial number may only be set once.') + if number < 0: + raise ValueError('The serial number should be non-negative.') + if utils.bit_length(number) > 160: # As defined in RFC 5280 + raise ValueError('The serial number should not be more than 160 ' + 'bits.') + return RevokedCertificateBuilder( + number, self._revocation_date, self._extensions + ) + + def revocation_date(self, time): + if not isinstance(time, datetime.datetime): + raise TypeError('Expecting datetime object.') + if self._revocation_date is not None: + raise ValueError('The revocation date may only be set once.') + if time <= _UNIX_EPOCH: + raise ValueError('The revocation date must be after the unix' + ' epoch (1970 January 1).') + return RevokedCertificateBuilder( + self._serial_number, time, self._extensions + ) + + def add_extension(self, extension, critical): + if not isinstance(extension, ExtensionType): + raise TypeError("extension must be an ExtensionType") + + extension = Extension(extension.oid, critical, extension) + + # TODO: This is quadratic in the number of extensions + for e in self._extensions: + if e.oid == extension.oid: + raise ValueError('This extension has already been set.') + return RevokedCertificateBuilder( + self._serial_number, self._revocation_date, + self._extensions + [extension] + ) + + def build(self, backend): + if self._serial_number is None: + raise ValueError("A revoked certificate must have a serial number") + if self._revocation_date is None: + raise ValueError( + "A revoked certificate must have a revocation date" + ) + + return backend.create_x509_revoked_certificate(self) diff --git a/Lib/site-packages/cryptography/x509/extensions.py b/Lib/site-packages/cryptography/x509/extensions.py new file mode 100644 index 0000000..3e6fc3b --- /dev/null +++ b/Lib/site-packages/cryptography/x509/extensions.py @@ -0,0 +1,1108 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import datetime +import hashlib +import ipaddress +from enum import Enum + +from pyasn1.codec.der import decoder +from pyasn1.type import namedtype, univ + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import constant_time, serialization +from cryptography.x509.general_name import GeneralName, IPAddress, OtherName +from cryptography.x509.name import Name +from cryptography.x509.oid import ( + CRLEntryExtensionOID, ExtensionOID, ObjectIdentifier +) + + +class _SubjectPublicKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', univ.Sequence()), + namedtype.NamedType('subjectPublicKey', univ.BitString()) + ) + + +def _key_identifier_from_public_key(public_key): + # This is a very slow way to do this. + serialized = public_key.public_bytes( + serialization.Encoding.DER, + serialization.PublicFormat.SubjectPublicKeyInfo + ) + spki, remaining = decoder.decode( + serialized, asn1Spec=_SubjectPublicKeyInfo() + ) + assert not remaining + # the univ.BitString object is a tuple of bits. We need bytes and + # pyasn1 really doesn't want to give them to us. To get it we'll + # build an integer and convert that to bytes. + bits = 0 + for bit in spki.getComponentByName("subjectPublicKey"): + bits = bits << 1 | bit + + data = utils.int_to_bytes(bits) + return hashlib.sha1(data).digest() + + +class DuplicateExtension(Exception): + def __init__(self, msg, oid): + super(DuplicateExtension, self).__init__(msg) + self.oid = oid + + +class UnsupportedExtension(Exception): + def __init__(self, msg, oid): + super(UnsupportedExtension, self).__init__(msg) + self.oid = oid + + +class ExtensionNotFound(Exception): + def __init__(self, msg, oid): + super(ExtensionNotFound, self).__init__(msg) + self.oid = oid + + +@six.add_metaclass(abc.ABCMeta) +class ExtensionType(object): + @abc.abstractproperty + def oid(self): + """ + Returns the oid associated with the given extension type. + """ + + +class Extensions(object): + def __init__(self, extensions): + self._extensions = extensions + + def get_extension_for_oid(self, oid): + for ext in self: + if ext.oid == oid: + return ext + + raise ExtensionNotFound("No {0} extension was found".format(oid), oid) + + def get_extension_for_class(self, extclass): + if extclass is UnrecognizedExtension: + raise TypeError( + "UnrecognizedExtension can't be used with " + "get_extension_for_class because more than one instance of the" + " class may be present." + ) + + for ext in self: + if isinstance(ext.value, extclass): + return ext + + raise ExtensionNotFound( + "No {0} extension was found".format(extclass), extclass.oid + ) + + def __iter__(self): + return iter(self._extensions) + + def __len__(self): + return len(self._extensions) + + def __getitem__(self, idx): + return self._extensions[idx] + + def __repr__(self): + return ( + "".format(self._extensions) + ) + + +@utils.register_interface(ExtensionType) +class CRLNumber(object): + oid = ExtensionOID.CRL_NUMBER + + def __init__(self, crl_number): + if not isinstance(crl_number, six.integer_types): + raise TypeError("crl_number must be an integer") + + self._crl_number = crl_number + + def __eq__(self, other): + if not isinstance(other, CRLNumber): + return NotImplemented + + return self.crl_number == other.crl_number + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.crl_number) + + def __repr__(self): + return "".format(self.crl_number) + + crl_number = utils.read_only_property("_crl_number") + + +@utils.register_interface(ExtensionType) +class AuthorityKeyIdentifier(object): + oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER + + def __init__(self, key_identifier, authority_cert_issuer, + authority_cert_serial_number): + if (authority_cert_issuer is None) != ( + authority_cert_serial_number is None + ): + raise ValueError( + "authority_cert_issuer and authority_cert_serial_number " + "must both be present or both None" + ) + + if authority_cert_issuer is not None and not all( + isinstance(x, GeneralName) for x in authority_cert_issuer + ): + raise TypeError( + "authority_cert_issuer must be a list of GeneralName " + "objects" + ) + + if authority_cert_serial_number is not None and not isinstance( + authority_cert_serial_number, six.integer_types + ): + raise TypeError( + "authority_cert_serial_number must be an integer" + ) + + self._key_identifier = key_identifier + self._authority_cert_issuer = authority_cert_issuer + self._authority_cert_serial_number = authority_cert_serial_number + + @classmethod + def from_issuer_public_key(cls, public_key): + digest = _key_identifier_from_public_key(public_key) + return cls( + key_identifier=digest, + authority_cert_issuer=None, + authority_cert_serial_number=None + ) + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AuthorityKeyIdentifier): + return NotImplemented + + return ( + self.key_identifier == other.key_identifier and + self.authority_cert_issuer == other.authority_cert_issuer and + self.authority_cert_serial_number == + other.authority_cert_serial_number + ) + + def __ne__(self, other): + return not self == other + + key_identifier = utils.read_only_property("_key_identifier") + authority_cert_issuer = utils.read_only_property("_authority_cert_issuer") + authority_cert_serial_number = utils.read_only_property( + "_authority_cert_serial_number" + ) + + +@utils.register_interface(ExtensionType) +class SubjectKeyIdentifier(object): + oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER + + def __init__(self, digest): + self._digest = digest + + @classmethod + def from_public_key(cls, public_key): + return cls(_key_identifier_from_public_key(public_key)) + + digest = utils.read_only_property("_digest") + + def __repr__(self): + return "".format(self.digest) + + def __eq__(self, other): + if not isinstance(other, SubjectKeyIdentifier): + return NotImplemented + + return constant_time.bytes_eq(self.digest, other.digest) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.digest) + + +@utils.register_interface(ExtensionType) +class AuthorityInformationAccess(object): + oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS + + def __init__(self, descriptions): + if not all(isinstance(x, AccessDescription) for x in descriptions): + raise TypeError( + "Every item in the descriptions list must be an " + "AccessDescription" + ) + + self._descriptions = descriptions + + def __iter__(self): + return iter(self._descriptions) + + def __len__(self): + return len(self._descriptions) + + def __repr__(self): + return "".format(self._descriptions) + + def __eq__(self, other): + if not isinstance(other, AuthorityInformationAccess): + return NotImplemented + + return self._descriptions == other._descriptions + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._descriptions[idx] + + +class AccessDescription(object): + def __init__(self, access_method, access_location): + if not isinstance(access_method, ObjectIdentifier): + raise TypeError("access_method must be an ObjectIdentifier") + + if not isinstance(access_location, GeneralName): + raise TypeError("access_location must be a GeneralName") + + self._access_method = access_method + self._access_location = access_location + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, AccessDescription): + return NotImplemented + + return ( + self.access_method == other.access_method and + self.access_location == other.access_location + ) + + def __ne__(self, other): + return not self == other + + access_method = utils.read_only_property("_access_method") + access_location = utils.read_only_property("_access_location") + + +@utils.register_interface(ExtensionType) +class BasicConstraints(object): + oid = ExtensionOID.BASIC_CONSTRAINTS + + def __init__(self, ca, path_length): + if not isinstance(ca, bool): + raise TypeError("ca must be a boolean value") + + if path_length is not None and not ca: + raise ValueError("path_length must be None when ca is False") + + if ( + path_length is not None and + (not isinstance(path_length, six.integer_types) or path_length < 0) + ): + raise TypeError( + "path_length must be a non-negative integer or None" + ) + + self._ca = ca + self._path_length = path_length + + ca = utils.read_only_property("_ca") + path_length = utils.read_only_property("_path_length") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, BasicConstraints): + return NotImplemented + + return self.ca == other.ca and self.path_length == other.path_length + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.ca, self.path_length)) + + +@utils.register_interface(ExtensionType) +class CRLDistributionPoints(object): + oid = ExtensionOID.CRL_DISTRIBUTION_POINTS + + def __init__(self, distribution_points): + if not all( + isinstance(x, DistributionPoint) for x in distribution_points + ): + raise TypeError( + "distribution_points must be a list of DistributionPoint " + "objects" + ) + + self._distribution_points = distribution_points + + def __iter__(self): + return iter(self._distribution_points) + + def __len__(self): + return len(self._distribution_points) + + def __repr__(self): + return "".format(self._distribution_points) + + def __eq__(self, other): + if not isinstance(other, CRLDistributionPoints): + return NotImplemented + + return self._distribution_points == other._distribution_points + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._distribution_points[idx] + + +class DistributionPoint(object): + def __init__(self, full_name, relative_name, reasons, crl_issuer): + if full_name and relative_name: + raise ValueError( + "You cannot provide both full_name and relative_name, at " + "least one must be None." + ) + + if full_name and not all( + isinstance(x, GeneralName) for x in full_name + ): + raise TypeError( + "full_name must be a list of GeneralName objects" + ) + + if relative_name and not isinstance(relative_name, Name): + raise TypeError("relative_name must be a Name") + + if crl_issuer and not all( + isinstance(x, GeneralName) for x in crl_issuer + ): + raise TypeError( + "crl_issuer must be None or a list of general names" + ) + + if reasons and (not isinstance(reasons, frozenset) or not all( + isinstance(x, ReasonFlags) for x in reasons + )): + raise TypeError("reasons must be None or frozenset of ReasonFlags") + + if reasons and ( + ReasonFlags.unspecified in reasons or + ReasonFlags.remove_from_crl in reasons + ): + raise ValueError( + "unspecified and remove_from_crl are not valid reasons in a " + "DistributionPoint" + ) + + if reasons and not crl_issuer and not (full_name or relative_name): + raise ValueError( + "You must supply crl_issuer, full_name, or relative_name when " + "reasons is not None" + ) + + self._full_name = full_name + self._relative_name = relative_name + self._reasons = reasons + self._crl_issuer = crl_issuer + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, DistributionPoint): + return NotImplemented + + return ( + self.full_name == other.full_name and + self.relative_name == other.relative_name and + self.reasons == other.reasons and + self.crl_issuer == other.crl_issuer + ) + + def __ne__(self, other): + return not self == other + + full_name = utils.read_only_property("_full_name") + relative_name = utils.read_only_property("_relative_name") + reasons = utils.read_only_property("_reasons") + crl_issuer = utils.read_only_property("_crl_issuer") + + +class ReasonFlags(Enum): + unspecified = "unspecified" + key_compromise = "keyCompromise" + ca_compromise = "cACompromise" + affiliation_changed = "affiliationChanged" + superseded = "superseded" + cessation_of_operation = "cessationOfOperation" + certificate_hold = "certificateHold" + privilege_withdrawn = "privilegeWithdrawn" + aa_compromise = "aACompromise" + remove_from_crl = "removeFromCRL" + + +@utils.register_interface(ExtensionType) +class CertificatePolicies(object): + oid = ExtensionOID.CERTIFICATE_POLICIES + + def __init__(self, policies): + if not all(isinstance(x, PolicyInformation) for x in policies): + raise TypeError( + "Every item in the policies list must be a " + "PolicyInformation" + ) + + self._policies = policies + + def __iter__(self): + return iter(self._policies) + + def __len__(self): + return len(self._policies) + + def __repr__(self): + return "".format(self._policies) + + def __eq__(self, other): + if not isinstance(other, CertificatePolicies): + return NotImplemented + + return self._policies == other._policies + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._policies[idx] + + +class PolicyInformation(object): + def __init__(self, policy_identifier, policy_qualifiers): + if not isinstance(policy_identifier, ObjectIdentifier): + raise TypeError("policy_identifier must be an ObjectIdentifier") + + self._policy_identifier = policy_identifier + if policy_qualifiers and not all( + isinstance( + x, (six.text_type, UserNotice) + ) for x in policy_qualifiers + ): + raise TypeError( + "policy_qualifiers must be a list of strings and/or UserNotice" + " objects or None" + ) + + self._policy_qualifiers = policy_qualifiers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, PolicyInformation): + return NotImplemented + + return ( + self.policy_identifier == other.policy_identifier and + self.policy_qualifiers == other.policy_qualifiers + ) + + def __ne__(self, other): + return not self == other + + policy_identifier = utils.read_only_property("_policy_identifier") + policy_qualifiers = utils.read_only_property("_policy_qualifiers") + + +class UserNotice(object): + def __init__(self, notice_reference, explicit_text): + if notice_reference and not isinstance( + notice_reference, NoticeReference + ): + raise TypeError( + "notice_reference must be None or a NoticeReference" + ) + + self._notice_reference = notice_reference + self._explicit_text = explicit_text + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, UserNotice): + return NotImplemented + + return ( + self.notice_reference == other.notice_reference and + self.explicit_text == other.explicit_text + ) + + def __ne__(self, other): + return not self == other + + notice_reference = utils.read_only_property("_notice_reference") + explicit_text = utils.read_only_property("_explicit_text") + + +class NoticeReference(object): + def __init__(self, organization, notice_numbers): + self._organization = organization + if not isinstance(notice_numbers, list) or not all( + isinstance(x, int) for x in notice_numbers + ): + raise TypeError( + "notice_numbers must be a list of integers" + ) + + self._notice_numbers = notice_numbers + + def __repr__(self): + return ( + "".format(self) + ) + + def __eq__(self, other): + if not isinstance(other, NoticeReference): + return NotImplemented + + return ( + self.organization == other.organization and + self.notice_numbers == other.notice_numbers + ) + + def __ne__(self, other): + return not self == other + + organization = utils.read_only_property("_organization") + notice_numbers = utils.read_only_property("_notice_numbers") + + +@utils.register_interface(ExtensionType) +class ExtendedKeyUsage(object): + oid = ExtensionOID.EXTENDED_KEY_USAGE + + def __init__(self, usages): + if not all(isinstance(x, ObjectIdentifier) for x in usages): + raise TypeError( + "Every item in the usages list must be an ObjectIdentifier" + ) + + self._usages = usages + + def __iter__(self): + return iter(self._usages) + + def __len__(self): + return len(self._usages) + + def __repr__(self): + return "".format(self._usages) + + def __eq__(self, other): + if not isinstance(other, ExtendedKeyUsage): + return NotImplemented + + return self._usages == other._usages + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class OCSPNoCheck(object): + oid = ExtensionOID.OCSP_NO_CHECK + + +@utils.register_interface(ExtensionType) +class InhibitAnyPolicy(object): + oid = ExtensionOID.INHIBIT_ANY_POLICY + + def __init__(self, skip_certs): + if not isinstance(skip_certs, six.integer_types): + raise TypeError("skip_certs must be an integer") + + if skip_certs < 0: + raise ValueError("skip_certs must be a non-negative integer") + + self._skip_certs = skip_certs + + def __repr__(self): + return "".format(self) + + def __eq__(self, other): + if not isinstance(other, InhibitAnyPolicy): + return NotImplemented + + return self.skip_certs == other.skip_certs + + def __ne__(self, other): + return not self == other + + skip_certs = utils.read_only_property("_skip_certs") + + +@utils.register_interface(ExtensionType) +class KeyUsage(object): + oid = ExtensionOID.KEY_USAGE + + def __init__(self, digital_signature, content_commitment, key_encipherment, + data_encipherment, key_agreement, key_cert_sign, crl_sign, + encipher_only, decipher_only): + if not key_agreement and (encipher_only or decipher_only): + raise ValueError( + "encipher_only and decipher_only can only be true when " + "key_agreement is true" + ) + + self._digital_signature = digital_signature + self._content_commitment = content_commitment + self._key_encipherment = key_encipherment + self._data_encipherment = data_encipherment + self._key_agreement = key_agreement + self._key_cert_sign = key_cert_sign + self._crl_sign = crl_sign + self._encipher_only = encipher_only + self._decipher_only = decipher_only + + digital_signature = utils.read_only_property("_digital_signature") + content_commitment = utils.read_only_property("_content_commitment") + key_encipherment = utils.read_only_property("_key_encipherment") + data_encipherment = utils.read_only_property("_data_encipherment") + key_agreement = utils.read_only_property("_key_agreement") + key_cert_sign = utils.read_only_property("_key_cert_sign") + crl_sign = utils.read_only_property("_crl_sign") + + @property + def encipher_only(self): + if not self.key_agreement: + raise ValueError( + "encipher_only is undefined unless key_agreement is true" + ) + else: + return self._encipher_only + + @property + def decipher_only(self): + if not self.key_agreement: + raise ValueError( + "decipher_only is undefined unless key_agreement is true" + ) + else: + return self._decipher_only + + def __repr__(self): + try: + encipher_only = self.encipher_only + decipher_only = self.decipher_only + except ValueError: + encipher_only = None + decipher_only = None + + return ("").format( + self, encipher_only, decipher_only) + + def __eq__(self, other): + if not isinstance(other, KeyUsage): + return NotImplemented + + return ( + self.digital_signature == other.digital_signature and + self.content_commitment == other.content_commitment and + self.key_encipherment == other.key_encipherment and + self.data_encipherment == other.data_encipherment and + self.key_agreement == other.key_agreement and + self.key_cert_sign == other.key_cert_sign and + self.crl_sign == other.crl_sign and + self._encipher_only == other._encipher_only and + self._decipher_only == other._decipher_only + ) + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class NameConstraints(object): + oid = ExtensionOID.NAME_CONSTRAINTS + + def __init__(self, permitted_subtrees, excluded_subtrees): + if permitted_subtrees is not None: + if not all( + isinstance(x, GeneralName) for x in permitted_subtrees + ): + raise TypeError( + "permitted_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(permitted_subtrees) + + if excluded_subtrees is not None: + if not all( + isinstance(x, GeneralName) for x in excluded_subtrees + ): + raise TypeError( + "excluded_subtrees must be a list of GeneralName objects " + "or None" + ) + + self._validate_ip_name(excluded_subtrees) + + if permitted_subtrees is None and excluded_subtrees is None: + raise ValueError( + "At least one of permitted_subtrees and excluded_subtrees " + "must not be None" + ) + + self._permitted_subtrees = permitted_subtrees + self._excluded_subtrees = excluded_subtrees + + def __eq__(self, other): + if not isinstance(other, NameConstraints): + return NotImplemented + + return ( + self.excluded_subtrees == other.excluded_subtrees and + self.permitted_subtrees == other.permitted_subtrees + ) + + def __ne__(self, other): + return not self == other + + def _validate_ip_name(self, tree): + if any(isinstance(name, IPAddress) and not isinstance( + name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network) + ) for name in tree): + raise TypeError( + "IPAddress name constraints must be an IPv4Network or" + " IPv6Network object" + ) + + def __repr__(self): + return ( + u"".format(self) + ) + + permitted_subtrees = utils.read_only_property("_permitted_subtrees") + excluded_subtrees = utils.read_only_property("_excluded_subtrees") + + +class Extension(object): + def __init__(self, oid, critical, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(critical, bool): + raise TypeError("critical must be a boolean value") + + self._oid = oid + self._critical = critical + self._value = value + + oid = utils.read_only_property("_oid") + critical = utils.read_only_property("_critical") + value = utils.read_only_property("_value") + + def __repr__(self): + return ("").format(self) + + def __eq__(self, other): + if not isinstance(other, Extension): + return NotImplemented + + return ( + self.oid == other.oid and + self.critical == other.critical and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + +class GeneralNames(object): + def __init__(self, general_names): + if not all(isinstance(x, GeneralName) for x in general_names): + raise TypeError( + "Every item in the general_names list must be an " + "object conforming to the GeneralName interface" + ) + + self._general_names = general_names + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + # Return the value of each GeneralName, except for OtherName instances + # which we return directly because it has two important properties not + # just one value. + objs = (i for i in self if isinstance(i, type)) + if type != OtherName: + objs = (i.value for i in objs) + return list(objs) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, GeneralNames): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._general_names[idx] + + +@utils.register_interface(ExtensionType) +class SubjectAlternativeName(object): + oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, SubjectAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __getitem__(self, idx): + return self._general_names[idx] + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(ExtensionType) +class IssuerAlternativeName(object): + oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, IssuerAlternativeName): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._general_names[idx] + + +@utils.register_interface(ExtensionType) +class CertificateIssuer(object): + oid = CRLEntryExtensionOID.CERTIFICATE_ISSUER + + def __init__(self, general_names): + self._general_names = GeneralNames(general_names) + + def __iter__(self): + return iter(self._general_names) + + def __len__(self): + return len(self._general_names) + + def get_values_for_type(self, type): + return self._general_names.get_values_for_type(type) + + def __repr__(self): + return "".format(self._general_names) + + def __eq__(self, other): + if not isinstance(other, CertificateIssuer): + return NotImplemented + + return self._general_names == other._general_names + + def __ne__(self, other): + return not self == other + + def __getitem__(self, idx): + return self._general_names[idx] + + +@utils.register_interface(ExtensionType) +class CRLReason(object): + oid = CRLEntryExtensionOID.CRL_REASON + + def __init__(self, reason): + if not isinstance(reason, ReasonFlags): + raise TypeError("reason must be an element from ReasonFlags") + + self._reason = reason + + def __repr__(self): + return "".format(self._reason) + + def __eq__(self, other): + if not isinstance(other, CRLReason): + return NotImplemented + + return self.reason == other.reason + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.reason) + + reason = utils.read_only_property("_reason") + + +@utils.register_interface(ExtensionType) +class InvalidityDate(object): + oid = CRLEntryExtensionOID.INVALIDITY_DATE + + def __init__(self, invalidity_date): + if not isinstance(invalidity_date, datetime.datetime): + raise TypeError("invalidity_date must be a datetime.datetime") + + self._invalidity_date = invalidity_date + + def __repr__(self): + return "".format( + self._invalidity_date + ) + + def __eq__(self, other): + if not isinstance(other, InvalidityDate): + return NotImplemented + + return self.invalidity_date == other.invalidity_date + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.invalidity_date) + + invalidity_date = utils.read_only_property("_invalidity_date") + + +@utils.register_interface(ExtensionType) +class UnrecognizedExtension(object): + def __init__(self, oid, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError("oid must be an ObjectIdentifier") + self._oid = oid + self._value = value + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def __repr__(self): + return ( + "".format( + self + ) + ) + + def __eq__(self, other): + if not isinstance(other, UnrecognizedExtension): + return NotImplemented + + return self.oid == other.oid and self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) diff --git a/Lib/site-packages/cryptography/x509/general_name.py b/Lib/site-packages/cryptography/x509/general_name.py new file mode 100644 index 0000000..6745243 --- /dev/null +++ b/Lib/site-packages/cryptography/x509/general_name.py @@ -0,0 +1,271 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import abc +import ipaddress +from email.utils import parseaddr + +import idna + +import six + +from six.moves import urllib_parse + +from cryptography import utils +from cryptography.x509.name import Name +from cryptography.x509.oid import ObjectIdentifier + + +_GENERAL_NAMES = { + 0: "otherName", + 1: "rfc822Name", + 2: "dNSName", + 3: "x400Address", + 4: "directoryName", + 5: "ediPartyName", + 6: "uniformResourceIdentifier", + 7: "iPAddress", + 8: "registeredID", +} + + +class UnsupportedGeneralNameType(Exception): + def __init__(self, msg, type): + super(UnsupportedGeneralNameType, self).__init__(msg) + self.type = type + + +@six.add_metaclass(abc.ABCMeta) +class GeneralName(object): + @abc.abstractproperty + def value(self): + """ + Return the value of the object + """ + + +@utils.register_interface(GeneralName) +class RFC822Name(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + name, address = parseaddr(value) + parts = address.split(u"@") + if name or not address: + # parseaddr has found a name (e.g. Name ) or the entire + # value is an empty string. + raise ValueError("Invalid rfc822name value") + elif len(parts) == 1: + # Single label email name. This is valid for local delivery. + # No IDNA encoding needed since there is no domain component. + encoded = address.encode("ascii") + else: + # A normal email of the form user@domain.com. Let's attempt to + # encode the domain component and reconstruct the address. + encoded = parts[0].encode("ascii") + b"@" + idna.encode(parts[1]) + + self._value = value + self._encoded = encoded + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RFC822Name): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class DNSName(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DNSName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class UniformResourceIdentifier(object): + def __init__(self, value): + if not isinstance(value, six.text_type): + raise TypeError("value must be a unicode string") + + parsed = urllib_parse.urlparse(value) + if not parsed.hostname: + netloc = "" + elif parsed.port: + netloc = ( + idna.encode(parsed.hostname) + + ":{0}".format(parsed.port).encode("ascii") + ).decode("ascii") + else: + netloc = idna.encode(parsed.hostname).decode("ascii") + + # Note that building a URL in this fashion means it should be + # semantically indistinguishable from the original but is not + # guaranteed to be exactly the same. + uri = urllib_parse.urlunparse(( + parsed.scheme, + netloc, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment + )).encode("ascii") + + self._value = value + self._encoded = uri + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, UniformResourceIdentifier): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash(self.value) + + +@utils.register_interface(GeneralName) +class DirectoryName(object): + def __init__(self, value): + if not isinstance(value, Name): + raise TypeError("value must be a Name") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, DirectoryName): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class RegisteredID(object): + def __init__(self, value): + if not isinstance(value, ObjectIdentifier): + raise TypeError("value must be an ObjectIdentifier") + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, RegisteredID): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class IPAddress(object): + def __init__(self, value): + if not isinstance( + value, + ( + ipaddress.IPv4Address, + ipaddress.IPv6Address, + ipaddress.IPv4Network, + ipaddress.IPv6Network + ) + ): + raise TypeError( + "value must be an instance of ipaddress.IPv4Address, " + "ipaddress.IPv6Address, ipaddress.IPv4Network, or " + "ipaddress.IPv6Network" + ) + + self._value = value + + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format(self.value) + + def __eq__(self, other): + if not isinstance(other, IPAddress): + return NotImplemented + + return self.value == other.value + + def __ne__(self, other): + return not self == other + + +@utils.register_interface(GeneralName) +class OtherName(object): + def __init__(self, type_id, value): + if not isinstance(type_id, ObjectIdentifier): + raise TypeError("type_id must be an ObjectIdentifier") + if not isinstance(value, bytes): + raise TypeError("value must be a binary string") + + self._type_id = type_id + self._value = value + + type_id = utils.read_only_property("_type_id") + value = utils.read_only_property("_value") + + def __repr__(self): + return "".format( + self.type_id, self.value) + + def __eq__(self, other): + if not isinstance(other, OtherName): + return NotImplemented + + return self.type_id == other.type_id and self.value == other.value + + def __ne__(self, other): + return not self == other diff --git a/Lib/site-packages/cryptography/x509/name.py b/Lib/site-packages/cryptography/x509/name.py new file mode 100644 index 0000000..9d93ece --- /dev/null +++ b/Lib/site-packages/cryptography/x509/name.py @@ -0,0 +1,78 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.x509.oid import ObjectIdentifier + + +class NameAttribute(object): + def __init__(self, oid, value): + if not isinstance(oid, ObjectIdentifier): + raise TypeError( + "oid argument must be an ObjectIdentifier instance." + ) + + if not isinstance(value, six.text_type): + raise TypeError( + "value argument must be a text type." + ) + + self._oid = oid + self._value = value + + oid = utils.read_only_property("_oid") + value = utils.read_only_property("_value") + + def __eq__(self, other): + if not isinstance(other, NameAttribute): + return NotImplemented + + return ( + self.oid == other.oid and + self.value == other.value + ) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.oid, self.value)) + + def __repr__(self): + return "".format(self) + + +class Name(object): + def __init__(self, attributes): + self._attributes = attributes + + def get_attributes_for_oid(self, oid): + return [i for i in self if i.oid == oid] + + def __eq__(self, other): + if not isinstance(other, Name): + return NotImplemented + + return self._attributes == other._attributes + + def __ne__(self, other): + return not self == other + + def __hash__(self): + # TODO: this is relatively expensive, if this looks like a bottleneck + # for you, consider optimizing! + return hash(tuple(self._attributes)) + + def __iter__(self): + return iter(self._attributes) + + def __len__(self): + return len(self._attributes) + + def __repr__(self): + return "".format(self._attributes) diff --git a/Lib/site-packages/cryptography/x509/oid.py b/Lib/site-packages/cryptography/x509/oid.py new file mode 100644 index 0000000..ced1510 --- /dev/null +++ b/Lib/site-packages/cryptography/x509/oid.py @@ -0,0 +1,251 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives import hashes + + +class ObjectIdentifier(object): + def __init__(self, dotted_string): + self._dotted_string = dotted_string + + nodes = self._dotted_string.split(".") + intnodes = [] + + # There must be at least 2 nodes, the first node must be 0..2, and + # if less than 2, the second node cannot have a value outside the + # range 0..39. All nodes must be integers. + for node in nodes: + try: + intnodes.append(int(node, 0)) + except ValueError: + raise ValueError( + "Malformed OID: %s (non-integer nodes)" % ( + self._dotted_string)) + + if len(nodes) < 2: + raise ValueError( + "Malformed OID: %s (insufficient number of nodes)" % ( + self._dotted_string)) + + if intnodes[0] > 2: + raise ValueError( + "Malformed OID: %s (first node outside valid range)" % ( + self._dotted_string)) + + if intnodes[0] < 2 and intnodes[1] >= 40: + raise ValueError( + "Malformed OID: %s (second node outside valid range)" % ( + self._dotted_string)) + + def __eq__(self, other): + if not isinstance(other, ObjectIdentifier): + return NotImplemented + + return self.dotted_string == other.dotted_string + + def __ne__(self, other): + return not self == other + + def __repr__(self): + return "".format( + self.dotted_string, + self._name + ) + + def __hash__(self): + return hash(self.dotted_string) + + @property + def _name(self): + return _OID_NAMES.get(self, "Unknown OID") + + dotted_string = utils.read_only_property("_dotted_string") + + +class ExtensionOID(object): + SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9") + SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14") + KEY_USAGE = ObjectIdentifier("2.5.29.15") + SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17") + ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18") + BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19") + NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30") + CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31") + CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32") + POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33") + AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35") + POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36") + EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37") + FRESHEST_CRL = ObjectIdentifier("2.5.29.46") + INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54") + AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1") + SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11") + OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5") + CRL_NUMBER = ObjectIdentifier("2.5.29.20") + + +class CRLEntryExtensionOID(object): + CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29") + CRL_REASON = ObjectIdentifier("2.5.29.21") + INVALIDITY_DATE = ObjectIdentifier("2.5.29.24") + + +CRLExtensionOID = utils.deprecated( + CRLEntryExtensionOID, + __name__, + "CRLExtensionOID has been renamed to CRLEntryExtensionOID", + utils.DeprecatedIn12 +) + + +class NameOID(object): + COMMON_NAME = ObjectIdentifier("2.5.4.3") + COUNTRY_NAME = ObjectIdentifier("2.5.4.6") + LOCALITY_NAME = ObjectIdentifier("2.5.4.7") + STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8") + ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10") + ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11") + SERIAL_NUMBER = ObjectIdentifier("2.5.4.5") + SURNAME = ObjectIdentifier("2.5.4.4") + GIVEN_NAME = ObjectIdentifier("2.5.4.42") + TITLE = ObjectIdentifier("2.5.4.12") + GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44") + DN_QUALIFIER = ObjectIdentifier("2.5.4.46") + PSEUDONYM = ObjectIdentifier("2.5.4.65") + DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25") + EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1") + JURISDICTION_COUNTRY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.3") + JURISDICTION_LOCALITY_NAME = ObjectIdentifier("1.3.6.1.4.1.311.60.2.1.1") + JURISDICTION_STATE_OR_PROVINCE_NAME = ObjectIdentifier( + "1.3.6.1.4.1.311.60.2.1.2" + ) + BUSINESS_CATEGORY = ObjectIdentifier("2.5.4.15") + + +class SignatureAlgorithmOID(object): + RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4") + RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5") + RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14") + RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11") + RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12") + RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13") + ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1") + ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1") + ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2") + ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3") + ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4") + DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3") + DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1") + DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2") + +_SIG_OIDS_TO_HASH = { + SignatureAlgorithmOID.RSA_WITH_MD5.dotted_string: hashes.MD5(), + SignatureAlgorithmOID.RSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.RSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.RSA_WITH_SHA256.dotted_string: hashes.SHA256(), + SignatureAlgorithmOID.RSA_WITH_SHA384.dotted_string: hashes.SHA384(), + SignatureAlgorithmOID.RSA_WITH_SHA512.dotted_string: hashes.SHA512(), + SignatureAlgorithmOID.ECDSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.ECDSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.ECDSA_WITH_SHA256.dotted_string: hashes.SHA256(), + SignatureAlgorithmOID.ECDSA_WITH_SHA384.dotted_string: hashes.SHA384(), + SignatureAlgorithmOID.ECDSA_WITH_SHA512.dotted_string: hashes.SHA512(), + SignatureAlgorithmOID.DSA_WITH_SHA1.dotted_string: hashes.SHA1(), + SignatureAlgorithmOID.DSA_WITH_SHA224.dotted_string: hashes.SHA224(), + SignatureAlgorithmOID.DSA_WITH_SHA256.dotted_string: hashes.SHA256() +} + + +class ExtendedKeyUsageOID(object): + SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1") + CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2") + CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3") + EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4") + TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8") + OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9") + + +class AuthorityInformationAccessOID(object): + CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2") + OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1") + + +class CertificatePoliciesOID(object): + CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1") + CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2") + ANY_POLICY = ObjectIdentifier("2.5.29.32.0") + +_OID_NAMES = { + NameOID.COMMON_NAME: "commonName", + NameOID.COUNTRY_NAME: "countryName", + NameOID.LOCALITY_NAME: "localityName", + NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName", + NameOID.ORGANIZATION_NAME: "organizationName", + NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName", + NameOID.SERIAL_NUMBER: "serialNumber", + NameOID.SURNAME: "surname", + NameOID.GIVEN_NAME: "givenName", + NameOID.TITLE: "title", + NameOID.GENERATION_QUALIFIER: "generationQualifier", + NameOID.DN_QUALIFIER: "dnQualifier", + NameOID.PSEUDONYM: "pseudonym", + NameOID.DOMAIN_COMPONENT: "domainComponent", + NameOID.EMAIL_ADDRESS: "emailAddress", + NameOID.JURISDICTION_COUNTRY_NAME: "jurisdictionCountryName", + NameOID.JURISDICTION_LOCALITY_NAME: "jurisdictionLocalityName", + NameOID.JURISDICTION_STATE_OR_PROVINCE_NAME: ( + "jurisdictionStateOrProvinceName" + ), + NameOID.BUSINESS_CATEGORY: "businessCategory", + + SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption", + SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption", + SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1", + SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224", + SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256", + SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384", + SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512", + SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1", + SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224", + SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256", + ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth", + ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth", + ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning", + ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection", + ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping", + ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning", + ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes", + ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier", + ExtensionOID.KEY_USAGE: "keyUsage", + ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName", + ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName", + ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints", + CRLEntryExtensionOID.CRL_REASON: "cRLReason", + CRLEntryExtensionOID.INVALIDITY_DATE: "invalidityDate", + CRLEntryExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer", + ExtensionOID.NAME_CONSTRAINTS: "nameConstraints", + ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints", + ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies", + ExtensionOID.POLICY_MAPPINGS: "policyMappings", + ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier", + ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints", + ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage", + ExtensionOID.FRESHEST_CRL: "freshestCRL", + ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy", + ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess", + ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess", + ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck", + ExtensionOID.CRL_NUMBER: "cRLNumber", + AuthorityInformationAccessOID.OCSP: "OCSP", + AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers", + CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps", + CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice", +} diff --git a/Lib/site-packages/easy_install.py b/Lib/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/Lib/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/Lib/site-packages/idna-2.0.dist-info/DESCRIPTION.rst b/Lib/site-packages/idna-2.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..84c7681 --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,144 @@ +Internationalized Domain Names in Applications (IDNA) +===================================================== + +A library to support the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This version of the protocol is often referred to as “IDNA2008†and can +produce different results from the earlier standard from 2003. + +The library is also intended to act as a suitable drop-in replacement for +the “encodings.idna†module that comes with the Python standard library +but currently only supports the older 2003 specification. + +Its basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use PIP: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library should work with Python 2.7, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to an A-label or U-label respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module. + +.. code-block:: pycon + + >>> import idna.codec + >>> print u'домена.иÑпытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.иÑпытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in RFC 5895, the IDNA specification no longer including mappings +from different forms of input that a user may enter, to the form that is provided +to the IDNA functions. This functionality is now a local user-interface issue +distinct from the IDNA functionality. + +The Unicode Consortium has developed one such user-level mapping, known as +`Unicode IDNA Compatibility Processing `_. +It provides for both transitional mapping and non-transitional mapping described +in this document. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of u'K\xf6nigsg\xe4\xdfchen' not allowed + >>> idna.encode(u'Königsgäßchen', uts46=True) + 'xn--knigsgchen-b4a3dun' + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Note that implementors should use transitional processing with caution as the outputs +of the functions may differ from what is expected, as noted in the example. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapping to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as test that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit to the master branch of the +idna git repository at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/Lib/site-packages/idna-2.0.dist-info/INSTALLER b/Lib/site-packages/idna-2.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Lib/site-packages/idna-2.0.dist-info/METADATA b/Lib/site-packages/idna-2.0.dist-info/METADATA new file mode 100644 index 0000000..0dbccca --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/METADATA @@ -0,0 +1,168 @@ +Metadata-Version: 2.0 +Name: idna +Version: 2.0 +Summary: Internationalized Domain Names in Applications (IDNA) +Home-page: https://github.com/kjd/idna +Author: Kim Davies +Author-email: kim@cynosure.com.au +License: BSD-like +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: System Administrators +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Topic :: Internet :: Name Service (DNS) +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Utilities + +Internationalized Domain Names in Applications (IDNA) +===================================================== + +A library to support the Internationalised Domain Names in Applications +(IDNA) protocol as specified in `RFC 5891 `_. +This version of the protocol is often referred to as “IDNA2008†and can +produce different results from the earlier standard from 2003. + +The library is also intended to act as a suitable drop-in replacement for +the “encodings.idna†module that comes with the Python standard library +but currently only supports the older 2003 specification. + +Its basic functions are simply executed: + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +Packages +-------- + +The latest tagged release version is published in the PyPI repository: + +.. image:: https://badge.fury.io/py/idna.svg + :target: http://badge.fury.io/py/idna + + +Installation +------------ + +To install this library, you can use PIP: + +.. code-block:: bash + + $ pip install idna + +Alternatively, you can install the package using the bundled setup script: + +.. code-block:: bash + + $ python setup.py install + +This library should work with Python 2.7, and Python 3.3 or later. + + +Usage +----- + +For typical usage, the ``encode`` and ``decode`` functions will take a domain +name argument and perform a conversion to an A-label or U-label respectively. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'ドメイン.テスト') + 'xn--eckwd4c7c.xn--zckzah' + >>> print idna.decode('xn--eckwd4c7c.xn--zckzah') + ドメイン.テスト + +You may use the codec encoding and decoding methods using the +``idna.codec`` module. + +.. code-block:: pycon + + >>> import idna.codec + >>> print u'домена.иÑпытание'.encode('idna') + xn--80ahd1agd.xn--80akhbyknj4f + >>> print 'xn--80ahd1agd.xn--80akhbyknj4f'.decode('idna') + домена.иÑпытание + +Conversions can be applied at a per-label basis using the ``ulabel`` or ``alabel`` +functions if necessary: + +.. code-block:: pycon + + >>> idna.alabel(u'测试') + 'xn--0zwm56d' + +Compatibility Mapping (UTS #46) ++++++++++++++++++++++++++++++++ + +As described in RFC 5895, the IDNA specification no longer including mappings +from different forms of input that a user may enter, to the form that is provided +to the IDNA functions. This functionality is now a local user-interface issue +distinct from the IDNA functionality. + +The Unicode Consortium has developed one such user-level mapping, known as +`Unicode IDNA Compatibility Processing `_. +It provides for both transitional mapping and non-transitional mapping described +in this document. + +.. code-block:: pycon + + >>> import idna + >>> idna.encode(u'Königsgäßchen') + ... + idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of u'K\xf6nigsg\xe4\xdfchen' not allowed + >>> idna.encode(u'Königsgäßchen', uts46=True) + 'xn--knigsgchen-b4a3dun' + >>> idna.encode(u'Königsgäßchen', uts46=True, transitional=True) + 'xn--knigsgsschen-lcb0w' + +Note that implementors should use transitional processing with caution as the outputs +of the functions may differ from what is expected, as noted in the example. + +``encodings.idna`` Compatibility +++++++++++++++++++++++++++++++++ + +Function calls from the Python built-in ``encodings.idna`` module are +mapping to their IDNA 2008 equivalents using the ``idna.compat`` module. +Simply substitute the ``import`` clause in your code to refer to the +new module name. + +Exceptions +---------- + +All errors raised during the conversion following the specification should +raise an exception derived from the ``idna.IDNAError`` base class. + +More specific exceptions that may be generated as ``idna.IDNABidiError`` +when the error reflects an illegal combination of left-to-right and right-to-left +characters in a label; ``idna.InvalidCodepoint`` when a specific codepoint is +an illegal character in an IDN label (i.e. INVALID); and ``idna.InvalidCodepointContext`` +when the codepoint is illegal based on its positional context (i.e. it is CONTEXTO +or CONTEXTJ but the contextual requirements are not satisfied.) + +Testing +------- + +The library has a test suite based on each rule of the IDNA specification, as +well as test that are provided as part of the Unicode Technical Standard 46, +`Unicode IDNA Compatibility Processing `_. + +The tests are run automatically on each commit to the master branch of the +idna git repository at Travis CI: + +.. image:: https://travis-ci.org/kjd/idna.svg?branch=master + :target: https://travis-ci.org/kjd/idna + + diff --git a/Lib/site-packages/idna-2.0.dist-info/RECORD b/Lib/site-packages/idna-2.0.dist-info/RECORD new file mode 100644 index 0000000..fe92809 --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/RECORD @@ -0,0 +1,20 @@ +idna/__init__.py,sha256=K0kNy26Vm6A-1V5lST3ily6yVsNLUbiqk6AZDFm2nJI,20 +idna/codec.py,sha256=fHb4zE1NzJR6aZmcpcaRd3tq4e8mYaETS1UogAU2IfM,3303 +idna/compat.py,sha256=LXKc5WEVdXNWv5A0FpETd5T15qZyQTKGBeoHF-GbdkM,240 +idna/core.py,sha256=zL5Do1c7hChvSJI6tUWnRB6RZayruc7JUeFJn2M9OTc,11243 +idna/idnadata.py,sha256=UVUBp9zbkxFBtZgNzq7sdQ9kYMPJseVXCC02PsHYn-E,90754 +idna/uts46data.py,sha256=RAmrTTPJKgSNXPICpBZ0f6Bp8_9R2OHQysKDGPymYlw,181224 +idna-2.0.dist-info/DESCRIPTION.rst,sha256=kSltH00qpJdoxPQkrllcXZXbgbQJy8uTam97W_RVjyM,4774 +idna-2.0.dist-info/METADATA,sha256=zc-FgzaRTcjWPBQf5XDnbZfZ7SFrBOZKUL8J29e0imA,5712 +idna-2.0.dist-info/metadata.json,sha256=9LK-dlPf8HPLMPefULPipZdGCVmIkahoVFeM4UvAc9A,1015 +idna-2.0.dist-info/pbr.json,sha256=G17oY0YosBv1cEKLCgflO_-APtB0l9i46H4DKOvoLew,46 +idna-2.0.dist-info/RECORD,, +idna-2.0.dist-info/top_level.txt,sha256=jSag9sEDqvSPftxOQy-ABfGV_RSy7oFh4zZJpODV8k0,5 +idna-2.0.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110 +c:\users\j\oml\platform\win32\Lib\site-packages\idna-2.0.dist-info\INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +idna/__pycache__/uts46data.cpython-35.pyc,, +idna/__pycache__/idnadata.cpython-35.pyc,, +idna/__pycache__/compat.cpython-35.pyc,, +idna/__pycache__/__init__.cpython-35.pyc,, +idna/__pycache__/core.cpython-35.pyc,, +idna/__pycache__/codec.cpython-35.pyc,, diff --git a/Lib/site-packages/idna-2.0.dist-info/WHEEL b/Lib/site-packages/idna-2.0.dist-info/WHEEL new file mode 100644 index 0000000..9dff69d --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.24.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/Lib/site-packages/idna-2.0.dist-info/metadata.json b/Lib/site-packages/idna-2.0.dist-info/metadata.json new file mode 100644 index 0000000..85f9fb5 --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"license": "BSD-like", "name": "idna", "metadata_version": "2.0", "generator": "bdist_wheel (0.24.0)", "summary": "Internationalized Domain Names in Applications (IDNA)", "version": "2.0", "extensions": {"python.details": {"project_urls": {"Home": "https://github.com/kjd/idna"}, "document_names": {"description": "DESCRIPTION.rst"}, "contacts": [{"role": "author", "email": "kim@cynosure.com.au", "name": "Kim Davies"}]}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Topic :: Internet :: Name Service (DNS)", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: Utilities"]} \ No newline at end of file diff --git a/Lib/site-packages/idna-2.0.dist-info/pbr.json b/Lib/site-packages/idna-2.0.dist-info/pbr.json new file mode 100644 index 0000000..086d6e8 --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/pbr.json @@ -0,0 +1 @@ +{"is_release": true, "git_version": "bb6e94c"} \ No newline at end of file diff --git a/Lib/site-packages/idna-2.0.dist-info/top_level.txt b/Lib/site-packages/idna-2.0.dist-info/top_level.txt new file mode 100644 index 0000000..c40472e --- /dev/null +++ b/Lib/site-packages/idna-2.0.dist-info/top_level.txt @@ -0,0 +1 @@ +idna diff --git a/Lib/site-packages/idna/__init__.py b/Lib/site-packages/idna/__init__.py new file mode 100644 index 0000000..bb67a43 --- /dev/null +++ b/Lib/site-packages/idna/__init__.py @@ -0,0 +1 @@ +from .core import * diff --git a/Lib/site-packages/idna/codec.py b/Lib/site-packages/idna/codec.py new file mode 100644 index 0000000..cdd9675 --- /dev/null +++ b/Lib/site-packages/idna/codec.py @@ -0,0 +1,118 @@ +from idna.core import encode, decode, alabel, ulabel, IDNAError +import codecs +import re + +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +class Codec(codecs.Codec): + + def encode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return "", 0 + + return encode(data), len(data) + + def decode(self, data, errors='strict'): + + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return u"", 0 + + return decode(data), len(data) + +class IncrementalEncoder(codecs.BufferedIncrementalEncoder): + def _buffer_encode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return ("", 0) + + labels = _unicode_dots_re.split(data) + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = '.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = '.' + + result = [] + size = 0 + for label in labels: + result.append(alabel(label)) + if size: + size += 1 + size += len(label) + + # Join with U+002E + result = ".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + +class IncrementalDecoder(codecs.BufferedIncrementalDecoder): + def _buffer_decode(self, data, errors, final): + if errors != 'strict': + raise IDNAError("Unsupported error handling \"{0}\"".format(errors)) + + if not data: + return (u"", 0) + + # IDNA allows decoding to operate on Unicode strings, too. + if isinstance(data, unicode): + labels = _unicode_dots_re.split(data) + else: + # Must be ASCII string + data = str(data) + unicode(data, "ascii") + labels = data.split(".") + + trailing_dot = u'' + if labels: + if not labels[-1]: + trailing_dot = u'.' + del labels[-1] + elif not final: + # Keep potentially unfinished label until the next call + del labels[-1] + if labels: + trailing_dot = u'.' + + result = [] + size = 0 + for label in labels: + result.append(ulabel(label)) + if size: + size += 1 + size += len(label) + + result = u".".join(result) + trailing_dot + size += len(trailing_dot) + return (result, size) + + +class StreamWriter(Codec, codecs.StreamWriter): + pass + +class StreamReader(Codec, codecs.StreamReader): + pass + +def getregentry(): + return codecs.CodecInfo( + name='idna', + encode=Codec().encode, + decode=Codec().decode, + incrementalencoder=IncrementalEncoder, + incrementaldecoder=IncrementalDecoder, + streamwriter=StreamWriter, + streamreader=StreamReader, + ) diff --git a/Lib/site-packages/idna/compat.py b/Lib/site-packages/idna/compat.py new file mode 100644 index 0000000..ef9bcbd --- /dev/null +++ b/Lib/site-packages/idna/compat.py @@ -0,0 +1,12 @@ +from idna.core import * +from idna.codec import * + +def ToASCII(label): + return encode(label) + +def ToUnicode(label): + return decode(label) + +def nameprep(s): + raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") + diff --git a/Lib/site-packages/idna/core.py b/Lib/site-packages/idna/core.py new file mode 100644 index 0000000..35b2803 --- /dev/null +++ b/Lib/site-packages/idna/core.py @@ -0,0 +1,386 @@ +from . import idnadata +import bisect +import unicodedata +import re +import sys + +_virama_combining_class = 9 +_alabel_prefix = b'xn--' +_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]') + +if sys.version_info[0] == 3: + unicode = str + unichr = chr + +class IDNAError(UnicodeError): + """ Base exception for all IDNA-encoding related problems """ + pass + + +class IDNABidiError(IDNAError): + """ Exception when bidirectional requirements are not satisfied """ + pass + + +class InvalidCodepoint(IDNAError): + """ Exception when a disallowed or unallocated codepoint is used """ + pass + + +class InvalidCodepointContext(IDNAError): + """ Exception when the codepoint is not valid in the context it is used """ + pass + + +def _combining_class(cp): + return unicodedata.combining(unichr(cp)) + +def _is_script(cp, script): + return ord(cp) in idnadata.scripts[script] + +def _punycode(s): + return s.encode('punycode') + +def _unot(s): + return 'U+{0:04X}'.format(s) + + +def valid_label_length(label): + + if len(label) > 63: + return False + return True + + +def valid_string_length(label, trailing_dot): + + if len(label) > (254 if trailing_dot else 253): + return False + return True + + +def check_bidi(label, check_ltr=False): + + # Bidi rules should only be applied if string contains RTL characters + bidi_label = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + if direction == '': + # String likely comes from a newer version of Unicode + raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx)) + if direction in ['R', 'AL', 'AN']: + bidi_label = True + break + if not bidi_label and not check_ltr: + return True + + # Bidi rule 1 + direction = unicodedata.bidirectional(label[0]) + if direction in ['R', 'AL']: + rtl = True + elif direction == 'L': + rtl = False + else: + raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label))) + + valid_ending = False + number_type = False + for (idx, cp) in enumerate(label, 1): + direction = unicodedata.bidirectional(cp) + + if rtl: + # Bidi rule 2 + if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx)) + # Bidi rule 3 + if direction in ['R', 'AL', 'EN', 'AN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + # Bidi rule 4 + if direction in ['AN', 'EN']: + if not number_type: + number_type = direction + else: + if number_type != direction: + raise IDNABidiError('Can not mix numeral types in a right-to-left label') + else: + # Bidi rule 5 + if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']: + raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx)) + # Bidi rule 6 + if direction in ['L', 'EN']: + valid_ending = True + elif direction != 'NSM': + valid_ending = False + + if not valid_ending: + raise IDNABidiError('Label ends with illegal codepoint directionality') + + return True + + +def check_initial_combiner(label): + + if unicodedata.category(label[0])[0] == 'M': + raise IDNAError('Label begins with an illegal combining character') + return True + + +def check_hyphen_ok(label): + + if label[2:4] == '--': + raise IDNAError('Label has disallowed hyphens in 3rd and 4th position') + if label[0] == '-' or label[-1] == '-': + raise IDNAError('Label must not start or end with a hyphen') + return True + + +def check_nfc(label): + + if unicodedata.normalize('NFC', label) != label: + raise IDNAError('Label must be in Normalization Form C') + + +def valid_contextj(label, pos): + + cp_value = ord(label[pos]) + + if cp_value == 0x200c: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + + ok = False + for i in range(pos-1, -1, -1): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == 'T': + continue + if joining_type in ['L', 'D']: + ok = True + break + + if not ok: + return False + + ok = False + for i in range(pos+1, len(label)): + joining_type = idnadata.joining_types.get(ord(label[i])) + if joining_type == 'T': + continue + if joining_type in ['R', 'D']: + ok = True + break + return ok + + if cp_value == 0x200d: + + if pos > 0: + if _combining_class(ord(label[pos - 1])) == _virama_combining_class: + return True + return False + + else: + + return False + + +def valid_contexto(label, pos, exception=False): + + cp_value = ord(label[pos]) + + if cp_value == 0x00b7: + if 0 < pos < len(label)-1: + if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c: + return True + return False + + elif cp_value == 0x0375: + if pos < len(label)-1 and len(label) > 1: + return _is_script(label[pos + 1], 'Greek') + return False + + elif cp_value == 0x05f3 or cp_value == 0x05f4: + if pos > 0: + return _is_script(label[pos - 1], 'Hebrew') + return False + + elif cp_value == 0x30fb: + for cp in label: + if cp == u'\u30fb': + continue + if not _is_script(cp, 'Hiragana') and not _is_script(cp, 'Katakana') and not _is_script(cp, 'Han'): + return False + return True + + elif 0x660 <= cp_value <= 0x669: + for cp in label: + if 0x6f0 <= ord(cp) <= 0x06f9: + return False + return True + + elif 0x6f0 <= cp_value <= 0x6f9: + for cp in label: + if 0x660 <= ord(cp) <= 0x0669: + return False + return True + + +def check_label(label): + + if isinstance(label, (bytes, bytearray)): + label = label.decode('utf-8') + if len(label) == 0: + raise IDNAError('Empty Label') + + check_nfc(label) + check_hyphen_ok(label) + check_initial_combiner(label) + + for (pos, cp) in enumerate(label): + cp_value = ord(cp) + if cp_value in idnadata.codepoint_classes['PVALID']: + continue + elif cp_value in idnadata.codepoint_classes['CONTEXTJ']: + if not valid_contextj(label, pos): + raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + elif cp_value in idnadata.codepoint_classes['CONTEXTO']: + if not valid_contexto(label, pos): + raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label))) + else: + raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label))) + + check_bidi(label) + + +def alabel(label): + + try: + label = label.encode('ascii') + try: + ulabel(label) + except: + raise IDNAError('The label {0} is not a valid A-label'.format(label)) + if not valid_label_length(label): + raise IDNAError('Label too long') + return label + except UnicodeError: + pass + + if not label: + raise IDNAError('No Input') + + label = unicode(label) + check_label(label) + label = _punycode(label) + label = _alabel_prefix + label + + if not valid_label_length(label): + raise IDNAError('Label too long') + + return label + + +def ulabel(label): + + if not isinstance(label, (bytes, bytearray)): + try: + label = label.encode('ascii') + except UnicodeError: + check_label(label) + return label + + label = label.lower() + if label.startswith(_alabel_prefix): + label = label[len(_alabel_prefix):] + else: + check_label(label) + return label.decode('ascii') + + label = label.decode('punycode') + check_label(label) + return label + + +def uts46_remap(domain, std3_rules=True, transitional=False): + """Re-map the characters in the string according to UTS46 processing.""" + from .uts46data import uts46data + output = u"" + try: + for pos, char in enumerate(domain): + code_point = ord(char) + uts46row = uts46data[code_point if code_point < 256 else + bisect.bisect_left(uts46data, (code_point, "Z")) - 1] + status = uts46row[1] + replacement = uts46row[2] if len(uts46row) == 3 else None + if (status == "V" or + (status == "D" and not transitional) or + (status == "3" and std3_rules and replacement is None)): + output += char + elif replacement is not None and (status == "M" or + (status == "3" and std3_rules) or + (status == "D" and transitional)): + output += replacement + elif status != "I": + raise IndexError() + return unicodedata.normalize("NFC", output) + except IndexError: + raise InvalidCodepoint( + "Codepoint {0} not allowed at position {1} in {2}".format( + _unot(code_point), pos + 1, repr(domain))) + + +def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, transitional) + trailing_dot = False + result = [] + if strict: + labels = s.split('.') + else: + labels = _unicode_dots_re.split(s) + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if labels[-1] == '': + del labels[-1] + trailing_dot = True + for label in labels: + result.append(alabel(label)) + if trailing_dot: + result.append(b'') + s = b'.'.join(result) + if not valid_string_length(s, trailing_dot): + raise IDNAError('Domain too long') + return s + + +def decode(s, strict=False, uts46=False, std3_rules=False): + + if isinstance(s, (bytes, bytearray)): + s = s.decode("ascii") + if uts46: + s = uts46_remap(s, std3_rules, False) + trailing_dot = False + result = [] + if not strict: + labels = _unicode_dots_re.split(s) + else: + labels = s.split(u'.') + while labels and not labels[0]: + del labels[0] + if not labels: + raise IDNAError('Empty domain') + if not labels[-1]: + del labels[-1] + trailing_dot = True + for label in labels: + result.append(ulabel(label)) + if trailing_dot: + result.append(u'') + return u'.'.join(result) diff --git a/Lib/site-packages/idna/idnadata.py b/Lib/site-packages/idna/idnadata.py new file mode 100644 index 0000000..e2867e5 --- /dev/null +++ b/Lib/site-packages/idna/idnadata.py @@ -0,0 +1,3573 @@ +# This file is automatically generated by build-idnadata.py + +scripts = { + 'Arabic': frozenset( + list(range(0x600,0x605)) + + list(range(0x606,0x60c)) + + list(range(0x60d,0x61b)) + + [0x61e] + + list(range(0x620,0x640)) + + list(range(0x641,0x64b)) + + list(range(0x656,0x660)) + + list(range(0x66a,0x670)) + + list(range(0x671,0x6dd)) + + list(range(0x6de,0x700)) + + list(range(0x750,0x780)) + + list(range(0x8a0,0x8b3)) + + list(range(0x8e4,0x900)) + + list(range(0xfb50,0xfbc2)) + + list(range(0xfbd3,0xfd3e)) + + list(range(0xfd50,0xfd90)) + + list(range(0xfd92,0xfdc8)) + + list(range(0xfdf0,0xfdfe)) + + list(range(0xfe70,0xfe75)) + + list(range(0xfe76,0xfefd)) + + list(range(0x10e60,0x10e7f)) + + list(range(0x1ee00,0x1ee04)) + + list(range(0x1ee05,0x1ee20)) + + list(range(0x1ee21,0x1ee23)) + + [0x1ee24] + + [0x1ee27] + + list(range(0x1ee29,0x1ee33)) + + list(range(0x1ee34,0x1ee38)) + + [0x1ee39] + + [0x1ee3b] + + [0x1ee42] + + [0x1ee47] + + [0x1ee49] + + [0x1ee4b] + + list(range(0x1ee4d,0x1ee50)) + + list(range(0x1ee51,0x1ee53)) + + [0x1ee54] + + [0x1ee57] + + [0x1ee59] + + [0x1ee5b] + + [0x1ee5d] + + [0x1ee5f] + + list(range(0x1ee61,0x1ee63)) + + [0x1ee64] + + list(range(0x1ee67,0x1ee6b)) + + list(range(0x1ee6c,0x1ee73)) + + list(range(0x1ee74,0x1ee78)) + + list(range(0x1ee79,0x1ee7d)) + + [0x1ee7e] + + list(range(0x1ee80,0x1ee8a)) + + list(range(0x1ee8b,0x1ee9c)) + + list(range(0x1eea1,0x1eea4)) + + list(range(0x1eea5,0x1eeaa)) + + list(range(0x1eeab,0x1eebc)) + + list(range(0x1eef0,0x1eef2)) + ), + 'Armenian': frozenset( + list(range(0x531,0x557)) + + list(range(0x559,0x560)) + + list(range(0x561,0x588)) + + [0x58a] + + list(range(0x58d,0x590)) + + list(range(0xfb13,0xfb18)) + ), + 'Avestan': frozenset( + list(range(0x10b00,0x10b36)) + + list(range(0x10b39,0x10b40)) + ), + 'Balinese': frozenset( + list(range(0x1b00,0x1b4c)) + + list(range(0x1b50,0x1b7d)) + ), + 'Bamum': frozenset( + list(range(0xa6a0,0xa6f8)) + + list(range(0x16800,0x16a39)) + ), + 'Bassa_Vah': frozenset( + list(range(0x16ad0,0x16aee)) + + list(range(0x16af0,0x16af6)) + ), + 'Batak': frozenset( + list(range(0x1bc0,0x1bf4)) + + list(range(0x1bfc,0x1c00)) + ), + 'Bengali': frozenset( + list(range(0x980,0x984)) + + list(range(0x985,0x98d)) + + list(range(0x98f,0x991)) + + list(range(0x993,0x9a9)) + + list(range(0x9aa,0x9b1)) + + [0x9b2] + + list(range(0x9b6,0x9ba)) + + list(range(0x9bc,0x9c5)) + + list(range(0x9c7,0x9c9)) + + list(range(0x9cb,0x9cf)) + + [0x9d7] + + list(range(0x9dc,0x9de)) + + list(range(0x9df,0x9e4)) + + list(range(0x9e6,0x9fc)) + ), + 'Bopomofo': frozenset( + list(range(0x2ea,0x2ec)) + + list(range(0x3105,0x312e)) + + list(range(0x31a0,0x31bb)) + ), + 'Brahmi': frozenset( + list(range(0x11000,0x1104e)) + + list(range(0x11052,0x11070)) + + [0x1107f] + ), + 'Braille': frozenset( + list(range(0x2800,0x2900)) + ), + 'Buginese': frozenset( + list(range(0x1a00,0x1a1c)) + + list(range(0x1a1e,0x1a20)) + ), + 'Buhid': frozenset( + list(range(0x1740,0x1754)) + ), + 'Canadian_Aboriginal': frozenset( + list(range(0x1400,0x1680)) + + list(range(0x18b0,0x18f6)) + ), + 'Carian': frozenset( + list(range(0x102a0,0x102d1)) + ), + 'Caucasian_Albanian': frozenset( + list(range(0x10530,0x10564)) + + [0x1056f] + ), + 'Chakma': frozenset( + list(range(0x11100,0x11135)) + + list(range(0x11136,0x11144)) + ), + 'Cham': frozenset( + list(range(0xaa00,0xaa37)) + + list(range(0xaa40,0xaa4e)) + + list(range(0xaa50,0xaa5a)) + + list(range(0xaa5c,0xaa60)) + ), + 'Cherokee': frozenset( + list(range(0x13a0,0x13f5)) + ), + 'Common': frozenset( + list(range(0x0,0x41)) + + list(range(0x5b,0x61)) + + list(range(0x7b,0xaa)) + + list(range(0xab,0xba)) + + list(range(0xbb,0xc0)) + + [0xd7] + + [0xf7] + + list(range(0x2b9,0x2e0)) + + list(range(0x2e5,0x2ea)) + + list(range(0x2ec,0x300)) + + [0x374] + + [0x37e] + + [0x385] + + [0x387] + + [0x589] + + [0x605] + + [0x60c] + + list(range(0x61b,0x61d)) + + [0x61f] + + [0x640] + + list(range(0x660,0x66a)) + + [0x6dd] + + list(range(0x964,0x966)) + + [0xe3f] + + list(range(0xfd5,0xfd9)) + + [0x10fb] + + list(range(0x16eb,0x16ee)) + + list(range(0x1735,0x1737)) + + list(range(0x1802,0x1804)) + + [0x1805] + + [0x1cd3] + + [0x1ce1] + + list(range(0x1ce9,0x1ced)) + + list(range(0x1cee,0x1cf4)) + + list(range(0x1cf5,0x1cf7)) + + list(range(0x2000,0x200c)) + + list(range(0x200e,0x2065)) + + list(range(0x2066,0x2071)) + + list(range(0x2074,0x207f)) + + list(range(0x2080,0x208f)) + + list(range(0x20a0,0x20be)) + + list(range(0x2100,0x2126)) + + list(range(0x2127,0x212a)) + + list(range(0x212c,0x2132)) + + list(range(0x2133,0x214e)) + + list(range(0x214f,0x2160)) + + [0x2189] + + list(range(0x2190,0x23fb)) + + list(range(0x2400,0x2427)) + + list(range(0x2440,0x244b)) + + list(range(0x2460,0x2800)) + + list(range(0x2900,0x2b74)) + + list(range(0x2b76,0x2b96)) + + list(range(0x2b98,0x2bba)) + + list(range(0x2bbd,0x2bc9)) + + list(range(0x2bca,0x2bd2)) + + list(range(0x2e00,0x2e43)) + + list(range(0x2ff0,0x2ffc)) + + list(range(0x3000,0x3005)) + + [0x3006] + + list(range(0x3008,0x3021)) + + list(range(0x3030,0x3038)) + + list(range(0x303c,0x3040)) + + list(range(0x309b,0x309d)) + + [0x30a0] + + list(range(0x30fb,0x30fd)) + + list(range(0x3190,0x31a0)) + + list(range(0x31c0,0x31e4)) + + list(range(0x3220,0x3260)) + + list(range(0x327f,0x32d0)) + + list(range(0x3358,0x3400)) + + list(range(0x4dc0,0x4e00)) + + list(range(0xa700,0xa722)) + + list(range(0xa788,0xa78b)) + + list(range(0xa830,0xa83a)) + + [0xa92e] + + [0xa9cf] + + [0xab5b] + + list(range(0xfd3e,0xfd40)) + + list(range(0xfe10,0xfe1a)) + + list(range(0xfe30,0xfe53)) + + list(range(0xfe54,0xfe67)) + + list(range(0xfe68,0xfe6c)) + + [0xfeff] + + list(range(0xff01,0xff21)) + + list(range(0xff3b,0xff41)) + + list(range(0xff5b,0xff66)) + + [0xff70] + + list(range(0xff9e,0xffa0)) + + list(range(0xffe0,0xffe7)) + + list(range(0xffe8,0xffef)) + + list(range(0xfff9,0xfffe)) + + list(range(0x10100,0x10103)) + + list(range(0x10107,0x10134)) + + list(range(0x10137,0x10140)) + + list(range(0x10190,0x1019c)) + + list(range(0x101d0,0x101fd)) + + list(range(0x102e1,0x102fc)) + + list(range(0x1bca0,0x1bca4)) + + list(range(0x1d000,0x1d0f6)) + + list(range(0x1d100,0x1d127)) + + list(range(0x1d129,0x1d167)) + + list(range(0x1d16a,0x1d17b)) + + list(range(0x1d183,0x1d185)) + + list(range(0x1d18c,0x1d1aa)) + + list(range(0x1d1ae,0x1d1de)) + + list(range(0x1d300,0x1d357)) + + list(range(0x1d360,0x1d372)) + + list(range(0x1d400,0x1d455)) + + list(range(0x1d456,0x1d49d)) + + list(range(0x1d49e,0x1d4a0)) + + [0x1d4a2] + + list(range(0x1d4a5,0x1d4a7)) + + list(range(0x1d4a9,0x1d4ad)) + + list(range(0x1d4ae,0x1d4ba)) + + [0x1d4bb] + + list(range(0x1d4bd,0x1d4c4)) + + list(range(0x1d4c5,0x1d506)) + + list(range(0x1d507,0x1d50b)) + + list(range(0x1d50d,0x1d515)) + + list(range(0x1d516,0x1d51d)) + + list(range(0x1d51e,0x1d53a)) + + list(range(0x1d53b,0x1d53f)) + + list(range(0x1d540,0x1d545)) + + [0x1d546] + + list(range(0x1d54a,0x1d551)) + + list(range(0x1d552,0x1d6a6)) + + list(range(0x1d6a8,0x1d7cc)) + + list(range(0x1d7ce,0x1d800)) + + list(range(0x1f000,0x1f02c)) + + list(range(0x1f030,0x1f094)) + + list(range(0x1f0a0,0x1f0af)) + + list(range(0x1f0b1,0x1f0c0)) + + list(range(0x1f0c1,0x1f0d0)) + + list(range(0x1f0d1,0x1f0f6)) + + list(range(0x1f100,0x1f10d)) + + list(range(0x1f110,0x1f12f)) + + list(range(0x1f130,0x1f16c)) + + list(range(0x1f170,0x1f19b)) + + list(range(0x1f1e6,0x1f200)) + + list(range(0x1f201,0x1f203)) + + list(range(0x1f210,0x1f23b)) + + list(range(0x1f240,0x1f249)) + + list(range(0x1f250,0x1f252)) + + list(range(0x1f300,0x1f32d)) + + list(range(0x1f330,0x1f37e)) + + list(range(0x1f380,0x1f3cf)) + + list(range(0x1f3d4,0x1f3f8)) + + list(range(0x1f400,0x1f4ff)) + + list(range(0x1f500,0x1f54b)) + + list(range(0x1f550,0x1f57a)) + + list(range(0x1f57b,0x1f5a4)) + + list(range(0x1f5a5,0x1f643)) + + list(range(0x1f645,0x1f6d0)) + + list(range(0x1f6e0,0x1f6ed)) + + list(range(0x1f6f0,0x1f6f4)) + + list(range(0x1f700,0x1f774)) + + list(range(0x1f780,0x1f7d5)) + + list(range(0x1f800,0x1f80c)) + + list(range(0x1f810,0x1f848)) + + list(range(0x1f850,0x1f85a)) + + list(range(0x1f860,0x1f888)) + + list(range(0x1f890,0x1f8ae)) + + [0xe0001] + + list(range(0xe0020,0xe0080)) + ), + 'Coptic': frozenset( + list(range(0x3e2,0x3f0)) + + list(range(0x2c80,0x2cf4)) + + list(range(0x2cf9,0x2d00)) + ), + 'Cuneiform': frozenset( + list(range(0x12000,0x12399)) + + list(range(0x12400,0x1246f)) + + list(range(0x12470,0x12475)) + ), + 'Cypriot': frozenset( + list(range(0x10800,0x10806)) + + [0x10808] + + list(range(0x1080a,0x10836)) + + list(range(0x10837,0x10839)) + + [0x1083c] + + [0x1083f] + ), + 'Cyrillic': frozenset( + list(range(0x400,0x485)) + + list(range(0x487,0x530)) + + [0x1d2b] + + [0x1d78] + + list(range(0x2de0,0x2e00)) + + list(range(0xa640,0xa69e)) + + [0xa69f] + ), + 'Deseret': frozenset( + list(range(0x10400,0x10450)) + ), + 'Devanagari': frozenset( + list(range(0x900,0x951)) + + list(range(0x953,0x964)) + + list(range(0x966,0x980)) + + list(range(0xa8e0,0xa8fc)) + ), + 'Duployan': frozenset( + list(range(0x1bc00,0x1bc6b)) + + list(range(0x1bc70,0x1bc7d)) + + list(range(0x1bc80,0x1bc89)) + + list(range(0x1bc90,0x1bc9a)) + + list(range(0x1bc9c,0x1bca0)) + ), + 'Egyptian_Hieroglyphs': frozenset( + list(range(0x13000,0x1342f)) + ), + 'Elbasan': frozenset( + list(range(0x10500,0x10528)) + ), + 'Ethiopic': frozenset( + list(range(0x1200,0x1249)) + + list(range(0x124a,0x124e)) + + list(range(0x1250,0x1257)) + + [0x1258] + + list(range(0x125a,0x125e)) + + list(range(0x1260,0x1289)) + + list(range(0x128a,0x128e)) + + list(range(0x1290,0x12b1)) + + list(range(0x12b2,0x12b6)) + + list(range(0x12b8,0x12bf)) + + [0x12c0] + + list(range(0x12c2,0x12c6)) + + list(range(0x12c8,0x12d7)) + + list(range(0x12d8,0x1311)) + + list(range(0x1312,0x1316)) + + list(range(0x1318,0x135b)) + + list(range(0x135d,0x137d)) + + list(range(0x1380,0x139a)) + + list(range(0x2d80,0x2d97)) + + list(range(0x2da0,0x2da7)) + + list(range(0x2da8,0x2daf)) + + list(range(0x2db0,0x2db7)) + + list(range(0x2db8,0x2dbf)) + + list(range(0x2dc0,0x2dc7)) + + list(range(0x2dc8,0x2dcf)) + + list(range(0x2dd0,0x2dd7)) + + list(range(0x2dd8,0x2ddf)) + + list(range(0xab01,0xab07)) + + list(range(0xab09,0xab0f)) + + list(range(0xab11,0xab17)) + + list(range(0xab20,0xab27)) + + list(range(0xab28,0xab2f)) + ), + 'Georgian': frozenset( + list(range(0x10a0,0x10c6)) + + [0x10c7] + + [0x10cd] + + list(range(0x10d0,0x10fb)) + + list(range(0x10fc,0x1100)) + + list(range(0x2d00,0x2d26)) + + [0x2d27] + + [0x2d2d] + ), + 'Glagolitic': frozenset( + list(range(0x2c00,0x2c2f)) + + list(range(0x2c30,0x2c5f)) + ), + 'Gothic': frozenset( + list(range(0x10330,0x1034b)) + ), + 'Grantha': frozenset( + list(range(0x11301,0x11304)) + + list(range(0x11305,0x1130d)) + + list(range(0x1130f,0x11311)) + + list(range(0x11313,0x11329)) + + list(range(0x1132a,0x11331)) + + list(range(0x11332,0x11334)) + + list(range(0x11335,0x1133a)) + + list(range(0x1133c,0x11345)) + + list(range(0x11347,0x11349)) + + list(range(0x1134b,0x1134e)) + + [0x11357] + + list(range(0x1135d,0x11364)) + + list(range(0x11366,0x1136d)) + + list(range(0x11370,0x11375)) + ), + 'Greek': frozenset( + list(range(0x370,0x374)) + + list(range(0x375,0x378)) + + list(range(0x37a,0x37e)) + + [0x37f] + + [0x384] + + [0x386] + + list(range(0x388,0x38b)) + + [0x38c] + + list(range(0x38e,0x3a2)) + + list(range(0x3a3,0x3e2)) + + list(range(0x3f0,0x400)) + + list(range(0x1d26,0x1d2b)) + + list(range(0x1d5d,0x1d62)) + + list(range(0x1d66,0x1d6b)) + + [0x1dbf] + + list(range(0x1f00,0x1f16)) + + list(range(0x1f18,0x1f1e)) + + list(range(0x1f20,0x1f46)) + + list(range(0x1f48,0x1f4e)) + + list(range(0x1f50,0x1f58)) + + [0x1f59] + + [0x1f5b] + + [0x1f5d] + + list(range(0x1f5f,0x1f7e)) + + list(range(0x1f80,0x1fb5)) + + list(range(0x1fb6,0x1fc5)) + + list(range(0x1fc6,0x1fd4)) + + list(range(0x1fd6,0x1fdc)) + + list(range(0x1fdd,0x1ff0)) + + list(range(0x1ff2,0x1ff5)) + + list(range(0x1ff6,0x1fff)) + + [0x2126] + + [0xab65] + + list(range(0x10140,0x1018d)) + + [0x101a0] + + list(range(0x1d200,0x1d246)) + ), + 'Gujarati': frozenset( + list(range(0xa81,0xa84)) + + list(range(0xa85,0xa8e)) + + list(range(0xa8f,0xa92)) + + list(range(0xa93,0xaa9)) + + list(range(0xaaa,0xab1)) + + list(range(0xab2,0xab4)) + + list(range(0xab5,0xaba)) + + list(range(0xabc,0xac6)) + + list(range(0xac7,0xaca)) + + list(range(0xacb,0xace)) + + [0xad0] + + list(range(0xae0,0xae4)) + + list(range(0xae6,0xaf2)) + ), + 'Gurmukhi': frozenset( + list(range(0xa01,0xa04)) + + list(range(0xa05,0xa0b)) + + list(range(0xa0f,0xa11)) + + list(range(0xa13,0xa29)) + + list(range(0xa2a,0xa31)) + + list(range(0xa32,0xa34)) + + list(range(0xa35,0xa37)) + + list(range(0xa38,0xa3a)) + + [0xa3c] + + list(range(0xa3e,0xa43)) + + list(range(0xa47,0xa49)) + + list(range(0xa4b,0xa4e)) + + [0xa51] + + list(range(0xa59,0xa5d)) + + [0xa5e] + + list(range(0xa66,0xa76)) + ), + 'Han': frozenset( + list(range(0x2e80,0x2e9a)) + + list(range(0x2e9b,0x2ef4)) + + list(range(0x2f00,0x2fd6)) + + [0x3005] + + [0x3007] + + list(range(0x3021,0x302a)) + + list(range(0x3038,0x303c)) + + list(range(0x3400,0x4db6)) + + list(range(0x4e00,0x9fcd)) + + list(range(0xf900,0xfa6e)) + + list(range(0xfa70,0xfada)) + + list(range(0x20000,0x2a6d7)) + + list(range(0x2a700,0x2b735)) + + list(range(0x2b740,0x2b81e)) + + list(range(0x2f800,0x2fa1e)) + ), + 'Hangul': frozenset( + list(range(0x1100,0x1200)) + + list(range(0x302e,0x3030)) + + list(range(0x3131,0x318f)) + + list(range(0x3200,0x321f)) + + list(range(0x3260,0x327f)) + + list(range(0xa960,0xa97d)) + + list(range(0xac00,0xd7a4)) + + list(range(0xd7b0,0xd7c7)) + + list(range(0xd7cb,0xd7fc)) + + list(range(0xffa0,0xffbf)) + + list(range(0xffc2,0xffc8)) + + list(range(0xffca,0xffd0)) + + list(range(0xffd2,0xffd8)) + + list(range(0xffda,0xffdd)) + ), + 'Hanunoo': frozenset( + list(range(0x1720,0x1735)) + ), + 'Hebrew': frozenset( + list(range(0x591,0x5c8)) + + list(range(0x5d0,0x5eb)) + + list(range(0x5f0,0x5f5)) + + list(range(0xfb1d,0xfb37)) + + list(range(0xfb38,0xfb3d)) + + [0xfb3e] + + list(range(0xfb40,0xfb42)) + + list(range(0xfb43,0xfb45)) + + list(range(0xfb46,0xfb50)) + ), + 'Hiragana': frozenset( + list(range(0x3041,0x3097)) + + list(range(0x309d,0x30a0)) + + [0x1b001] + + [0x1f200] + ), + 'Imperial_Aramaic': frozenset( + list(range(0x10840,0x10856)) + + list(range(0x10857,0x10860)) + ), + 'Inherited': frozenset( + list(range(0x300,0x370)) + + list(range(0x485,0x487)) + + list(range(0x64b,0x656)) + + [0x670] + + list(range(0x951,0x953)) + + list(range(0x1ab0,0x1abf)) + + list(range(0x1cd0,0x1cd3)) + + list(range(0x1cd4,0x1ce1)) + + list(range(0x1ce2,0x1ce9)) + + [0x1ced] + + [0x1cf4] + + list(range(0x1cf8,0x1cfa)) + + list(range(0x1dc0,0x1df6)) + + list(range(0x1dfc,0x1e00)) + + list(range(0x200c,0x200e)) + + list(range(0x20d0,0x20f1)) + + list(range(0x302a,0x302e)) + + list(range(0x3099,0x309b)) + + list(range(0xfe00,0xfe10)) + + list(range(0xfe20,0xfe2e)) + + [0x101fd] + + [0x102e0] + + list(range(0x1d167,0x1d16a)) + + list(range(0x1d17b,0x1d183)) + + list(range(0x1d185,0x1d18c)) + + list(range(0x1d1aa,0x1d1ae)) + + list(range(0xe0100,0xe01f0)) + ), + 'Inscriptional_Pahlavi': frozenset( + list(range(0x10b60,0x10b73)) + + list(range(0x10b78,0x10b80)) + ), + 'Inscriptional_Parthian': frozenset( + list(range(0x10b40,0x10b56)) + + list(range(0x10b58,0x10b60)) + ), + 'Javanese': frozenset( + list(range(0xa980,0xa9ce)) + + list(range(0xa9d0,0xa9da)) + + list(range(0xa9de,0xa9e0)) + ), + 'Kaithi': frozenset( + list(range(0x11080,0x110c2)) + ), + 'Kannada': frozenset( + list(range(0xc81,0xc84)) + + list(range(0xc85,0xc8d)) + + list(range(0xc8e,0xc91)) + + list(range(0xc92,0xca9)) + + list(range(0xcaa,0xcb4)) + + list(range(0xcb5,0xcba)) + + list(range(0xcbc,0xcc5)) + + list(range(0xcc6,0xcc9)) + + list(range(0xcca,0xcce)) + + list(range(0xcd5,0xcd7)) + + [0xcde] + + list(range(0xce0,0xce4)) + + list(range(0xce6,0xcf0)) + + list(range(0xcf1,0xcf3)) + ), + 'Katakana': frozenset( + list(range(0x30a1,0x30fb)) + + list(range(0x30fd,0x3100)) + + list(range(0x31f0,0x3200)) + + list(range(0x32d0,0x32ff)) + + list(range(0x3300,0x3358)) + + list(range(0xff66,0xff70)) + + list(range(0xff71,0xff9e)) + + [0x1b000] + ), + 'Kayah_Li': frozenset( + list(range(0xa900,0xa92e)) + + [0xa92f] + ), + 'Kharoshthi': frozenset( + list(range(0x10a00,0x10a04)) + + list(range(0x10a05,0x10a07)) + + list(range(0x10a0c,0x10a14)) + + list(range(0x10a15,0x10a18)) + + list(range(0x10a19,0x10a34)) + + list(range(0x10a38,0x10a3b)) + + list(range(0x10a3f,0x10a48)) + + list(range(0x10a50,0x10a59)) + ), + 'Khmer': frozenset( + list(range(0x1780,0x17de)) + + list(range(0x17e0,0x17ea)) + + list(range(0x17f0,0x17fa)) + + list(range(0x19e0,0x1a00)) + ), + 'Khojki': frozenset( + list(range(0x11200,0x11212)) + + list(range(0x11213,0x1123e)) + ), + 'Khudawadi': frozenset( + list(range(0x112b0,0x112eb)) + + list(range(0x112f0,0x112fa)) + ), + 'Lao': frozenset( + list(range(0xe81,0xe83)) + + [0xe84] + + list(range(0xe87,0xe89)) + + [0xe8a] + + [0xe8d] + + list(range(0xe94,0xe98)) + + list(range(0xe99,0xea0)) + + list(range(0xea1,0xea4)) + + [0xea5] + + [0xea7] + + list(range(0xeaa,0xeac)) + + list(range(0xead,0xeba)) + + list(range(0xebb,0xebe)) + + list(range(0xec0,0xec5)) + + [0xec6] + + list(range(0xec8,0xece)) + + list(range(0xed0,0xeda)) + + list(range(0xedc,0xee0)) + ), + 'Latin': frozenset( + list(range(0x41,0x5b)) + + list(range(0x61,0x7b)) + + [0xaa] + + [0xba] + + list(range(0xc0,0xd7)) + + list(range(0xd8,0xf7)) + + list(range(0xf8,0x2b9)) + + list(range(0x2e0,0x2e5)) + + list(range(0x1d00,0x1d26)) + + list(range(0x1d2c,0x1d5d)) + + list(range(0x1d62,0x1d66)) + + list(range(0x1d6b,0x1d78)) + + list(range(0x1d79,0x1dbf)) + + list(range(0x1e00,0x1f00)) + + [0x2071] + + [0x207f] + + list(range(0x2090,0x209d)) + + list(range(0x212a,0x212c)) + + [0x2132] + + [0x214e] + + list(range(0x2160,0x2189)) + + list(range(0x2c60,0x2c80)) + + list(range(0xa722,0xa788)) + + list(range(0xa78b,0xa78f)) + + list(range(0xa790,0xa7ae)) + + list(range(0xa7b0,0xa7b2)) + + list(range(0xa7f7,0xa800)) + + list(range(0xab30,0xab5b)) + + list(range(0xab5c,0xab60)) + + [0xab64] + + list(range(0xfb00,0xfb07)) + + list(range(0xff21,0xff3b)) + + list(range(0xff41,0xff5b)) + ), + 'Lepcha': frozenset( + list(range(0x1c00,0x1c38)) + + list(range(0x1c3b,0x1c4a)) + + list(range(0x1c4d,0x1c50)) + ), + 'Limbu': frozenset( + list(range(0x1900,0x191f)) + + list(range(0x1920,0x192c)) + + list(range(0x1930,0x193c)) + + [0x1940] + + list(range(0x1944,0x1950)) + ), + 'Linear_A': frozenset( + list(range(0x10600,0x10737)) + + list(range(0x10740,0x10756)) + + list(range(0x10760,0x10768)) + ), + 'Linear_B': frozenset( + list(range(0x10000,0x1000c)) + + list(range(0x1000d,0x10027)) + + list(range(0x10028,0x1003b)) + + list(range(0x1003c,0x1003e)) + + list(range(0x1003f,0x1004e)) + + list(range(0x10050,0x1005e)) + + list(range(0x10080,0x100fb)) + ), + 'Lisu': frozenset( + list(range(0xa4d0,0xa500)) + ), + 'Lycian': frozenset( + list(range(0x10280,0x1029d)) + ), + 'Lydian': frozenset( + list(range(0x10920,0x1093a)) + + [0x1093f] + ), + 'Mahajani': frozenset( + list(range(0x11150,0x11177)) + ), + 'Malayalam': frozenset( + list(range(0xd01,0xd04)) + + list(range(0xd05,0xd0d)) + + list(range(0xd0e,0xd11)) + + list(range(0xd12,0xd3b)) + + list(range(0xd3d,0xd45)) + + list(range(0xd46,0xd49)) + + list(range(0xd4a,0xd4f)) + + [0xd57] + + list(range(0xd60,0xd64)) + + list(range(0xd66,0xd76)) + + list(range(0xd79,0xd80)) + ), + 'Mandaic': frozenset( + list(range(0x840,0x85c)) + + [0x85e] + ), + 'Manichaean': frozenset( + list(range(0x10ac0,0x10ae7)) + + list(range(0x10aeb,0x10af7)) + ), + 'Meetei_Mayek': frozenset( + list(range(0xaae0,0xaaf7)) + + list(range(0xabc0,0xabee)) + + list(range(0xabf0,0xabfa)) + ), + 'Mende_Kikakui': frozenset( + list(range(0x1e800,0x1e8c5)) + + list(range(0x1e8c7,0x1e8d7)) + ), + 'Meroitic_Cursive': frozenset( + list(range(0x109a0,0x109b8)) + + list(range(0x109be,0x109c0)) + ), + 'Meroitic_Hieroglyphs': frozenset( + list(range(0x10980,0x109a0)) + ), + 'Miao': frozenset( + list(range(0x16f00,0x16f45)) + + list(range(0x16f50,0x16f7f)) + + list(range(0x16f8f,0x16fa0)) + ), + 'Modi': frozenset( + list(range(0x11600,0x11645)) + + list(range(0x11650,0x1165a)) + ), + 'Mongolian': frozenset( + list(range(0x1800,0x1802)) + + [0x1804] + + list(range(0x1806,0x180f)) + + list(range(0x1810,0x181a)) + + list(range(0x1820,0x1878)) + + list(range(0x1880,0x18ab)) + ), + 'Mro': frozenset( + list(range(0x16a40,0x16a5f)) + + list(range(0x16a60,0x16a6a)) + + list(range(0x16a6e,0x16a70)) + ), + 'Myanmar': frozenset( + list(range(0x1000,0x10a0)) + + list(range(0xa9e0,0xa9ff)) + + list(range(0xaa60,0xaa80)) + ), + 'Nabataean': frozenset( + list(range(0x10880,0x1089f)) + + list(range(0x108a7,0x108b0)) + ), + 'New_Tai_Lue': frozenset( + list(range(0x1980,0x19ac)) + + list(range(0x19b0,0x19ca)) + + list(range(0x19d0,0x19db)) + + list(range(0x19de,0x19e0)) + ), + 'Nko': frozenset( + list(range(0x7c0,0x7fb)) + ), + 'Ogham': frozenset( + list(range(0x1680,0x169d)) + ), + 'Ol_Chiki': frozenset( + list(range(0x1c50,0x1c80)) + ), + 'Old_Italic': frozenset( + list(range(0x10300,0x10324)) + ), + 'Old_North_Arabian': frozenset( + list(range(0x10a80,0x10aa0)) + ), + 'Old_Permic': frozenset( + list(range(0x10350,0x1037b)) + ), + 'Old_Persian': frozenset( + list(range(0x103a0,0x103c4)) + + list(range(0x103c8,0x103d6)) + ), + 'Old_South_Arabian': frozenset( + list(range(0x10a60,0x10a80)) + ), + 'Old_Turkic': frozenset( + list(range(0x10c00,0x10c49)) + ), + 'Oriya': frozenset( + list(range(0xb01,0xb04)) + + list(range(0xb05,0xb0d)) + + list(range(0xb0f,0xb11)) + + list(range(0xb13,0xb29)) + + list(range(0xb2a,0xb31)) + + list(range(0xb32,0xb34)) + + list(range(0xb35,0xb3a)) + + list(range(0xb3c,0xb45)) + + list(range(0xb47,0xb49)) + + list(range(0xb4b,0xb4e)) + + list(range(0xb56,0xb58)) + + list(range(0xb5c,0xb5e)) + + list(range(0xb5f,0xb64)) + + list(range(0xb66,0xb78)) + ), + 'Osmanya': frozenset( + list(range(0x10480,0x1049e)) + + list(range(0x104a0,0x104aa)) + ), + 'Pahawh_Hmong': frozenset( + list(range(0x16b00,0x16b46)) + + list(range(0x16b50,0x16b5a)) + + list(range(0x16b5b,0x16b62)) + + list(range(0x16b63,0x16b78)) + + list(range(0x16b7d,0x16b90)) + ), + 'Palmyrene': frozenset( + list(range(0x10860,0x10880)) + ), + 'Pau_Cin_Hau': frozenset( + list(range(0x11ac0,0x11af9)) + ), + 'Phags_Pa': frozenset( + list(range(0xa840,0xa878)) + ), + 'Phoenician': frozenset( + list(range(0x10900,0x1091c)) + + [0x1091f] + ), + 'Psalter_Pahlavi': frozenset( + list(range(0x10b80,0x10b92)) + + list(range(0x10b99,0x10b9d)) + + list(range(0x10ba9,0x10bb0)) + ), + 'Rejang': frozenset( + list(range(0xa930,0xa954)) + + [0xa95f] + ), + 'Runic': frozenset( + list(range(0x16a0,0x16eb)) + + list(range(0x16ee,0x16f9)) + ), + 'Samaritan': frozenset( + list(range(0x800,0x82e)) + + list(range(0x830,0x83f)) + ), + 'Saurashtra': frozenset( + list(range(0xa880,0xa8c5)) + + list(range(0xa8ce,0xa8da)) + ), + 'Sharada': frozenset( + list(range(0x11180,0x111c9)) + + [0x111cd] + + list(range(0x111d0,0x111db)) + ), + 'Shavian': frozenset( + list(range(0x10450,0x10480)) + ), + 'Siddham': frozenset( + list(range(0x11580,0x115b6)) + + list(range(0x115b8,0x115ca)) + ), + 'Sinhala': frozenset( + list(range(0xd82,0xd84)) + + list(range(0xd85,0xd97)) + + list(range(0xd9a,0xdb2)) + + list(range(0xdb3,0xdbc)) + + [0xdbd] + + list(range(0xdc0,0xdc7)) + + [0xdca] + + list(range(0xdcf,0xdd5)) + + [0xdd6] + + list(range(0xdd8,0xde0)) + + list(range(0xde6,0xdf0)) + + list(range(0xdf2,0xdf5)) + + list(range(0x111e1,0x111f5)) + ), + 'Sora_Sompeng': frozenset( + list(range(0x110d0,0x110e9)) + + list(range(0x110f0,0x110fa)) + ), + 'Sundanese': frozenset( + list(range(0x1b80,0x1bc0)) + + list(range(0x1cc0,0x1cc8)) + ), + 'Syloti_Nagri': frozenset( + list(range(0xa800,0xa82c)) + ), + 'Syriac': frozenset( + list(range(0x700,0x70e)) + + list(range(0x70f,0x74b)) + + list(range(0x74d,0x750)) + ), + 'Tagalog': frozenset( + list(range(0x1700,0x170d)) + + list(range(0x170e,0x1715)) + ), + 'Tagbanwa': frozenset( + list(range(0x1760,0x176d)) + + list(range(0x176e,0x1771)) + + list(range(0x1772,0x1774)) + ), + 'Tai_Le': frozenset( + list(range(0x1950,0x196e)) + + list(range(0x1970,0x1975)) + ), + 'Tai_Tham': frozenset( + list(range(0x1a20,0x1a5f)) + + list(range(0x1a60,0x1a7d)) + + list(range(0x1a7f,0x1a8a)) + + list(range(0x1a90,0x1a9a)) + + list(range(0x1aa0,0x1aae)) + ), + 'Tai_Viet': frozenset( + list(range(0xaa80,0xaac3)) + + list(range(0xaadb,0xaae0)) + ), + 'Takri': frozenset( + list(range(0x11680,0x116b8)) + + list(range(0x116c0,0x116ca)) + ), + 'Tamil': frozenset( + list(range(0xb82,0xb84)) + + list(range(0xb85,0xb8b)) + + list(range(0xb8e,0xb91)) + + list(range(0xb92,0xb96)) + + list(range(0xb99,0xb9b)) + + [0xb9c] + + list(range(0xb9e,0xba0)) + + list(range(0xba3,0xba5)) + + list(range(0xba8,0xbab)) + + list(range(0xbae,0xbba)) + + list(range(0xbbe,0xbc3)) + + list(range(0xbc6,0xbc9)) + + list(range(0xbca,0xbce)) + + [0xbd0] + + [0xbd7] + + list(range(0xbe6,0xbfb)) + ), + 'Telugu': frozenset( + list(range(0xc00,0xc04)) + + list(range(0xc05,0xc0d)) + + list(range(0xc0e,0xc11)) + + list(range(0xc12,0xc29)) + + list(range(0xc2a,0xc3a)) + + list(range(0xc3d,0xc45)) + + list(range(0xc46,0xc49)) + + list(range(0xc4a,0xc4e)) + + list(range(0xc55,0xc57)) + + list(range(0xc58,0xc5a)) + + list(range(0xc60,0xc64)) + + list(range(0xc66,0xc70)) + + list(range(0xc78,0xc80)) + ), + 'Thaana': frozenset( + list(range(0x780,0x7b2)) + ), + 'Thai': frozenset( + list(range(0xe01,0xe3b)) + + list(range(0xe40,0xe5c)) + ), + 'Tibetan': frozenset( + list(range(0xf00,0xf48)) + + list(range(0xf49,0xf6d)) + + list(range(0xf71,0xf98)) + + list(range(0xf99,0xfbd)) + + list(range(0xfbe,0xfcd)) + + list(range(0xfce,0xfd5)) + + list(range(0xfd9,0xfdb)) + ), + 'Tifinagh': frozenset( + list(range(0x2d30,0x2d68)) + + list(range(0x2d6f,0x2d71)) + + [0x2d7f] + ), + 'Tirhuta': frozenset( + list(range(0x11480,0x114c8)) + + list(range(0x114d0,0x114da)) + ), + 'Ugaritic': frozenset( + list(range(0x10380,0x1039e)) + + [0x1039f] + ), + 'Vai': frozenset( + list(range(0xa500,0xa62c)) + ), + 'Warang_Citi': frozenset( + list(range(0x118a0,0x118f3)) + + [0x118ff] + ), + 'Yi': frozenset( + list(range(0xa000,0xa48d)) + + list(range(0xa490,0xa4c7)) + ), +} +joining_types = { + 0x600: 'U', + 0x601: 'U', + 0x602: 'U', + 0x603: 'U', + 0x604: 'U', + 0x605: 'U', + 0x608: 'U', + 0x60b: 'U', + 0x620: 'D', + 0x621: 'U', + 0x622: 'R', + 0x623: 'R', + 0x624: 'R', + 0x625: 'R', + 0x626: 'D', + 0x627: 'R', + 0x628: 'D', + 0x629: 'R', + 0x62a: 'D', + 0x62b: 'D', + 0x62c: 'D', + 0x62d: 'D', + 0x62e: 'D', + 0x62f: 'R', + 0x630: 'R', + 0x631: 'R', + 0x632: 'R', + 0x633: 'D', + 0x634: 'D', + 0x635: 'D', + 0x636: 'D', + 0x637: 'D', + 0x638: 'D', + 0x639: 'D', + 0x63a: 'D', + 0x63b: 'D', + 0x63c: 'D', + 0x63d: 'D', + 0x63e: 'D', + 0x63f: 'D', + 0x640: 'C', + 0x641: 'D', + 0x642: 'D', + 0x643: 'D', + 0x644: 'D', + 0x645: 'D', + 0x646: 'D', + 0x647: 'D', + 0x648: 'R', + 0x649: 'D', + 0x64a: 'D', + 0x66e: 'D', + 0x66f: 'D', + 0x671: 'R', + 0x672: 'R', + 0x673: 'R', + 0x674: 'U', + 0x675: 'R', + 0x676: 'R', + 0x677: 'R', + 0x678: 'D', + 0x679: 'D', + 0x67a: 'D', + 0x67b: 'D', + 0x67c: 'D', + 0x67d: 'D', + 0x67e: 'D', + 0x67f: 'D', + 0x680: 'D', + 0x681: 'D', + 0x682: 'D', + 0x683: 'D', + 0x684: 'D', + 0x685: 'D', + 0x686: 'D', + 0x687: 'D', + 0x688: 'R', + 0x689: 'R', + 0x68a: 'R', + 0x68b: 'R', + 0x68c: 'R', + 0x68d: 'R', + 0x68e: 'R', + 0x68f: 'R', + 0x690: 'R', + 0x691: 'R', + 0x692: 'R', + 0x693: 'R', + 0x694: 'R', + 0x695: 'R', + 0x696: 'R', + 0x697: 'R', + 0x698: 'R', + 0x699: 'R', + 0x69a: 'D', + 0x69b: 'D', + 0x69c: 'D', + 0x69d: 'D', + 0x69e: 'D', + 0x69f: 'D', + 0x6a0: 'D', + 0x6a1: 'D', + 0x6a2: 'D', + 0x6a3: 'D', + 0x6a4: 'D', + 0x6a5: 'D', + 0x6a6: 'D', + 0x6a7: 'D', + 0x6a8: 'D', + 0x6a9: 'D', + 0x6aa: 'D', + 0x6ab: 'D', + 0x6ac: 'D', + 0x6ad: 'D', + 0x6ae: 'D', + 0x6af: 'D', + 0x6b0: 'D', + 0x6b1: 'D', + 0x6b2: 'D', + 0x6b3: 'D', + 0x6b4: 'D', + 0x6b5: 'D', + 0x6b6: 'D', + 0x6b7: 'D', + 0x6b8: 'D', + 0x6b9: 'D', + 0x6ba: 'D', + 0x6bb: 'D', + 0x6bc: 'D', + 0x6bd: 'D', + 0x6be: 'D', + 0x6bf: 'D', + 0x6c0: 'R', + 0x6c1: 'D', + 0x6c2: 'D', + 0x6c3: 'R', + 0x6c4: 'R', + 0x6c5: 'R', + 0x6c6: 'R', + 0x6c7: 'R', + 0x6c8: 'R', + 0x6c9: 'R', + 0x6ca: 'R', + 0x6cb: 'R', + 0x6cc: 'D', + 0x6cd: 'R', + 0x6ce: 'D', + 0x6cf: 'R', + 0x6d0: 'D', + 0x6d1: 'D', + 0x6d2: 'R', + 0x6d3: 'R', + 0x6d5: 'R', + 0x6dd: 'U', + 0x6ee: 'R', + 0x6ef: 'R', + 0x6fa: 'D', + 0x6fb: 'D', + 0x6fc: 'D', + 0x6ff: 'D', + 0x710: 'R', + 0x712: 'D', + 0x713: 'D', + 0x714: 'D', + 0x715: 'R', + 0x716: 'R', + 0x717: 'R', + 0x718: 'R', + 0x719: 'R', + 0x71a: 'D', + 0x71b: 'D', + 0x71c: 'D', + 0x71d: 'D', + 0x71e: 'R', + 0x71f: 'D', + 0x720: 'D', + 0x721: 'D', + 0x722: 'D', + 0x723: 'D', + 0x724: 'D', + 0x725: 'D', + 0x726: 'D', + 0x727: 'D', + 0x728: 'R', + 0x729: 'D', + 0x72a: 'R', + 0x72b: 'D', + 0x72c: 'R', + 0x72d: 'D', + 0x72e: 'D', + 0x72f: 'R', + 0x74d: 'R', + 0x74e: 'D', + 0x74f: 'D', + 0x750: 'D', + 0x751: 'D', + 0x752: 'D', + 0x753: 'D', + 0x754: 'D', + 0x755: 'D', + 0x756: 'D', + 0x757: 'D', + 0x758: 'D', + 0x759: 'R', + 0x75a: 'R', + 0x75b: 'R', + 0x75c: 'D', + 0x75d: 'D', + 0x75e: 'D', + 0x75f: 'D', + 0x760: 'D', + 0x761: 'D', + 0x762: 'D', + 0x763: 'D', + 0x764: 'D', + 0x765: 'D', + 0x766: 'D', + 0x767: 'D', + 0x768: 'D', + 0x769: 'D', + 0x76a: 'D', + 0x76b: 'R', + 0x76c: 'R', + 0x76d: 'D', + 0x76e: 'D', + 0x76f: 'D', + 0x770: 'D', + 0x771: 'R', + 0x772: 'D', + 0x773: 'R', + 0x774: 'R', + 0x775: 'D', + 0x776: 'D', + 0x777: 'D', + 0x778: 'R', + 0x779: 'R', + 0x77a: 'D', + 0x77b: 'D', + 0x77c: 'D', + 0x77d: 'D', + 0x77e: 'D', + 0x77f: 'D', + 0x7ca: 'D', + 0x7cb: 'D', + 0x7cc: 'D', + 0x7cd: 'D', + 0x7ce: 'D', + 0x7cf: 'D', + 0x7d0: 'D', + 0x7d1: 'D', + 0x7d2: 'D', + 0x7d3: 'D', + 0x7d4: 'D', + 0x7d5: 'D', + 0x7d6: 'D', + 0x7d7: 'D', + 0x7d8: 'D', + 0x7d9: 'D', + 0x7da: 'D', + 0x7db: 'D', + 0x7dc: 'D', + 0x7dd: 'D', + 0x7de: 'D', + 0x7df: 'D', + 0x7e0: 'D', + 0x7e1: 'D', + 0x7e2: 'D', + 0x7e3: 'D', + 0x7e4: 'D', + 0x7e5: 'D', + 0x7e6: 'D', + 0x7e7: 'D', + 0x7e8: 'D', + 0x7e9: 'D', + 0x7ea: 'D', + 0x7fa: 'C', + 0x840: 'R', + 0x841: 'D', + 0x842: 'D', + 0x843: 'D', + 0x844: 'D', + 0x845: 'D', + 0x846: 'R', + 0x847: 'D', + 0x848: 'D', + 0x849: 'R', + 0x84a: 'D', + 0x84b: 'D', + 0x84c: 'D', + 0x84d: 'D', + 0x84e: 'D', + 0x84f: 'R', + 0x850: 'D', + 0x851: 'D', + 0x852: 'D', + 0x853: 'D', + 0x854: 'R', + 0x855: 'D', + 0x856: 'U', + 0x857: 'U', + 0x858: 'U', + 0x8a0: 'D', + 0x8a1: 'D', + 0x8a2: 'D', + 0x8a3: 'D', + 0x8a4: 'D', + 0x8a5: 'D', + 0x8a6: 'D', + 0x8a7: 'D', + 0x8a8: 'D', + 0x8a9: 'D', + 0x8aa: 'R', + 0x8ab: 'R', + 0x8ac: 'R', + 0x8ad: 'U', + 0x8ae: 'R', + 0x8af: 'D', + 0x8b0: 'D', + 0x8b1: 'R', + 0x8b2: 'R', + 0x1806: 'U', + 0x1807: 'D', + 0x180a: 'C', + 0x180e: 'U', + 0x1820: 'D', + 0x1821: 'D', + 0x1822: 'D', + 0x1823: 'D', + 0x1824: 'D', + 0x1825: 'D', + 0x1826: 'D', + 0x1827: 'D', + 0x1828: 'D', + 0x1829: 'D', + 0x182a: 'D', + 0x182b: 'D', + 0x182c: 'D', + 0x182d: 'D', + 0x182e: 'D', + 0x182f: 'D', + 0x1830: 'D', + 0x1831: 'D', + 0x1832: 'D', + 0x1833: 'D', + 0x1834: 'D', + 0x1835: 'D', + 0x1836: 'D', + 0x1837: 'D', + 0x1838: 'D', + 0x1839: 'D', + 0x183a: 'D', + 0x183b: 'D', + 0x183c: 'D', + 0x183d: 'D', + 0x183e: 'D', + 0x183f: 'D', + 0x1840: 'D', + 0x1841: 'D', + 0x1842: 'D', + 0x1843: 'D', + 0x1844: 'D', + 0x1845: 'D', + 0x1846: 'D', + 0x1847: 'D', + 0x1848: 'D', + 0x1849: 'D', + 0x184a: 'D', + 0x184b: 'D', + 0x184c: 'D', + 0x184d: 'D', + 0x184e: 'D', + 0x184f: 'D', + 0x1850: 'D', + 0x1851: 'D', + 0x1852: 'D', + 0x1853: 'D', + 0x1854: 'D', + 0x1855: 'D', + 0x1856: 'D', + 0x1857: 'D', + 0x1858: 'D', + 0x1859: 'D', + 0x185a: 'D', + 0x185b: 'D', + 0x185c: 'D', + 0x185d: 'D', + 0x185e: 'D', + 0x185f: 'D', + 0x1860: 'D', + 0x1861: 'D', + 0x1862: 'D', + 0x1863: 'D', + 0x1864: 'D', + 0x1865: 'D', + 0x1866: 'D', + 0x1867: 'D', + 0x1868: 'D', + 0x1869: 'D', + 0x186a: 'D', + 0x186b: 'D', + 0x186c: 'D', + 0x186d: 'D', + 0x186e: 'D', + 0x186f: 'D', + 0x1870: 'D', + 0x1871: 'D', + 0x1872: 'D', + 0x1873: 'D', + 0x1874: 'D', + 0x1875: 'D', + 0x1876: 'D', + 0x1877: 'D', + 0x1880: 'U', + 0x1881: 'U', + 0x1882: 'U', + 0x1883: 'U', + 0x1884: 'U', + 0x1885: 'U', + 0x1886: 'U', + 0x1887: 'D', + 0x1888: 'D', + 0x1889: 'D', + 0x188a: 'D', + 0x188b: 'D', + 0x188c: 'D', + 0x188d: 'D', + 0x188e: 'D', + 0x188f: 'D', + 0x1890: 'D', + 0x1891: 'D', + 0x1892: 'D', + 0x1893: 'D', + 0x1894: 'D', + 0x1895: 'D', + 0x1896: 'D', + 0x1897: 'D', + 0x1898: 'D', + 0x1899: 'D', + 0x189a: 'D', + 0x189b: 'D', + 0x189c: 'D', + 0x189d: 'D', + 0x189e: 'D', + 0x189f: 'D', + 0x18a0: 'D', + 0x18a1: 'D', + 0x18a2: 'D', + 0x18a3: 'D', + 0x18a4: 'D', + 0x18a5: 'D', + 0x18a6: 'D', + 0x18a7: 'D', + 0x18a8: 'D', + 0x18aa: 'D', + 0x200c: 'U', + 0x200d: 'C', + 0x2066: 'U', + 0x2067: 'U', + 0x2068: 'U', + 0x2069: 'U', + 0xa840: 'D', + 0xa841: 'D', + 0xa842: 'D', + 0xa843: 'D', + 0xa844: 'D', + 0xa845: 'D', + 0xa846: 'D', + 0xa847: 'D', + 0xa848: 'D', + 0xa849: 'D', + 0xa84a: 'D', + 0xa84b: 'D', + 0xa84c: 'D', + 0xa84d: 'D', + 0xa84e: 'D', + 0xa84f: 'D', + 0xa850: 'D', + 0xa851: 'D', + 0xa852: 'D', + 0xa853: 'D', + 0xa854: 'D', + 0xa855: 'D', + 0xa856: 'D', + 0xa857: 'D', + 0xa858: 'D', + 0xa859: 'D', + 0xa85a: 'D', + 0xa85b: 'D', + 0xa85c: 'D', + 0xa85d: 'D', + 0xa85e: 'D', + 0xa85f: 'D', + 0xa860: 'D', + 0xa861: 'D', + 0xa862: 'D', + 0xa863: 'D', + 0xa864: 'D', + 0xa865: 'D', + 0xa866: 'D', + 0xa867: 'D', + 0xa868: 'D', + 0xa869: 'D', + 0xa86a: 'D', + 0xa86b: 'D', + 0xa86c: 'D', + 0xa86d: 'D', + 0xa86e: 'D', + 0xa86f: 'D', + 0xa870: 'D', + 0xa871: 'D', + 0xa872: 'L', + 0xa873: 'U', + 0x10ac0: 'D', + 0x10ac1: 'D', + 0x10ac2: 'D', + 0x10ac3: 'D', + 0x10ac4: 'D', + 0x10ac5: 'R', + 0x10ac6: 'U', + 0x10ac7: 'R', + 0x10ac8: 'U', + 0x10ac9: 'R', + 0x10aca: 'R', + 0x10acb: 'U', + 0x10acc: 'U', + 0x10acd: 'L', + 0x10ace: 'R', + 0x10acf: 'R', + 0x10ad0: 'R', + 0x10ad1: 'R', + 0x10ad2: 'R', + 0x10ad3: 'D', + 0x10ad4: 'D', + 0x10ad5: 'D', + 0x10ad6: 'D', + 0x10ad7: 'L', + 0x10ad8: 'D', + 0x10ad9: 'D', + 0x10ada: 'D', + 0x10adb: 'D', + 0x10adc: 'D', + 0x10add: 'R', + 0x10ade: 'D', + 0x10adf: 'D', + 0x10ae0: 'D', + 0x10ae1: 'R', + 0x10ae2: 'U', + 0x10ae3: 'U', + 0x10ae4: 'R', + 0x10aeb: 'D', + 0x10aec: 'D', + 0x10aed: 'D', + 0x10aee: 'D', + 0x10aef: 'R', + 0x10b80: 'D', + 0x10b81: 'R', + 0x10b82: 'D', + 0x10b83: 'R', + 0x10b84: 'R', + 0x10b85: 'R', + 0x10b86: 'D', + 0x10b87: 'D', + 0x10b88: 'D', + 0x10b89: 'R', + 0x10b8a: 'D', + 0x10b8b: 'D', + 0x10b8c: 'R', + 0x10b8d: 'D', + 0x10b8e: 'R', + 0x10b8f: 'R', + 0x10b90: 'D', + 0x10b91: 'R', + 0x10ba9: 'R', + 0x10baa: 'R', + 0x10bab: 'R', + 0x10bac: 'R', + 0x10bad: 'D', + 0x10bae: 'D', + 0x10baf: 'U', +} +codepoint_classes = { + 'PVALID': frozenset( + [0x2d] + + list(range(0x30,0x3a)) + + list(range(0x61,0x7b)) + + list(range(0xdf,0xf7)) + + list(range(0xf8,0x100)) + + [0x101] + + [0x103] + + [0x105] + + [0x107] + + [0x109] + + [0x10b] + + [0x10d] + + [0x10f] + + [0x111] + + [0x113] + + [0x115] + + [0x117] + + [0x119] + + [0x11b] + + [0x11d] + + [0x11f] + + [0x121] + + [0x123] + + [0x125] + + [0x127] + + [0x129] + + [0x12b] + + [0x12d] + + [0x12f] + + [0x131] + + [0x135] + + list(range(0x137,0x139)) + + [0x13a] + + [0x13c] + + [0x13e] + + [0x142] + + [0x144] + + [0x146] + + [0x148] + + [0x14b] + + [0x14d] + + [0x14f] + + [0x151] + + [0x153] + + [0x155] + + [0x157] + + [0x159] + + [0x15b] + + [0x15d] + + [0x15f] + + [0x161] + + [0x163] + + [0x165] + + [0x167] + + [0x169] + + [0x16b] + + [0x16d] + + [0x16f] + + [0x171] + + [0x173] + + [0x175] + + [0x177] + + [0x17a] + + [0x17c] + + [0x17e] + + [0x180] + + [0x183] + + [0x185] + + [0x188] + + list(range(0x18c,0x18e)) + + [0x192] + + [0x195] + + list(range(0x199,0x19c)) + + [0x19e] + + [0x1a1] + + [0x1a3] + + [0x1a5] + + [0x1a8] + + list(range(0x1aa,0x1ac)) + + [0x1ad] + + [0x1b0] + + [0x1b4] + + [0x1b6] + + list(range(0x1b9,0x1bc)) + + list(range(0x1bd,0x1c4)) + + [0x1ce] + + [0x1d0] + + [0x1d2] + + [0x1d4] + + [0x1d6] + + [0x1d8] + + [0x1da] + + list(range(0x1dc,0x1de)) + + [0x1df] + + [0x1e1] + + [0x1e3] + + [0x1e5] + + [0x1e7] + + [0x1e9] + + [0x1eb] + + [0x1ed] + + list(range(0x1ef,0x1f1)) + + [0x1f5] + + [0x1f9] + + [0x1fb] + + [0x1fd] + + [0x1ff] + + [0x201] + + [0x203] + + [0x205] + + [0x207] + + [0x209] + + [0x20b] + + [0x20d] + + [0x20f] + + [0x211] + + [0x213] + + [0x215] + + [0x217] + + [0x219] + + [0x21b] + + [0x21d] + + [0x21f] + + [0x221] + + [0x223] + + [0x225] + + [0x227] + + [0x229] + + [0x22b] + + [0x22d] + + [0x22f] + + [0x231] + + list(range(0x233,0x23a)) + + [0x23c] + + list(range(0x23f,0x241)) + + [0x242] + + [0x247] + + [0x249] + + [0x24b] + + [0x24d] + + list(range(0x24f,0x2b0)) + + list(range(0x2b9,0x2c2)) + + list(range(0x2c6,0x2d2)) + + [0x2ec] + + [0x2ee] + + list(range(0x300,0x340)) + + [0x342] + + list(range(0x346,0x34f)) + + list(range(0x350,0x370)) + + [0x371] + + [0x373] + + [0x377] + + list(range(0x37b,0x37e)) + + [0x390] + + list(range(0x3ac,0x3cf)) + + [0x3d7] + + [0x3d9] + + [0x3db] + + [0x3dd] + + [0x3df] + + [0x3e1] + + [0x3e3] + + [0x3e5] + + [0x3e7] + + [0x3e9] + + [0x3eb] + + [0x3ed] + + [0x3ef] + + [0x3f3] + + [0x3f8] + + list(range(0x3fb,0x3fd)) + + list(range(0x430,0x460)) + + [0x461] + + [0x463] + + [0x465] + + [0x467] + + [0x469] + + [0x46b] + + [0x46d] + + [0x46f] + + [0x471] + + [0x473] + + [0x475] + + [0x477] + + [0x479] + + [0x47b] + + [0x47d] + + [0x47f] + + [0x481] + + list(range(0x483,0x488)) + + [0x48b] + + [0x48d] + + [0x48f] + + [0x491] + + [0x493] + + [0x495] + + [0x497] + + [0x499] + + [0x49b] + + [0x49d] + + [0x49f] + + [0x4a1] + + [0x4a3] + + [0x4a5] + + [0x4a7] + + [0x4a9] + + [0x4ab] + + [0x4ad] + + [0x4af] + + [0x4b1] + + [0x4b3] + + [0x4b5] + + [0x4b7] + + [0x4b9] + + [0x4bb] + + [0x4bd] + + [0x4bf] + + [0x4c2] + + [0x4c4] + + [0x4c6] + + [0x4c8] + + [0x4ca] + + [0x4cc] + + list(range(0x4ce,0x4d0)) + + [0x4d1] + + [0x4d3] + + [0x4d5] + + [0x4d7] + + [0x4d9] + + [0x4db] + + [0x4dd] + + [0x4df] + + [0x4e1] + + [0x4e3] + + [0x4e5] + + [0x4e7] + + [0x4e9] + + [0x4eb] + + [0x4ed] + + [0x4ef] + + [0x4f1] + + [0x4f3] + + [0x4f5] + + [0x4f7] + + [0x4f9] + + [0x4fb] + + [0x4fd] + + [0x4ff] + + [0x501] + + [0x503] + + [0x505] + + [0x507] + + [0x509] + + [0x50b] + + [0x50d] + + [0x50f] + + [0x511] + + [0x513] + + [0x515] + + [0x517] + + [0x519] + + [0x51b] + + [0x51d] + + [0x51f] + + [0x521] + + [0x523] + + [0x525] + + [0x527] + + [0x559] + + list(range(0x561,0x587)) + + list(range(0x591,0x5be)) + + [0x5bf] + + list(range(0x5c1,0x5c3)) + + list(range(0x5c4,0x5c6)) + + [0x5c7] + + list(range(0x5d0,0x5eb)) + + list(range(0x5f0,0x5f3)) + + list(range(0x610,0x61b)) + + list(range(0x620,0x640)) + + list(range(0x641,0x660)) + + list(range(0x66e,0x675)) + + list(range(0x679,0x6d4)) + + list(range(0x6d5,0x6dd)) + + list(range(0x6df,0x6e9)) + + list(range(0x6ea,0x6f0)) + + list(range(0x6fa,0x700)) + + list(range(0x710,0x74b)) + + list(range(0x74d,0x7b2)) + + list(range(0x7c0,0x7f6)) + + list(range(0x800,0x82e)) + + list(range(0x840,0x85c)) + + [0x8a0] + + list(range(0x8a2,0x8ad)) + + list(range(0x8e4,0x8ff)) + + list(range(0x900,0x958)) + + list(range(0x960,0x964)) + + list(range(0x966,0x970)) + + list(range(0x971,0x978)) + + list(range(0x979,0x980)) + + list(range(0x981,0x984)) + + list(range(0x985,0x98d)) + + list(range(0x98f,0x991)) + + list(range(0x993,0x9a9)) + + list(range(0x9aa,0x9b1)) + + [0x9b2] + + list(range(0x9b6,0x9ba)) + + list(range(0x9bc,0x9c5)) + + list(range(0x9c7,0x9c9)) + + list(range(0x9cb,0x9cf)) + + [0x9d7] + + list(range(0x9e0,0x9e4)) + + list(range(0x9e6,0x9f2)) + + list(range(0xa01,0xa04)) + + list(range(0xa05,0xa0b)) + + list(range(0xa0f,0xa11)) + + list(range(0xa13,0xa29)) + + list(range(0xa2a,0xa31)) + + [0xa32] + + [0xa35] + + list(range(0xa38,0xa3a)) + + [0xa3c] + + list(range(0xa3e,0xa43)) + + list(range(0xa47,0xa49)) + + list(range(0xa4b,0xa4e)) + + [0xa51] + + [0xa5c] + + list(range(0xa66,0xa76)) + + list(range(0xa81,0xa84)) + + list(range(0xa85,0xa8e)) + + list(range(0xa8f,0xa92)) + + list(range(0xa93,0xaa9)) + + list(range(0xaaa,0xab1)) + + list(range(0xab2,0xab4)) + + list(range(0xab5,0xaba)) + + list(range(0xabc,0xac6)) + + list(range(0xac7,0xaca)) + + list(range(0xacb,0xace)) + + [0xad0] + + list(range(0xae0,0xae4)) + + list(range(0xae6,0xaf0)) + + list(range(0xb01,0xb04)) + + list(range(0xb05,0xb0d)) + + list(range(0xb0f,0xb11)) + + list(range(0xb13,0xb29)) + + list(range(0xb2a,0xb31)) + + list(range(0xb32,0xb34)) + + list(range(0xb35,0xb3a)) + + list(range(0xb3c,0xb45)) + + list(range(0xb47,0xb49)) + + list(range(0xb4b,0xb4e)) + + list(range(0xb56,0xb58)) + + list(range(0xb5f,0xb64)) + + list(range(0xb66,0xb70)) + + [0xb71] + + list(range(0xb82,0xb84)) + + list(range(0xb85,0xb8b)) + + list(range(0xb8e,0xb91)) + + list(range(0xb92,0xb96)) + + list(range(0xb99,0xb9b)) + + [0xb9c] + + list(range(0xb9e,0xba0)) + + list(range(0xba3,0xba5)) + + list(range(0xba8,0xbab)) + + list(range(0xbae,0xbba)) + + list(range(0xbbe,0xbc3)) + + list(range(0xbc6,0xbc9)) + + list(range(0xbca,0xbce)) + + [0xbd0] + + [0xbd7] + + list(range(0xbe6,0xbf0)) + + list(range(0xc01,0xc04)) + + list(range(0xc05,0xc0d)) + + list(range(0xc0e,0xc11)) + + list(range(0xc12,0xc29)) + + list(range(0xc2a,0xc34)) + + list(range(0xc35,0xc3a)) + + list(range(0xc3d,0xc45)) + + list(range(0xc46,0xc49)) + + list(range(0xc4a,0xc4e)) + + list(range(0xc55,0xc57)) + + list(range(0xc58,0xc5a)) + + list(range(0xc60,0xc64)) + + list(range(0xc66,0xc70)) + + list(range(0xc82,0xc84)) + + list(range(0xc85,0xc8d)) + + list(range(0xc8e,0xc91)) + + list(range(0xc92,0xca9)) + + list(range(0xcaa,0xcb4)) + + list(range(0xcb5,0xcba)) + + list(range(0xcbc,0xcc5)) + + list(range(0xcc6,0xcc9)) + + list(range(0xcca,0xcce)) + + list(range(0xcd5,0xcd7)) + + [0xcde] + + list(range(0xce0,0xce4)) + + list(range(0xce6,0xcf0)) + + list(range(0xcf1,0xcf3)) + + list(range(0xd02,0xd04)) + + list(range(0xd05,0xd0d)) + + list(range(0xd0e,0xd11)) + + list(range(0xd12,0xd3b)) + + list(range(0xd3d,0xd45)) + + list(range(0xd46,0xd49)) + + list(range(0xd4a,0xd4f)) + + [0xd57] + + list(range(0xd60,0xd64)) + + list(range(0xd66,0xd70)) + + list(range(0xd7a,0xd80)) + + list(range(0xd82,0xd84)) + + list(range(0xd85,0xd97)) + + list(range(0xd9a,0xdb2)) + + list(range(0xdb3,0xdbc)) + + [0xdbd] + + list(range(0xdc0,0xdc7)) + + [0xdca] + + list(range(0xdcf,0xdd5)) + + [0xdd6] + + list(range(0xdd8,0xde0)) + + list(range(0xdf2,0xdf4)) + + list(range(0xe01,0xe33)) + + list(range(0xe34,0xe3b)) + + list(range(0xe40,0xe4f)) + + list(range(0xe50,0xe5a)) + + list(range(0xe81,0xe83)) + + [0xe84] + + list(range(0xe87,0xe89)) + + [0xe8a] + + [0xe8d] + + list(range(0xe94,0xe98)) + + list(range(0xe99,0xea0)) + + list(range(0xea1,0xea4)) + + [0xea5] + + [0xea7] + + list(range(0xeaa,0xeac)) + + list(range(0xead,0xeb3)) + + list(range(0xeb4,0xeba)) + + list(range(0xebb,0xebe)) + + list(range(0xec0,0xec5)) + + [0xec6] + + list(range(0xec8,0xece)) + + list(range(0xed0,0xeda)) + + list(range(0xede,0xee0)) + + [0xf00] + + [0xf0b] + + list(range(0xf18,0xf1a)) + + list(range(0xf20,0xf2a)) + + [0xf35] + + [0xf37] + + [0xf39] + + list(range(0xf3e,0xf43)) + + list(range(0xf44,0xf48)) + + list(range(0xf49,0xf4d)) + + list(range(0xf4e,0xf52)) + + list(range(0xf53,0xf57)) + + list(range(0xf58,0xf5c)) + + list(range(0xf5d,0xf69)) + + list(range(0xf6a,0xf6d)) + + list(range(0xf71,0xf73)) + + [0xf74] + + list(range(0xf7a,0xf81)) + + list(range(0xf82,0xf85)) + + list(range(0xf86,0xf93)) + + list(range(0xf94,0xf98)) + + list(range(0xf99,0xf9d)) + + list(range(0xf9e,0xfa2)) + + list(range(0xfa3,0xfa7)) + + list(range(0xfa8,0xfac)) + + list(range(0xfad,0xfb9)) + + list(range(0xfba,0xfbd)) + + [0xfc6] + + list(range(0x1000,0x104a)) + + list(range(0x1050,0x109e)) + + list(range(0x10d0,0x10fb)) + + list(range(0x10fd,0x1100)) + + list(range(0x1200,0x1249)) + + list(range(0x124a,0x124e)) + + list(range(0x1250,0x1257)) + + [0x1258] + + list(range(0x125a,0x125e)) + + list(range(0x1260,0x1289)) + + list(range(0x128a,0x128e)) + + list(range(0x1290,0x12b1)) + + list(range(0x12b2,0x12b6)) + + list(range(0x12b8,0x12bf)) + + [0x12c0] + + list(range(0x12c2,0x12c6)) + + list(range(0x12c8,0x12d7)) + + list(range(0x12d8,0x1311)) + + list(range(0x1312,0x1316)) + + list(range(0x1318,0x135b)) + + list(range(0x135d,0x1360)) + + list(range(0x1380,0x1390)) + + list(range(0x13a0,0x13f5)) + + list(range(0x1401,0x166d)) + + list(range(0x166f,0x1680)) + + list(range(0x1681,0x169b)) + + list(range(0x16a0,0x16eb)) + + list(range(0x1700,0x170d)) + + list(range(0x170e,0x1715)) + + list(range(0x1720,0x1735)) + + list(range(0x1740,0x1754)) + + list(range(0x1760,0x176d)) + + list(range(0x176e,0x1771)) + + list(range(0x1772,0x1774)) + + list(range(0x1780,0x17b4)) + + list(range(0x17b6,0x17d4)) + + [0x17d7] + + list(range(0x17dc,0x17de)) + + list(range(0x17e0,0x17ea)) + + list(range(0x1810,0x181a)) + + list(range(0x1820,0x1878)) + + list(range(0x1880,0x18ab)) + + list(range(0x18b0,0x18f6)) + + list(range(0x1900,0x191d)) + + list(range(0x1920,0x192c)) + + list(range(0x1930,0x193c)) + + list(range(0x1946,0x196e)) + + list(range(0x1970,0x1975)) + + list(range(0x1980,0x19ac)) + + list(range(0x19b0,0x19ca)) + + list(range(0x19d0,0x19da)) + + list(range(0x1a00,0x1a1c)) + + list(range(0x1a20,0x1a5f)) + + list(range(0x1a60,0x1a7d)) + + list(range(0x1a7f,0x1a8a)) + + list(range(0x1a90,0x1a9a)) + + [0x1aa7] + + list(range(0x1b00,0x1b4c)) + + list(range(0x1b50,0x1b5a)) + + list(range(0x1b6b,0x1b74)) + + list(range(0x1b80,0x1bf4)) + + list(range(0x1c00,0x1c38)) + + list(range(0x1c40,0x1c4a)) + + list(range(0x1c4d,0x1c7e)) + + list(range(0x1cd0,0x1cd3)) + + list(range(0x1cd4,0x1cf7)) + + list(range(0x1d00,0x1d2c)) + + [0x1d2f] + + [0x1d3b] + + [0x1d4e] + + list(range(0x1d6b,0x1d78)) + + list(range(0x1d79,0x1d9b)) + + list(range(0x1dc0,0x1de7)) + + list(range(0x1dfc,0x1e00)) + + [0x1e01] + + [0x1e03] + + [0x1e05] + + [0x1e07] + + [0x1e09] + + [0x1e0b] + + [0x1e0d] + + [0x1e0f] + + [0x1e11] + + [0x1e13] + + [0x1e15] + + [0x1e17] + + [0x1e19] + + [0x1e1b] + + [0x1e1d] + + [0x1e1f] + + [0x1e21] + + [0x1e23] + + [0x1e25] + + [0x1e27] + + [0x1e29] + + [0x1e2b] + + [0x1e2d] + + [0x1e2f] + + [0x1e31] + + [0x1e33] + + [0x1e35] + + [0x1e37] + + [0x1e39] + + [0x1e3b] + + [0x1e3d] + + [0x1e3f] + + [0x1e41] + + [0x1e43] + + [0x1e45] + + [0x1e47] + + [0x1e49] + + [0x1e4b] + + [0x1e4d] + + [0x1e4f] + + [0x1e51] + + [0x1e53] + + [0x1e55] + + [0x1e57] + + [0x1e59] + + [0x1e5b] + + [0x1e5d] + + [0x1e5f] + + [0x1e61] + + [0x1e63] + + [0x1e65] + + [0x1e67] + + [0x1e69] + + [0x1e6b] + + [0x1e6d] + + [0x1e6f] + + [0x1e71] + + [0x1e73] + + [0x1e75] + + [0x1e77] + + [0x1e79] + + [0x1e7b] + + [0x1e7d] + + [0x1e7f] + + [0x1e81] + + [0x1e83] + + [0x1e85] + + [0x1e87] + + [0x1e89] + + [0x1e8b] + + [0x1e8d] + + [0x1e8f] + + [0x1e91] + + [0x1e93] + + list(range(0x1e95,0x1e9a)) + + list(range(0x1e9c,0x1e9e)) + + [0x1e9f] + + [0x1ea1] + + [0x1ea3] + + [0x1ea5] + + [0x1ea7] + + [0x1ea9] + + [0x1eab] + + [0x1ead] + + [0x1eaf] + + [0x1eb1] + + [0x1eb3] + + [0x1eb5] + + [0x1eb7] + + [0x1eb9] + + [0x1ebb] + + [0x1ebd] + + [0x1ebf] + + [0x1ec1] + + [0x1ec3] + + [0x1ec5] + + [0x1ec7] + + [0x1ec9] + + [0x1ecb] + + [0x1ecd] + + [0x1ecf] + + [0x1ed1] + + [0x1ed3] + + [0x1ed5] + + [0x1ed7] + + [0x1ed9] + + [0x1edb] + + [0x1edd] + + [0x1edf] + + [0x1ee1] + + [0x1ee3] + + [0x1ee5] + + [0x1ee7] + + [0x1ee9] + + [0x1eeb] + + [0x1eed] + + [0x1eef] + + [0x1ef1] + + [0x1ef3] + + [0x1ef5] + + [0x1ef7] + + [0x1ef9] + + [0x1efb] + + [0x1efd] + + list(range(0x1eff,0x1f08)) + + list(range(0x1f10,0x1f16)) + + list(range(0x1f20,0x1f28)) + + list(range(0x1f30,0x1f38)) + + list(range(0x1f40,0x1f46)) + + list(range(0x1f50,0x1f58)) + + list(range(0x1f60,0x1f68)) + + [0x1f70] + + [0x1f72] + + [0x1f74] + + [0x1f76] + + [0x1f78] + + [0x1f7a] + + [0x1f7c] + + list(range(0x1fb0,0x1fb2)) + + [0x1fb6] + + [0x1fc6] + + list(range(0x1fd0,0x1fd3)) + + list(range(0x1fd6,0x1fd8)) + + list(range(0x1fe0,0x1fe3)) + + list(range(0x1fe4,0x1fe8)) + + [0x1ff6] + + [0x214e] + + [0x2184] + + list(range(0x2c30,0x2c5f)) + + [0x2c61] + + list(range(0x2c65,0x2c67)) + + [0x2c68] + + [0x2c6a] + + [0x2c6c] + + [0x2c71] + + list(range(0x2c73,0x2c75)) + + list(range(0x2c76,0x2c7c)) + + [0x2c81] + + [0x2c83] + + [0x2c85] + + [0x2c87] + + [0x2c89] + + [0x2c8b] + + [0x2c8d] + + [0x2c8f] + + [0x2c91] + + [0x2c93] + + [0x2c95] + + [0x2c97] + + [0x2c99] + + [0x2c9b] + + [0x2c9d] + + [0x2c9f] + + [0x2ca1] + + [0x2ca3] + + [0x2ca5] + + [0x2ca7] + + [0x2ca9] + + [0x2cab] + + [0x2cad] + + [0x2caf] + + [0x2cb1] + + [0x2cb3] + + [0x2cb5] + + [0x2cb7] + + [0x2cb9] + + [0x2cbb] + + [0x2cbd] + + [0x2cbf] + + [0x2cc1] + + [0x2cc3] + + [0x2cc5] + + [0x2cc7] + + [0x2cc9] + + [0x2ccb] + + [0x2ccd] + + [0x2ccf] + + [0x2cd1] + + [0x2cd3] + + [0x2cd5] + + [0x2cd7] + + [0x2cd9] + + [0x2cdb] + + [0x2cdd] + + [0x2cdf] + + [0x2ce1] + + list(range(0x2ce3,0x2ce5)) + + [0x2cec] + + list(range(0x2cee,0x2cf2)) + + [0x2cf3] + + list(range(0x2d00,0x2d26)) + + [0x2d27] + + [0x2d2d] + + list(range(0x2d30,0x2d68)) + + list(range(0x2d7f,0x2d97)) + + list(range(0x2da0,0x2da7)) + + list(range(0x2da8,0x2daf)) + + list(range(0x2db0,0x2db7)) + + list(range(0x2db8,0x2dbf)) + + list(range(0x2dc0,0x2dc7)) + + list(range(0x2dc8,0x2dcf)) + + list(range(0x2dd0,0x2dd7)) + + list(range(0x2dd8,0x2ddf)) + + list(range(0x2de0,0x2e00)) + + [0x2e2f] + + list(range(0x3005,0x3008)) + + list(range(0x302a,0x302e)) + + [0x303c] + + list(range(0x3041,0x3097)) + + list(range(0x3099,0x309b)) + + list(range(0x309d,0x309f)) + + list(range(0x30a1,0x30fb)) + + list(range(0x30fc,0x30ff)) + + list(range(0x3105,0x312e)) + + list(range(0x31a0,0x31bb)) + + list(range(0x31f0,0x3200)) + + list(range(0x3400,0x4db6)) + + list(range(0x4e00,0x9fcd)) + + list(range(0xa000,0xa48d)) + + list(range(0xa4d0,0xa4fe)) + + list(range(0xa500,0xa60d)) + + list(range(0xa610,0xa62c)) + + [0xa641] + + [0xa643] + + [0xa645] + + [0xa647] + + [0xa649] + + [0xa64b] + + [0xa64d] + + [0xa64f] + + [0xa651] + + [0xa653] + + [0xa655] + + [0xa657] + + [0xa659] + + [0xa65b] + + [0xa65d] + + [0xa65f] + + [0xa661] + + [0xa663] + + [0xa665] + + [0xa667] + + [0xa669] + + [0xa66b] + + list(range(0xa66d,0xa670)) + + list(range(0xa674,0xa67e)) + + [0xa67f] + + [0xa681] + + [0xa683] + + [0xa685] + + [0xa687] + + [0xa689] + + [0xa68b] + + [0xa68d] + + [0xa68f] + + [0xa691] + + [0xa693] + + [0xa695] + + [0xa697] + + list(range(0xa69f,0xa6e6)) + + list(range(0xa6f0,0xa6f2)) + + list(range(0xa717,0xa720)) + + [0xa723] + + [0xa725] + + [0xa727] + + [0xa729] + + [0xa72b] + + [0xa72d] + + list(range(0xa72f,0xa732)) + + [0xa733] + + [0xa735] + + [0xa737] + + [0xa739] + + [0xa73b] + + [0xa73d] + + [0xa73f] + + [0xa741] + + [0xa743] + + [0xa745] + + [0xa747] + + [0xa749] + + [0xa74b] + + [0xa74d] + + [0xa74f] + + [0xa751] + + [0xa753] + + [0xa755] + + [0xa757] + + [0xa759] + + [0xa75b] + + [0xa75d] + + [0xa75f] + + [0xa761] + + [0xa763] + + [0xa765] + + [0xa767] + + [0xa769] + + [0xa76b] + + [0xa76d] + + [0xa76f] + + list(range(0xa771,0xa779)) + + [0xa77a] + + [0xa77c] + + [0xa77f] + + [0xa781] + + [0xa783] + + [0xa785] + + list(range(0xa787,0xa789)) + + [0xa78c] + + [0xa78e] + + [0xa791] + + [0xa793] + + [0xa7a1] + + [0xa7a3] + + [0xa7a5] + + [0xa7a7] + + [0xa7a9] + + list(range(0xa7fa,0xa828)) + + list(range(0xa840,0xa874)) + + list(range(0xa880,0xa8c5)) + + list(range(0xa8d0,0xa8da)) + + list(range(0xa8e0,0xa8f8)) + + [0xa8fb] + + list(range(0xa900,0xa92e)) + + list(range(0xa930,0xa954)) + + list(range(0xa980,0xa9c1)) + + list(range(0xa9cf,0xa9da)) + + list(range(0xaa00,0xaa37)) + + list(range(0xaa40,0xaa4e)) + + list(range(0xaa50,0xaa5a)) + + list(range(0xaa60,0xaa77)) + + list(range(0xaa7a,0xaa7c)) + + list(range(0xaa80,0xaac3)) + + list(range(0xaadb,0xaade)) + + list(range(0xaae0,0xaaf0)) + + list(range(0xaaf2,0xaaf7)) + + list(range(0xab01,0xab07)) + + list(range(0xab09,0xab0f)) + + list(range(0xab11,0xab17)) + + list(range(0xab20,0xab27)) + + list(range(0xab28,0xab2f)) + + list(range(0xabc0,0xabeb)) + + list(range(0xabec,0xabee)) + + list(range(0xabf0,0xabfa)) + + list(range(0xac00,0xd7a4)) + + list(range(0xfa0e,0xfa10)) + + [0xfa11] + + list(range(0xfa13,0xfa15)) + + [0xfa1f] + + [0xfa21] + + list(range(0xfa23,0xfa25)) + + list(range(0xfa27,0xfa2a)) + + [0xfb1e] + + list(range(0xfe20,0xfe27)) + + [0xfe73] + + list(range(0x10000,0x1000c)) + + list(range(0x1000d,0x10027)) + + list(range(0x10028,0x1003b)) + + list(range(0x1003c,0x1003e)) + + list(range(0x1003f,0x1004e)) + + list(range(0x10050,0x1005e)) + + list(range(0x10080,0x100fb)) + + [0x101fd] + + list(range(0x10280,0x1029d)) + + list(range(0x102a0,0x102d1)) + + list(range(0x10300,0x1031f)) + + list(range(0x10330,0x10341)) + + list(range(0x10342,0x1034a)) + + list(range(0x10380,0x1039e)) + + list(range(0x103a0,0x103c4)) + + list(range(0x103c8,0x103d0)) + + list(range(0x10428,0x1049e)) + + list(range(0x104a0,0x104aa)) + + list(range(0x10800,0x10806)) + + [0x10808] + + list(range(0x1080a,0x10836)) + + list(range(0x10837,0x10839)) + + [0x1083c] + + list(range(0x1083f,0x10856)) + + list(range(0x10900,0x10916)) + + list(range(0x10920,0x1093a)) + + list(range(0x10980,0x109b8)) + + list(range(0x109be,0x109c0)) + + list(range(0x10a00,0x10a04)) + + list(range(0x10a05,0x10a07)) + + list(range(0x10a0c,0x10a14)) + + list(range(0x10a15,0x10a18)) + + list(range(0x10a19,0x10a34)) + + list(range(0x10a38,0x10a3b)) + + [0x10a3f] + + list(range(0x10a60,0x10a7d)) + + list(range(0x10b00,0x10b36)) + + list(range(0x10b40,0x10b56)) + + list(range(0x10b60,0x10b73)) + + list(range(0x10c00,0x10c49)) + + list(range(0x11000,0x11047)) + + list(range(0x11066,0x11070)) + + list(range(0x11080,0x110bb)) + + list(range(0x110d0,0x110e9)) + + list(range(0x110f0,0x110fa)) + + list(range(0x11100,0x11135)) + + list(range(0x11136,0x11140)) + + list(range(0x11180,0x111c5)) + + list(range(0x111d0,0x111da)) + + list(range(0x11680,0x116b8)) + + list(range(0x116c0,0x116ca)) + + list(range(0x12000,0x1236f)) + + list(range(0x13000,0x1342f)) + + list(range(0x16800,0x16a39)) + + list(range(0x16f00,0x16f45)) + + list(range(0x16f50,0x16f7f)) + + list(range(0x16f8f,0x16fa0)) + + list(range(0x1b000,0x1b002)) + + list(range(0x20000,0x2a6d7)) + + list(range(0x2a700,0x2b735)) + + list(range(0x2b740,0x2b81e)) + ), + 'CONTEXTJ': frozenset( + list(range(0x200c,0x200e)) + ), + 'CONTEXTO': frozenset( + [0xb7] + + [0x375] + + list(range(0x5f3,0x5f5)) + + list(range(0x660,0x66a)) + + list(range(0x6f0,0x6fa)) + + [0x30fb] + ), + 'DISALLOWED': frozenset( + list(range(0x0,0x2d)) + + list(range(0x2e,0x30)) + + list(range(0x3a,0x61)) + + list(range(0x7b,0xb7)) + + list(range(0xb8,0xdf)) + + [0xf7] + + [0x100] + + [0x102] + + [0x104] + + [0x106] + + [0x108] + + [0x10a] + + [0x10c] + + [0x10e] + + [0x110] + + [0x112] + + [0x114] + + [0x116] + + [0x118] + + [0x11a] + + [0x11c] + + [0x11e] + + [0x120] + + [0x122] + + [0x124] + + [0x126] + + [0x128] + + [0x12a] + + [0x12c] + + [0x12e] + + [0x130] + + list(range(0x132,0x135)) + + [0x136] + + [0x139] + + [0x13b] + + [0x13d] + + list(range(0x13f,0x142)) + + [0x143] + + [0x145] + + [0x147] + + list(range(0x149,0x14b)) + + [0x14c] + + [0x14e] + + [0x150] + + [0x152] + + [0x154] + + [0x156] + + [0x158] + + [0x15a] + + [0x15c] + + [0x15e] + + [0x160] + + [0x162] + + [0x164] + + [0x166] + + [0x168] + + [0x16a] + + [0x16c] + + [0x16e] + + [0x170] + + [0x172] + + [0x174] + + [0x176] + + list(range(0x178,0x17a)) + + [0x17b] + + [0x17d] + + [0x17f] + + list(range(0x181,0x183)) + + [0x184] + + list(range(0x186,0x188)) + + list(range(0x189,0x18c)) + + list(range(0x18e,0x192)) + + list(range(0x193,0x195)) + + list(range(0x196,0x199)) + + list(range(0x19c,0x19e)) + + list(range(0x19f,0x1a1)) + + [0x1a2] + + [0x1a4] + + list(range(0x1a6,0x1a8)) + + [0x1a9] + + [0x1ac] + + list(range(0x1ae,0x1b0)) + + list(range(0x1b1,0x1b4)) + + [0x1b5] + + list(range(0x1b7,0x1b9)) + + [0x1bc] + + list(range(0x1c4,0x1ce)) + + [0x1cf] + + [0x1d1] + + [0x1d3] + + [0x1d5] + + [0x1d7] + + [0x1d9] + + [0x1db] + + [0x1de] + + [0x1e0] + + [0x1e2] + + [0x1e4] + + [0x1e6] + + [0x1e8] + + [0x1ea] + + [0x1ec] + + [0x1ee] + + list(range(0x1f1,0x1f5)) + + list(range(0x1f6,0x1f9)) + + [0x1fa] + + [0x1fc] + + [0x1fe] + + [0x200] + + [0x202] + + [0x204] + + [0x206] + + [0x208] + + [0x20a] + + [0x20c] + + [0x20e] + + [0x210] + + [0x212] + + [0x214] + + [0x216] + + [0x218] + + [0x21a] + + [0x21c] + + [0x21e] + + [0x220] + + [0x222] + + [0x224] + + [0x226] + + [0x228] + + [0x22a] + + [0x22c] + + [0x22e] + + [0x230] + + [0x232] + + list(range(0x23a,0x23c)) + + list(range(0x23d,0x23f)) + + [0x241] + + list(range(0x243,0x247)) + + [0x248] + + [0x24a] + + [0x24c] + + [0x24e] + + list(range(0x2b0,0x2b9)) + + list(range(0x2c2,0x2c6)) + + list(range(0x2d2,0x2ec)) + + [0x2ed] + + list(range(0x2ef,0x300)) + + list(range(0x340,0x342)) + + list(range(0x343,0x346)) + + [0x34f] + + [0x370] + + [0x372] + + [0x374] + + [0x376] + + [0x37a] + + [0x37e] + + list(range(0x384,0x38b)) + + [0x38c] + + list(range(0x38e,0x390)) + + list(range(0x391,0x3a2)) + + list(range(0x3a3,0x3ac)) + + list(range(0x3cf,0x3d7)) + + [0x3d8] + + [0x3da] + + [0x3dc] + + [0x3de] + + [0x3e0] + + [0x3e2] + + [0x3e4] + + [0x3e6] + + [0x3e8] + + [0x3ea] + + [0x3ec] + + [0x3ee] + + list(range(0x3f0,0x3f3)) + + list(range(0x3f4,0x3f8)) + + list(range(0x3f9,0x3fb)) + + list(range(0x3fd,0x430)) + + [0x460] + + [0x462] + + [0x464] + + [0x466] + + [0x468] + + [0x46a] + + [0x46c] + + [0x46e] + + [0x470] + + [0x472] + + [0x474] + + [0x476] + + [0x478] + + [0x47a] + + [0x47c] + + [0x47e] + + [0x480] + + [0x482] + + list(range(0x488,0x48b)) + + [0x48c] + + [0x48e] + + [0x490] + + [0x492] + + [0x494] + + [0x496] + + [0x498] + + [0x49a] + + [0x49c] + + [0x49e] + + [0x4a0] + + [0x4a2] + + [0x4a4] + + [0x4a6] + + [0x4a8] + + [0x4aa] + + [0x4ac] + + [0x4ae] + + [0x4b0] + + [0x4b2] + + [0x4b4] + + [0x4b6] + + [0x4b8] + + [0x4ba] + + [0x4bc] + + [0x4be] + + list(range(0x4c0,0x4c2)) + + [0x4c3] + + [0x4c5] + + [0x4c7] + + [0x4c9] + + [0x4cb] + + [0x4cd] + + [0x4d0] + + [0x4d2] + + [0x4d4] + + [0x4d6] + + [0x4d8] + + [0x4da] + + [0x4dc] + + [0x4de] + + [0x4e0] + + [0x4e2] + + [0x4e4] + + [0x4e6] + + [0x4e8] + + [0x4ea] + + [0x4ec] + + [0x4ee] + + [0x4f0] + + [0x4f2] + + [0x4f4] + + [0x4f6] + + [0x4f8] + + [0x4fa] + + [0x4fc] + + [0x4fe] + + [0x500] + + [0x502] + + [0x504] + + [0x506] + + [0x508] + + [0x50a] + + [0x50c] + + [0x50e] + + [0x510] + + [0x512] + + [0x514] + + [0x516] + + [0x518] + + [0x51a] + + [0x51c] + + [0x51e] + + [0x520] + + [0x522] + + [0x524] + + [0x526] + + list(range(0x531,0x557)) + + list(range(0x55a,0x560)) + + [0x587] + + list(range(0x589,0x58b)) + + [0x58f] + + [0x5be] + + [0x5c0] + + [0x5c3] + + [0x5c6] + + list(range(0x600,0x605)) + + list(range(0x606,0x610)) + + list(range(0x61b,0x61d)) + + list(range(0x61e,0x620)) + + [0x640] + + list(range(0x66a,0x66e)) + + list(range(0x675,0x679)) + + [0x6d4] + + list(range(0x6dd,0x6df)) + + [0x6e9] + + list(range(0x700,0x70e)) + + [0x70f] + + list(range(0x7f6,0x7fb)) + + list(range(0x830,0x83f)) + + [0x85e] + + list(range(0x958,0x960)) + + list(range(0x964,0x966)) + + [0x970] + + list(range(0x9dc,0x9de)) + + [0x9df] + + list(range(0x9f2,0x9fc)) + + [0xa33] + + [0xa36] + + list(range(0xa59,0xa5c)) + + [0xa5e] + + list(range(0xaf0,0xaf2)) + + list(range(0xb5c,0xb5e)) + + [0xb70] + + list(range(0xb72,0xb78)) + + list(range(0xbf0,0xbfb)) + + list(range(0xc78,0xc80)) + + list(range(0xd70,0xd76)) + + [0xd79] + + [0xdf4] + + [0xe33] + + [0xe3f] + + [0xe4f] + + list(range(0xe5a,0xe5c)) + + [0xeb3] + + list(range(0xedc,0xede)) + + list(range(0xf01,0xf0b)) + + list(range(0xf0c,0xf18)) + + list(range(0xf1a,0xf20)) + + list(range(0xf2a,0xf35)) + + [0xf36] + + [0xf38] + + list(range(0xf3a,0xf3e)) + + [0xf43] + + [0xf4d] + + [0xf52] + + [0xf57] + + [0xf5c] + + [0xf69] + + [0xf73] + + list(range(0xf75,0xf7a)) + + [0xf81] + + [0xf85] + + [0xf93] + + [0xf9d] + + [0xfa2] + + [0xfa7] + + [0xfac] + + [0xfb9] + + list(range(0xfbe,0xfc6)) + + list(range(0xfc7,0xfcd)) + + list(range(0xfce,0xfdb)) + + list(range(0x104a,0x1050)) + + list(range(0x109e,0x10c6)) + + [0x10c7] + + [0x10cd] + + list(range(0x10fb,0x10fd)) + + list(range(0x1100,0x1200)) + + list(range(0x1360,0x137d)) + + list(range(0x1390,0x139a)) + + [0x1400] + + list(range(0x166d,0x166f)) + + [0x1680] + + list(range(0x169b,0x169d)) + + list(range(0x16eb,0x16f1)) + + list(range(0x1735,0x1737)) + + list(range(0x17b4,0x17b6)) + + list(range(0x17d4,0x17d7)) + + list(range(0x17d8,0x17dc)) + + list(range(0x17f0,0x17fa)) + + list(range(0x1800,0x180f)) + + [0x1940] + + list(range(0x1944,0x1946)) + + [0x19da] + + list(range(0x19de,0x1a00)) + + list(range(0x1a1e,0x1a20)) + + list(range(0x1aa0,0x1aa7)) + + list(range(0x1aa8,0x1aae)) + + list(range(0x1b5a,0x1b6b)) + + list(range(0x1b74,0x1b7d)) + + list(range(0x1bfc,0x1c00)) + + list(range(0x1c3b,0x1c40)) + + list(range(0x1c7e,0x1c80)) + + list(range(0x1cc0,0x1cc8)) + + [0x1cd3] + + list(range(0x1d2c,0x1d2f)) + + list(range(0x1d30,0x1d3b)) + + list(range(0x1d3c,0x1d4e)) + + list(range(0x1d4f,0x1d6b)) + + [0x1d78] + + list(range(0x1d9b,0x1dc0)) + + [0x1e00] + + [0x1e02] + + [0x1e04] + + [0x1e06] + + [0x1e08] + + [0x1e0a] + + [0x1e0c] + + [0x1e0e] + + [0x1e10] + + [0x1e12] + + [0x1e14] + + [0x1e16] + + [0x1e18] + + [0x1e1a] + + [0x1e1c] + + [0x1e1e] + + [0x1e20] + + [0x1e22] + + [0x1e24] + + [0x1e26] + + [0x1e28] + + [0x1e2a] + + [0x1e2c] + + [0x1e2e] + + [0x1e30] + + [0x1e32] + + [0x1e34] + + [0x1e36] + + [0x1e38] + + [0x1e3a] + + [0x1e3c] + + [0x1e3e] + + [0x1e40] + + [0x1e42] + + [0x1e44] + + [0x1e46] + + [0x1e48] + + [0x1e4a] + + [0x1e4c] + + [0x1e4e] + + [0x1e50] + + [0x1e52] + + [0x1e54] + + [0x1e56] + + [0x1e58] + + [0x1e5a] + + [0x1e5c] + + [0x1e5e] + + [0x1e60] + + [0x1e62] + + [0x1e64] + + [0x1e66] + + [0x1e68] + + [0x1e6a] + + [0x1e6c] + + [0x1e6e] + + [0x1e70] + + [0x1e72] + + [0x1e74] + + [0x1e76] + + [0x1e78] + + [0x1e7a] + + [0x1e7c] + + [0x1e7e] + + [0x1e80] + + [0x1e82] + + [0x1e84] + + [0x1e86] + + [0x1e88] + + [0x1e8a] + + [0x1e8c] + + [0x1e8e] + + [0x1e90] + + [0x1e92] + + [0x1e94] + + list(range(0x1e9a,0x1e9c)) + + [0x1e9e] + + [0x1ea0] + + [0x1ea2] + + [0x1ea4] + + [0x1ea6] + + [0x1ea8] + + [0x1eaa] + + [0x1eac] + + [0x1eae] + + [0x1eb0] + + [0x1eb2] + + [0x1eb4] + + [0x1eb6] + + [0x1eb8] + + [0x1eba] + + [0x1ebc] + + [0x1ebe] + + [0x1ec0] + + [0x1ec2] + + [0x1ec4] + + [0x1ec6] + + [0x1ec8] + + [0x1eca] + + [0x1ecc] + + [0x1ece] + + [0x1ed0] + + [0x1ed2] + + [0x1ed4] + + [0x1ed6] + + [0x1ed8] + + [0x1eda] + + [0x1edc] + + [0x1ede] + + [0x1ee0] + + [0x1ee2] + + [0x1ee4] + + [0x1ee6] + + [0x1ee8] + + [0x1eea] + + [0x1eec] + + [0x1eee] + + [0x1ef0] + + [0x1ef2] + + [0x1ef4] + + [0x1ef6] + + [0x1ef8] + + [0x1efa] + + [0x1efc] + + [0x1efe] + + list(range(0x1f08,0x1f10)) + + list(range(0x1f18,0x1f1e)) + + list(range(0x1f28,0x1f30)) + + list(range(0x1f38,0x1f40)) + + list(range(0x1f48,0x1f4e)) + + [0x1f59] + + [0x1f5b] + + [0x1f5d] + + [0x1f5f] + + list(range(0x1f68,0x1f70)) + + [0x1f71] + + [0x1f73] + + [0x1f75] + + [0x1f77] + + [0x1f79] + + [0x1f7b] + + [0x1f7d] + + list(range(0x1f80,0x1fb0)) + + list(range(0x1fb2,0x1fb5)) + + list(range(0x1fb7,0x1fc5)) + + list(range(0x1fc7,0x1fd0)) + + [0x1fd3] + + list(range(0x1fd8,0x1fdc)) + + list(range(0x1fdd,0x1fe0)) + + [0x1fe3] + + list(range(0x1fe8,0x1ff0)) + + list(range(0x1ff2,0x1ff5)) + + list(range(0x1ff7,0x1fff)) + + list(range(0x2000,0x200c)) + + list(range(0x200e,0x2065)) + + list(range(0x2066,0x2072)) + + list(range(0x2074,0x208f)) + + list(range(0x2090,0x209d)) + + list(range(0x20a0,0x20bb)) + + list(range(0x20d0,0x20f1)) + + list(range(0x2100,0x214e)) + + list(range(0x214f,0x2184)) + + list(range(0x2185,0x218a)) + + list(range(0x2190,0x23f4)) + + list(range(0x2400,0x2427)) + + list(range(0x2440,0x244b)) + + list(range(0x2460,0x2700)) + + list(range(0x2701,0x2b4d)) + + list(range(0x2b50,0x2b5a)) + + list(range(0x2c00,0x2c2f)) + + [0x2c60] + + list(range(0x2c62,0x2c65)) + + [0x2c67] + + [0x2c69] + + [0x2c6b] + + list(range(0x2c6d,0x2c71)) + + [0x2c72] + + [0x2c75] + + list(range(0x2c7c,0x2c81)) + + [0x2c82] + + [0x2c84] + + [0x2c86] + + [0x2c88] + + [0x2c8a] + + [0x2c8c] + + [0x2c8e] + + [0x2c90] + + [0x2c92] + + [0x2c94] + + [0x2c96] + + [0x2c98] + + [0x2c9a] + + [0x2c9c] + + [0x2c9e] + + [0x2ca0] + + [0x2ca2] + + [0x2ca4] + + [0x2ca6] + + [0x2ca8] + + [0x2caa] + + [0x2cac] + + [0x2cae] + + [0x2cb0] + + [0x2cb2] + + [0x2cb4] + + [0x2cb6] + + [0x2cb8] + + [0x2cba] + + [0x2cbc] + + [0x2cbe] + + [0x2cc0] + + [0x2cc2] + + [0x2cc4] + + [0x2cc6] + + [0x2cc8] + + [0x2cca] + + [0x2ccc] + + [0x2cce] + + [0x2cd0] + + [0x2cd2] + + [0x2cd4] + + [0x2cd6] + + [0x2cd8] + + [0x2cda] + + [0x2cdc] + + [0x2cde] + + [0x2ce0] + + [0x2ce2] + + list(range(0x2ce5,0x2cec)) + + [0x2ced] + + [0x2cf2] + + list(range(0x2cf9,0x2d00)) + + list(range(0x2d6f,0x2d71)) + + list(range(0x2e00,0x2e2f)) + + list(range(0x2e30,0x2e3c)) + + list(range(0x2e80,0x2e9a)) + + list(range(0x2e9b,0x2ef4)) + + list(range(0x2f00,0x2fd6)) + + list(range(0x2ff0,0x2ffc)) + + list(range(0x3000,0x3005)) + + list(range(0x3008,0x302a)) + + list(range(0x302e,0x303c)) + + list(range(0x303d,0x3040)) + + list(range(0x309b,0x309d)) + + list(range(0x309f,0x30a1)) + + [0x30ff] + + list(range(0x3131,0x318f)) + + list(range(0x3190,0x31a0)) + + list(range(0x31c0,0x31e4)) + + list(range(0x3200,0x321f)) + + list(range(0x3220,0x32ff)) + + list(range(0x3300,0x3400)) + + list(range(0x4dc0,0x4e00)) + + list(range(0xa490,0xa4c7)) + + list(range(0xa4fe,0xa500)) + + list(range(0xa60d,0xa610)) + + [0xa640] + + [0xa642] + + [0xa644] + + [0xa646] + + [0xa648] + + [0xa64a] + + [0xa64c] + + [0xa64e] + + [0xa650] + + [0xa652] + + [0xa654] + + [0xa656] + + [0xa658] + + [0xa65a] + + [0xa65c] + + [0xa65e] + + [0xa660] + + [0xa662] + + [0xa664] + + [0xa666] + + [0xa668] + + [0xa66a] + + [0xa66c] + + list(range(0xa670,0xa674)) + + [0xa67e] + + [0xa680] + + [0xa682] + + [0xa684] + + [0xa686] + + [0xa688] + + [0xa68a] + + [0xa68c] + + [0xa68e] + + [0xa690] + + [0xa692] + + [0xa694] + + [0xa696] + + list(range(0xa6e6,0xa6f0)) + + list(range(0xa6f2,0xa6f8)) + + list(range(0xa700,0xa717)) + + list(range(0xa720,0xa723)) + + [0xa724] + + [0xa726] + + [0xa728] + + [0xa72a] + + [0xa72c] + + [0xa72e] + + [0xa732] + + [0xa734] + + [0xa736] + + [0xa738] + + [0xa73a] + + [0xa73c] + + [0xa73e] + + [0xa740] + + [0xa742] + + [0xa744] + + [0xa746] + + [0xa748] + + [0xa74a] + + [0xa74c] + + [0xa74e] + + [0xa750] + + [0xa752] + + [0xa754] + + [0xa756] + + [0xa758] + + [0xa75a] + + [0xa75c] + + [0xa75e] + + [0xa760] + + [0xa762] + + [0xa764] + + [0xa766] + + [0xa768] + + [0xa76a] + + [0xa76c] + + [0xa76e] + + [0xa770] + + [0xa779] + + [0xa77b] + + list(range(0xa77d,0xa77f)) + + [0xa780] + + [0xa782] + + [0xa784] + + [0xa786] + + list(range(0xa789,0xa78c)) + + [0xa78d] + + [0xa790] + + [0xa792] + + [0xa7a0] + + [0xa7a2] + + [0xa7a4] + + [0xa7a6] + + [0xa7a8] + + [0xa7aa] + + list(range(0xa7f8,0xa7fa)) + + list(range(0xa828,0xa82c)) + + list(range(0xa830,0xa83a)) + + list(range(0xa874,0xa878)) + + list(range(0xa8ce,0xa8d0)) + + list(range(0xa8f8,0xa8fb)) + + list(range(0xa92e,0xa930)) + + list(range(0xa95f,0xa97d)) + + list(range(0xa9c1,0xa9ce)) + + list(range(0xa9de,0xa9e0)) + + list(range(0xaa5c,0xaa60)) + + list(range(0xaa77,0xaa7a)) + + list(range(0xaade,0xaae0)) + + list(range(0xaaf0,0xaaf2)) + + [0xabeb] + + list(range(0xd7b0,0xd7c7)) + + list(range(0xd7cb,0xd7fc)) + + list(range(0xd800,0xfa0e)) + + [0xfa10] + + [0xfa12] + + list(range(0xfa15,0xfa1f)) + + [0xfa20] + + [0xfa22] + + list(range(0xfa25,0xfa27)) + + list(range(0xfa2a,0xfa6e)) + + list(range(0xfa70,0xfada)) + + list(range(0xfb00,0xfb07)) + + list(range(0xfb13,0xfb18)) + + [0xfb1d] + + list(range(0xfb1f,0xfb37)) + + list(range(0xfb38,0xfb3d)) + + [0xfb3e] + + list(range(0xfb40,0xfb42)) + + list(range(0xfb43,0xfb45)) + + list(range(0xfb46,0xfbc2)) + + list(range(0xfbd3,0xfd40)) + + list(range(0xfd50,0xfd90)) + + list(range(0xfd92,0xfdc8)) + + list(range(0xfdd0,0xfdfe)) + + list(range(0xfe00,0xfe1a)) + + list(range(0xfe30,0xfe53)) + + list(range(0xfe54,0xfe67)) + + list(range(0xfe68,0xfe6c)) + + list(range(0xfe70,0xfe73)) + + [0xfe74] + + list(range(0xfe76,0xfefd)) + + [0xfeff] + + list(range(0xff01,0xffbf)) + + list(range(0xffc2,0xffc8)) + + list(range(0xffca,0xffd0)) + + list(range(0xffd2,0xffd8)) + + list(range(0xffda,0xffdd)) + + list(range(0xffe0,0xffe7)) + + list(range(0xffe8,0xffef)) + + list(range(0xfff9,0x10000)) + + list(range(0x10100,0x10103)) + + list(range(0x10107,0x10134)) + + list(range(0x10137,0x1018b)) + + list(range(0x10190,0x1019c)) + + list(range(0x101d0,0x101fd)) + + list(range(0x10320,0x10324)) + + [0x10341] + + [0x1034a] + + [0x1039f] + + list(range(0x103d0,0x103d6)) + + list(range(0x10400,0x10428)) + + list(range(0x10857,0x10860)) + + list(range(0x10916,0x1091c)) + + [0x1091f] + + [0x1093f] + + list(range(0x10a40,0x10a48)) + + list(range(0x10a50,0x10a59)) + + list(range(0x10a7d,0x10a80)) + + list(range(0x10b39,0x10b40)) + + list(range(0x10b58,0x10b60)) + + list(range(0x10b78,0x10b80)) + + list(range(0x10e60,0x10e7f)) + + list(range(0x11047,0x1104e)) + + list(range(0x11052,0x11066)) + + list(range(0x110bb,0x110c2)) + + list(range(0x11140,0x11144)) + + list(range(0x111c5,0x111c9)) + + list(range(0x12400,0x12463)) + + list(range(0x12470,0x12474)) + + list(range(0x1d000,0x1d0f6)) + + list(range(0x1d100,0x1d127)) + + list(range(0x1d129,0x1d1de)) + + list(range(0x1d200,0x1d246)) + + list(range(0x1d300,0x1d357)) + + list(range(0x1d360,0x1d372)) + + list(range(0x1d400,0x1d455)) + + list(range(0x1d456,0x1d49d)) + + list(range(0x1d49e,0x1d4a0)) + + [0x1d4a2] + + list(range(0x1d4a5,0x1d4a7)) + + list(range(0x1d4a9,0x1d4ad)) + + list(range(0x1d4ae,0x1d4ba)) + + [0x1d4bb] + + list(range(0x1d4bd,0x1d4c4)) + + list(range(0x1d4c5,0x1d506)) + + list(range(0x1d507,0x1d50b)) + + list(range(0x1d50d,0x1d515)) + + list(range(0x1d516,0x1d51d)) + + list(range(0x1d51e,0x1d53a)) + + list(range(0x1d53b,0x1d53f)) + + list(range(0x1d540,0x1d545)) + + [0x1d546] + + list(range(0x1d54a,0x1d551)) + + list(range(0x1d552,0x1d6a6)) + + list(range(0x1d6a8,0x1d7cc)) + + list(range(0x1d7ce,0x1d800)) + + list(range(0x1ee00,0x1ee04)) + + list(range(0x1ee05,0x1ee20)) + + list(range(0x1ee21,0x1ee23)) + + [0x1ee24] + + [0x1ee27] + + list(range(0x1ee29,0x1ee33)) + + list(range(0x1ee34,0x1ee38)) + + [0x1ee39] + + [0x1ee3b] + + [0x1ee42] + + [0x1ee47] + + [0x1ee49] + + [0x1ee4b] + + list(range(0x1ee4d,0x1ee50)) + + list(range(0x1ee51,0x1ee53)) + + [0x1ee54] + + [0x1ee57] + + [0x1ee59] + + [0x1ee5b] + + [0x1ee5d] + + [0x1ee5f] + + list(range(0x1ee61,0x1ee63)) + + [0x1ee64] + + list(range(0x1ee67,0x1ee6b)) + + list(range(0x1ee6c,0x1ee73)) + + list(range(0x1ee74,0x1ee78)) + + list(range(0x1ee79,0x1ee7d)) + + [0x1ee7e] + + list(range(0x1ee80,0x1ee8a)) + + list(range(0x1ee8b,0x1ee9c)) + + list(range(0x1eea1,0x1eea4)) + + list(range(0x1eea5,0x1eeaa)) + + list(range(0x1eeab,0x1eebc)) + + list(range(0x1eef0,0x1eef2)) + + list(range(0x1f000,0x1f02c)) + + list(range(0x1f030,0x1f094)) + + list(range(0x1f0a0,0x1f0af)) + + list(range(0x1f0b1,0x1f0bf)) + + list(range(0x1f0c1,0x1f0d0)) + + list(range(0x1f0d1,0x1f0e0)) + + list(range(0x1f100,0x1f10b)) + + list(range(0x1f110,0x1f12f)) + + list(range(0x1f130,0x1f16c)) + + list(range(0x1f170,0x1f19b)) + + list(range(0x1f1e6,0x1f203)) + + list(range(0x1f210,0x1f23b)) + + list(range(0x1f240,0x1f249)) + + list(range(0x1f250,0x1f252)) + + list(range(0x1f300,0x1f321)) + + list(range(0x1f330,0x1f336)) + + list(range(0x1f337,0x1f37d)) + + list(range(0x1f380,0x1f394)) + + list(range(0x1f3a0,0x1f3c5)) + + list(range(0x1f3c6,0x1f3cb)) + + list(range(0x1f3e0,0x1f3f1)) + + list(range(0x1f400,0x1f43f)) + + [0x1f440] + + list(range(0x1f442,0x1f4f8)) + + list(range(0x1f4f9,0x1f4fd)) + + list(range(0x1f500,0x1f53e)) + + list(range(0x1f540,0x1f544)) + + list(range(0x1f550,0x1f568)) + + list(range(0x1f5fb,0x1f641)) + + list(range(0x1f645,0x1f650)) + + list(range(0x1f680,0x1f6c6)) + + list(range(0x1f700,0x1f774)) + + list(range(0x1fffe,0x20000)) + + list(range(0x2f800,0x2fa1e)) + + list(range(0x2fffe,0x30000)) + + list(range(0x3fffe,0x40000)) + + list(range(0x4fffe,0x50000)) + + list(range(0x5fffe,0x60000)) + + list(range(0x6fffe,0x70000)) + + list(range(0x7fffe,0x80000)) + + list(range(0x8fffe,0x90000)) + + list(range(0x9fffe,0xa0000)) + + list(range(0xafffe,0xb0000)) + + list(range(0xbfffe,0xc0000)) + + list(range(0xcfffe,0xd0000)) + + list(range(0xdfffe,0xe0000)) + + [0xe0001] + + list(range(0xe0020,0xe0080)) + + list(range(0xe0100,0xe01f0)) + + list(range(0xefffe,0x110000)) + ), +} diff --git a/Lib/site-packages/idna/uts46data.py b/Lib/site-packages/idna/uts46data.py new file mode 100644 index 0000000..64e2c68 --- /dev/null +++ b/Lib/site-packages/idna/uts46data.py @@ -0,0 +1,7267 @@ +# This file is automatically generated by tools/build-uts46data.py +# vim: set fileencoding=utf-8 : + +"""IDNA Mapping Table from UTS46.""" + +uts46data = ( + (0x0, '3'), + (0x1, '3'), + (0x2, '3'), + (0x3, '3'), + (0x4, '3'), + (0x5, '3'), + (0x6, '3'), + (0x7, '3'), + (0x8, '3'), + (0x9, '3'), + (0xA, '3'), + (0xB, '3'), + (0xC, '3'), + (0xD, '3'), + (0xE, '3'), + (0xF, '3'), + (0x10, '3'), + (0x11, '3'), + (0x12, '3'), + (0x13, '3'), + (0x14, '3'), + (0x15, '3'), + (0x16, '3'), + (0x17, '3'), + (0x18, '3'), + (0x19, '3'), + (0x1A, '3'), + (0x1B, '3'), + (0x1C, '3'), + (0x1D, '3'), + (0x1E, '3'), + (0x1F, '3'), + (0x20, '3'), + (0x21, '3'), + (0x22, '3'), + (0x23, '3'), + (0x24, '3'), + (0x25, '3'), + (0x26, '3'), + (0x27, '3'), + (0x28, '3'), + (0x29, '3'), + (0x2A, '3'), + (0x2B, '3'), + (0x2C, '3'), + (0x2D, 'V'), + (0x2E, 'V'), + (0x2F, '3'), + (0x30, 'V'), + (0x31, 'V'), + (0x32, 'V'), + (0x33, 'V'), + (0x34, 'V'), + (0x35, 'V'), + (0x36, 'V'), + (0x37, 'V'), + (0x38, 'V'), + (0x39, 'V'), + (0x3A, '3'), + (0x3B, '3'), + (0x3C, '3'), + (0x3D, '3'), + (0x3E, '3'), + (0x3F, '3'), + (0x40, '3'), + (0x41, 'M', u'a'), + (0x42, 'M', u'b'), + (0x43, 'M', u'c'), + (0x44, 'M', u'd'), + (0x45, 'M', u'e'), + (0x46, 'M', u'f'), + (0x47, 'M', u'g'), + (0x48, 'M', u'h'), + (0x49, 'M', u'i'), + (0x4A, 'M', u'j'), + (0x4B, 'M', u'k'), + (0x4C, 'M', u'l'), + (0x4D, 'M', u'm'), + (0x4E, 'M', u'n'), + (0x4F, 'M', u'o'), + (0x50, 'M', u'p'), + (0x51, 'M', u'q'), + (0x52, 'M', u'r'), + (0x53, 'M', u's'), + (0x54, 'M', u't'), + (0x55, 'M', u'u'), + (0x56, 'M', u'v'), + (0x57, 'M', u'w'), + (0x58, 'M', u'x'), + (0x59, 'M', u'y'), + (0x5A, 'M', u'z'), + (0x5B, '3'), + (0x5C, '3'), + (0x5D, '3'), + (0x5E, '3'), + (0x5F, '3'), + (0x60, '3'), + (0x61, 'V'), + (0x62, 'V'), + (0x63, 'V'), + (0x64, 'V'), + (0x65, 'V'), + (0x66, 'V'), + (0x67, 'V'), + (0x68, 'V'), + (0x69, 'V'), + (0x6A, 'V'), + (0x6B, 'V'), + (0x6C, 'V'), + (0x6D, 'V'), + (0x6E, 'V'), + (0x6F, 'V'), + (0x70, 'V'), + (0x71, 'V'), + (0x72, 'V'), + (0x73, 'V'), + (0x74, 'V'), + (0x75, 'V'), + (0x76, 'V'), + (0x77, 'V'), + (0x78, 'V'), + (0x79, 'V'), + (0x7A, 'V'), + (0x7B, '3'), + (0x7C, '3'), + (0x7D, '3'), + (0x7E, '3'), + (0x7F, '3'), + (0x80, 'X'), + (0x81, 'X'), + (0x82, 'X'), + (0x83, 'X'), + (0x84, 'X'), + (0x85, 'X'), + (0x86, 'X'), + (0x87, 'X'), + (0x88, 'X'), + (0x89, 'X'), + (0x8A, 'X'), + (0x8B, 'X'), + (0x8C, 'X'), + (0x8D, 'X'), + (0x8E, 'X'), + (0x8F, 'X'), + (0x90, 'X'), + (0x91, 'X'), + (0x92, 'X'), + (0x93, 'X'), + (0x94, 'X'), + (0x95, 'X'), + (0x96, 'X'), + (0x97, 'X'), + (0x98, 'X'), + (0x99, 'X'), + (0x9A, 'X'), + (0x9B, 'X'), + (0x9C, 'X'), + (0x9D, 'X'), + (0x9E, 'X'), + (0x9F, 'X'), + (0xA0, '3', u' '), + (0xA1, 'V'), + (0xA2, 'V'), + (0xA3, 'V'), + (0xA4, 'V'), + (0xA5, 'V'), + (0xA6, 'V'), + (0xA7, 'V'), + (0xA8, '3', u' ̈'), + (0xA9, 'V'), + (0xAA, 'M', u'a'), + (0xAB, 'V'), + (0xAC, 'V'), + (0xAD, 'I'), + (0xAE, 'V'), + (0xAF, '3', u' Ì„'), + (0xB0, 'V'), + (0xB1, 'V'), + (0xB2, 'M', u'2'), + (0xB3, 'M', u'3'), + (0xB4, '3', u' Ì'), + (0xB5, 'M', u'μ'), + (0xB6, 'V'), + (0xB7, 'V'), + (0xB8, '3', u' ̧'), + (0xB9, 'M', u'1'), + (0xBA, 'M', u'o'), + (0xBB, 'V'), + (0xBC, 'M', u'1â„4'), + (0xBD, 'M', u'1â„2'), + (0xBE, 'M', u'3â„4'), + (0xBF, 'V'), + (0xC0, 'M', u'à'), + (0xC1, 'M', u'á'), + (0xC2, 'M', u'â'), + (0xC3, 'M', u'ã'), + (0xC4, 'M', u'ä'), + (0xC5, 'M', u'Ã¥'), + (0xC6, 'M', u'æ'), + (0xC7, 'M', u'ç'), + (0xC8, 'M', u'è'), + (0xC9, 'M', u'é'), + (0xCA, 'M', u'ê'), + (0xCB, 'M', u'ë'), + (0xCC, 'M', u'ì'), + (0xCD, 'M', u'í'), + (0xCE, 'M', u'î'), + (0xCF, 'M', u'ï'), + (0xD0, 'M', u'ð'), + (0xD1, 'M', u'ñ'), + (0xD2, 'M', u'ò'), + (0xD3, 'M', u'ó'), + (0xD4, 'M', u'ô'), + (0xD5, 'M', u'õ'), + (0xD6, 'M', u'ö'), + (0xD7, 'V'), + (0xD8, 'M', u'ø'), + (0xD9, 'M', u'ù'), + (0xDA, 'M', u'ú'), + (0xDB, 'M', u'û'), + (0xDC, 'M', u'ü'), + (0xDD, 'M', u'ý'), + (0xDE, 'M', u'þ'), + (0xDF, 'D', u'ss'), + (0xE0, 'V'), + (0xE1, 'V'), + (0xE2, 'V'), + (0xE3, 'V'), + (0xE4, 'V'), + (0xE5, 'V'), + (0xE6, 'V'), + (0xE7, 'V'), + (0xE8, 'V'), + (0xE9, 'V'), + (0xEA, 'V'), + (0xEB, 'V'), + (0xEC, 'V'), + (0xED, 'V'), + (0xEE, 'V'), + (0xEF, 'V'), + (0xF0, 'V'), + (0xF1, 'V'), + (0xF2, 'V'), + (0xF3, 'V'), + (0xF4, 'V'), + (0xF5, 'V'), + (0xF6, 'V'), + (0xF7, 'V'), + (0xF8, 'V'), + (0xF9, 'V'), + (0xFA, 'V'), + (0xFB, 'V'), + (0xFC, 'V'), + (0xFD, 'V'), + (0xFE, 'V'), + (0xFF, 'V'), + (0x100, 'M', u'Ä'), + (0x101, 'V'), + (0x102, 'M', u'ă'), + (0x103, 'V'), + (0x104, 'M', u'Ä…'), + (0x105, 'V'), + (0x106, 'M', u'ć'), + (0x107, 'V'), + (0x108, 'M', u'ĉ'), + (0x109, 'V'), + (0x10A, 'M', u'Ä‹'), + (0x10B, 'V'), + (0x10C, 'M', u'Ä'), + (0x10D, 'V'), + (0x10E, 'M', u'Ä'), + (0x10F, 'V'), + (0x110, 'M', u'Ä‘'), + (0x111, 'V'), + (0x112, 'M', u'Ä“'), + (0x113, 'V'), + (0x114, 'M', u'Ä•'), + (0x115, 'V'), + (0x116, 'M', u'Ä—'), + (0x117, 'V'), + (0x118, 'M', u'Ä™'), + (0x119, 'V'), + (0x11A, 'M', u'Ä›'), + (0x11B, 'V'), + (0x11C, 'M', u'Ä'), + (0x11D, 'V'), + (0x11E, 'M', u'ÄŸ'), + (0x11F, 'V'), + (0x120, 'M', u'Ä¡'), + (0x121, 'V'), + (0x122, 'M', u'Ä£'), + (0x123, 'V'), + (0x124, 'M', u'Ä¥'), + (0x125, 'V'), + (0x126, 'M', u'ħ'), + (0x127, 'V'), + (0x128, 'M', u'Ä©'), + (0x129, 'V'), + (0x12A, 'M', u'Ä«'), + (0x12B, 'V'), + (0x12C, 'M', u'Ä­'), + (0x12D, 'V'), + (0x12E, 'M', u'į'), + (0x12F, 'V'), + (0x130, 'M', u'i̇'), + (0x131, 'V'), + (0x132, 'M', u'ij'), + (0x134, 'M', u'ĵ'), + (0x135, 'V'), + (0x136, 'M', u'Ä·'), + (0x137, 'V'), + (0x139, 'M', u'ĺ'), + (0x13A, 'V'), + (0x13B, 'M', u'ļ'), + (0x13C, 'V'), + (0x13D, 'M', u'ľ'), + (0x13E, 'V'), + (0x13F, 'M', u'l·'), + (0x141, 'M', u'Å‚'), + (0x142, 'V'), + (0x143, 'M', u'Å„'), + (0x144, 'V'), + (0x145, 'M', u'ņ'), + (0x146, 'V'), + (0x147, 'M', u'ň'), + (0x148, 'V'), + (0x149, 'M', u'ʼn'), + (0x14A, 'M', u'Å‹'), + (0x14B, 'V'), + (0x14C, 'M', u'Å'), + (0x14D, 'V'), + (0x14E, 'M', u'Å'), + (0x14F, 'V'), + (0x150, 'M', u'Å‘'), + (0x151, 'V'), + (0x152, 'M', u'Å“'), + (0x153, 'V'), + (0x154, 'M', u'Å•'), + (0x155, 'V'), + (0x156, 'M', u'Å—'), + (0x157, 'V'), + (0x158, 'M', u'Å™'), + (0x159, 'V'), + (0x15A, 'M', u'Å›'), + (0x15B, 'V'), + (0x15C, 'M', u'Å'), + (0x15D, 'V'), + (0x15E, 'M', u'ÅŸ'), + (0x15F, 'V'), + (0x160, 'M', u'Å¡'), + (0x161, 'V'), + (0x162, 'M', u'Å£'), + (0x163, 'V'), + (0x164, 'M', u'Å¥'), + (0x165, 'V'), + (0x166, 'M', u'ŧ'), + (0x167, 'V'), + (0x168, 'M', u'Å©'), + (0x169, 'V'), + (0x16A, 'M', u'Å«'), + (0x16B, 'V'), + (0x16C, 'M', u'Å­'), + (0x16D, 'V'), + (0x16E, 'M', u'ů'), + (0x16F, 'V'), + (0x170, 'M', u'ű'), + (0x171, 'V'), + (0x172, 'M', u'ų'), + (0x173, 'V'), + (0x174, 'M', u'ŵ'), + (0x175, 'V'), + (0x176, 'M', u'Å·'), + (0x177, 'V'), + (0x178, 'M', u'ÿ'), + (0x179, 'M', u'ź'), + (0x17A, 'V'), + (0x17B, 'M', u'ż'), + (0x17C, 'V'), + (0x17D, 'M', u'ž'), + (0x17E, 'V'), + (0x17F, 'M', u's'), + (0x180, 'V'), + (0x181, 'M', u'É“'), + (0x182, 'M', u'ƃ'), + (0x183, 'V'), + (0x184, 'M', u'Æ…'), + (0x185, 'V'), + (0x186, 'M', u'É”'), + (0x187, 'M', u'ƈ'), + (0x188, 'V'), + (0x189, 'M', u'É–'), + (0x18A, 'M', u'É—'), + (0x18B, 'M', u'ÆŒ'), + (0x18C, 'V'), + (0x18E, 'M', u'Ç'), + (0x18F, 'M', u'É™'), + (0x190, 'M', u'É›'), + (0x191, 'M', u'Æ’'), + (0x192, 'V'), + (0x193, 'M', u'É '), + (0x194, 'M', u'É£'), + (0x195, 'V'), + (0x196, 'M', u'É©'), + (0x197, 'M', u'ɨ'), + (0x198, 'M', u'Æ™'), + (0x199, 'V'), + (0x19C, 'M', u'ɯ'), + (0x19D, 'M', u'ɲ'), + (0x19E, 'V'), + (0x19F, 'M', u'ɵ'), + (0x1A0, 'M', u'Æ¡'), + (0x1A1, 'V'), + (0x1A2, 'M', u'Æ£'), + (0x1A3, 'V'), + (0x1A4, 'M', u'Æ¥'), + (0x1A5, 'V'), + (0x1A6, 'M', u'Ê€'), + (0x1A7, 'M', u'ƨ'), + (0x1A8, 'V'), + (0x1A9, 'M', u'ʃ'), + (0x1AA, 'V'), + (0x1AC, 'M', u'Æ­'), + (0x1AD, 'V'), + (0x1AE, 'M', u'ʈ'), + (0x1AF, 'M', u'Æ°'), + (0x1B0, 'V'), + (0x1B1, 'M', u'ÊŠ'), + (0x1B2, 'M', u'Ê‹'), + (0x1B3, 'M', u'Æ´'), + (0x1B4, 'V'), + (0x1B5, 'M', u'ƶ'), + (0x1B6, 'V'), + (0x1B7, 'M', u'Ê’'), + (0x1B8, 'M', u'ƹ'), + (0x1B9, 'V'), + (0x1BC, 'M', u'ƽ'), + (0x1BD, 'V'), + (0x1C4, 'M', u'dž'), + (0x1C7, 'M', u'lj'), + (0x1CA, 'M', u'nj'), + (0x1CD, 'M', u'ÇŽ'), + (0x1CE, 'V'), + (0x1CF, 'M', u'Ç'), + (0x1D0, 'V'), + (0x1D1, 'M', u'Ç’'), + (0x1D2, 'V'), + (0x1D3, 'M', u'Ç”'), + (0x1D4, 'V'), + (0x1D5, 'M', u'Ç–'), + (0x1D6, 'V'), + (0x1D7, 'M', u'ǘ'), + (0x1D8, 'V'), + (0x1D9, 'M', u'Çš'), + (0x1DA, 'V'), + (0x1DB, 'M', u'Çœ'), + (0x1DC, 'V'), + (0x1DE, 'M', u'ÇŸ'), + (0x1DF, 'V'), + (0x1E0, 'M', u'Ç¡'), + (0x1E1, 'V'), + (0x1E2, 'M', u'Ç£'), + (0x1E3, 'V'), + (0x1E4, 'M', u'Ç¥'), + (0x1E5, 'V'), + (0x1E6, 'M', u'ǧ'), + (0x1E7, 'V'), + (0x1E8, 'M', u'Ç©'), + (0x1E9, 'V'), + (0x1EA, 'M', u'Ç«'), + (0x1EB, 'V'), + (0x1EC, 'M', u'Ç­'), + (0x1ED, 'V'), + (0x1EE, 'M', u'ǯ'), + (0x1EF, 'V'), + (0x1F1, 'M', u'dz'), + (0x1F4, 'M', u'ǵ'), + (0x1F5, 'V'), + (0x1F6, 'M', u'Æ•'), + (0x1F7, 'M', u'Æ¿'), + (0x1F8, 'M', u'ǹ'), + (0x1F9, 'V'), + (0x1FA, 'M', u'Ç»'), + (0x1FB, 'V'), + (0x1FC, 'M', u'ǽ'), + (0x1FD, 'V'), + (0x1FE, 'M', u'Ç¿'), + (0x1FF, 'V'), + (0x200, 'M', u'È'), + (0x201, 'V'), + (0x202, 'M', u'ȃ'), + (0x203, 'V'), + (0x204, 'M', u'È…'), + (0x205, 'V'), + (0x206, 'M', u'ȇ'), + (0x207, 'V'), + (0x208, 'M', u'ȉ'), + (0x209, 'V'), + (0x20A, 'M', u'È‹'), + (0x20B, 'V'), + (0x20C, 'M', u'È'), + (0x20D, 'V'), + (0x20E, 'M', u'È'), + (0x20F, 'V'), + (0x210, 'M', u'È‘'), + (0x211, 'V'), + (0x212, 'M', u'È“'), + (0x213, 'V'), + (0x214, 'M', u'È•'), + (0x215, 'V'), + (0x216, 'M', u'È—'), + (0x217, 'V'), + (0x218, 'M', u'È™'), + (0x219, 'V'), + (0x21A, 'M', u'È›'), + (0x21B, 'V'), + (0x21C, 'M', u'È'), + (0x21D, 'V'), + (0x21E, 'M', u'ÈŸ'), + (0x21F, 'V'), + (0x220, 'M', u'Æž'), + (0x221, 'V'), + (0x222, 'M', u'È£'), + (0x223, 'V'), + (0x224, 'M', u'È¥'), + (0x225, 'V'), + (0x226, 'M', u'ȧ'), + (0x227, 'V'), + (0x228, 'M', u'È©'), + (0x229, 'V'), + (0x22A, 'M', u'È«'), + (0x22B, 'V'), + (0x22C, 'M', u'È­'), + (0x22D, 'V'), + (0x22E, 'M', u'ȯ'), + (0x22F, 'V'), + (0x230, 'M', u'ȱ'), + (0x231, 'V'), + (0x232, 'M', u'ȳ'), + (0x233, 'V'), + (0x23A, 'M', u'â±¥'), + (0x23B, 'M', u'ȼ'), + (0x23C, 'V'), + (0x23D, 'M', u'Æš'), + (0x23E, 'M', u'ⱦ'), + (0x23F, 'V'), + (0x241, 'M', u'É‚'), + (0x242, 'V'), + (0x243, 'M', u'Æ€'), + (0x244, 'M', u'ʉ'), + (0x245, 'M', u'ÊŒ'), + (0x246, 'M', u'ɇ'), + (0x247, 'V'), + (0x248, 'M', u'ɉ'), + (0x249, 'V'), + (0x24A, 'M', u'É‹'), + (0x24B, 'V'), + (0x24C, 'M', u'É'), + (0x24D, 'V'), + (0x24E, 'M', u'É'), + (0x24F, 'V'), + (0x2B0, 'M', u'h'), + (0x2B1, 'M', u'ɦ'), + (0x2B2, 'M', u'j'), + (0x2B3, 'M', u'r'), + (0x2B4, 'M', u'ɹ'), + (0x2B5, 'M', u'É»'), + (0x2B6, 'M', u'Ê'), + (0x2B7, 'M', u'w'), + (0x2B8, 'M', u'y'), + (0x2B9, 'V'), + (0x2D8, '3', u' ̆'), + (0x2D9, '3', u' ̇'), + (0x2DA, '3', u' ÌŠ'), + (0x2DB, '3', u' ̨'), + (0x2DC, '3', u' ̃'), + (0x2DD, '3', u' Ì‹'), + (0x2DE, 'V'), + (0x2E0, 'M', u'É£'), + (0x2E1, 'M', u'l'), + (0x2E2, 'M', u's'), + (0x2E3, 'M', u'x'), + (0x2E4, 'M', u'Ê•'), + (0x2E5, 'V'), + (0x340, 'M', u'Ì€'), + (0x341, 'M', u'Ì'), + (0x342, 'V'), + (0x343, 'M', u'Ì“'), + (0x344, 'M', u'̈Ì'), + (0x345, 'M', u'ι'), + (0x346, 'V'), + (0x34F, 'I'), + (0x350, 'V'), + (0x370, 'M', u'ͱ'), + (0x371, 'V'), + (0x372, 'M', u'ͳ'), + (0x373, 'V'), + (0x374, 'M', u'ʹ'), + (0x375, 'V'), + (0x376, 'M', u'Í·'), + (0x377, 'V'), + (0x378, 'X'), + (0x37A, '3', u' ι'), + (0x37B, 'V'), + (0x37E, '3', u';'), + (0x37F, 'X'), + (0x384, '3', u' Ì'), + (0x385, '3', u' ̈Ì'), + (0x386, 'M', u'ά'), + (0x387, 'M', u'·'), + (0x388, 'M', u'έ'), + (0x389, 'M', u'ή'), + (0x38A, 'M', u'ί'), + (0x38B, 'X'), + (0x38C, 'M', u'ÏŒ'), + (0x38D, 'X'), + (0x38E, 'M', u'Ï'), + (0x38F, 'M', u'ÏŽ'), + (0x390, 'V'), + (0x391, 'M', u'α'), + (0x392, 'M', u'β'), + (0x393, 'M', u'γ'), + (0x394, 'M', u'δ'), + (0x395, 'M', u'ε'), + (0x396, 'M', u'ζ'), + (0x397, 'M', u'η'), + (0x398, 'M', u'θ'), + (0x399, 'M', u'ι'), + (0x39A, 'M', u'κ'), + (0x39B, 'M', u'λ'), + (0x39C, 'M', u'μ'), + (0x39D, 'M', u'ν'), + (0x39E, 'M', u'ξ'), + (0x39F, 'M', u'ο'), + (0x3A0, 'M', u'Ï€'), + (0x3A1, 'M', u'Ï'), + (0x3A2, 'X'), + (0x3A3, 'M', u'σ'), + (0x3A4, 'M', u'Ï„'), + (0x3A5, 'M', u'Ï…'), + (0x3A6, 'M', u'φ'), + (0x3A7, 'M', u'χ'), + (0x3A8, 'M', u'ψ'), + (0x3A9, 'M', u'ω'), + (0x3AA, 'M', u'ÏŠ'), + (0x3AB, 'M', u'Ï‹'), + (0x3AC, 'V'), + (0x3C2, 'D', u'σ'), + (0x3C3, 'V'), + (0x3CF, 'M', u'Ï—'), + (0x3D0, 'M', u'β'), + (0x3D1, 'M', u'θ'), + (0x3D2, 'M', u'Ï…'), + (0x3D3, 'M', u'Ï'), + (0x3D4, 'M', u'Ï‹'), + (0x3D5, 'M', u'φ'), + (0x3D6, 'M', u'Ï€'), + (0x3D7, 'V'), + (0x3D8, 'M', u'Ï™'), + (0x3D9, 'V'), + (0x3DA, 'M', u'Ï›'), + (0x3DB, 'V'), + (0x3DC, 'M', u'Ï'), + (0x3DD, 'V'), + (0x3DE, 'M', u'ÏŸ'), + (0x3DF, 'V'), + (0x3E0, 'M', u'Ï¡'), + (0x3E1, 'V'), + (0x3E2, 'M', u'Ï£'), + (0x3E3, 'V'), + (0x3E4, 'M', u'Ï¥'), + (0x3E5, 'V'), + (0x3E6, 'M', u'ϧ'), + (0x3E7, 'V'), + (0x3E8, 'M', u'Ï©'), + (0x3E9, 'V'), + (0x3EA, 'M', u'Ï«'), + (0x3EB, 'V'), + (0x3EC, 'M', u'Ï­'), + (0x3ED, 'V'), + (0x3EE, 'M', u'ϯ'), + (0x3EF, 'V'), + (0x3F0, 'M', u'κ'), + (0x3F1, 'M', u'Ï'), + (0x3F2, 'M', u'σ'), + (0x3F3, 'V'), + (0x3F4, 'M', u'θ'), + (0x3F5, 'M', u'ε'), + (0x3F6, 'V'), + (0x3F7, 'M', u'ϸ'), + (0x3F8, 'V'), + (0x3F9, 'M', u'σ'), + (0x3FA, 'M', u'Ï»'), + (0x3FB, 'V'), + (0x3FD, 'M', u'Í»'), + (0x3FE, 'M', u'ͼ'), + (0x3FF, 'M', u'ͽ'), + (0x400, 'M', u'Ñ'), + (0x401, 'M', u'Ñ‘'), + (0x402, 'M', u'Ñ’'), + (0x403, 'M', u'Ñ“'), + (0x404, 'M', u'Ñ”'), + (0x405, 'M', u'Ñ•'), + (0x406, 'M', u'Ñ–'), + (0x407, 'M', u'Ñ—'), + (0x408, 'M', u'ј'), + (0x409, 'M', u'Ñ™'), + (0x40A, 'M', u'Ñš'), + (0x40B, 'M', u'Ñ›'), + (0x40C, 'M', u'Ñœ'), + (0x40D, 'M', u'Ñ'), + (0x40E, 'M', u'Ñž'), + (0x40F, 'M', u'ÑŸ'), + (0x410, 'M', u'а'), + (0x411, 'M', u'б'), + (0x412, 'M', u'в'), + (0x413, 'M', u'г'), + (0x414, 'M', u'д'), + (0x415, 'M', u'е'), + (0x416, 'M', u'ж'), + (0x417, 'M', u'з'), + (0x418, 'M', u'и'), + (0x419, 'M', u'й'), + (0x41A, 'M', u'к'), + (0x41B, 'M', u'л'), + (0x41C, 'M', u'м'), + (0x41D, 'M', u'н'), + (0x41E, 'M', u'о'), + (0x41F, 'M', u'п'), + (0x420, 'M', u'Ñ€'), + (0x421, 'M', u'Ñ'), + (0x422, 'M', u'Ñ‚'), + (0x423, 'M', u'у'), + (0x424, 'M', u'Ñ„'), + (0x425, 'M', u'Ñ…'), + (0x426, 'M', u'ц'), + (0x427, 'M', u'ч'), + (0x428, 'M', u'ш'), + (0x429, 'M', u'щ'), + (0x42A, 'M', u'ÑŠ'), + (0x42B, 'M', u'Ñ‹'), + (0x42C, 'M', u'ÑŒ'), + (0x42D, 'M', u'Ñ'), + (0x42E, 'M', u'ÑŽ'), + (0x42F, 'M', u'Ñ'), + (0x430, 'V'), + (0x460, 'M', u'Ñ¡'), + (0x461, 'V'), + (0x462, 'M', u'Ñ£'), + (0x463, 'V'), + (0x464, 'M', u'Ñ¥'), + (0x465, 'V'), + (0x466, 'M', u'ѧ'), + (0x467, 'V'), + (0x468, 'M', u'Ñ©'), + (0x469, 'V'), + (0x46A, 'M', u'Ñ«'), + (0x46B, 'V'), + (0x46C, 'M', u'Ñ­'), + (0x46D, 'V'), + (0x46E, 'M', u'ѯ'), + (0x46F, 'V'), + (0x470, 'M', u'ѱ'), + (0x471, 'V'), + (0x472, 'M', u'ѳ'), + (0x473, 'V'), + (0x474, 'M', u'ѵ'), + (0x475, 'V'), + (0x476, 'M', u'Ñ·'), + (0x477, 'V'), + (0x478, 'M', u'ѹ'), + (0x479, 'V'), + (0x47A, 'M', u'Ñ»'), + (0x47B, 'V'), + (0x47C, 'M', u'ѽ'), + (0x47D, 'V'), + (0x47E, 'M', u'Ñ¿'), + (0x47F, 'V'), + (0x480, 'M', u'Ò'), + (0x481, 'V'), + (0x48A, 'M', u'Ò‹'), + (0x48B, 'V'), + (0x48C, 'M', u'Ò'), + (0x48D, 'V'), + (0x48E, 'M', u'Ò'), + (0x48F, 'V'), + (0x490, 'M', u'Ò‘'), + (0x491, 'V'), + (0x492, 'M', u'Ò“'), + (0x493, 'V'), + (0x494, 'M', u'Ò•'), + (0x495, 'V'), + (0x496, 'M', u'Ò—'), + (0x497, 'V'), + (0x498, 'M', u'Ò™'), + (0x499, 'V'), + (0x49A, 'M', u'Ò›'), + (0x49B, 'V'), + (0x49C, 'M', u'Ò'), + (0x49D, 'V'), + (0x49E, 'M', u'ÒŸ'), + (0x49F, 'V'), + (0x4A0, 'M', u'Ò¡'), + (0x4A1, 'V'), + (0x4A2, 'M', u'Ò£'), + (0x4A3, 'V'), + (0x4A4, 'M', u'Ò¥'), + (0x4A5, 'V'), + (0x4A6, 'M', u'Ò§'), + (0x4A7, 'V'), + (0x4A8, 'M', u'Ò©'), + (0x4A9, 'V'), + (0x4AA, 'M', u'Ò«'), + (0x4AB, 'V'), + (0x4AC, 'M', u'Ò­'), + (0x4AD, 'V'), + (0x4AE, 'M', u'Ò¯'), + (0x4AF, 'V'), + (0x4B0, 'M', u'Ò±'), + (0x4B1, 'V'), + (0x4B2, 'M', u'Ò³'), + (0x4B3, 'V'), + (0x4B4, 'M', u'Òµ'), + (0x4B5, 'V'), + (0x4B6, 'M', u'Ò·'), + (0x4B7, 'V'), + (0x4B8, 'M', u'Ò¹'), + (0x4B9, 'V'), + (0x4BA, 'M', u'Ò»'), + (0x4BB, 'V'), + (0x4BC, 'M', u'Ò½'), + (0x4BD, 'V'), + (0x4BE, 'M', u'Ò¿'), + (0x4BF, 'V'), + (0x4C0, 'X'), + (0x4C1, 'M', u'Ó‚'), + (0x4C2, 'V'), + (0x4C3, 'M', u'Ó„'), + (0x4C4, 'V'), + (0x4C5, 'M', u'Ó†'), + (0x4C6, 'V'), + (0x4C7, 'M', u'Óˆ'), + (0x4C8, 'V'), + (0x4C9, 'M', u'ÓŠ'), + (0x4CA, 'V'), + (0x4CB, 'M', u'ÓŒ'), + (0x4CC, 'V'), + (0x4CD, 'M', u'ÓŽ'), + (0x4CE, 'V'), + (0x4D0, 'M', u'Ó‘'), + (0x4D1, 'V'), + (0x4D2, 'M', u'Ó“'), + (0x4D3, 'V'), + (0x4D4, 'M', u'Ó•'), + (0x4D5, 'V'), + (0x4D6, 'M', u'Ó—'), + (0x4D7, 'V'), + (0x4D8, 'M', u'Ó™'), + (0x4D9, 'V'), + (0x4DA, 'M', u'Ó›'), + (0x4DB, 'V'), + (0x4DC, 'M', u'Ó'), + (0x4DD, 'V'), + (0x4DE, 'M', u'ÓŸ'), + (0x4DF, 'V'), + (0x4E0, 'M', u'Ó¡'), + (0x4E1, 'V'), + (0x4E2, 'M', u'Ó£'), + (0x4E3, 'V'), + (0x4E4, 'M', u'Ó¥'), + (0x4E5, 'V'), + (0x4E6, 'M', u'Ó§'), + (0x4E7, 'V'), + (0x4E8, 'M', u'Ó©'), + (0x4E9, 'V'), + (0x4EA, 'M', u'Ó«'), + (0x4EB, 'V'), + (0x4EC, 'M', u'Ó­'), + (0x4ED, 'V'), + (0x4EE, 'M', u'Ó¯'), + (0x4EF, 'V'), + (0x4F0, 'M', u'Ó±'), + (0x4F1, 'V'), + (0x4F2, 'M', u'Ó³'), + (0x4F3, 'V'), + (0x4F4, 'M', u'Óµ'), + (0x4F5, 'V'), + (0x4F6, 'M', u'Ó·'), + (0x4F7, 'V'), + (0x4F8, 'M', u'Ó¹'), + (0x4F9, 'V'), + (0x4FA, 'M', u'Ó»'), + (0x4FB, 'V'), + (0x4FC, 'M', u'Ó½'), + (0x4FD, 'V'), + (0x4FE, 'M', u'Ó¿'), + (0x4FF, 'V'), + (0x500, 'M', u'Ô'), + (0x501, 'V'), + (0x502, 'M', u'Ôƒ'), + (0x503, 'V'), + (0x504, 'M', u'Ô…'), + (0x505, 'V'), + (0x506, 'M', u'Ô‡'), + (0x507, 'V'), + (0x508, 'M', u'Ô‰'), + (0x509, 'V'), + (0x50A, 'M', u'Ô‹'), + (0x50B, 'V'), + (0x50C, 'M', u'Ô'), + (0x50D, 'V'), + (0x50E, 'M', u'Ô'), + (0x50F, 'V'), + (0x510, 'M', u'Ô‘'), + (0x511, 'V'), + (0x512, 'M', u'Ô“'), + (0x513, 'V'), + (0x514, 'M', u'Ô•'), + (0x515, 'V'), + (0x516, 'M', u'Ô—'), + (0x517, 'V'), + (0x518, 'M', u'Ô™'), + (0x519, 'V'), + (0x51A, 'M', u'Ô›'), + (0x51B, 'V'), + (0x51C, 'M', u'Ô'), + (0x51D, 'V'), + (0x51E, 'M', u'ÔŸ'), + (0x51F, 'V'), + (0x520, 'M', u'Ô¡'), + (0x521, 'V'), + (0x522, 'M', u'Ô£'), + (0x523, 'V'), + (0x524, 'M', u'Ô¥'), + (0x525, 'V'), + (0x526, 'M', u'Ô§'), + (0x527, 'V'), + (0x528, 'X'), + (0x531, 'M', u'Õ¡'), + (0x532, 'M', u'Õ¢'), + (0x533, 'M', u'Õ£'), + (0x534, 'M', u'Õ¤'), + (0x535, 'M', u'Õ¥'), + (0x536, 'M', u'Õ¦'), + (0x537, 'M', u'Õ§'), + (0x538, 'M', u'Õ¨'), + (0x539, 'M', u'Õ©'), + (0x53A, 'M', u'Õª'), + (0x53B, 'M', u'Õ«'), + (0x53C, 'M', u'Õ¬'), + (0x53D, 'M', u'Õ­'), + (0x53E, 'M', u'Õ®'), + (0x53F, 'M', u'Õ¯'), + (0x540, 'M', u'Õ°'), + (0x541, 'M', u'Õ±'), + (0x542, 'M', u'Õ²'), + (0x543, 'M', u'Õ³'), + (0x544, 'M', u'Õ´'), + (0x545, 'M', u'Õµ'), + (0x546, 'M', u'Õ¶'), + (0x547, 'M', u'Õ·'), + (0x548, 'M', u'Õ¸'), + (0x549, 'M', u'Õ¹'), + (0x54A, 'M', u'Õº'), + (0x54B, 'M', u'Õ»'), + (0x54C, 'M', u'Õ¼'), + (0x54D, 'M', u'Õ½'), + (0x54E, 'M', u'Õ¾'), + (0x54F, 'M', u'Õ¿'), + (0x550, 'M', u'Ö€'), + (0x551, 'M', u'Ö'), + (0x552, 'M', u'Ö‚'), + (0x553, 'M', u'Öƒ'), + (0x554, 'M', u'Ö„'), + (0x555, 'M', u'Ö…'), + (0x556, 'M', u'Ö†'), + (0x557, 'X'), + (0x559, 'V'), + (0x560, 'X'), + (0x561, 'V'), + (0x587, 'M', u'Õ¥Ö‚'), + (0x588, 'X'), + (0x589, 'V'), + (0x58B, 'X'), + (0x58F, 'V'), + (0x590, 'X'), + (0x591, 'V'), + (0x5C8, 'X'), + (0x5D0, 'V'), + (0x5EB, 'X'), + (0x5F0, 'V'), + (0x5F5, 'X'), + (0x606, 'V'), + (0x61C, 'X'), + (0x61E, 'V'), + (0x675, 'M', u'اٴ'), + (0x676, 'M', u'وٴ'), + (0x677, 'M', u'Û‡Ù´'), + (0x678, 'M', u'يٴ'), + (0x679, 'V'), + (0x6DD, 'X'), + (0x6DE, 'V'), + (0x70E, 'X'), + (0x710, 'V'), + (0x74B, 'X'), + (0x74D, 'V'), + (0x7B2, 'X'), + (0x7C0, 'V'), + (0x7FB, 'X'), + (0x800, 'V'), + (0x82E, 'X'), + (0x830, 'V'), + (0x83F, 'X'), + (0x840, 'V'), + (0x85C, 'X'), + (0x85E, 'V'), + (0x85F, 'X'), + (0x8A0, 'V'), + (0x8A1, 'X'), + (0x8A2, 'V'), + (0x8AD, 'X'), + (0x8E4, 'V'), + (0x8FF, 'X'), + (0x900, 'V'), + (0x958, 'M', u'क़'), + (0x959, 'M', u'ख़'), + (0x95A, 'M', u'ग़'), + (0x95B, 'M', u'ज़'), + (0x95C, 'M', u'ड़'), + (0x95D, 'M', u'ढ़'), + (0x95E, 'M', u'फ़'), + (0x95F, 'M', u'य़'), + (0x960, 'V'), + (0x978, 'X'), + (0x979, 'V'), + (0x980, 'X'), + (0x981, 'V'), + (0x984, 'X'), + (0x985, 'V'), + (0x98D, 'X'), + (0x98F, 'V'), + (0x991, 'X'), + (0x993, 'V'), + (0x9A9, 'X'), + (0x9AA, 'V'), + (0x9B1, 'X'), + (0x9B2, 'V'), + (0x9B3, 'X'), + (0x9B6, 'V'), + (0x9BA, 'X'), + (0x9BC, 'V'), + (0x9C5, 'X'), + (0x9C7, 'V'), + (0x9C9, 'X'), + (0x9CB, 'V'), + (0x9CF, 'X'), + (0x9D7, 'V'), + (0x9D8, 'X'), + (0x9DC, 'M', u'ড়'), + (0x9DD, 'M', u'ঢ়'), + (0x9DE, 'X'), + (0x9DF, 'M', u'য়'), + (0x9E0, 'V'), + (0x9E4, 'X'), + (0x9E6, 'V'), + (0x9FC, 'X'), + (0xA01, 'V'), + (0xA04, 'X'), + (0xA05, 'V'), + (0xA0B, 'X'), + (0xA0F, 'V'), + (0xA11, 'X'), + (0xA13, 'V'), + (0xA29, 'X'), + (0xA2A, 'V'), + (0xA31, 'X'), + (0xA32, 'V'), + (0xA33, 'M', u'ਲ਼'), + (0xA34, 'X'), + (0xA35, 'V'), + (0xA36, 'M', u'ਸ਼'), + (0xA37, 'X'), + (0xA38, 'V'), + (0xA3A, 'X'), + (0xA3C, 'V'), + (0xA3D, 'X'), + (0xA3E, 'V'), + (0xA43, 'X'), + (0xA47, 'V'), + (0xA49, 'X'), + (0xA4B, 'V'), + (0xA4E, 'X'), + (0xA51, 'V'), + (0xA52, 'X'), + (0xA59, 'M', u'ਖ਼'), + (0xA5A, 'M', u'ਗ਼'), + (0xA5B, 'M', u'ਜ਼'), + (0xA5C, 'V'), + (0xA5D, 'X'), + (0xA5E, 'M', u'ਫ਼'), + (0xA5F, 'X'), + (0xA66, 'V'), + (0xA76, 'X'), + (0xA81, 'V'), + (0xA84, 'X'), + (0xA85, 'V'), + (0xA8E, 'X'), + (0xA8F, 'V'), + (0xA92, 'X'), + (0xA93, 'V'), + (0xAA9, 'X'), + (0xAAA, 'V'), + (0xAB1, 'X'), + (0xAB2, 'V'), + (0xAB4, 'X'), + (0xAB5, 'V'), + (0xABA, 'X'), + (0xABC, 'V'), + (0xAC6, 'X'), + (0xAC7, 'V'), + (0xACA, 'X'), + (0xACB, 'V'), + (0xACE, 'X'), + (0xAD0, 'V'), + (0xAD1, 'X'), + (0xAE0, 'V'), + (0xAE4, 'X'), + (0xAE6, 'V'), + (0xAF2, 'X'), + (0xB01, 'V'), + (0xB04, 'X'), + (0xB05, 'V'), + (0xB0D, 'X'), + (0xB0F, 'V'), + (0xB11, 'X'), + (0xB13, 'V'), + (0xB29, 'X'), + (0xB2A, 'V'), + (0xB31, 'X'), + (0xB32, 'V'), + (0xB34, 'X'), + (0xB35, 'V'), + (0xB3A, 'X'), + (0xB3C, 'V'), + (0xB45, 'X'), + (0xB47, 'V'), + (0xB49, 'X'), + (0xB4B, 'V'), + (0xB4E, 'X'), + (0xB56, 'V'), + (0xB58, 'X'), + (0xB5C, 'M', u'ଡ଼'), + (0xB5D, 'M', u'ଢ଼'), + (0xB5E, 'X'), + (0xB5F, 'V'), + (0xB64, 'X'), + (0xB66, 'V'), + (0xB78, 'X'), + (0xB82, 'V'), + (0xB84, 'X'), + (0xB85, 'V'), + (0xB8B, 'X'), + (0xB8E, 'V'), + (0xB91, 'X'), + (0xB92, 'V'), + (0xB96, 'X'), + (0xB99, 'V'), + (0xB9B, 'X'), + (0xB9C, 'V'), + (0xB9D, 'X'), + (0xB9E, 'V'), + (0xBA0, 'X'), + (0xBA3, 'V'), + (0xBA5, 'X'), + (0xBA8, 'V'), + (0xBAB, 'X'), + (0xBAE, 'V'), + (0xBBA, 'X'), + (0xBBE, 'V'), + (0xBC3, 'X'), + (0xBC6, 'V'), + (0xBC9, 'X'), + (0xBCA, 'V'), + (0xBCE, 'X'), + (0xBD0, 'V'), + (0xBD1, 'X'), + (0xBD7, 'V'), + (0xBD8, 'X'), + (0xBE6, 'V'), + (0xBFB, 'X'), + (0xC01, 'V'), + (0xC04, 'X'), + (0xC05, 'V'), + (0xC0D, 'X'), + (0xC0E, 'V'), + (0xC11, 'X'), + (0xC12, 'V'), + (0xC29, 'X'), + (0xC2A, 'V'), + (0xC34, 'X'), + (0xC35, 'V'), + (0xC3A, 'X'), + (0xC3D, 'V'), + (0xC45, 'X'), + (0xC46, 'V'), + (0xC49, 'X'), + (0xC4A, 'V'), + (0xC4E, 'X'), + (0xC55, 'V'), + (0xC57, 'X'), + (0xC58, 'V'), + (0xC5A, 'X'), + (0xC60, 'V'), + (0xC64, 'X'), + (0xC66, 'V'), + (0xC70, 'X'), + (0xC78, 'V'), + (0xC80, 'X'), + (0xC82, 'V'), + (0xC84, 'X'), + (0xC85, 'V'), + (0xC8D, 'X'), + (0xC8E, 'V'), + (0xC91, 'X'), + (0xC92, 'V'), + (0xCA9, 'X'), + (0xCAA, 'V'), + (0xCB4, 'X'), + (0xCB5, 'V'), + (0xCBA, 'X'), + (0xCBC, 'V'), + (0xCC5, 'X'), + (0xCC6, 'V'), + (0xCC9, 'X'), + (0xCCA, 'V'), + (0xCCE, 'X'), + (0xCD5, 'V'), + (0xCD7, 'X'), + (0xCDE, 'V'), + (0xCDF, 'X'), + (0xCE0, 'V'), + (0xCE4, 'X'), + (0xCE6, 'V'), + (0xCF0, 'X'), + (0xCF1, 'V'), + (0xCF3, 'X'), + (0xD02, 'V'), + (0xD04, 'X'), + (0xD05, 'V'), + (0xD0D, 'X'), + (0xD0E, 'V'), + (0xD11, 'X'), + (0xD12, 'V'), + (0xD3B, 'X'), + (0xD3D, 'V'), + (0xD45, 'X'), + (0xD46, 'V'), + (0xD49, 'X'), + (0xD4A, 'V'), + (0xD4F, 'X'), + (0xD57, 'V'), + (0xD58, 'X'), + (0xD60, 'V'), + (0xD64, 'X'), + (0xD66, 'V'), + (0xD76, 'X'), + (0xD79, 'V'), + (0xD80, 'X'), + (0xD82, 'V'), + (0xD84, 'X'), + (0xD85, 'V'), + (0xD97, 'X'), + (0xD9A, 'V'), + (0xDB2, 'X'), + (0xDB3, 'V'), + (0xDBC, 'X'), + (0xDBD, 'V'), + (0xDBE, 'X'), + (0xDC0, 'V'), + (0xDC7, 'X'), + (0xDCA, 'V'), + (0xDCB, 'X'), + (0xDCF, 'V'), + (0xDD5, 'X'), + (0xDD6, 'V'), + (0xDD7, 'X'), + (0xDD8, 'V'), + (0xDE0, 'X'), + (0xDF2, 'V'), + (0xDF5, 'X'), + (0xE01, 'V'), + (0xE33, 'M', u'à¹à¸²'), + (0xE34, 'V'), + (0xE3B, 'X'), + (0xE3F, 'V'), + (0xE5C, 'X'), + (0xE81, 'V'), + (0xE83, 'X'), + (0xE84, 'V'), + (0xE85, 'X'), + (0xE87, 'V'), + (0xE89, 'X'), + (0xE8A, 'V'), + (0xE8B, 'X'), + (0xE8D, 'V'), + (0xE8E, 'X'), + (0xE94, 'V'), + (0xE98, 'X'), + (0xE99, 'V'), + (0xEA0, 'X'), + (0xEA1, 'V'), + (0xEA4, 'X'), + (0xEA5, 'V'), + (0xEA6, 'X'), + (0xEA7, 'V'), + (0xEA8, 'X'), + (0xEAA, 'V'), + (0xEAC, 'X'), + (0xEAD, 'V'), + (0xEB3, 'M', u'à»àº²'), + (0xEB4, 'V'), + (0xEBA, 'X'), + (0xEBB, 'V'), + (0xEBE, 'X'), + (0xEC0, 'V'), + (0xEC5, 'X'), + (0xEC6, 'V'), + (0xEC7, 'X'), + (0xEC8, 'V'), + (0xECE, 'X'), + (0xED0, 'V'), + (0xEDA, 'X'), + (0xEDC, 'M', u'ຫນ'), + (0xEDD, 'M', u'ຫມ'), + (0xEDE, 'V'), + (0xEE0, 'X'), + (0xF00, 'V'), + (0xF0C, 'M', u'་'), + (0xF0D, 'V'), + (0xF43, 'M', u'གྷ'), + (0xF44, 'V'), + (0xF48, 'X'), + (0xF49, 'V'), + (0xF4D, 'M', u'ཌྷ'), + (0xF4E, 'V'), + (0xF52, 'M', u'དྷ'), + (0xF53, 'V'), + (0xF57, 'M', u'བྷ'), + (0xF58, 'V'), + (0xF5C, 'M', u'ཛྷ'), + (0xF5D, 'V'), + (0xF69, 'M', u'ཀྵ'), + (0xF6A, 'V'), + (0xF6D, 'X'), + (0xF71, 'V'), + (0xF73, 'M', u'ཱི'), + (0xF74, 'V'), + (0xF75, 'M', u'ཱུ'), + (0xF76, 'M', u'ྲྀ'), + (0xF77, 'M', u'ྲཱྀ'), + (0xF78, 'M', u'ླྀ'), + (0xF79, 'M', u'ླཱྀ'), + (0xF7A, 'V'), + (0xF81, 'M', u'ཱྀ'), + (0xF82, 'V'), + (0xF93, 'M', u'ྒྷ'), + (0xF94, 'V'), + (0xF98, 'X'), + (0xF99, 'V'), + (0xF9D, 'M', u'ྜྷ'), + (0xF9E, 'V'), + (0xFA2, 'M', u'ྡྷ'), + (0xFA3, 'V'), + (0xFA7, 'M', u'ྦྷ'), + (0xFA8, 'V'), + (0xFAC, 'M', u'ྫྷ'), + (0xFAD, 'V'), + (0xFB9, 'M', u'à¾à¾µ'), + (0xFBA, 'V'), + (0xFBD, 'X'), + (0xFBE, 'V'), + (0xFCD, 'X'), + (0xFCE, 'V'), + (0xFDB, 'X'), + (0x1000, 'V'), + (0x10A0, 'X'), + (0x10C7, 'M', u'â´§'), + (0x10C8, 'X'), + (0x10CD, 'M', u'â´­'), + (0x10CE, 'X'), + (0x10D0, 'V'), + (0x10FC, 'M', u'ნ'), + (0x10FD, 'V'), + (0x115F, 'X'), + (0x1161, 'V'), + (0x1249, 'X'), + (0x124A, 'V'), + (0x124E, 'X'), + (0x1250, 'V'), + (0x1257, 'X'), + (0x1258, 'V'), + (0x1259, 'X'), + (0x125A, 'V'), + (0x125E, 'X'), + (0x1260, 'V'), + (0x1289, 'X'), + (0x128A, 'V'), + (0x128E, 'X'), + (0x1290, 'V'), + (0x12B1, 'X'), + (0x12B2, 'V'), + (0x12B6, 'X'), + (0x12B8, 'V'), + (0x12BF, 'X'), + (0x12C0, 'V'), + (0x12C1, 'X'), + (0x12C2, 'V'), + (0x12C6, 'X'), + (0x12C8, 'V'), + (0x12D7, 'X'), + (0x12D8, 'V'), + (0x1311, 'X'), + (0x1312, 'V'), + (0x1316, 'X'), + (0x1318, 'V'), + (0x135B, 'X'), + (0x135D, 'V'), + (0x137D, 'X'), + (0x1380, 'V'), + (0x139A, 'X'), + (0x13A0, 'V'), + (0x13F5, 'X'), + (0x1400, 'V'), + (0x1680, 'X'), + (0x1681, 'V'), + (0x169D, 'X'), + (0x16A0, 'V'), + (0x16F1, 'X'), + (0x1700, 'V'), + (0x170D, 'X'), + (0x170E, 'V'), + (0x1715, 'X'), + (0x1720, 'V'), + (0x1737, 'X'), + (0x1740, 'V'), + (0x1754, 'X'), + (0x1760, 'V'), + (0x176D, 'X'), + (0x176E, 'V'), + (0x1771, 'X'), + (0x1772, 'V'), + (0x1774, 'X'), + (0x1780, 'V'), + (0x17B4, 'X'), + (0x17B6, 'V'), + (0x17DE, 'X'), + (0x17E0, 'V'), + (0x17EA, 'X'), + (0x17F0, 'V'), + (0x17FA, 'X'), + (0x1800, 'V'), + (0x1806, 'X'), + (0x1807, 'V'), + (0x180B, 'I'), + (0x180E, 'X'), + (0x1810, 'V'), + (0x181A, 'X'), + (0x1820, 'V'), + (0x1878, 'X'), + (0x1880, 'V'), + (0x18AB, 'X'), + (0x18B0, 'V'), + (0x18F6, 'X'), + (0x1900, 'V'), + (0x191D, 'X'), + (0x1920, 'V'), + (0x192C, 'X'), + (0x1930, 'V'), + (0x193C, 'X'), + (0x1940, 'V'), + (0x1941, 'X'), + (0x1944, 'V'), + (0x196E, 'X'), + (0x1970, 'V'), + (0x1975, 'X'), + (0x1980, 'V'), + (0x19AC, 'X'), + (0x19B0, 'V'), + (0x19CA, 'X'), + (0x19D0, 'V'), + (0x19DB, 'X'), + (0x19DE, 'V'), + (0x1A1C, 'X'), + (0x1A1E, 'V'), + (0x1A5F, 'X'), + (0x1A60, 'V'), + (0x1A7D, 'X'), + (0x1A7F, 'V'), + (0x1A8A, 'X'), + (0x1A90, 'V'), + (0x1A9A, 'X'), + (0x1AA0, 'V'), + (0x1AAE, 'X'), + (0x1B00, 'V'), + (0x1B4C, 'X'), + (0x1B50, 'V'), + (0x1B7D, 'X'), + (0x1B80, 'V'), + (0x1BF4, 'X'), + (0x1BFC, 'V'), + (0x1C38, 'X'), + (0x1C3B, 'V'), + (0x1C4A, 'X'), + (0x1C4D, 'V'), + (0x1C80, 'X'), + (0x1CC0, 'V'), + (0x1CC8, 'X'), + (0x1CD0, 'V'), + (0x1CF7, 'X'), + (0x1D00, 'V'), + (0x1D2C, 'M', u'a'), + (0x1D2D, 'M', u'æ'), + (0x1D2E, 'M', u'b'), + (0x1D2F, 'V'), + (0x1D30, 'M', u'd'), + (0x1D31, 'M', u'e'), + (0x1D32, 'M', u'Ç'), + (0x1D33, 'M', u'g'), + (0x1D34, 'M', u'h'), + (0x1D35, 'M', u'i'), + (0x1D36, 'M', u'j'), + (0x1D37, 'M', u'k'), + (0x1D38, 'M', u'l'), + (0x1D39, 'M', u'm'), + (0x1D3A, 'M', u'n'), + (0x1D3B, 'V'), + (0x1D3C, 'M', u'o'), + (0x1D3D, 'M', u'È£'), + (0x1D3E, 'M', u'p'), + (0x1D3F, 'M', u'r'), + (0x1D40, 'M', u't'), + (0x1D41, 'M', u'u'), + (0x1D42, 'M', u'w'), + (0x1D43, 'M', u'a'), + (0x1D44, 'M', u'É'), + (0x1D45, 'M', u'É‘'), + (0x1D46, 'M', u'á´‚'), + (0x1D47, 'M', u'b'), + (0x1D48, 'M', u'd'), + (0x1D49, 'M', u'e'), + (0x1D4A, 'M', u'É™'), + (0x1D4B, 'M', u'É›'), + (0x1D4C, 'M', u'Éœ'), + (0x1D4D, 'M', u'g'), + (0x1D4E, 'V'), + (0x1D4F, 'M', u'k'), + (0x1D50, 'M', u'm'), + (0x1D51, 'M', u'Å‹'), + (0x1D52, 'M', u'o'), + (0x1D53, 'M', u'É”'), + (0x1D54, 'M', u'á´–'), + (0x1D55, 'M', u'á´—'), + (0x1D56, 'M', u'p'), + (0x1D57, 'M', u't'), + (0x1D58, 'M', u'u'), + (0x1D59, 'M', u'á´'), + (0x1D5A, 'M', u'ɯ'), + (0x1D5B, 'M', u'v'), + (0x1D5C, 'M', u'á´¥'), + (0x1D5D, 'M', u'β'), + (0x1D5E, 'M', u'γ'), + (0x1D5F, 'M', u'δ'), + (0x1D60, 'M', u'φ'), + (0x1D61, 'M', u'χ'), + (0x1D62, 'M', u'i'), + (0x1D63, 'M', u'r'), + (0x1D64, 'M', u'u'), + (0x1D65, 'M', u'v'), + (0x1D66, 'M', u'β'), + (0x1D67, 'M', u'γ'), + (0x1D68, 'M', u'Ï'), + (0x1D69, 'M', u'φ'), + (0x1D6A, 'M', u'χ'), + (0x1D6B, 'V'), + (0x1D78, 'M', u'н'), + (0x1D79, 'V'), + (0x1D9B, 'M', u'É’'), + (0x1D9C, 'M', u'c'), + (0x1D9D, 'M', u'É•'), + (0x1D9E, 'M', u'ð'), + (0x1D9F, 'M', u'Éœ'), + (0x1DA0, 'M', u'f'), + (0x1DA1, 'M', u'ÉŸ'), + (0x1DA2, 'M', u'É¡'), + (0x1DA3, 'M', u'É¥'), + (0x1DA4, 'M', u'ɨ'), + (0x1DA5, 'M', u'É©'), + (0x1DA6, 'M', u'ɪ'), + (0x1DA7, 'M', u'áµ»'), + (0x1DA8, 'M', u'Ê'), + (0x1DA9, 'M', u'É­'), + (0x1DAA, 'M', u'ᶅ'), + (0x1DAB, 'M', u'ÊŸ'), + (0x1DAC, 'M', u'ɱ'), + (0x1DAD, 'M', u'É°'), + (0x1DAE, 'M', u'ɲ'), + (0x1DAF, 'M', u'ɳ'), + (0x1DB0, 'M', u'É´'), + (0x1DB1, 'M', u'ɵ'), + (0x1DB2, 'M', u'ɸ'), + (0x1DB3, 'M', u'Ê‚'), + (0x1DB4, 'M', u'ʃ'), + (0x1DB5, 'M', u'Æ«'), + (0x1DB6, 'M', u'ʉ'), + (0x1DB7, 'M', u'ÊŠ'), + (0x1DB8, 'M', u'á´œ'), + (0x1DB9, 'M', u'Ê‹'), + (0x1DBA, 'M', u'ÊŒ'), + (0x1DBB, 'M', u'z'), + (0x1DBC, 'M', u'Ê'), + (0x1DBD, 'M', u'Ê‘'), + (0x1DBE, 'M', u'Ê’'), + (0x1DBF, 'M', u'θ'), + (0x1DC0, 'V'), + (0x1DE7, 'X'), + (0x1DFC, 'V'), + (0x1E00, 'M', u'á¸'), + (0x1E01, 'V'), + (0x1E02, 'M', u'ḃ'), + (0x1E03, 'V'), + (0x1E04, 'M', u'ḅ'), + (0x1E05, 'V'), + (0x1E06, 'M', u'ḇ'), + (0x1E07, 'V'), + (0x1E08, 'M', u'ḉ'), + (0x1E09, 'V'), + (0x1E0A, 'M', u'ḋ'), + (0x1E0B, 'V'), + (0x1E0C, 'M', u'á¸'), + (0x1E0D, 'V'), + (0x1E0E, 'M', u'á¸'), + (0x1E0F, 'V'), + (0x1E10, 'M', u'ḑ'), + (0x1E11, 'V'), + (0x1E12, 'M', u'ḓ'), + (0x1E13, 'V'), + (0x1E14, 'M', u'ḕ'), + (0x1E15, 'V'), + (0x1E16, 'M', u'ḗ'), + (0x1E17, 'V'), + (0x1E18, 'M', u'ḙ'), + (0x1E19, 'V'), + (0x1E1A, 'M', u'ḛ'), + (0x1E1B, 'V'), + (0x1E1C, 'M', u'á¸'), + (0x1E1D, 'V'), + (0x1E1E, 'M', u'ḟ'), + (0x1E1F, 'V'), + (0x1E20, 'M', u'ḡ'), + (0x1E21, 'V'), + (0x1E22, 'M', u'ḣ'), + (0x1E23, 'V'), + (0x1E24, 'M', u'ḥ'), + (0x1E25, 'V'), + (0x1E26, 'M', u'ḧ'), + (0x1E27, 'V'), + (0x1E28, 'M', u'ḩ'), + (0x1E29, 'V'), + (0x1E2A, 'M', u'ḫ'), + (0x1E2B, 'V'), + (0x1E2C, 'M', u'ḭ'), + (0x1E2D, 'V'), + (0x1E2E, 'M', u'ḯ'), + (0x1E2F, 'V'), + (0x1E30, 'M', u'ḱ'), + (0x1E31, 'V'), + (0x1E32, 'M', u'ḳ'), + (0x1E33, 'V'), + (0x1E34, 'M', u'ḵ'), + (0x1E35, 'V'), + (0x1E36, 'M', u'ḷ'), + (0x1E37, 'V'), + (0x1E38, 'M', u'ḹ'), + (0x1E39, 'V'), + (0x1E3A, 'M', u'ḻ'), + (0x1E3B, 'V'), + (0x1E3C, 'M', u'ḽ'), + (0x1E3D, 'V'), + (0x1E3E, 'M', u'ḿ'), + (0x1E3F, 'V'), + (0x1E40, 'M', u'á¹'), + (0x1E41, 'V'), + (0x1E42, 'M', u'ṃ'), + (0x1E43, 'V'), + (0x1E44, 'M', u'á¹…'), + (0x1E45, 'V'), + (0x1E46, 'M', u'ṇ'), + (0x1E47, 'V'), + (0x1E48, 'M', u'ṉ'), + (0x1E49, 'V'), + (0x1E4A, 'M', u'ṋ'), + (0x1E4B, 'V'), + (0x1E4C, 'M', u'á¹'), + (0x1E4D, 'V'), + (0x1E4E, 'M', u'á¹'), + (0x1E4F, 'V'), + (0x1E50, 'M', u'ṑ'), + (0x1E51, 'V'), + (0x1E52, 'M', u'ṓ'), + (0x1E53, 'V'), + (0x1E54, 'M', u'ṕ'), + (0x1E55, 'V'), + (0x1E56, 'M', u'á¹—'), + (0x1E57, 'V'), + (0x1E58, 'M', u'á¹™'), + (0x1E59, 'V'), + (0x1E5A, 'M', u'á¹›'), + (0x1E5B, 'V'), + (0x1E5C, 'M', u'á¹'), + (0x1E5D, 'V'), + (0x1E5E, 'M', u'ṟ'), + (0x1E5F, 'V'), + (0x1E60, 'M', u'ṡ'), + (0x1E61, 'V'), + (0x1E62, 'M', u'á¹£'), + (0x1E63, 'V'), + (0x1E64, 'M', u'á¹¥'), + (0x1E65, 'V'), + (0x1E66, 'M', u'ṧ'), + (0x1E67, 'V'), + (0x1E68, 'M', u'ṩ'), + (0x1E69, 'V'), + (0x1E6A, 'M', u'ṫ'), + (0x1E6B, 'V'), + (0x1E6C, 'M', u'á¹­'), + (0x1E6D, 'V'), + (0x1E6E, 'M', u'ṯ'), + (0x1E6F, 'V'), + (0x1E70, 'M', u'á¹±'), + (0x1E71, 'V'), + (0x1E72, 'M', u'á¹³'), + (0x1E73, 'V'), + (0x1E74, 'M', u'á¹µ'), + (0x1E75, 'V'), + (0x1E76, 'M', u'á¹·'), + (0x1E77, 'V'), + (0x1E78, 'M', u'á¹¹'), + (0x1E79, 'V'), + (0x1E7A, 'M', u'á¹»'), + (0x1E7B, 'V'), + (0x1E7C, 'M', u'á¹½'), + (0x1E7D, 'V'), + (0x1E7E, 'M', u'ṿ'), + (0x1E7F, 'V'), + (0x1E80, 'M', u'áº'), + (0x1E81, 'V'), + (0x1E82, 'M', u'ẃ'), + (0x1E83, 'V'), + (0x1E84, 'M', u'ẅ'), + (0x1E85, 'V'), + (0x1E86, 'M', u'ẇ'), + (0x1E87, 'V'), + (0x1E88, 'M', u'ẉ'), + (0x1E89, 'V'), + (0x1E8A, 'M', u'ẋ'), + (0x1E8B, 'V'), + (0x1E8C, 'M', u'áº'), + (0x1E8D, 'V'), + (0x1E8E, 'M', u'áº'), + (0x1E8F, 'V'), + (0x1E90, 'M', u'ẑ'), + (0x1E91, 'V'), + (0x1E92, 'M', u'ẓ'), + (0x1E93, 'V'), + (0x1E94, 'M', u'ẕ'), + (0x1E95, 'V'), + (0x1E9A, 'M', u'aʾ'), + (0x1E9B, 'M', u'ṡ'), + (0x1E9C, 'V'), + (0x1E9E, 'M', u'ss'), + (0x1E9F, 'V'), + (0x1EA0, 'M', u'ạ'), + (0x1EA1, 'V'), + (0x1EA2, 'M', u'ả'), + (0x1EA3, 'V'), + (0x1EA4, 'M', u'ấ'), + (0x1EA5, 'V'), + (0x1EA6, 'M', u'ầ'), + (0x1EA7, 'V'), + (0x1EA8, 'M', u'ẩ'), + (0x1EA9, 'V'), + (0x1EAA, 'M', u'ẫ'), + (0x1EAB, 'V'), + (0x1EAC, 'M', u'ậ'), + (0x1EAD, 'V'), + (0x1EAE, 'M', u'ắ'), + (0x1EAF, 'V'), + (0x1EB0, 'M', u'ằ'), + (0x1EB1, 'V'), + (0x1EB2, 'M', u'ẳ'), + (0x1EB3, 'V'), + (0x1EB4, 'M', u'ẵ'), + (0x1EB5, 'V'), + (0x1EB6, 'M', u'ặ'), + (0x1EB7, 'V'), + (0x1EB8, 'M', u'ẹ'), + (0x1EB9, 'V'), + (0x1EBA, 'M', u'ẻ'), + (0x1EBB, 'V'), + (0x1EBC, 'M', u'ẽ'), + (0x1EBD, 'V'), + (0x1EBE, 'M', u'ế'), + (0x1EBF, 'V'), + (0x1EC0, 'M', u'á»'), + (0x1EC1, 'V'), + (0x1EC2, 'M', u'ể'), + (0x1EC3, 'V'), + (0x1EC4, 'M', u'á»…'), + (0x1EC5, 'V'), + (0x1EC6, 'M', u'ệ'), + (0x1EC7, 'V'), + (0x1EC8, 'M', u'ỉ'), + (0x1EC9, 'V'), + (0x1ECA, 'M', u'ị'), + (0x1ECB, 'V'), + (0x1ECC, 'M', u'á»'), + (0x1ECD, 'V'), + (0x1ECE, 'M', u'á»'), + (0x1ECF, 'V'), + (0x1ED0, 'M', u'ố'), + (0x1ED1, 'V'), + (0x1ED2, 'M', u'ồ'), + (0x1ED3, 'V'), + (0x1ED4, 'M', u'ổ'), + (0x1ED5, 'V'), + (0x1ED6, 'M', u'á»—'), + (0x1ED7, 'V'), + (0x1ED8, 'M', u'á»™'), + (0x1ED9, 'V'), + (0x1EDA, 'M', u'á»›'), + (0x1EDB, 'V'), + (0x1EDC, 'M', u'á»'), + (0x1EDD, 'V'), + (0x1EDE, 'M', u'ở'), + (0x1EDF, 'V'), + (0x1EE0, 'M', u'ỡ'), + (0x1EE1, 'V'), + (0x1EE2, 'M', u'ợ'), + (0x1EE3, 'V'), + (0x1EE4, 'M', u'ụ'), + (0x1EE5, 'V'), + (0x1EE6, 'M', u'ủ'), + (0x1EE7, 'V'), + (0x1EE8, 'M', u'ứ'), + (0x1EE9, 'V'), + (0x1EEA, 'M', u'ừ'), + (0x1EEB, 'V'), + (0x1EEC, 'M', u'á»­'), + (0x1EED, 'V'), + (0x1EEE, 'M', u'ữ'), + (0x1EEF, 'V'), + (0x1EF0, 'M', u'á»±'), + (0x1EF1, 'V'), + (0x1EF2, 'M', u'ỳ'), + (0x1EF3, 'V'), + (0x1EF4, 'M', u'ỵ'), + (0x1EF5, 'V'), + (0x1EF6, 'M', u'á»·'), + (0x1EF7, 'V'), + (0x1EF8, 'M', u'ỹ'), + (0x1EF9, 'V'), + (0x1EFA, 'M', u'á»»'), + (0x1EFB, 'V'), + (0x1EFC, 'M', u'ỽ'), + (0x1EFD, 'V'), + (0x1EFE, 'M', u'ỿ'), + (0x1EFF, 'V'), + (0x1F08, 'M', u'á¼€'), + (0x1F09, 'M', u'á¼'), + (0x1F0A, 'M', u'ἂ'), + (0x1F0B, 'M', u'ἃ'), + (0x1F0C, 'M', u'ἄ'), + (0x1F0D, 'M', u'á¼…'), + (0x1F0E, 'M', u'ἆ'), + (0x1F0F, 'M', u'ἇ'), + (0x1F10, 'V'), + (0x1F16, 'X'), + (0x1F18, 'M', u'á¼'), + (0x1F19, 'M', u'ἑ'), + (0x1F1A, 'M', u'á¼’'), + (0x1F1B, 'M', u'ἓ'), + (0x1F1C, 'M', u'á¼”'), + (0x1F1D, 'M', u'ἕ'), + (0x1F1E, 'X'), + (0x1F20, 'V'), + (0x1F28, 'M', u'á¼ '), + (0x1F29, 'M', u'ἡ'), + (0x1F2A, 'M', u'á¼¢'), + (0x1F2B, 'M', u'á¼£'), + (0x1F2C, 'M', u'ἤ'), + (0x1F2D, 'M', u'á¼¥'), + (0x1F2E, 'M', u'ἦ'), + (0x1F2F, 'M', u'ἧ'), + (0x1F30, 'V'), + (0x1F38, 'M', u'á¼°'), + (0x1F39, 'M', u'á¼±'), + (0x1F3A, 'M', u'á¼²'), + (0x1F3B, 'M', u'á¼³'), + (0x1F3C, 'M', u'á¼´'), + (0x1F3D, 'M', u'á¼µ'), + (0x1F3E, 'M', u'ἶ'), + (0x1F3F, 'M', u'á¼·'), + (0x1F40, 'V'), + (0x1F46, 'X'), + (0x1F48, 'M', u'á½€'), + (0x1F49, 'M', u'á½'), + (0x1F4A, 'M', u'ὂ'), + (0x1F4B, 'M', u'ὃ'), + (0x1F4C, 'M', u'ὄ'), + (0x1F4D, 'M', u'á½…'), + (0x1F4E, 'X'), + (0x1F50, 'V'), + (0x1F58, 'X'), + (0x1F59, 'M', u'ὑ'), + (0x1F5A, 'X'), + (0x1F5B, 'M', u'ὓ'), + (0x1F5C, 'X'), + (0x1F5D, 'M', u'ὕ'), + (0x1F5E, 'X'), + (0x1F5F, 'M', u'á½—'), + (0x1F60, 'V'), + (0x1F68, 'M', u'á½ '), + (0x1F69, 'M', u'ὡ'), + (0x1F6A, 'M', u'á½¢'), + (0x1F6B, 'M', u'á½£'), + (0x1F6C, 'M', u'ὤ'), + (0x1F6D, 'M', u'á½¥'), + (0x1F6E, 'M', u'ὦ'), + (0x1F6F, 'M', u'ὧ'), + (0x1F70, 'V'), + (0x1F71, 'M', u'ά'), + (0x1F72, 'V'), + (0x1F73, 'M', u'έ'), + (0x1F74, 'V'), + (0x1F75, 'M', u'ή'), + (0x1F76, 'V'), + (0x1F77, 'M', u'ί'), + (0x1F78, 'V'), + (0x1F79, 'M', u'ÏŒ'), + (0x1F7A, 'V'), + (0x1F7B, 'M', u'Ï'), + (0x1F7C, 'V'), + (0x1F7D, 'M', u'ÏŽ'), + (0x1F7E, 'X'), + (0x1F80, 'M', u'ἀι'), + (0x1F81, 'M', u'á¼Î¹'), + (0x1F82, 'M', u'ἂι'), + (0x1F83, 'M', u'ἃι'), + (0x1F84, 'M', u'ἄι'), + (0x1F85, 'M', u'ἅι'), + (0x1F86, 'M', u'ἆι'), + (0x1F87, 'M', u'ἇι'), + (0x1F88, 'M', u'ἀι'), + (0x1F89, 'M', u'á¼Î¹'), + (0x1F8A, 'M', u'ἂι'), + (0x1F8B, 'M', u'ἃι'), + (0x1F8C, 'M', u'ἄι'), + (0x1F8D, 'M', u'ἅι'), + (0x1F8E, 'M', u'ἆι'), + (0x1F8F, 'M', u'ἇι'), + (0x1F90, 'M', u'ἠι'), + (0x1F91, 'M', u'ἡι'), + (0x1F92, 'M', u'ἢι'), + (0x1F93, 'M', u'ἣι'), + (0x1F94, 'M', u'ἤι'), + (0x1F95, 'M', u'ἥι'), + (0x1F96, 'M', u'ἦι'), + (0x1F97, 'M', u'ἧι'), + (0x1F98, 'M', u'ἠι'), + (0x1F99, 'M', u'ἡι'), + (0x1F9A, 'M', u'ἢι'), + (0x1F9B, 'M', u'ἣι'), + (0x1F9C, 'M', u'ἤι'), + (0x1F9D, 'M', u'ἥι'), + (0x1F9E, 'M', u'ἦι'), + (0x1F9F, 'M', u'ἧι'), + (0x1FA0, 'M', u'ὠι'), + (0x1FA1, 'M', u'ὡι'), + (0x1FA2, 'M', u'ὢι'), + (0x1FA3, 'M', u'ὣι'), + (0x1FA4, 'M', u'ὤι'), + (0x1FA5, 'M', u'ὥι'), + (0x1FA6, 'M', u'ὦι'), + (0x1FA7, 'M', u'ὧι'), + (0x1FA8, 'M', u'ὠι'), + (0x1FA9, 'M', u'ὡι'), + (0x1FAA, 'M', u'ὢι'), + (0x1FAB, 'M', u'ὣι'), + (0x1FAC, 'M', u'ὤι'), + (0x1FAD, 'M', u'ὥι'), + (0x1FAE, 'M', u'ὦι'), + (0x1FAF, 'M', u'ὧι'), + (0x1FB0, 'V'), + (0x1FB2, 'M', u'ὰι'), + (0x1FB3, 'M', u'αι'), + (0x1FB4, 'M', u'άι'), + (0x1FB5, 'X'), + (0x1FB6, 'V'), + (0x1FB7, 'M', u'ᾶι'), + (0x1FB8, 'M', u'á¾°'), + (0x1FB9, 'M', u'á¾±'), + (0x1FBA, 'M', u'á½°'), + (0x1FBB, 'M', u'ά'), + (0x1FBC, 'M', u'αι'), + (0x1FBD, '3', u' Ì“'), + (0x1FBE, 'M', u'ι'), + (0x1FBF, '3', u' Ì“'), + (0x1FC0, '3', u' Í‚'), + (0x1FC1, '3', u' ̈͂'), + (0x1FC2, 'M', u'ὴι'), + (0x1FC3, 'M', u'ηι'), + (0x1FC4, 'M', u'ήι'), + (0x1FC5, 'X'), + (0x1FC6, 'V'), + (0x1FC7, 'M', u'ῆι'), + (0x1FC8, 'M', u'á½²'), + (0x1FC9, 'M', u'έ'), + (0x1FCA, 'M', u'á½´'), + (0x1FCB, 'M', u'ή'), + (0x1FCC, 'M', u'ηι'), + (0x1FCD, '3', u' Ì“Ì€'), + (0x1FCE, '3', u' Ì“Ì'), + (0x1FCF, '3', u' Ì“Í‚'), + (0x1FD0, 'V'), + (0x1FD3, 'M', u'Î'), + (0x1FD4, 'X'), + (0x1FD6, 'V'), + (0x1FD8, 'M', u'á¿'), + (0x1FD9, 'M', u'á¿‘'), + (0x1FDA, 'M', u'ὶ'), + (0x1FDB, 'M', u'ί'), + (0x1FDC, 'X'), + (0x1FDD, '3', u' ̔̀'), + (0x1FDE, '3', u' Ì”Ì'), + (0x1FDF, '3', u' ̔͂'), + (0x1FE0, 'V'), + (0x1FE3, 'M', u'ΰ'), + (0x1FE4, 'V'), + (0x1FE8, 'M', u'á¿ '), + (0x1FE9, 'M', u'á¿¡'), + (0x1FEA, 'M', u'ὺ'), + (0x1FEB, 'M', u'Ï'), + (0x1FEC, 'M', u'á¿¥'), + (0x1FED, '3', u' ̈̀'), + (0x1FEE, '3', u' ̈Ì'), + (0x1FEF, '3', u'`'), + (0x1FF0, 'X'), + (0x1FF2, 'M', u'ὼι'), + (0x1FF3, 'M', u'ωι'), + (0x1FF4, 'M', u'ώι'), + (0x1FF5, 'X'), + (0x1FF6, 'V'), + (0x1FF7, 'M', u'ῶι'), + (0x1FF8, 'M', u'ὸ'), + (0x1FF9, 'M', u'ÏŒ'), + (0x1FFA, 'M', u'á½¼'), + (0x1FFB, 'M', u'ÏŽ'), + (0x1FFC, 'M', u'ωι'), + (0x1FFD, '3', u' Ì'), + (0x1FFE, '3', u' Ì”'), + (0x1FFF, 'X'), + (0x2000, '3', u' '), + (0x200B, 'I'), + (0x200C, 'D', u''), + (0x200E, 'X'), + (0x2010, 'V'), + (0x2011, 'M', u'â€'), + (0x2012, 'V'), + (0x2017, '3', u' ̳'), + (0x2018, 'V'), + (0x2024, 'X'), + (0x2027, 'V'), + (0x2028, 'X'), + (0x202F, '3', u' '), + (0x2030, 'V'), + (0x2033, 'M', u'′′'), + (0x2034, 'M', u'′′′'), + (0x2035, 'V'), + (0x2036, 'M', u'‵‵'), + (0x2037, 'M', u'‵‵‵'), + (0x2038, 'V'), + (0x203C, '3', u'!!'), + (0x203D, 'V'), + (0x203E, '3', u' Ì…'), + (0x203F, 'V'), + (0x2047, '3', u'??'), + (0x2048, '3', u'?!'), + (0x2049, '3', u'!?'), + (0x204A, 'V'), + (0x2057, 'M', u'′′′′'), + (0x2058, 'V'), + (0x205F, '3', u' '), + (0x2060, 'I'), + (0x2061, 'X'), + (0x2064, 'I'), + (0x2065, 'X'), + (0x2070, 'M', u'0'), + (0x2071, 'M', u'i'), + (0x2072, 'X'), + (0x2074, 'M', u'4'), + (0x2075, 'M', u'5'), + (0x2076, 'M', u'6'), + (0x2077, 'M', u'7'), + (0x2078, 'M', u'8'), + (0x2079, 'M', u'9'), + (0x207A, '3', u'+'), + (0x207B, 'M', u'−'), + (0x207C, '3', u'='), + (0x207D, '3', u'('), + (0x207E, '3', u')'), + (0x207F, 'M', u'n'), + (0x2080, 'M', u'0'), + (0x2081, 'M', u'1'), + (0x2082, 'M', u'2'), + (0x2083, 'M', u'3'), + (0x2084, 'M', u'4'), + (0x2085, 'M', u'5'), + (0x2086, 'M', u'6'), + (0x2087, 'M', u'7'), + (0x2088, 'M', u'8'), + (0x2089, 'M', u'9'), + (0x208A, '3', u'+'), + (0x208B, 'M', u'−'), + (0x208C, '3', u'='), + (0x208D, '3', u'('), + (0x208E, '3', u')'), + (0x208F, 'X'), + (0x2090, 'M', u'a'), + (0x2091, 'M', u'e'), + (0x2092, 'M', u'o'), + (0x2093, 'M', u'x'), + (0x2094, 'M', u'É™'), + (0x2095, 'M', u'h'), + (0x2096, 'M', u'k'), + (0x2097, 'M', u'l'), + (0x2098, 'M', u'm'), + (0x2099, 'M', u'n'), + (0x209A, 'M', u'p'), + (0x209B, 'M', u's'), + (0x209C, 'M', u't'), + (0x209D, 'X'), + (0x20A0, 'V'), + (0x20A8, 'M', u'rs'), + (0x20A9, 'V'), + (0x20BB, 'X'), + (0x20D0, 'V'), + (0x20F1, 'X'), + (0x2100, '3', u'a/c'), + (0x2101, '3', u'a/s'), + (0x2102, 'M', u'c'), + (0x2103, 'M', u'°c'), + (0x2104, 'V'), + (0x2105, '3', u'c/o'), + (0x2106, '3', u'c/u'), + (0x2107, 'M', u'É›'), + (0x2108, 'V'), + (0x2109, 'M', u'°f'), + (0x210A, 'M', u'g'), + (0x210B, 'M', u'h'), + (0x210F, 'M', u'ħ'), + (0x2110, 'M', u'i'), + (0x2112, 'M', u'l'), + (0x2114, 'V'), + (0x2115, 'M', u'n'), + (0x2116, 'M', u'no'), + (0x2117, 'V'), + (0x2119, 'M', u'p'), + (0x211A, 'M', u'q'), + (0x211B, 'M', u'r'), + (0x211E, 'V'), + (0x2120, 'M', u'sm'), + (0x2121, 'M', u'tel'), + (0x2122, 'M', u'tm'), + (0x2123, 'V'), + (0x2124, 'M', u'z'), + (0x2125, 'V'), + (0x2126, 'M', u'ω'), + (0x2127, 'V'), + (0x2128, 'M', u'z'), + (0x2129, 'V'), + (0x212A, 'M', u'k'), + (0x212B, 'M', u'Ã¥'), + (0x212C, 'M', u'b'), + (0x212D, 'M', u'c'), + (0x212E, 'V'), + (0x212F, 'M', u'e'), + (0x2131, 'M', u'f'), + (0x2132, 'X'), + (0x2133, 'M', u'm'), + (0x2134, 'M', u'o'), + (0x2135, 'M', u'×'), + (0x2136, 'M', u'ב'), + (0x2137, 'M', u'×’'), + (0x2138, 'M', u'ד'), + (0x2139, 'M', u'i'), + (0x213A, 'V'), + (0x213B, 'M', u'fax'), + (0x213C, 'M', u'Ï€'), + (0x213D, 'M', u'γ'), + (0x213F, 'M', u'Ï€'), + (0x2140, 'M', u'∑'), + (0x2141, 'V'), + (0x2145, 'M', u'd'), + (0x2147, 'M', u'e'), + (0x2148, 'M', u'i'), + (0x2149, 'M', u'j'), + (0x214A, 'V'), + (0x2150, 'M', u'1â„7'), + (0x2151, 'M', u'1â„9'), + (0x2152, 'M', u'1â„10'), + (0x2153, 'M', u'1â„3'), + (0x2154, 'M', u'2â„3'), + (0x2155, 'M', u'1â„5'), + (0x2156, 'M', u'2â„5'), + (0x2157, 'M', u'3â„5'), + (0x2158, 'M', u'4â„5'), + (0x2159, 'M', u'1â„6'), + (0x215A, 'M', u'5â„6'), + (0x215B, 'M', u'1â„8'), + (0x215C, 'M', u'3â„8'), + (0x215D, 'M', u'5â„8'), + (0x215E, 'M', u'7â„8'), + (0x215F, 'M', u'1â„'), + (0x2160, 'M', u'i'), + (0x2161, 'M', u'ii'), + (0x2162, 'M', u'iii'), + (0x2163, 'M', u'iv'), + (0x2164, 'M', u'v'), + (0x2165, 'M', u'vi'), + (0x2166, 'M', u'vii'), + (0x2167, 'M', u'viii'), + (0x2168, 'M', u'ix'), + (0x2169, 'M', u'x'), + (0x216A, 'M', u'xi'), + (0x216B, 'M', u'xii'), + (0x216C, 'M', u'l'), + (0x216D, 'M', u'c'), + (0x216E, 'M', u'd'), + (0x216F, 'M', u'm'), + (0x2170, 'M', u'i'), + (0x2171, 'M', u'ii'), + (0x2172, 'M', u'iii'), + (0x2173, 'M', u'iv'), + (0x2174, 'M', u'v'), + (0x2175, 'M', u'vi'), + (0x2176, 'M', u'vii'), + (0x2177, 'M', u'viii'), + (0x2178, 'M', u'ix'), + (0x2179, 'M', u'x'), + (0x217A, 'M', u'xi'), + (0x217B, 'M', u'xii'), + (0x217C, 'M', u'l'), + (0x217D, 'M', u'c'), + (0x217E, 'M', u'd'), + (0x217F, 'M', u'm'), + (0x2180, 'V'), + (0x2183, 'X'), + (0x2184, 'V'), + (0x2189, 'M', u'0â„3'), + (0x218A, 'X'), + (0x2190, 'V'), + (0x222C, 'M', u'∫∫'), + (0x222D, 'M', u'∫∫∫'), + (0x222E, 'V'), + (0x222F, 'M', u'∮∮'), + (0x2230, 'M', u'∮∮∮'), + (0x2231, 'V'), + (0x2260, '3'), + (0x2261, 'V'), + (0x226E, '3'), + (0x2270, 'V'), + (0x2329, 'M', u'〈'), + (0x232A, 'M', u'〉'), + (0x232B, 'V'), + (0x23F4, 'X'), + (0x2400, 'V'), + (0x2427, 'X'), + (0x2440, 'V'), + (0x244B, 'X'), + (0x2460, 'M', u'1'), + (0x2461, 'M', u'2'), + (0x2462, 'M', u'3'), + (0x2463, 'M', u'4'), + (0x2464, 'M', u'5'), + (0x2465, 'M', u'6'), + (0x2466, 'M', u'7'), + (0x2467, 'M', u'8'), + (0x2468, 'M', u'9'), + (0x2469, 'M', u'10'), + (0x246A, 'M', u'11'), + (0x246B, 'M', u'12'), + (0x246C, 'M', u'13'), + (0x246D, 'M', u'14'), + (0x246E, 'M', u'15'), + (0x246F, 'M', u'16'), + (0x2470, 'M', u'17'), + (0x2471, 'M', u'18'), + (0x2472, 'M', u'19'), + (0x2473, 'M', u'20'), + (0x2474, '3', u'(1)'), + (0x2475, '3', u'(2)'), + (0x2476, '3', u'(3)'), + (0x2477, '3', u'(4)'), + (0x2478, '3', u'(5)'), + (0x2479, '3', u'(6)'), + (0x247A, '3', u'(7)'), + (0x247B, '3', u'(8)'), + (0x247C, '3', u'(9)'), + (0x247D, '3', u'(10)'), + (0x247E, '3', u'(11)'), + (0x247F, '3', u'(12)'), + (0x2480, '3', u'(13)'), + (0x2481, '3', u'(14)'), + (0x2482, '3', u'(15)'), + (0x2483, '3', u'(16)'), + (0x2484, '3', u'(17)'), + (0x2485, '3', u'(18)'), + (0x2486, '3', u'(19)'), + (0x2487, '3', u'(20)'), + (0x2488, 'X'), + (0x249C, '3', u'(a)'), + (0x249D, '3', u'(b)'), + (0x249E, '3', u'(c)'), + (0x249F, '3', u'(d)'), + (0x24A0, '3', u'(e)'), + (0x24A1, '3', u'(f)'), + (0x24A2, '3', u'(g)'), + (0x24A3, '3', u'(h)'), + (0x24A4, '3', u'(i)'), + (0x24A5, '3', u'(j)'), + (0x24A6, '3', u'(k)'), + (0x24A7, '3', u'(l)'), + (0x24A8, '3', u'(m)'), + (0x24A9, '3', u'(n)'), + (0x24AA, '3', u'(o)'), + (0x24AB, '3', u'(p)'), + (0x24AC, '3', u'(q)'), + (0x24AD, '3', u'(r)'), + (0x24AE, '3', u'(s)'), + (0x24AF, '3', u'(t)'), + (0x24B0, '3', u'(u)'), + (0x24B1, '3', u'(v)'), + (0x24B2, '3', u'(w)'), + (0x24B3, '3', u'(x)'), + (0x24B4, '3', u'(y)'), + (0x24B5, '3', u'(z)'), + (0x24B6, 'M', u'a'), + (0x24B7, 'M', u'b'), + (0x24B8, 'M', u'c'), + (0x24B9, 'M', u'd'), + (0x24BA, 'M', u'e'), + (0x24BB, 'M', u'f'), + (0x24BC, 'M', u'g'), + (0x24BD, 'M', u'h'), + (0x24BE, 'M', u'i'), + (0x24BF, 'M', u'j'), + (0x24C0, 'M', u'k'), + (0x24C1, 'M', u'l'), + (0x24C2, 'M', u'm'), + (0x24C3, 'M', u'n'), + (0x24C4, 'M', u'o'), + (0x24C5, 'M', u'p'), + (0x24C6, 'M', u'q'), + (0x24C7, 'M', u'r'), + (0x24C8, 'M', u's'), + (0x24C9, 'M', u't'), + (0x24CA, 'M', u'u'), + (0x24CB, 'M', u'v'), + (0x24CC, 'M', u'w'), + (0x24CD, 'M', u'x'), + (0x24CE, 'M', u'y'), + (0x24CF, 'M', u'z'), + (0x24D0, 'M', u'a'), + (0x24D1, 'M', u'b'), + (0x24D2, 'M', u'c'), + (0x24D3, 'M', u'd'), + (0x24D4, 'M', u'e'), + (0x24D5, 'M', u'f'), + (0x24D6, 'M', u'g'), + (0x24D7, 'M', u'h'), + (0x24D8, 'M', u'i'), + (0x24D9, 'M', u'j'), + (0x24DA, 'M', u'k'), + (0x24DB, 'M', u'l'), + (0x24DC, 'M', u'm'), + (0x24DD, 'M', u'n'), + (0x24DE, 'M', u'o'), + (0x24DF, 'M', u'p'), + (0x24E0, 'M', u'q'), + (0x24E1, 'M', u'r'), + (0x24E2, 'M', u's'), + (0x24E3, 'M', u't'), + (0x24E4, 'M', u'u'), + (0x24E5, 'M', u'v'), + (0x24E6, 'M', u'w'), + (0x24E7, 'M', u'x'), + (0x24E8, 'M', u'y'), + (0x24E9, 'M', u'z'), + (0x24EA, 'M', u'0'), + (0x24EB, 'V'), + (0x2700, 'X'), + (0x2701, 'V'), + (0x2A0C, 'M', u'∫∫∫∫'), + (0x2A0D, 'V'), + (0x2A74, '3', u'::='), + (0x2A75, '3', u'=='), + (0x2A76, '3', u'==='), + (0x2A77, 'V'), + (0x2ADC, 'M', u'â«Ì¸'), + (0x2ADD, 'V'), + (0x2B4D, 'X'), + (0x2B50, 'V'), + (0x2B5A, 'X'), + (0x2C00, 'M', u'â°°'), + (0x2C01, 'M', u'â°±'), + (0x2C02, 'M', u'â°²'), + (0x2C03, 'M', u'â°³'), + (0x2C04, 'M', u'â°´'), + (0x2C05, 'M', u'â°µ'), + (0x2C06, 'M', u'â°¶'), + (0x2C07, 'M', u'â°·'), + (0x2C08, 'M', u'â°¸'), + (0x2C09, 'M', u'â°¹'), + (0x2C0A, 'M', u'â°º'), + (0x2C0B, 'M', u'â°»'), + (0x2C0C, 'M', u'â°¼'), + (0x2C0D, 'M', u'â°½'), + (0x2C0E, 'M', u'â°¾'), + (0x2C0F, 'M', u'â°¿'), + (0x2C10, 'M', u'â±€'), + (0x2C11, 'M', u'â±'), + (0x2C12, 'M', u'ⱂ'), + (0x2C13, 'M', u'ⱃ'), + (0x2C14, 'M', u'ⱄ'), + (0x2C15, 'M', u'â±…'), + (0x2C16, 'M', u'ⱆ'), + (0x2C17, 'M', u'ⱇ'), + (0x2C18, 'M', u'ⱈ'), + (0x2C19, 'M', u'ⱉ'), + (0x2C1A, 'M', u'ⱊ'), + (0x2C1B, 'M', u'ⱋ'), + (0x2C1C, 'M', u'ⱌ'), + (0x2C1D, 'M', u'â±'), + (0x2C1E, 'M', u'ⱎ'), + (0x2C1F, 'M', u'â±'), + (0x2C20, 'M', u'â±'), + (0x2C21, 'M', u'ⱑ'), + (0x2C22, 'M', u'â±’'), + (0x2C23, 'M', u'ⱓ'), + (0x2C24, 'M', u'â±”'), + (0x2C25, 'M', u'ⱕ'), + (0x2C26, 'M', u'â±–'), + (0x2C27, 'M', u'â±—'), + (0x2C28, 'M', u'ⱘ'), + (0x2C29, 'M', u'â±™'), + (0x2C2A, 'M', u'ⱚ'), + (0x2C2B, 'M', u'â±›'), + (0x2C2C, 'M', u'ⱜ'), + (0x2C2D, 'M', u'â±'), + (0x2C2E, 'M', u'ⱞ'), + (0x2C2F, 'X'), + (0x2C30, 'V'), + (0x2C5F, 'X'), + (0x2C60, 'M', u'ⱡ'), + (0x2C61, 'V'), + (0x2C62, 'M', u'É«'), + (0x2C63, 'M', u'áµ½'), + (0x2C64, 'M', u'ɽ'), + (0x2C65, 'V'), + (0x2C67, 'M', u'ⱨ'), + (0x2C68, 'V'), + (0x2C69, 'M', u'ⱪ'), + (0x2C6A, 'V'), + (0x2C6B, 'M', u'ⱬ'), + (0x2C6C, 'V'), + (0x2C6D, 'M', u'É‘'), + (0x2C6E, 'M', u'ɱ'), + (0x2C6F, 'M', u'É'), + (0x2C70, 'M', u'É’'), + (0x2C71, 'V'), + (0x2C72, 'M', u'â±³'), + (0x2C73, 'V'), + (0x2C75, 'M', u'ⱶ'), + (0x2C76, 'V'), + (0x2C7C, 'M', u'j'), + (0x2C7D, 'M', u'v'), + (0x2C7E, 'M', u'È¿'), + (0x2C7F, 'M', u'É€'), + (0x2C80, 'M', u'â²'), + (0x2C81, 'V'), + (0x2C82, 'M', u'ⲃ'), + (0x2C83, 'V'), + (0x2C84, 'M', u'â²…'), + (0x2C85, 'V'), + (0x2C86, 'M', u'ⲇ'), + (0x2C87, 'V'), + (0x2C88, 'M', u'ⲉ'), + (0x2C89, 'V'), + (0x2C8A, 'M', u'ⲋ'), + (0x2C8B, 'V'), + (0x2C8C, 'M', u'â²'), + (0x2C8D, 'V'), + (0x2C8E, 'M', u'â²'), + (0x2C8F, 'V'), + (0x2C90, 'M', u'ⲑ'), + (0x2C91, 'V'), + (0x2C92, 'M', u'ⲓ'), + (0x2C93, 'V'), + (0x2C94, 'M', u'ⲕ'), + (0x2C95, 'V'), + (0x2C96, 'M', u'â²—'), + (0x2C97, 'V'), + (0x2C98, 'M', u'â²™'), + (0x2C99, 'V'), + (0x2C9A, 'M', u'â²›'), + (0x2C9B, 'V'), + (0x2C9C, 'M', u'â²'), + (0x2C9D, 'V'), + (0x2C9E, 'M', u'ⲟ'), + (0x2C9F, 'V'), + (0x2CA0, 'M', u'ⲡ'), + (0x2CA1, 'V'), + (0x2CA2, 'M', u'â²£'), + (0x2CA3, 'V'), + (0x2CA4, 'M', u'â²¥'), + (0x2CA5, 'V'), + (0x2CA6, 'M', u'ⲧ'), + (0x2CA7, 'V'), + (0x2CA8, 'M', u'ⲩ'), + (0x2CA9, 'V'), + (0x2CAA, 'M', u'ⲫ'), + (0x2CAB, 'V'), + (0x2CAC, 'M', u'â²­'), + (0x2CAD, 'V'), + (0x2CAE, 'M', u'ⲯ'), + (0x2CAF, 'V'), + (0x2CB0, 'M', u'â²±'), + (0x2CB1, 'V'), + (0x2CB2, 'M', u'â²³'), + (0x2CB3, 'V'), + (0x2CB4, 'M', u'â²µ'), + (0x2CB5, 'V'), + (0x2CB6, 'M', u'â²·'), + (0x2CB7, 'V'), + (0x2CB8, 'M', u'â²¹'), + (0x2CB9, 'V'), + (0x2CBA, 'M', u'â²»'), + (0x2CBB, 'V'), + (0x2CBC, 'M', u'â²½'), + (0x2CBD, 'V'), + (0x2CBE, 'M', u'ⲿ'), + (0x2CBF, 'V'), + (0x2CC0, 'M', u'â³'), + (0x2CC1, 'V'), + (0x2CC2, 'M', u'ⳃ'), + (0x2CC3, 'V'), + (0x2CC4, 'M', u'â³…'), + (0x2CC5, 'V'), + (0x2CC6, 'M', u'ⳇ'), + (0x2CC7, 'V'), + (0x2CC8, 'M', u'ⳉ'), + (0x2CC9, 'V'), + (0x2CCA, 'M', u'ⳋ'), + (0x2CCB, 'V'), + (0x2CCC, 'M', u'â³'), + (0x2CCD, 'V'), + (0x2CCE, 'M', u'â³'), + (0x2CCF, 'V'), + (0x2CD0, 'M', u'ⳑ'), + (0x2CD1, 'V'), + (0x2CD2, 'M', u'ⳓ'), + (0x2CD3, 'V'), + (0x2CD4, 'M', u'ⳕ'), + (0x2CD5, 'V'), + (0x2CD6, 'M', u'â³—'), + (0x2CD7, 'V'), + (0x2CD8, 'M', u'â³™'), + (0x2CD9, 'V'), + (0x2CDA, 'M', u'â³›'), + (0x2CDB, 'V'), + (0x2CDC, 'M', u'â³'), + (0x2CDD, 'V'), + (0x2CDE, 'M', u'ⳟ'), + (0x2CDF, 'V'), + (0x2CE0, 'M', u'ⳡ'), + (0x2CE1, 'V'), + (0x2CE2, 'M', u'â³£'), + (0x2CE3, 'V'), + (0x2CEB, 'M', u'ⳬ'), + (0x2CEC, 'V'), + (0x2CED, 'M', u'â³®'), + (0x2CEE, 'V'), + (0x2CF2, 'M', u'â³³'), + (0x2CF3, 'V'), + (0x2CF4, 'X'), + (0x2CF9, 'V'), + (0x2D26, 'X'), + (0x2D27, 'V'), + (0x2D28, 'X'), + (0x2D2D, 'V'), + (0x2D2E, 'X'), + (0x2D30, 'V'), + (0x2D68, 'X'), + (0x2D6F, 'M', u'ⵡ'), + (0x2D70, 'V'), + (0x2D71, 'X'), + (0x2D7F, 'V'), + (0x2D97, 'X'), + (0x2DA0, 'V'), + (0x2DA7, 'X'), + (0x2DA8, 'V'), + (0x2DAF, 'X'), + (0x2DB0, 'V'), + (0x2DB7, 'X'), + (0x2DB8, 'V'), + (0x2DBF, 'X'), + (0x2DC0, 'V'), + (0x2DC7, 'X'), + (0x2DC8, 'V'), + (0x2DCF, 'X'), + (0x2DD0, 'V'), + (0x2DD7, 'X'), + (0x2DD8, 'V'), + (0x2DDF, 'X'), + (0x2DE0, 'V'), + (0x2E3C, 'X'), + (0x2E80, 'V'), + (0x2E9A, 'X'), + (0x2E9B, 'V'), + (0x2E9F, 'M', u'æ¯'), + (0x2EA0, 'V'), + (0x2EF3, 'M', u'龟'), + (0x2EF4, 'X'), + (0x2F00, 'M', u'一'), + (0x2F01, 'M', u'丨'), + (0x2F02, 'M', u'丶'), + (0x2F03, 'M', u'丿'), + (0x2F04, 'M', u'ä¹™'), + (0x2F05, 'M', u'亅'), + (0x2F06, 'M', u'二'), + (0x2F07, 'M', u'亠'), + (0x2F08, 'M', u'人'), + (0x2F09, 'M', u'å„¿'), + (0x2F0A, 'M', u'å…¥'), + (0x2F0B, 'M', u'å…«'), + (0x2F0C, 'M', u'冂'), + (0x2F0D, 'M', u'冖'), + (0x2F0E, 'M', u'冫'), + (0x2F0F, 'M', u'几'), + (0x2F10, 'M', u'凵'), + (0x2F11, 'M', u'刀'), + (0x2F12, 'M', u'力'), + (0x2F13, 'M', u'勹'), + (0x2F14, 'M', u'匕'), + (0x2F15, 'M', u'匚'), + (0x2F16, 'M', u'匸'), + (0x2F17, 'M', u'å'), + (0x2F18, 'M', u'åœ'), + (0x2F19, 'M', u'å©'), + (0x2F1A, 'M', u'厂'), + (0x2F1B, 'M', u'厶'), + (0x2F1C, 'M', u'åˆ'), + (0x2F1D, 'M', u'å£'), + (0x2F1E, 'M', u'å›—'), + (0x2F1F, 'M', u'土'), + (0x2F20, 'M', u'士'), + (0x2F21, 'M', u'夂'), + (0x2F22, 'M', u'夊'), + (0x2F23, 'M', u'夕'), + (0x2F24, 'M', u'大'), + (0x2F25, 'M', u'女'), + (0x2F26, 'M', u'å­'), + (0x2F27, 'M', u'宀'), + (0x2F28, 'M', u'寸'), + (0x2F29, 'M', u'å°'), + (0x2F2A, 'M', u'å°¢'), + (0x2F2B, 'M', u'å°¸'), + (0x2F2C, 'M', u'å±®'), + (0x2F2D, 'M', u'å±±'), + (0x2F2E, 'M', u'å·›'), + (0x2F2F, 'M', u'å·¥'), + (0x2F30, 'M', u'å·±'), + (0x2F31, 'M', u'å·¾'), + (0x2F32, 'M', u'å¹²'), + (0x2F33, 'M', u'幺'), + (0x2F34, 'M', u'广'), + (0x2F35, 'M', u'å»´'), + (0x2F36, 'M', u'廾'), + (0x2F37, 'M', u'弋'), + (0x2F38, 'M', u'弓'), + (0x2F39, 'M', u'å½'), + (0x2F3A, 'M', u'彡'), + (0x2F3B, 'M', u'å½³'), + (0x2F3C, 'M', u'心'), + (0x2F3D, 'M', u'戈'), + (0x2F3E, 'M', u'戶'), + (0x2F3F, 'M', u'手'), + (0x2F40, 'M', u'支'), + (0x2F41, 'M', u'æ”´'), + (0x2F42, 'M', u'æ–‡'), + (0x2F43, 'M', u'æ–—'), + (0x2F44, 'M', u'æ–¤'), + (0x2F45, 'M', u'æ–¹'), + (0x2F46, 'M', u'æ— '), + (0x2F47, 'M', u'æ—¥'), + (0x2F48, 'M', u'æ›°'), + (0x2F49, 'M', u'月'), + (0x2F4A, 'M', u'木'), + (0x2F4B, 'M', u'欠'), + (0x2F4C, 'M', u'æ­¢'), + (0x2F4D, 'M', u'æ­¹'), + (0x2F4E, 'M', u'殳'), + (0x2F4F, 'M', u'毋'), + (0x2F50, 'M', u'比'), + (0x2F51, 'M', u'毛'), + (0x2F52, 'M', u'æ°'), + (0x2F53, 'M', u'æ°”'), + (0x2F54, 'M', u'æ°´'), + (0x2F55, 'M', u'ç«'), + (0x2F56, 'M', u'爪'), + (0x2F57, 'M', u'父'), + (0x2F58, 'M', u'爻'), + (0x2F59, 'M', u'爿'), + (0x2F5A, 'M', u'片'), + (0x2F5B, 'M', u'牙'), + (0x2F5C, 'M', u'牛'), + (0x2F5D, 'M', u'犬'), + (0x2F5E, 'M', u'玄'), + (0x2F5F, 'M', u'玉'), + (0x2F60, 'M', u'ç“œ'), + (0x2F61, 'M', u'瓦'), + (0x2F62, 'M', u'甘'), + (0x2F63, 'M', u'生'), + (0x2F64, 'M', u'用'), + (0x2F65, 'M', u'ç”°'), + (0x2F66, 'M', u'ç–‹'), + (0x2F67, 'M', u'ç–’'), + (0x2F68, 'M', u'癶'), + (0x2F69, 'M', u'白'), + (0x2F6A, 'M', u'çš®'), + (0x2F6B, 'M', u'çš¿'), + (0x2F6C, 'M', u'ç›®'), + (0x2F6D, 'M', u'矛'), + (0x2F6E, 'M', u'矢'), + (0x2F6F, 'M', u'石'), + (0x2F70, 'M', u'示'), + (0x2F71, 'M', u'禸'), + (0x2F72, 'M', u'禾'), + (0x2F73, 'M', u'ç©´'), + (0x2F74, 'M', u'ç«‹'), + (0x2F75, 'M', u'竹'), + (0x2F76, 'M', u'ç±³'), + (0x2F77, 'M', u'糸'), + (0x2F78, 'M', u'缶'), + (0x2F79, 'M', u'网'), + (0x2F7A, 'M', u'羊'), + (0x2F7B, 'M', u'ç¾½'), + (0x2F7C, 'M', u'è€'), + (0x2F7D, 'M', u'而'), + (0x2F7E, 'M', u'耒'), + (0x2F7F, 'M', u'耳'), + (0x2F80, 'M', u'è¿'), + (0x2F81, 'M', u'肉'), + (0x2F82, 'M', u'臣'), + (0x2F83, 'M', u'自'), + (0x2F84, 'M', u'至'), + (0x2F85, 'M', u'臼'), + (0x2F86, 'M', u'舌'), + (0x2F87, 'M', u'舛'), + (0x2F88, 'M', u'舟'), + (0x2F89, 'M', u'艮'), + (0x2F8A, 'M', u'色'), + (0x2F8B, 'M', u'艸'), + (0x2F8C, 'M', u'è™'), + (0x2F8D, 'M', u'虫'), + (0x2F8E, 'M', u'è¡€'), + (0x2F8F, 'M', u'è¡Œ'), + (0x2F90, 'M', u'è¡£'), + (0x2F91, 'M', u'襾'), + (0x2F92, 'M', u'見'), + (0x2F93, 'M', u'角'), + (0x2F94, 'M', u'言'), + (0x2F95, 'M', u'è°·'), + (0x2F96, 'M', u'豆'), + (0x2F97, 'M', u'豕'), + (0x2F98, 'M', u'豸'), + (0x2F99, 'M', u'è²'), + (0x2F9A, 'M', u'赤'), + (0x2F9B, 'M', u'èµ°'), + (0x2F9C, 'M', u'足'), + (0x2F9D, 'M', u'身'), + (0x2F9E, 'M', u'車'), + (0x2F9F, 'M', u'è¾›'), + (0x2FA0, 'M', u'è¾°'), + (0x2FA1, 'M', u'è¾µ'), + (0x2FA2, 'M', u'é‚‘'), + (0x2FA3, 'M', u'é…‰'), + (0x2FA4, 'M', u'釆'), + (0x2FA5, 'M', u'里'), + (0x2FA6, 'M', u'金'), + (0x2FA7, 'M', u'é•·'), + (0x2FA8, 'M', u'é–€'), + (0x2FA9, 'M', u'阜'), + (0x2FAA, 'M', u'隶'), + (0x2FAB, 'M', u'éš¹'), + (0x2FAC, 'M', u'雨'), + (0x2FAD, 'M', u'é‘'), + (0x2FAE, 'M', u'éž'), + (0x2FAF, 'M', u'é¢'), + (0x2FB0, 'M', u'é©'), + (0x2FB1, 'M', u'韋'), + (0x2FB2, 'M', u'韭'), + (0x2FB3, 'M', u'音'), + (0x2FB4, 'M', u'é '), + (0x2FB5, 'M', u'風'), + (0x2FB6, 'M', u'飛'), + (0x2FB7, 'M', u'食'), + (0x2FB8, 'M', u'首'), + (0x2FB9, 'M', u'香'), + (0x2FBA, 'M', u'馬'), + (0x2FBB, 'M', u'骨'), + (0x2FBC, 'M', u'高'), + (0x2FBD, 'M', u'é«Ÿ'), + (0x2FBE, 'M', u'鬥'), + (0x2FBF, 'M', u'鬯'), + (0x2FC0, 'M', u'鬲'), + (0x2FC1, 'M', u'鬼'), + (0x2FC2, 'M', u'é­š'), + (0x2FC3, 'M', u'é³¥'), + (0x2FC4, 'M', u'é¹µ'), + (0x2FC5, 'M', u'鹿'), + (0x2FC6, 'M', u'麥'), + (0x2FC7, 'M', u'麻'), + (0x2FC8, 'M', u'黃'), + (0x2FC9, 'M', u'é»'), + (0x2FCA, 'M', u'黑'), + (0x2FCB, 'M', u'黹'), + (0x2FCC, 'M', u'黽'), + (0x2FCD, 'M', u'鼎'), + (0x2FCE, 'M', u'鼓'), + (0x2FCF, 'M', u'é¼ '), + (0x2FD0, 'M', u'é¼»'), + (0x2FD1, 'M', u'齊'), + (0x2FD2, 'M', u'é½’'), + (0x2FD3, 'M', u'é¾'), + (0x2FD4, 'M', u'龜'), + (0x2FD5, 'M', u'é¾ '), + (0x2FD6, 'X'), + (0x3000, '3', u' '), + (0x3001, 'V'), + (0x3002, 'M', u'.'), + (0x3003, 'V'), + (0x3036, 'M', u'〒'), + (0x3037, 'V'), + (0x3038, 'M', u'å'), + (0x3039, 'M', u'å„'), + (0x303A, 'M', u'å…'), + (0x303B, 'V'), + (0x3040, 'X'), + (0x3041, 'V'), + (0x3097, 'X'), + (0x3099, 'V'), + (0x309B, '3', u' ã‚™'), + (0x309C, '3', u' ã‚š'), + (0x309D, 'V'), + (0x309F, 'M', u'より'), + (0x30A0, 'V'), + (0x30FF, 'M', u'コト'), + (0x3100, 'X'), + (0x3105, 'V'), + (0x312E, 'X'), + (0x3131, 'M', u'á„€'), + (0x3132, 'M', u'á„'), + (0x3133, 'M', u'ᆪ'), + (0x3134, 'M', u'á„‚'), + (0x3135, 'M', u'ᆬ'), + (0x3136, 'M', u'ᆭ'), + (0x3137, 'M', u'ᄃ'), + (0x3138, 'M', u'á„„'), + (0x3139, 'M', u'á„…'), + (0x313A, 'M', u'ᆰ'), + (0x313B, 'M', u'ᆱ'), + (0x313C, 'M', u'ᆲ'), + (0x313D, 'M', u'ᆳ'), + (0x313E, 'M', u'ᆴ'), + (0x313F, 'M', u'ᆵ'), + (0x3140, 'M', u'á„š'), + (0x3141, 'M', u'ᄆ'), + (0x3142, 'M', u'ᄇ'), + (0x3143, 'M', u'ᄈ'), + (0x3144, 'M', u'á„¡'), + (0x3145, 'M', u'ᄉ'), + (0x3146, 'M', u'á„Š'), + (0x3147, 'M', u'á„‹'), + (0x3148, 'M', u'á„Œ'), + (0x3149, 'M', u'á„'), + (0x314A, 'M', u'á„Ž'), + (0x314B, 'M', u'á„'), + (0x314C, 'M', u'á„'), + (0x314D, 'M', u'á„‘'), + (0x314E, 'M', u'á„’'), + (0x314F, 'M', u'á…¡'), + (0x3150, 'M', u'á…¢'), + (0x3151, 'M', u'á…£'), + (0x3152, 'M', u'á…¤'), + (0x3153, 'M', u'á…¥'), + (0x3154, 'M', u'á…¦'), + (0x3155, 'M', u'á…§'), + (0x3156, 'M', u'á…¨'), + (0x3157, 'M', u'á…©'), + (0x3158, 'M', u'á…ª'), + (0x3159, 'M', u'á…«'), + (0x315A, 'M', u'á…¬'), + (0x315B, 'M', u'á…­'), + (0x315C, 'M', u'á…®'), + (0x315D, 'M', u'á…¯'), + (0x315E, 'M', u'á…°'), + (0x315F, 'M', u'á…±'), + (0x3160, 'M', u'á…²'), + (0x3161, 'M', u'á…³'), + (0x3162, 'M', u'á…´'), + (0x3163, 'M', u'á…µ'), + (0x3164, 'X'), + (0x3165, 'M', u'á„”'), + (0x3166, 'M', u'á„•'), + (0x3167, 'M', u'ᇇ'), + (0x3168, 'M', u'ᇈ'), + (0x3169, 'M', u'ᇌ'), + (0x316A, 'M', u'ᇎ'), + (0x316B, 'M', u'ᇓ'), + (0x316C, 'M', u'ᇗ'), + (0x316D, 'M', u'ᇙ'), + (0x316E, 'M', u'á„œ'), + (0x316F, 'M', u'á‡'), + (0x3170, 'M', u'ᇟ'), + (0x3171, 'M', u'á„'), + (0x3172, 'M', u'á„ž'), + (0x3173, 'M', u'á„ '), + (0x3174, 'M', u'á„¢'), + (0x3175, 'M', u'á„£'), + (0x3176, 'M', u'ᄧ'), + (0x3177, 'M', u'á„©'), + (0x3178, 'M', u'á„«'), + (0x3179, 'M', u'ᄬ'), + (0x317A, 'M', u'á„­'), + (0x317B, 'M', u'á„®'), + (0x317C, 'M', u'ᄯ'), + (0x317D, 'M', u'ᄲ'), + (0x317E, 'M', u'ᄶ'), + (0x317F, 'M', u'á…€'), + (0x3180, 'M', u'á…‡'), + (0x3181, 'M', u'á…Œ'), + (0x3182, 'M', u'ᇱ'), + (0x3183, 'M', u'ᇲ'), + (0x3184, 'M', u'á…—'), + (0x3185, 'M', u'á…˜'), + (0x3186, 'M', u'á…™'), + (0x3187, 'M', u'ᆄ'), + (0x3188, 'M', u'ᆅ'), + (0x3189, 'M', u'ᆈ'), + (0x318A, 'M', u'ᆑ'), + (0x318B, 'M', u'ᆒ'), + (0x318C, 'M', u'ᆔ'), + (0x318D, 'M', u'ᆞ'), + (0x318E, 'M', u'ᆡ'), + (0x318F, 'X'), + (0x3190, 'V'), + (0x3192, 'M', u'一'), + (0x3193, 'M', u'二'), + (0x3194, 'M', u'三'), + (0x3195, 'M', u'å››'), + (0x3196, 'M', u'上'), + (0x3197, 'M', u'中'), + (0x3198, 'M', u'下'), + (0x3199, 'M', u'甲'), + (0x319A, 'M', u'ä¹™'), + (0x319B, 'M', u'丙'), + (0x319C, 'M', u'ä¸'), + (0x319D, 'M', u'天'), + (0x319E, 'M', u'地'), + (0x319F, 'M', u'人'), + (0x31A0, 'V'), + (0x31BB, 'X'), + (0x31C0, 'V'), + (0x31E4, 'X'), + (0x31F0, 'V'), + (0x3200, '3', u'(á„€)'), + (0x3201, '3', u'(á„‚)'), + (0x3202, '3', u'(ᄃ)'), + (0x3203, '3', u'(á„…)'), + (0x3204, '3', u'(ᄆ)'), + (0x3205, '3', u'(ᄇ)'), + (0x3206, '3', u'(ᄉ)'), + (0x3207, '3', u'(á„‹)'), + (0x3208, '3', u'(á„Œ)'), + (0x3209, '3', u'(á„Ž)'), + (0x320A, '3', u'(á„)'), + (0x320B, '3', u'(á„)'), + (0x320C, '3', u'(á„‘)'), + (0x320D, '3', u'(á„’)'), + (0x320E, '3', u'(ê°€)'), + (0x320F, '3', u'(나)'), + (0x3210, '3', u'(다)'), + (0x3211, '3', u'(ë¼)'), + (0x3212, '3', u'(마)'), + (0x3213, '3', u'(ë°”)'), + (0x3214, '3', u'(사)'), + (0x3215, '3', u'(ì•„)'), + (0x3216, '3', u'(ìž)'), + (0x3217, '3', u'(ì°¨)'), + (0x3218, '3', u'(ì¹´)'), + (0x3219, '3', u'(타)'), + (0x321A, '3', u'(파)'), + (0x321B, '3', u'(하)'), + (0x321C, '3', u'(주)'), + (0x321D, '3', u'(오전)'), + (0x321E, '3', u'(오후)'), + (0x321F, 'X'), + (0x3220, '3', u'(一)'), + (0x3221, '3', u'(二)'), + (0x3222, '3', u'(三)'), + (0x3223, '3', u'(å››)'), + (0x3224, '3', u'(五)'), + (0x3225, '3', u'(å…­)'), + (0x3226, '3', u'(七)'), + (0x3227, '3', u'(å…«)'), + (0x3228, '3', u'(ä¹)'), + (0x3229, '3', u'(å)'), + (0x322A, '3', u'(月)'), + (0x322B, '3', u'(ç«)'), + (0x322C, '3', u'(æ°´)'), + (0x322D, '3', u'(木)'), + (0x322E, '3', u'(金)'), + (0x322F, '3', u'(土)'), + (0x3230, '3', u'(æ—¥)'), + (0x3231, '3', u'(æ ª)'), + (0x3232, '3', u'(有)'), + (0x3233, '3', u'(社)'), + (0x3234, '3', u'(å)'), + (0x3235, '3', u'(特)'), + (0x3236, '3', u'(財)'), + (0x3237, '3', u'(ç¥)'), + (0x3238, '3', u'(労)'), + (0x3239, '3', u'(代)'), + (0x323A, '3', u'(呼)'), + (0x323B, '3', u'(å­¦)'), + (0x323C, '3', u'(監)'), + (0x323D, '3', u'(ä¼)'), + (0x323E, '3', u'(資)'), + (0x323F, '3', u'(å”)'), + (0x3240, '3', u'(祭)'), + (0x3241, '3', u'(休)'), + (0x3242, '3', u'(自)'), + (0x3243, '3', u'(至)'), + (0x3244, 'M', u'å•'), + (0x3245, 'M', u'å¹¼'), + (0x3246, 'M', u'æ–‡'), + (0x3247, 'M', u'ç®'), + (0x3248, 'V'), + (0x3250, 'M', u'pte'), + (0x3251, 'M', u'21'), + (0x3252, 'M', u'22'), + (0x3253, 'M', u'23'), + (0x3254, 'M', u'24'), + (0x3255, 'M', u'25'), + (0x3256, 'M', u'26'), + (0x3257, 'M', u'27'), + (0x3258, 'M', u'28'), + (0x3259, 'M', u'29'), + (0x325A, 'M', u'30'), + (0x325B, 'M', u'31'), + (0x325C, 'M', u'32'), + (0x325D, 'M', u'33'), + (0x325E, 'M', u'34'), + (0x325F, 'M', u'35'), + (0x3260, 'M', u'á„€'), + (0x3261, 'M', u'á„‚'), + (0x3262, 'M', u'ᄃ'), + (0x3263, 'M', u'á„…'), + (0x3264, 'M', u'ᄆ'), + (0x3265, 'M', u'ᄇ'), + (0x3266, 'M', u'ᄉ'), + (0x3267, 'M', u'á„‹'), + (0x3268, 'M', u'á„Œ'), + (0x3269, 'M', u'á„Ž'), + (0x326A, 'M', u'á„'), + (0x326B, 'M', u'á„'), + (0x326C, 'M', u'á„‘'), + (0x326D, 'M', u'á„’'), + (0x326E, 'M', u'ê°€'), + (0x326F, 'M', u'나'), + (0x3270, 'M', u'다'), + (0x3271, 'M', u'ë¼'), + (0x3272, 'M', u'마'), + (0x3273, 'M', u'ë°”'), + (0x3274, 'M', u'사'), + (0x3275, 'M', u'ì•„'), + (0x3276, 'M', u'ìž'), + (0x3277, 'M', u'ì°¨'), + (0x3278, 'M', u'ì¹´'), + (0x3279, 'M', u'타'), + (0x327A, 'M', u'파'), + (0x327B, 'M', u'하'), + (0x327C, 'M', u'참고'), + (0x327D, 'M', u'주ì˜'), + (0x327E, 'M', u'ìš°'), + (0x327F, 'V'), + (0x3280, 'M', u'一'), + (0x3281, 'M', u'二'), + (0x3282, 'M', u'三'), + (0x3283, 'M', u'å››'), + (0x3284, 'M', u'五'), + (0x3285, 'M', u'å…­'), + (0x3286, 'M', u'七'), + (0x3287, 'M', u'å…«'), + (0x3288, 'M', u'ä¹'), + (0x3289, 'M', u'å'), + (0x328A, 'M', u'月'), + (0x328B, 'M', u'ç«'), + (0x328C, 'M', u'æ°´'), + (0x328D, 'M', u'木'), + (0x328E, 'M', u'金'), + (0x328F, 'M', u'土'), + (0x3290, 'M', u'æ—¥'), + (0x3291, 'M', u'æ ª'), + (0x3292, 'M', u'有'), + (0x3293, 'M', u'社'), + (0x3294, 'M', u'å'), + (0x3295, 'M', u'特'), + (0x3296, 'M', u'財'), + (0x3297, 'M', u'ç¥'), + (0x3298, 'M', u'労'), + (0x3299, 'M', u'秘'), + (0x329A, 'M', u'ç”·'), + (0x329B, 'M', u'女'), + (0x329C, 'M', u'é©'), + (0x329D, 'M', u'優'), + (0x329E, 'M', u'å°'), + (0x329F, 'M', u'注'), + (0x32A0, 'M', u'é …'), + (0x32A1, 'M', u'休'), + (0x32A2, 'M', u'写'), + (0x32A3, 'M', u'æ­£'), + (0x32A4, 'M', u'上'), + (0x32A5, 'M', u'中'), + (0x32A6, 'M', u'下'), + (0x32A7, 'M', u'å·¦'), + (0x32A8, 'M', u'å³'), + (0x32A9, 'M', u'医'), + (0x32AA, 'M', u'å®—'), + (0x32AB, 'M', u'å­¦'), + (0x32AC, 'M', u'監'), + (0x32AD, 'M', u'ä¼'), + (0x32AE, 'M', u'資'), + (0x32AF, 'M', u'å”'), + (0x32B0, 'M', u'夜'), + (0x32B1, 'M', u'36'), + (0x32B2, 'M', u'37'), + (0x32B3, 'M', u'38'), + (0x32B4, 'M', u'39'), + (0x32B5, 'M', u'40'), + (0x32B6, 'M', u'41'), + (0x32B7, 'M', u'42'), + (0x32B8, 'M', u'43'), + (0x32B9, 'M', u'44'), + (0x32BA, 'M', u'45'), + (0x32BB, 'M', u'46'), + (0x32BC, 'M', u'47'), + (0x32BD, 'M', u'48'), + (0x32BE, 'M', u'49'), + (0x32BF, 'M', u'50'), + (0x32C0, 'M', u'1月'), + (0x32C1, 'M', u'2月'), + (0x32C2, 'M', u'3月'), + (0x32C3, 'M', u'4月'), + (0x32C4, 'M', u'5月'), + (0x32C5, 'M', u'6月'), + (0x32C6, 'M', u'7月'), + (0x32C7, 'M', u'8月'), + (0x32C8, 'M', u'9月'), + (0x32C9, 'M', u'10月'), + (0x32CA, 'M', u'11月'), + (0x32CB, 'M', u'12月'), + (0x32CC, 'M', u'hg'), + (0x32CD, 'M', u'erg'), + (0x32CE, 'M', u'ev'), + (0x32CF, 'M', u'ltd'), + (0x32D0, 'M', u'ã‚¢'), + (0x32D1, 'M', u'イ'), + (0x32D2, 'M', u'ウ'), + (0x32D3, 'M', u'エ'), + (0x32D4, 'M', u'オ'), + (0x32D5, 'M', u'ã‚«'), + (0x32D6, 'M', u'ã‚­'), + (0x32D7, 'M', u'ク'), + (0x32D8, 'M', u'ケ'), + (0x32D9, 'M', u'コ'), + (0x32DA, 'M', u'サ'), + (0x32DB, 'M', u'ã‚·'), + (0x32DC, 'M', u'ス'), + (0x32DD, 'M', u'ã‚»'), + (0x32DE, 'M', u'ソ'), + (0x32DF, 'M', u'ã‚¿'), + (0x32E0, 'M', u'ãƒ'), + (0x32E1, 'M', u'ツ'), + (0x32E2, 'M', u'テ'), + (0x32E3, 'M', u'ト'), + (0x32E4, 'M', u'ナ'), + (0x32E5, 'M', u'ニ'), + (0x32E6, 'M', u'ヌ'), + (0x32E7, 'M', u'ãƒ'), + (0x32E8, 'M', u'ノ'), + (0x32E9, 'M', u'ãƒ'), + (0x32EA, 'M', u'ヒ'), + (0x32EB, 'M', u'フ'), + (0x32EC, 'M', u'ヘ'), + (0x32ED, 'M', u'ホ'), + (0x32EE, 'M', u'マ'), + (0x32EF, 'M', u'ミ'), + (0x32F0, 'M', u'ム'), + (0x32F1, 'M', u'メ'), + (0x32F2, 'M', u'モ'), + (0x32F3, 'M', u'ヤ'), + (0x32F4, 'M', u'ユ'), + (0x32F5, 'M', u'ヨ'), + (0x32F6, 'M', u'ラ'), + (0x32F7, 'M', u'リ'), + (0x32F8, 'M', u'ル'), + (0x32F9, 'M', u'レ'), + (0x32FA, 'M', u'ロ'), + (0x32FB, 'M', u'ワ'), + (0x32FC, 'M', u'ヰ'), + (0x32FD, 'M', u'ヱ'), + (0x32FE, 'M', u'ヲ'), + (0x32FF, 'X'), + (0x3300, 'M', u'アパート'), + (0x3301, 'M', u'アルファ'), + (0x3302, 'M', u'アンペア'), + (0x3303, 'M', u'アール'), + (0x3304, 'M', u'イニング'), + (0x3305, 'M', u'インãƒ'), + (0x3306, 'M', u'ウォン'), + (0x3307, 'M', u'エスクード'), + (0x3308, 'M', u'エーカー'), + (0x3309, 'M', u'オンス'), + (0x330A, 'M', u'オーム'), + (0x330B, 'M', u'カイリ'), + (0x330C, 'M', u'カラット'), + (0x330D, 'M', u'カロリー'), + (0x330E, 'M', u'ガロン'), + (0x330F, 'M', u'ガンマ'), + (0x3310, 'M', u'ギガ'), + (0x3311, 'M', u'ギニー'), + (0x3312, 'M', u'キュリー'), + (0x3313, 'M', u'ギルダー'), + (0x3314, 'M', u'キロ'), + (0x3315, 'M', u'キログラム'), + (0x3316, 'M', u'キロメートル'), + (0x3317, 'M', u'キロワット'), + (0x3318, 'M', u'グラム'), + (0x3319, 'M', u'グラムトン'), + (0x331A, 'M', u'クルゼイロ'), + (0x331B, 'M', u'クローãƒ'), + (0x331C, 'M', u'ケース'), + (0x331D, 'M', u'コルナ'), + (0x331E, 'M', u'コーãƒ'), + (0x331F, 'M', u'サイクル'), + (0x3320, 'M', u'サンãƒãƒ¼ãƒ '), + (0x3321, 'M', u'シリング'), + (0x3322, 'M', u'センãƒ'), + (0x3323, 'M', u'セント'), + (0x3324, 'M', u'ダース'), + (0x3325, 'M', u'デシ'), + (0x3326, 'M', u'ドル'), + (0x3327, 'M', u'トン'), + (0x3328, 'M', u'ナノ'), + (0x3329, 'M', u'ノット'), + (0x332A, 'M', u'ãƒã‚¤ãƒ„'), + (0x332B, 'M', u'パーセント'), + (0x332C, 'M', u'パーツ'), + (0x332D, 'M', u'ãƒãƒ¼ãƒ¬ãƒ«'), + (0x332E, 'M', u'ピアストル'), + (0x332F, 'M', u'ピクル'), + (0x3330, 'M', u'ピコ'), + (0x3331, 'M', u'ビル'), + (0x3332, 'M', u'ファラッド'), + (0x3333, 'M', u'フィート'), + (0x3334, 'M', u'ブッシェル'), + (0x3335, 'M', u'フラン'), + (0x3336, 'M', u'ヘクタール'), + (0x3337, 'M', u'ペソ'), + (0x3338, 'M', u'ペニヒ'), + (0x3339, 'M', u'ヘルツ'), + (0x333A, 'M', u'ペンス'), + (0x333B, 'M', u'ページ'), + (0x333C, 'M', u'ベータ'), + (0x333D, 'M', u'ãƒã‚¤ãƒ³ãƒˆ'), + (0x333E, 'M', u'ボルト'), + (0x333F, 'M', u'ホン'), + (0x3340, 'M', u'ãƒãƒ³ãƒ‰'), + (0x3341, 'M', u'ホール'), + (0x3342, 'M', u'ホーン'), + (0x3343, 'M', u'マイクロ'), + (0x3344, 'M', u'マイル'), + (0x3345, 'M', u'マッãƒ'), + (0x3346, 'M', u'マルク'), + (0x3347, 'M', u'マンション'), + (0x3348, 'M', u'ミクロン'), + (0x3349, 'M', u'ミリ'), + (0x334A, 'M', u'ミリãƒãƒ¼ãƒ«'), + (0x334B, 'M', u'メガ'), + (0x334C, 'M', u'メガトン'), + (0x334D, 'M', u'メートル'), + (0x334E, 'M', u'ヤード'), + (0x334F, 'M', u'ヤール'), + (0x3350, 'M', u'ユアン'), + (0x3351, 'M', u'リットル'), + (0x3352, 'M', u'リラ'), + (0x3353, 'M', u'ルピー'), + (0x3354, 'M', u'ルーブル'), + (0x3355, 'M', u'レム'), + (0x3356, 'M', u'レントゲン'), + (0x3357, 'M', u'ワット'), + (0x3358, 'M', u'0点'), + (0x3359, 'M', u'1点'), + (0x335A, 'M', u'2点'), + (0x335B, 'M', u'3点'), + (0x335C, 'M', u'4点'), + (0x335D, 'M', u'5点'), + (0x335E, 'M', u'6点'), + (0x335F, 'M', u'7点'), + (0x3360, 'M', u'8点'), + (0x3361, 'M', u'9点'), + (0x3362, 'M', u'10点'), + (0x3363, 'M', u'11点'), + (0x3364, 'M', u'12点'), + (0x3365, 'M', u'13点'), + (0x3366, 'M', u'14点'), + (0x3367, 'M', u'15点'), + (0x3368, 'M', u'16点'), + (0x3369, 'M', u'17点'), + (0x336A, 'M', u'18点'), + (0x336B, 'M', u'19点'), + (0x336C, 'M', u'20点'), + (0x336D, 'M', u'21点'), + (0x336E, 'M', u'22点'), + (0x336F, 'M', u'23点'), + (0x3370, 'M', u'24点'), + (0x3371, 'M', u'hpa'), + (0x3372, 'M', u'da'), + (0x3373, 'M', u'au'), + (0x3374, 'M', u'bar'), + (0x3375, 'M', u'ov'), + (0x3376, 'M', u'pc'), + (0x3377, 'M', u'dm'), + (0x3378, 'M', u'dm2'), + (0x3379, 'M', u'dm3'), + (0x337A, 'M', u'iu'), + (0x337B, 'M', u'å¹³æˆ'), + (0x337C, 'M', u'昭和'), + (0x337D, 'M', u'大正'), + (0x337E, 'M', u'明治'), + (0x337F, 'M', u'æ ªå¼ä¼šç¤¾'), + (0x3380, 'M', u'pa'), + (0x3381, 'M', u'na'), + (0x3382, 'M', u'μa'), + (0x3383, 'M', u'ma'), + (0x3384, 'M', u'ka'), + (0x3385, 'M', u'kb'), + (0x3386, 'M', u'mb'), + (0x3387, 'M', u'gb'), + (0x3388, 'M', u'cal'), + (0x3389, 'M', u'kcal'), + (0x338A, 'M', u'pf'), + (0x338B, 'M', u'nf'), + (0x338C, 'M', u'μf'), + (0x338D, 'M', u'μg'), + (0x338E, 'M', u'mg'), + (0x338F, 'M', u'kg'), + (0x3390, 'M', u'hz'), + (0x3391, 'M', u'khz'), + (0x3392, 'M', u'mhz'), + (0x3393, 'M', u'ghz'), + (0x3394, 'M', u'thz'), + (0x3395, 'M', u'μl'), + (0x3396, 'M', u'ml'), + (0x3397, 'M', u'dl'), + (0x3398, 'M', u'kl'), + (0x3399, 'M', u'fm'), + (0x339A, 'M', u'nm'), + (0x339B, 'M', u'μm'), + (0x339C, 'M', u'mm'), + (0x339D, 'M', u'cm'), + (0x339E, 'M', u'km'), + (0x339F, 'M', u'mm2'), + (0x33A0, 'M', u'cm2'), + (0x33A1, 'M', u'm2'), + (0x33A2, 'M', u'km2'), + (0x33A3, 'M', u'mm3'), + (0x33A4, 'M', u'cm3'), + (0x33A5, 'M', u'm3'), + (0x33A6, 'M', u'km3'), + (0x33A7, 'M', u'm∕s'), + (0x33A8, 'M', u'm∕s2'), + (0x33A9, 'M', u'pa'), + (0x33AA, 'M', u'kpa'), + (0x33AB, 'M', u'mpa'), + (0x33AC, 'M', u'gpa'), + (0x33AD, 'M', u'rad'), + (0x33AE, 'M', u'rad∕s'), + (0x33AF, 'M', u'rad∕s2'), + (0x33B0, 'M', u'ps'), + (0x33B1, 'M', u'ns'), + (0x33B2, 'M', u'μs'), + (0x33B3, 'M', u'ms'), + (0x33B4, 'M', u'pv'), + (0x33B5, 'M', u'nv'), + (0x33B6, 'M', u'μv'), + (0x33B7, 'M', u'mv'), + (0x33B8, 'M', u'kv'), + (0x33B9, 'M', u'mv'), + (0x33BA, 'M', u'pw'), + (0x33BB, 'M', u'nw'), + (0x33BC, 'M', u'μw'), + (0x33BD, 'M', u'mw'), + (0x33BE, 'M', u'kw'), + (0x33BF, 'M', u'mw'), + (0x33C0, 'M', u'kω'), + (0x33C1, 'M', u'mω'), + (0x33C2, 'X'), + (0x33C3, 'M', u'bq'), + (0x33C4, 'M', u'cc'), + (0x33C5, 'M', u'cd'), + (0x33C6, 'M', u'c∕kg'), + (0x33C7, 'X'), + (0x33C8, 'M', u'db'), + (0x33C9, 'M', u'gy'), + (0x33CA, 'M', u'ha'), + (0x33CB, 'M', u'hp'), + (0x33CC, 'M', u'in'), + (0x33CD, 'M', u'kk'), + (0x33CE, 'M', u'km'), + (0x33CF, 'M', u'kt'), + (0x33D0, 'M', u'lm'), + (0x33D1, 'M', u'ln'), + (0x33D2, 'M', u'log'), + (0x33D3, 'M', u'lx'), + (0x33D4, 'M', u'mb'), + (0x33D5, 'M', u'mil'), + (0x33D6, 'M', u'mol'), + (0x33D7, 'M', u'ph'), + (0x33D8, 'X'), + (0x33D9, 'M', u'ppm'), + (0x33DA, 'M', u'pr'), + (0x33DB, 'M', u'sr'), + (0x33DC, 'M', u'sv'), + (0x33DD, 'M', u'wb'), + (0x33DE, 'M', u'v∕m'), + (0x33DF, 'M', u'a∕m'), + (0x33E0, 'M', u'1æ—¥'), + (0x33E1, 'M', u'2æ—¥'), + (0x33E2, 'M', u'3æ—¥'), + (0x33E3, 'M', u'4æ—¥'), + (0x33E4, 'M', u'5æ—¥'), + (0x33E5, 'M', u'6æ—¥'), + (0x33E6, 'M', u'7æ—¥'), + (0x33E7, 'M', u'8æ—¥'), + (0x33E8, 'M', u'9æ—¥'), + (0x33E9, 'M', u'10æ—¥'), + (0x33EA, 'M', u'11æ—¥'), + (0x33EB, 'M', u'12æ—¥'), + (0x33EC, 'M', u'13æ—¥'), + (0x33ED, 'M', u'14æ—¥'), + (0x33EE, 'M', u'15æ—¥'), + (0x33EF, 'M', u'16æ—¥'), + (0x33F0, 'M', u'17æ—¥'), + (0x33F1, 'M', u'18æ—¥'), + (0x33F2, 'M', u'19æ—¥'), + (0x33F3, 'M', u'20æ—¥'), + (0x33F4, 'M', u'21æ—¥'), + (0x33F5, 'M', u'22æ—¥'), + (0x33F6, 'M', u'23æ—¥'), + (0x33F7, 'M', u'24æ—¥'), + (0x33F8, 'M', u'25æ—¥'), + (0x33F9, 'M', u'26æ—¥'), + (0x33FA, 'M', u'27æ—¥'), + (0x33FB, 'M', u'28æ—¥'), + (0x33FC, 'M', u'29æ—¥'), + (0x33FD, 'M', u'30æ—¥'), + (0x33FE, 'M', u'31æ—¥'), + (0x33FF, 'M', u'gal'), + (0x3400, 'V'), + (0x4DB6, 'X'), + (0x4DC0, 'V'), + (0x9FCD, 'X'), + (0xA000, 'V'), + (0xA48D, 'X'), + (0xA490, 'V'), + (0xA4C7, 'X'), + (0xA4D0, 'V'), + (0xA62C, 'X'), + (0xA640, 'M', u'ê™'), + (0xA641, 'V'), + (0xA642, 'M', u'ꙃ'), + (0xA643, 'V'), + (0xA644, 'M', u'ê™…'), + (0xA645, 'V'), + (0xA646, 'M', u'ꙇ'), + (0xA647, 'V'), + (0xA648, 'M', u'ꙉ'), + (0xA649, 'V'), + (0xA64A, 'M', u'ꙋ'), + (0xA64B, 'V'), + (0xA64C, 'M', u'ê™'), + (0xA64D, 'V'), + (0xA64E, 'M', u'ê™'), + (0xA64F, 'V'), + (0xA650, 'M', u'ꙑ'), + (0xA651, 'V'), + (0xA652, 'M', u'ꙓ'), + (0xA653, 'V'), + (0xA654, 'M', u'ꙕ'), + (0xA655, 'V'), + (0xA656, 'M', u'ê™—'), + (0xA657, 'V'), + (0xA658, 'M', u'ê™™'), + (0xA659, 'V'), + (0xA65A, 'M', u'ê™›'), + (0xA65B, 'V'), + (0xA65C, 'M', u'ê™'), + (0xA65D, 'V'), + (0xA65E, 'M', u'ꙟ'), + (0xA65F, 'V'), + (0xA660, 'M', u'ꙡ'), + (0xA661, 'V'), + (0xA662, 'M', u'ꙣ'), + (0xA663, 'V'), + (0xA664, 'M', u'ꙥ'), + (0xA665, 'V'), + (0xA666, 'M', u'ꙧ'), + (0xA667, 'V'), + (0xA668, 'M', u'ꙩ'), + (0xA669, 'V'), + (0xA66A, 'M', u'ꙫ'), + (0xA66B, 'V'), + (0xA66C, 'M', u'ê™­'), + (0xA66D, 'V'), + (0xA680, 'M', u'êš'), + (0xA681, 'V'), + (0xA682, 'M', u'ꚃ'), + (0xA683, 'V'), + (0xA684, 'M', u'êš…'), + (0xA685, 'V'), + (0xA686, 'M', u'ꚇ'), + (0xA687, 'V'), + (0xA688, 'M', u'ꚉ'), + (0xA689, 'V'), + (0xA68A, 'M', u'êš‹'), + (0xA68B, 'V'), + (0xA68C, 'M', u'êš'), + (0xA68D, 'V'), + (0xA68E, 'M', u'êš'), + (0xA68F, 'V'), + (0xA690, 'M', u'êš‘'), + (0xA691, 'V'), + (0xA692, 'M', u'êš“'), + (0xA693, 'V'), + (0xA694, 'M', u'êš•'), + (0xA695, 'V'), + (0xA696, 'M', u'êš—'), + (0xA697, 'V'), + (0xA698, 'X'), + (0xA69F, 'V'), + (0xA6F8, 'X'), + (0xA700, 'V'), + (0xA722, 'M', u'ꜣ'), + (0xA723, 'V'), + (0xA724, 'M', u'ꜥ'), + (0xA725, 'V'), + (0xA726, 'M', u'ꜧ'), + (0xA727, 'V'), + (0xA728, 'M', u'ꜩ'), + (0xA729, 'V'), + (0xA72A, 'M', u'ꜫ'), + (0xA72B, 'V'), + (0xA72C, 'M', u'ꜭ'), + (0xA72D, 'V'), + (0xA72E, 'M', u'ꜯ'), + (0xA72F, 'V'), + (0xA732, 'M', u'ꜳ'), + (0xA733, 'V'), + (0xA734, 'M', u'ꜵ'), + (0xA735, 'V'), + (0xA736, 'M', u'ꜷ'), + (0xA737, 'V'), + (0xA738, 'M', u'ꜹ'), + (0xA739, 'V'), + (0xA73A, 'M', u'ꜻ'), + (0xA73B, 'V'), + (0xA73C, 'M', u'ꜽ'), + (0xA73D, 'V'), + (0xA73E, 'M', u'ꜿ'), + (0xA73F, 'V'), + (0xA740, 'M', u'ê'), + (0xA741, 'V'), + (0xA742, 'M', u'êƒ'), + (0xA743, 'V'), + (0xA744, 'M', u'ê…'), + (0xA745, 'V'), + (0xA746, 'M', u'ê‡'), + (0xA747, 'V'), + (0xA748, 'M', u'ê‰'), + (0xA749, 'V'), + (0xA74A, 'M', u'ê‹'), + (0xA74B, 'V'), + (0xA74C, 'M', u'ê'), + (0xA74D, 'V'), + (0xA74E, 'M', u'ê'), + (0xA74F, 'V'), + (0xA750, 'M', u'ê‘'), + (0xA751, 'V'), + (0xA752, 'M', u'ê“'), + (0xA753, 'V'), + (0xA754, 'M', u'ê•'), + (0xA755, 'V'), + (0xA756, 'M', u'ê—'), + (0xA757, 'V'), + (0xA758, 'M', u'ê™'), + (0xA759, 'V'), + (0xA75A, 'M', u'ê›'), + (0xA75B, 'V'), + (0xA75C, 'M', u'ê'), + (0xA75D, 'V'), + (0xA75E, 'M', u'êŸ'), + (0xA75F, 'V'), + (0xA760, 'M', u'ê¡'), + (0xA761, 'V'), + (0xA762, 'M', u'ê£'), + (0xA763, 'V'), + (0xA764, 'M', u'ê¥'), + (0xA765, 'V'), + (0xA766, 'M', u'ê§'), + (0xA767, 'V'), + (0xA768, 'M', u'ê©'), + (0xA769, 'V'), + (0xA76A, 'M', u'ê«'), + (0xA76B, 'V'), + (0xA76C, 'M', u'ê­'), + (0xA76D, 'V'), + (0xA76E, 'M', u'ê¯'), + (0xA76F, 'V'), + (0xA770, 'M', u'ê¯'), + (0xA771, 'V'), + (0xA779, 'M', u'êº'), + (0xA77A, 'V'), + (0xA77B, 'M', u'ê¼'), + (0xA77C, 'V'), + (0xA77D, 'M', u'áµ¹'), + (0xA77E, 'M', u'ê¿'), + (0xA77F, 'V'), + (0xA780, 'M', u'êž'), + (0xA781, 'V'), + (0xA782, 'M', u'ꞃ'), + (0xA783, 'V'), + (0xA784, 'M', u'êž…'), + (0xA785, 'V'), + (0xA786, 'M', u'ꞇ'), + (0xA787, 'V'), + (0xA78B, 'M', u'ꞌ'), + (0xA78C, 'V'), + (0xA78D, 'M', u'É¥'), + (0xA78E, 'V'), + (0xA78F, 'X'), + (0xA790, 'M', u'êž‘'), + (0xA791, 'V'), + (0xA792, 'M', u'êž“'), + (0xA793, 'V'), + (0xA794, 'X'), + (0xA7A0, 'M', u'êž¡'), + (0xA7A1, 'V'), + (0xA7A2, 'M', u'ꞣ'), + (0xA7A3, 'V'), + (0xA7A4, 'M', u'ꞥ'), + (0xA7A5, 'V'), + (0xA7A6, 'M', u'ꞧ'), + (0xA7A7, 'V'), + (0xA7A8, 'M', u'êž©'), + (0xA7A9, 'V'), + (0xA7AA, 'M', u'ɦ'), + (0xA7AB, 'X'), + (0xA7F8, 'M', u'ħ'), + (0xA7F9, 'M', u'Å“'), + (0xA7FA, 'V'), + (0xA82C, 'X'), + (0xA830, 'V'), + (0xA83A, 'X'), + (0xA840, 'V'), + (0xA878, 'X'), + (0xA880, 'V'), + (0xA8C5, 'X'), + (0xA8CE, 'V'), + (0xA8DA, 'X'), + (0xA8E0, 'V'), + (0xA8FC, 'X'), + (0xA900, 'V'), + (0xA954, 'X'), + (0xA95F, 'V'), + (0xA97D, 'X'), + (0xA980, 'V'), + (0xA9CE, 'X'), + (0xA9CF, 'V'), + (0xA9DA, 'X'), + (0xA9DE, 'V'), + (0xA9E0, 'X'), + (0xAA00, 'V'), + (0xAA37, 'X'), + (0xAA40, 'V'), + (0xAA4E, 'X'), + (0xAA50, 'V'), + (0xAA5A, 'X'), + (0xAA5C, 'V'), + (0xAA7C, 'X'), + (0xAA80, 'V'), + (0xAAC3, 'X'), + (0xAADB, 'V'), + (0xAAF7, 'X'), + (0xAB01, 'V'), + (0xAB07, 'X'), + (0xAB09, 'V'), + (0xAB0F, 'X'), + (0xAB11, 'V'), + (0xAB17, 'X'), + (0xAB20, 'V'), + (0xAB27, 'X'), + (0xAB28, 'V'), + (0xAB2F, 'X'), + (0xABC0, 'V'), + (0xABEE, 'X'), + (0xABF0, 'V'), + (0xABFA, 'X'), + (0xAC00, 'V'), + (0xD7A4, 'X'), + (0xD7B0, 'V'), + (0xD7C7, 'X'), + (0xD7CB, 'V'), + (0xD7FC, 'X'), + (0xF900, 'M', u'豈'), + (0xF901, 'M', u'æ›´'), + (0xF902, 'M', u'車'), + (0xF903, 'M', u'賈'), + (0xF904, 'M', u'滑'), + (0xF905, 'M', u'串'), + (0xF906, 'M', u'å¥'), + (0xF907, 'M', u'龜'), + (0xF909, 'M', u'契'), + (0xF90A, 'M', u'金'), + (0xF90B, 'M', u'å–‡'), + (0xF90C, 'M', u'奈'), + (0xF90D, 'M', u'懶'), + (0xF90E, 'M', u'癩'), + (0xF90F, 'M', u'ç¾…'), + (0xF910, 'M', u'蘿'), + (0xF911, 'M', u'螺'), + (0xF912, 'M', u'裸'), + (0xF913, 'M', u'é‚'), + (0xF914, 'M', u'樂'), + (0xF915, 'M', u'æ´›'), + (0xF916, 'M', u'烙'), + (0xF917, 'M', u'çž'), + (0xF918, 'M', u'è½'), + (0xF919, 'M', u'é…ª'), + (0xF91A, 'M', u'駱'), + (0xF91B, 'M', u'亂'), + (0xF91C, 'M', u'åµ'), + (0xF91D, 'M', u'欄'), + (0xF91E, 'M', u'爛'), + (0xF91F, 'M', u'蘭'), + (0xF920, 'M', u'鸞'), + (0xF921, 'M', u'åµ'), + (0xF922, 'M', u'æ¿«'), + (0xF923, 'M', u'è—'), + (0xF924, 'M', u'襤'), + (0xF925, 'M', u'拉'), + (0xF926, 'M', u'臘'), + (0xF927, 'M', u'è Ÿ'), + (0xF928, 'M', u'廊'), + (0xF929, 'M', u'朗'), + (0xF92A, 'M', u'浪'), + (0xF92B, 'M', u'狼'), + (0xF92C, 'M', u'郎'), + (0xF92D, 'M', u'來'), + (0xF92E, 'M', u'冷'), + (0xF92F, 'M', u'å‹ž'), + (0xF930, 'M', u'æ“„'), + (0xF931, 'M', u'æ«“'), + (0xF932, 'M', u'çˆ'), + (0xF933, 'M', u'盧'), + (0xF934, 'M', u'è€'), + (0xF935, 'M', u'蘆'), + (0xF936, 'M', u'虜'), + (0xF937, 'M', u'è·¯'), + (0xF938, 'M', u'露'), + (0xF939, 'M', u'é­¯'), + (0xF93A, 'M', u'é·º'), + (0xF93B, 'M', u'碌'), + (0xF93C, 'M', u'祿'), + (0xF93D, 'M', u'綠'), + (0xF93E, 'M', u'è‰'), + (0xF93F, 'M', u'錄'), + (0xF940, 'M', u'鹿'), + (0xF941, 'M', u'è«–'), + (0xF942, 'M', u'壟'), + (0xF943, 'M', u'弄'), + (0xF944, 'M', u'ç± '), + (0xF945, 'M', u'è¾'), + (0xF946, 'M', u'牢'), + (0xF947, 'M', u'磊'), + (0xF948, 'M', u'賂'), + (0xF949, 'M', u'é›·'), + (0xF94A, 'M', u'壘'), + (0xF94B, 'M', u'å±¢'), + (0xF94C, 'M', u'樓'), + (0xF94D, 'M', u'æ·š'), + (0xF94E, 'M', u'æ¼'), + (0xF94F, 'M', u'ç´¯'), + (0xF950, 'M', u'縷'), + (0xF951, 'M', u'陋'), + (0xF952, 'M', u'å‹’'), + (0xF953, 'M', u'è‚‹'), + (0xF954, 'M', u'凜'), + (0xF955, 'M', u'凌'), + (0xF956, 'M', u'稜'), + (0xF957, 'M', u'綾'), + (0xF958, 'M', u'è±'), + (0xF959, 'M', u'陵'), + (0xF95A, 'M', u'讀'), + (0xF95B, 'M', u'æ‹'), + (0xF95C, 'M', u'樂'), + (0xF95D, 'M', u'諾'), + (0xF95E, 'M', u'丹'), + (0xF95F, 'M', u'寧'), + (0xF960, 'M', u'怒'), + (0xF961, 'M', u'率'), + (0xF962, 'M', u'ç•°'), + (0xF963, 'M', u'北'), + (0xF964, 'M', u'磻'), + (0xF965, 'M', u'便'), + (0xF966, 'M', u'復'), + (0xF967, 'M', u'ä¸'), + (0xF968, 'M', u'泌'), + (0xF969, 'M', u'數'), + (0xF96A, 'M', u'ç´¢'), + (0xF96B, 'M', u'åƒ'), + (0xF96C, 'M', u'å¡ž'), + (0xF96D, 'M', u'çœ'), + (0xF96E, 'M', u'葉'), + (0xF96F, 'M', u'說'), + (0xF970, 'M', u'殺'), + (0xF971, 'M', u'è¾°'), + (0xF972, 'M', u'沈'), + (0xF973, 'M', u'拾'), + (0xF974, 'M', u'è‹¥'), + (0xF975, 'M', u'掠'), + (0xF976, 'M', u'ç•¥'), + (0xF977, 'M', u'亮'), + (0xF978, 'M', u'å…©'), + (0xF979, 'M', u'凉'), + (0xF97A, 'M', u'æ¢'), + (0xF97B, 'M', u'糧'), + (0xF97C, 'M', u'良'), + (0xF97D, 'M', u'è«’'), + (0xF97E, 'M', u'é‡'), + (0xF97F, 'M', u'勵'), + (0xF980, 'M', u'å‘‚'), + (0xF981, 'M', u'女'), + (0xF982, 'M', u'廬'), + (0xF983, 'M', u'æ—…'), + (0xF984, 'M', u'濾'), + (0xF985, 'M', u'礪'), + (0xF986, 'M', u'é–­'), + (0xF987, 'M', u'驪'), + (0xF988, 'M', u'麗'), + (0xF989, 'M', u'黎'), + (0xF98A, 'M', u'力'), + (0xF98B, 'M', u'曆'), + (0xF98C, 'M', u'æ­·'), + (0xF98D, 'M', u'è½¢'), + (0xF98E, 'M', u'å¹´'), + (0xF98F, 'M', u'æ†'), + (0xF990, 'M', u'戀'), + (0xF991, 'M', u'æ’š'), + (0xF992, 'M', u'æ¼£'), + (0xF993, 'M', u'ç…‰'), + (0xF994, 'M', u'ç’‰'), + (0xF995, 'M', u'秊'), + (0xF996, 'M', u'ç·´'), + (0xF997, 'M', u'è¯'), + (0xF998, 'M', u'輦'), + (0xF999, 'M', u'è“®'), + (0xF99A, 'M', u'連'), + (0xF99B, 'M', u'éŠ'), + (0xF99C, 'M', u'列'), + (0xF99D, 'M', u'劣'), + (0xF99E, 'M', u'å’½'), + (0xF99F, 'M', u'烈'), + (0xF9A0, 'M', u'裂'), + (0xF9A1, 'M', u'說'), + (0xF9A2, 'M', u'廉'), + (0xF9A3, 'M', u'念'), + (0xF9A4, 'M', u'æ»'), + (0xF9A5, 'M', u'æ®®'), + (0xF9A6, 'M', u'ç°¾'), + (0xF9A7, 'M', u'çµ'), + (0xF9A8, 'M', u'令'), + (0xF9A9, 'M', u'囹'), + (0xF9AA, 'M', u'寧'), + (0xF9AB, 'M', u'嶺'), + (0xF9AC, 'M', u'怜'), + (0xF9AD, 'M', u'玲'), + (0xF9AE, 'M', u'ç‘©'), + (0xF9AF, 'M', u'羚'), + (0xF9B0, 'M', u'è†'), + (0xF9B1, 'M', u'鈴'), + (0xF9B2, 'M', u'零'), + (0xF9B3, 'M', u'éˆ'), + (0xF9B4, 'M', u'é ˜'), + (0xF9B5, 'M', u'例'), + (0xF9B6, 'M', u'禮'), + (0xF9B7, 'M', u'醴'), + (0xF9B8, 'M', u'隸'), + (0xF9B9, 'M', u'惡'), + (0xF9BA, 'M', u'了'), + (0xF9BB, 'M', u'僚'), + (0xF9BC, 'M', u'寮'), + (0xF9BD, 'M', u'å°¿'), + (0xF9BE, 'M', u'æ–™'), + (0xF9BF, 'M', u'樂'), + (0xF9C0, 'M', u'燎'), + (0xF9C1, 'M', u'療'), + (0xF9C2, 'M', u'蓼'), + (0xF9C3, 'M', u'é¼'), + (0xF9C4, 'M', u'é¾'), + (0xF9C5, 'M', u'暈'), + (0xF9C6, 'M', u'阮'), + (0xF9C7, 'M', u'劉'), + (0xF9C8, 'M', u'æ»'), + (0xF9C9, 'M', u'柳'), + (0xF9CA, 'M', u'æµ'), + (0xF9CB, 'M', u'溜'), + (0xF9CC, 'M', u'ç‰'), + (0xF9CD, 'M', u'ç•™'), + (0xF9CE, 'M', u'ç¡«'), + (0xF9CF, 'M', u'ç´'), + (0xF9D0, 'M', u'é¡ž'), + (0xF9D1, 'M', u'å…­'), + (0xF9D2, 'M', u'戮'), + (0xF9D3, 'M', u'陸'), + (0xF9D4, 'M', u'倫'), + (0xF9D5, 'M', u'å´™'), + (0xF9D6, 'M', u'æ·ª'), + (0xF9D7, 'M', u'輪'), + (0xF9D8, 'M', u'律'), + (0xF9D9, 'M', u'æ…„'), + (0xF9DA, 'M', u'æ —'), + (0xF9DB, 'M', u'率'), + (0xF9DC, 'M', u'隆'), + (0xF9DD, 'M', u'利'), + (0xF9DE, 'M', u'å'), + (0xF9DF, 'M', u'å±¥'), + (0xF9E0, 'M', u'易'), + (0xF9E1, 'M', u'æŽ'), + (0xF9E2, 'M', u'梨'), + (0xF9E3, 'M', u'æ³¥'), + (0xF9E4, 'M', u'ç†'), + (0xF9E5, 'M', u'ç—¢'), + (0xF9E6, 'M', u'ç½¹'), + (0xF9E7, 'M', u'è£'), + (0xF9E8, 'M', u'裡'), + (0xF9E9, 'M', u'里'), + (0xF9EA, 'M', u'離'), + (0xF9EB, 'M', u'匿'), + (0xF9EC, 'M', u'溺'), + (0xF9ED, 'M', u'å'), + (0xF9EE, 'M', u'ç‡'), + (0xF9EF, 'M', u'ç’˜'), + (0xF9F0, 'M', u'è—º'), + (0xF9F1, 'M', u'隣'), + (0xF9F2, 'M', u'é±—'), + (0xF9F3, 'M', u'麟'), + (0xF9F4, 'M', u'æž—'), + (0xF9F5, 'M', u'æ·‹'), + (0xF9F6, 'M', u'臨'), + (0xF9F7, 'M', u'ç«‹'), + (0xF9F8, 'M', u'笠'), + (0xF9F9, 'M', u'ç²’'), + (0xF9FA, 'M', u'ç‹€'), + (0xF9FB, 'M', u'ç‚™'), + (0xF9FC, 'M', u'è­˜'), + (0xF9FD, 'M', u'什'), + (0xF9FE, 'M', u'茶'), + (0xF9FF, 'M', u'刺'), + (0xFA00, 'M', u'切'), + (0xFA01, 'M', u'度'), + (0xFA02, 'M', u'æ‹“'), + (0xFA03, 'M', u'ç³–'), + (0xFA04, 'M', u'å®…'), + (0xFA05, 'M', u'æ´ž'), + (0xFA06, 'M', u'æš´'), + (0xFA07, 'M', u'è¼»'), + (0xFA08, 'M', u'è¡Œ'), + (0xFA09, 'M', u'é™'), + (0xFA0A, 'M', u'見'), + (0xFA0B, 'M', u'廓'), + (0xFA0C, 'M', u'å…€'), + (0xFA0D, 'M', u'å—€'), + (0xFA0E, 'V'), + (0xFA10, 'M', u'å¡š'), + (0xFA11, 'V'), + (0xFA12, 'M', u'æ™´'), + (0xFA13, 'V'), + (0xFA15, 'M', u'凞'), + (0xFA16, 'M', u'猪'), + (0xFA17, 'M', u'益'), + (0xFA18, 'M', u'礼'), + (0xFA19, 'M', u'神'), + (0xFA1A, 'M', u'祥'), + (0xFA1B, 'M', u'ç¦'), + (0xFA1C, 'M', u'é–'), + (0xFA1D, 'M', u'ç²¾'), + (0xFA1E, 'M', u'ç¾½'), + (0xFA1F, 'V'), + (0xFA20, 'M', u'蘒'), + (0xFA21, 'V'), + (0xFA22, 'M', u'諸'), + (0xFA23, 'V'), + (0xFA25, 'M', u'逸'), + (0xFA26, 'M', u'都'), + (0xFA27, 'V'), + (0xFA2A, 'M', u'飯'), + (0xFA2B, 'M', u'飼'), + (0xFA2C, 'M', u'館'), + (0xFA2D, 'M', u'鶴'), + (0xFA2E, 'M', u'郞'), + (0xFA2F, 'M', u'éš·'), + (0xFA30, 'M', u'ä¾®'), + (0xFA31, 'M', u'僧'), + (0xFA32, 'M', u'å…'), + (0xFA33, 'M', u'勉'), + (0xFA34, 'M', u'勤'), + (0xFA35, 'M', u'å‘'), + (0xFA36, 'M', u'å–'), + (0xFA37, 'M', u'嘆'), + (0xFA38, 'M', u'器'), + (0xFA39, 'M', u'å¡€'), + (0xFA3A, 'M', u'墨'), + (0xFA3B, 'M', u'層'), + (0xFA3C, 'M', u'å±®'), + (0xFA3D, 'M', u'æ‚”'), + (0xFA3E, 'M', u'æ…¨'), + (0xFA3F, 'M', u'憎'), + (0xFA40, 'M', u'懲'), + (0xFA41, 'M', u'æ•'), + (0xFA42, 'M', u'æ—¢'), + (0xFA43, 'M', u'æš‘'), + (0xFA44, 'M', u'梅'), + (0xFA45, 'M', u'æµ·'), + (0xFA46, 'M', u'渚'), + (0xFA47, 'M', u'æ¼¢'), + (0xFA48, 'M', u'ç…®'), + (0xFA49, 'M', u'爫'), + (0xFA4A, 'M', u'ç¢'), + (0xFA4B, 'M', u'碑'), + (0xFA4C, 'M', u'社'), + (0xFA4D, 'M', u'祉'), + (0xFA4E, 'M', u'祈'), + (0xFA4F, 'M', u'ç¥'), + (0xFA50, 'M', u'祖'), + (0xFA51, 'M', u'ç¥'), + (0xFA52, 'M', u'ç¦'), + (0xFA53, 'M', u'禎'), + (0xFA54, 'M', u'ç©€'), + (0xFA55, 'M', u'çª'), + (0xFA56, 'M', u'節'), + (0xFA57, 'M', u'ç·´'), + (0xFA58, 'M', u'縉'), + (0xFA59, 'M', u'ç¹'), + (0xFA5A, 'M', u'ç½²'), + (0xFA5B, 'M', u'者'), + (0xFA5C, 'M', u'臭'), + (0xFA5D, 'M', u'艹'), + (0xFA5F, 'M', u'è‘—'), + (0xFA60, 'M', u'è¤'), + (0xFA61, 'M', u'視'), + (0xFA62, 'M', u'è¬'), + (0xFA63, 'M', u'謹'), + (0xFA64, 'M', u'賓'), + (0xFA65, 'M', u'è´ˆ'), + (0xFA66, 'M', u'辶'), + (0xFA67, 'M', u'逸'), + (0xFA68, 'M', u'難'), + (0xFA69, 'M', u'響'), + (0xFA6A, 'M', u'é »'), + (0xFA6B, 'M', u'æµ'), + (0xFA6C, 'M', u'𤋮'), + (0xFA6D, 'M', u'舘'), + (0xFA6E, 'X'), + (0xFA70, 'M', u'並'), + (0xFA71, 'M', u'况'), + (0xFA72, 'M', u'å…¨'), + (0xFA73, 'M', u'ä¾€'), + (0xFA74, 'M', u'å……'), + (0xFA75, 'M', u'冀'), + (0xFA76, 'M', u'勇'), + (0xFA77, 'M', u'勺'), + (0xFA78, 'M', u'å–'), + (0xFA79, 'M', u'å••'), + (0xFA7A, 'M', u'å–™'), + (0xFA7B, 'M', u'å—¢'), + (0xFA7C, 'M', u'å¡š'), + (0xFA7D, 'M', u'墳'), + (0xFA7E, 'M', u'奄'), + (0xFA7F, 'M', u'奔'), + (0xFA80, 'M', u'å©¢'), + (0xFA81, 'M', u'嬨'), + (0xFA82, 'M', u'å»’'), + (0xFA83, 'M', u'å»™'), + (0xFA84, 'M', u'彩'), + (0xFA85, 'M', u'å¾­'), + (0xFA86, 'M', u'惘'), + (0xFA87, 'M', u'æ…Ž'), + (0xFA88, 'M', u'愈'), + (0xFA89, 'M', u'憎'), + (0xFA8A, 'M', u'æ… '), + (0xFA8B, 'M', u'懲'), + (0xFA8C, 'M', u'戴'), + (0xFA8D, 'M', u'æ„'), + (0xFA8E, 'M', u'æœ'), + (0xFA8F, 'M', u'æ‘’'), + (0xFA90, 'M', u'æ•–'), + (0xFA91, 'M', u'æ™´'), + (0xFA92, 'M', u'朗'), + (0xFA93, 'M', u'望'), + (0xFA94, 'M', u'æ–'), + (0xFA95, 'M', u'æ­¹'), + (0xFA96, 'M', u'殺'), + (0xFA97, 'M', u'æµ'), + (0xFA98, 'M', u'æ»›'), + (0xFA99, 'M', u'滋'), + (0xFA9A, 'M', u'æ¼¢'), + (0xFA9B, 'M', u'瀞'), + (0xFA9C, 'M', u'ç…®'), + (0xFA9D, 'M', u'瞧'), + (0xFA9E, 'M', u'爵'), + (0xFA9F, 'M', u'犯'), + (0xFAA0, 'M', u'猪'), + (0xFAA1, 'M', u'瑱'), + (0xFAA2, 'M', u'甆'), + (0xFAA3, 'M', u'ç”»'), + (0xFAA4, 'M', u'ç˜'), + (0xFAA5, 'M', u'瘟'), + (0xFAA6, 'M', u'益'), + (0xFAA7, 'M', u'ç››'), + (0xFAA8, 'M', u'ç›´'), + (0xFAA9, 'M', u'çŠ'), + (0xFAAA, 'M', u'ç€'), + (0xFAAB, 'M', u'磌'), + (0xFAAC, 'M', u'窱'), + (0xFAAD, 'M', u'節'), + (0xFAAE, 'M', u'ç±»'), + (0xFAAF, 'M', u'çµ›'), + (0xFAB0, 'M', u'ç·´'), + (0xFAB1, 'M', u'ç¼¾'), + (0xFAB2, 'M', u'者'), + (0xFAB3, 'M', u'è’'), + (0xFAB4, 'M', u'è¯'), + (0xFAB5, 'M', u'è¹'), + (0xFAB6, 'M', u'è¥'), + (0xFAB7, 'M', u'覆'), + (0xFAB8, 'M', u'視'), + (0xFAB9, 'M', u'調'), + (0xFABA, 'M', u'諸'), + (0xFABB, 'M', u'è«‹'), + (0xFABC, 'M', u'è¬'), + (0xFABD, 'M', u'諾'), + (0xFABE, 'M', u'è«­'), + (0xFABF, 'M', u'謹'), + (0xFAC0, 'M', u'變'), + (0xFAC1, 'M', u'è´ˆ'), + (0xFAC2, 'M', u'輸'), + (0xFAC3, 'M', u'é²'), + (0xFAC4, 'M', u'醙'), + (0xFAC5, 'M', u'鉶'), + (0xFAC6, 'M', u'陼'), + (0xFAC7, 'M', u'難'), + (0xFAC8, 'M', u'é–'), + (0xFAC9, 'M', u'韛'), + (0xFACA, 'M', u'響'), + (0xFACB, 'M', u'é ‹'), + (0xFACC, 'M', u'é »'), + (0xFACD, 'M', u'鬒'), + (0xFACE, 'M', u'龜'), + (0xFACF, 'M', u'𢡊'), + (0xFAD0, 'M', u'𢡄'), + (0xFAD1, 'M', u'ð£•'), + (0xFAD2, 'M', u'ã®'), + (0xFAD3, 'M', u'䀘'), + (0xFAD4, 'M', u'䀹'), + (0xFAD5, 'M', u'𥉉'), + (0xFAD6, 'M', u'ð¥³'), + (0xFAD7, 'M', u'𧻓'), + (0xFAD8, 'M', u'齃'), + (0xFAD9, 'M', u'龎'), + (0xFADA, 'X'), + (0xFB00, 'M', u'ff'), + (0xFB01, 'M', u'fi'), + (0xFB02, 'M', u'fl'), + (0xFB03, 'M', u'ffi'), + (0xFB04, 'M', u'ffl'), + (0xFB05, 'M', u'st'), + (0xFB07, 'X'), + (0xFB13, 'M', u'Õ´Õ¶'), + (0xFB14, 'M', u'Õ´Õ¥'), + (0xFB15, 'M', u'Õ´Õ«'), + (0xFB16, 'M', u'Õ¾Õ¶'), + (0xFB17, 'M', u'Õ´Õ­'), + (0xFB18, 'X'), + (0xFB1D, 'M', u'×™Ö´'), + (0xFB1E, 'V'), + (0xFB1F, 'M', u'ײַ'), + (0xFB20, 'M', u'×¢'), + (0xFB21, 'M', u'×'), + (0xFB22, 'M', u'ד'), + (0xFB23, 'M', u'×”'), + (0xFB24, 'M', u'×›'), + (0xFB25, 'M', u'ל'), + (0xFB26, 'M', u'×'), + (0xFB27, 'M', u'ר'), + (0xFB28, 'M', u'ת'), + (0xFB29, '3', u'+'), + (0xFB2A, 'M', u'ש×'), + (0xFB2B, 'M', u'שׂ'), + (0xFB2C, 'M', u'שּ×'), + (0xFB2D, 'M', u'שּׂ'), + (0xFB2E, 'M', u'×Ö·'), + (0xFB2F, 'M', u'×Ö¸'), + (0xFB30, 'M', u'×Ö¼'), + (0xFB31, 'M', u'בּ'), + (0xFB32, 'M', u'×’Ö¼'), + (0xFB33, 'M', u'דּ'), + (0xFB34, 'M', u'×”Ö¼'), + (0xFB35, 'M', u'וּ'), + (0xFB36, 'M', u'×–Ö¼'), + (0xFB37, 'X'), + (0xFB38, 'M', u'טּ'), + (0xFB39, 'M', u'×™Ö¼'), + (0xFB3A, 'M', u'ךּ'), + (0xFB3B, 'M', u'×›Ö¼'), + (0xFB3C, 'M', u'לּ'), + (0xFB3D, 'X'), + (0xFB3E, 'M', u'מּ'), + (0xFB3F, 'X'), + (0xFB40, 'M', u'× Ö¼'), + (0xFB41, 'M', u'סּ'), + (0xFB42, 'X'), + (0xFB43, 'M', u'×£Ö¼'), + (0xFB44, 'M', u'פּ'), + (0xFB45, 'X'), + (0xFB46, 'M', u'צּ'), + (0xFB47, 'M', u'קּ'), + (0xFB48, 'M', u'רּ'), + (0xFB49, 'M', u'שּ'), + (0xFB4A, 'M', u'תּ'), + (0xFB4B, 'M', u'וֹ'), + (0xFB4C, 'M', u'בֿ'), + (0xFB4D, 'M', u'×›Ö¿'), + (0xFB4E, 'M', u'פֿ'), + (0xFB4F, 'M', u'×ל'), + (0xFB50, 'M', u'Ù±'), + (0xFB52, 'M', u'Ù»'), + (0xFB56, 'M', u'Ù¾'), + (0xFB5A, 'M', u'Ú€'), + (0xFB5E, 'M', u'Ùº'), + (0xFB62, 'M', u'Ù¿'), + (0xFB66, 'M', u'Ù¹'), + (0xFB6A, 'M', u'Ú¤'), + (0xFB6E, 'M', u'Ú¦'), + (0xFB72, 'M', u'Ú„'), + (0xFB76, 'M', u'Úƒ'), + (0xFB7A, 'M', u'Ú†'), + (0xFB7E, 'M', u'Ú‡'), + (0xFB82, 'M', u'Ú'), + (0xFB84, 'M', u'ÚŒ'), + (0xFB86, 'M', u'ÚŽ'), + (0xFB88, 'M', u'Úˆ'), + (0xFB8A, 'M', u'Ú˜'), + (0xFB8C, 'M', u'Ú‘'), + (0xFB8E, 'M', u'Ú©'), + (0xFB92, 'M', u'Ú¯'), + (0xFB96, 'M', u'Ú³'), + (0xFB9A, 'M', u'Ú±'), + (0xFB9E, 'M', u'Úº'), + (0xFBA0, 'M', u'Ú»'), + (0xFBA4, 'M', u'Û€'), + (0xFBA6, 'M', u'Û'), + (0xFBAA, 'M', u'Ú¾'), + (0xFBAE, 'M', u'Û’'), + (0xFBB0, 'M', u'Û“'), + (0xFBB2, 'V'), + (0xFBC2, 'X'), + (0xFBD3, 'M', u'Ú­'), + (0xFBD7, 'M', u'Û‡'), + (0xFBD9, 'M', u'Û†'), + (0xFBDB, 'M', u'Ûˆ'), + (0xFBDD, 'M', u'Û‡Ù´'), + (0xFBDE, 'M', u'Û‹'), + (0xFBE0, 'M', u'Û…'), + (0xFBE2, 'M', u'Û‰'), + (0xFBE4, 'M', u'Û'), + (0xFBE8, 'M', u'Ù‰'), + (0xFBEA, 'M', u'ئا'), + (0xFBEC, 'M', u'ئە'), + (0xFBEE, 'M', u'ئو'), + (0xFBF0, 'M', u'ئۇ'), + (0xFBF2, 'M', u'ئۆ'), + (0xFBF4, 'M', u'ئۈ'), + (0xFBF6, 'M', u'ئÛ'), + (0xFBF9, 'M', u'ئى'), + (0xFBFC, 'M', u'ÛŒ'), + (0xFC00, 'M', u'ئج'), + (0xFC01, 'M', u'ئح'), + (0xFC02, 'M', u'ئم'), + (0xFC03, 'M', u'ئى'), + (0xFC04, 'M', u'ئي'), + (0xFC05, 'M', u'بج'), + (0xFC06, 'M', u'بح'), + (0xFC07, 'M', u'بخ'), + (0xFC08, 'M', u'بم'), + (0xFC09, 'M', u'بى'), + (0xFC0A, 'M', u'بي'), + (0xFC0B, 'M', u'تج'), + (0xFC0C, 'M', u'تح'), + (0xFC0D, 'M', u'تخ'), + (0xFC0E, 'M', u'تم'), + (0xFC0F, 'M', u'تى'), + (0xFC10, 'M', u'تي'), + (0xFC11, 'M', u'ثج'), + (0xFC12, 'M', u'ثم'), + (0xFC13, 'M', u'ثى'), + (0xFC14, 'M', u'ثي'), + (0xFC15, 'M', u'جح'), + (0xFC16, 'M', u'جم'), + (0xFC17, 'M', u'حج'), + (0xFC18, 'M', u'حم'), + (0xFC19, 'M', u'خج'), + (0xFC1A, 'M', u'خح'), + (0xFC1B, 'M', u'خم'), + (0xFC1C, 'M', u'سج'), + (0xFC1D, 'M', u'سح'), + (0xFC1E, 'M', u'سخ'), + (0xFC1F, 'M', u'سم'), + (0xFC20, 'M', u'صح'), + (0xFC21, 'M', u'صم'), + (0xFC22, 'M', u'ضج'), + (0xFC23, 'M', u'ضح'), + (0xFC24, 'M', u'ضخ'), + (0xFC25, 'M', u'ضم'), + (0xFC26, 'M', u'طح'), + (0xFC27, 'M', u'طم'), + (0xFC28, 'M', u'ظم'), + (0xFC29, 'M', u'عج'), + (0xFC2A, 'M', u'عم'), + (0xFC2B, 'M', u'غج'), + (0xFC2C, 'M', u'غم'), + (0xFC2D, 'M', u'Ùج'), + (0xFC2E, 'M', u'ÙØ­'), + (0xFC2F, 'M', u'ÙØ®'), + (0xFC30, 'M', u'ÙÙ…'), + (0xFC31, 'M', u'ÙÙ‰'), + (0xFC32, 'M', u'ÙÙŠ'), + (0xFC33, 'M', u'قح'), + (0xFC34, 'M', u'قم'), + (0xFC35, 'M', u'قى'), + (0xFC36, 'M', u'قي'), + (0xFC37, 'M', u'كا'), + (0xFC38, 'M', u'كج'), + (0xFC39, 'M', u'كح'), + (0xFC3A, 'M', u'كخ'), + (0xFC3B, 'M', u'كل'), + (0xFC3C, 'M', u'كم'), + (0xFC3D, 'M', u'كى'), + (0xFC3E, 'M', u'كي'), + (0xFC3F, 'M', u'لج'), + (0xFC40, 'M', u'لح'), + (0xFC41, 'M', u'لخ'), + (0xFC42, 'M', u'لم'), + (0xFC43, 'M', u'لى'), + (0xFC44, 'M', u'لي'), + (0xFC45, 'M', u'مج'), + (0xFC46, 'M', u'مح'), + (0xFC47, 'M', u'مخ'), + (0xFC48, 'M', u'مم'), + (0xFC49, 'M', u'مى'), + (0xFC4A, 'M', u'مي'), + (0xFC4B, 'M', u'نج'), + (0xFC4C, 'M', u'نح'), + (0xFC4D, 'M', u'نخ'), + (0xFC4E, 'M', u'نم'), + (0xFC4F, 'M', u'نى'), + (0xFC50, 'M', u'ني'), + (0xFC51, 'M', u'هج'), + (0xFC52, 'M', u'هم'), + (0xFC53, 'M', u'هى'), + (0xFC54, 'M', u'هي'), + (0xFC55, 'M', u'يج'), + (0xFC56, 'M', u'يح'), + (0xFC57, 'M', u'يخ'), + (0xFC58, 'M', u'يم'), + (0xFC59, 'M', u'يى'), + (0xFC5A, 'M', u'يي'), + (0xFC5B, 'M', u'ذٰ'), + (0xFC5C, 'M', u'رٰ'), + (0xFC5D, 'M', u'ىٰ'), + (0xFC5E, '3', u' ٌّ'), + (0xFC5F, '3', u' ÙÙ‘'), + (0xFC60, '3', u' ÙŽÙ‘'), + (0xFC61, '3', u' ÙÙ‘'), + (0xFC62, '3', u' ÙÙ‘'), + (0xFC63, '3', u' ّٰ'), + (0xFC64, 'M', u'ئر'), + (0xFC65, 'M', u'ئز'), + (0xFC66, 'M', u'ئم'), + (0xFC67, 'M', u'ئن'), + (0xFC68, 'M', u'ئى'), + (0xFC69, 'M', u'ئي'), + (0xFC6A, 'M', u'بر'), + (0xFC6B, 'M', u'بز'), + (0xFC6C, 'M', u'بم'), + (0xFC6D, 'M', u'بن'), + (0xFC6E, 'M', u'بى'), + (0xFC6F, 'M', u'بي'), + (0xFC70, 'M', u'تر'), + (0xFC71, 'M', u'تز'), + (0xFC72, 'M', u'تم'), + (0xFC73, 'M', u'تن'), + (0xFC74, 'M', u'تى'), + (0xFC75, 'M', u'تي'), + (0xFC76, 'M', u'ثر'), + (0xFC77, 'M', u'ثز'), + (0xFC78, 'M', u'ثم'), + (0xFC79, 'M', u'ثن'), + (0xFC7A, 'M', u'ثى'), + (0xFC7B, 'M', u'ثي'), + (0xFC7C, 'M', u'ÙÙ‰'), + (0xFC7D, 'M', u'ÙÙŠ'), + (0xFC7E, 'M', u'قى'), + (0xFC7F, 'M', u'قي'), + (0xFC80, 'M', u'كا'), + (0xFC81, 'M', u'كل'), + (0xFC82, 'M', u'كم'), + (0xFC83, 'M', u'كى'), + (0xFC84, 'M', u'كي'), + (0xFC85, 'M', u'لم'), + (0xFC86, 'M', u'لى'), + (0xFC87, 'M', u'لي'), + (0xFC88, 'M', u'ما'), + (0xFC89, 'M', u'مم'), + (0xFC8A, 'M', u'نر'), + (0xFC8B, 'M', u'نز'), + (0xFC8C, 'M', u'نم'), + (0xFC8D, 'M', u'نن'), + (0xFC8E, 'M', u'نى'), + (0xFC8F, 'M', u'ني'), + (0xFC90, 'M', u'ىٰ'), + (0xFC91, 'M', u'ير'), + (0xFC92, 'M', u'يز'), + (0xFC93, 'M', u'يم'), + (0xFC94, 'M', u'ين'), + (0xFC95, 'M', u'يى'), + (0xFC96, 'M', u'يي'), + (0xFC97, 'M', u'ئج'), + (0xFC98, 'M', u'ئح'), + (0xFC99, 'M', u'ئخ'), + (0xFC9A, 'M', u'ئم'), + (0xFC9B, 'M', u'ئه'), + (0xFC9C, 'M', u'بج'), + (0xFC9D, 'M', u'بح'), + (0xFC9E, 'M', u'بخ'), + (0xFC9F, 'M', u'بم'), + (0xFCA0, 'M', u'به'), + (0xFCA1, 'M', u'تج'), + (0xFCA2, 'M', u'تح'), + (0xFCA3, 'M', u'تخ'), + (0xFCA4, 'M', u'تم'), + (0xFCA5, 'M', u'ته'), + (0xFCA6, 'M', u'ثم'), + (0xFCA7, 'M', u'جح'), + (0xFCA8, 'M', u'جم'), + (0xFCA9, 'M', u'حج'), + (0xFCAA, 'M', u'حم'), + (0xFCAB, 'M', u'خج'), + (0xFCAC, 'M', u'خم'), + (0xFCAD, 'M', u'سج'), + (0xFCAE, 'M', u'سح'), + (0xFCAF, 'M', u'سخ'), + (0xFCB0, 'M', u'سم'), + (0xFCB1, 'M', u'صح'), + (0xFCB2, 'M', u'صخ'), + (0xFCB3, 'M', u'صم'), + (0xFCB4, 'M', u'ضج'), + (0xFCB5, 'M', u'ضح'), + (0xFCB6, 'M', u'ضخ'), + (0xFCB7, 'M', u'ضم'), + (0xFCB8, 'M', u'طح'), + (0xFCB9, 'M', u'ظم'), + (0xFCBA, 'M', u'عج'), + (0xFCBB, 'M', u'عم'), + (0xFCBC, 'M', u'غج'), + (0xFCBD, 'M', u'غم'), + (0xFCBE, 'M', u'Ùج'), + (0xFCBF, 'M', u'ÙØ­'), + (0xFCC0, 'M', u'ÙØ®'), + (0xFCC1, 'M', u'ÙÙ…'), + (0xFCC2, 'M', u'قح'), + (0xFCC3, 'M', u'قم'), + (0xFCC4, 'M', u'كج'), + (0xFCC5, 'M', u'كح'), + (0xFCC6, 'M', u'كخ'), + (0xFCC7, 'M', u'كل'), + (0xFCC8, 'M', u'كم'), + (0xFCC9, 'M', u'لج'), + (0xFCCA, 'M', u'لح'), + (0xFCCB, 'M', u'لخ'), + (0xFCCC, 'M', u'لم'), + (0xFCCD, 'M', u'له'), + (0xFCCE, 'M', u'مج'), + (0xFCCF, 'M', u'مح'), + (0xFCD0, 'M', u'مخ'), + (0xFCD1, 'M', u'مم'), + (0xFCD2, 'M', u'نج'), + (0xFCD3, 'M', u'نح'), + (0xFCD4, 'M', u'نخ'), + (0xFCD5, 'M', u'نم'), + (0xFCD6, 'M', u'نه'), + (0xFCD7, 'M', u'هج'), + (0xFCD8, 'M', u'هم'), + (0xFCD9, 'M', u'هٰ'), + (0xFCDA, 'M', u'يج'), + (0xFCDB, 'M', u'يح'), + (0xFCDC, 'M', u'يخ'), + (0xFCDD, 'M', u'يم'), + (0xFCDE, 'M', u'يه'), + (0xFCDF, 'M', u'ئم'), + (0xFCE0, 'M', u'ئه'), + (0xFCE1, 'M', u'بم'), + (0xFCE2, 'M', u'به'), + (0xFCE3, 'M', u'تم'), + (0xFCE4, 'M', u'ته'), + (0xFCE5, 'M', u'ثم'), + (0xFCE6, 'M', u'ثه'), + (0xFCE7, 'M', u'سم'), + (0xFCE8, 'M', u'سه'), + (0xFCE9, 'M', u'شم'), + (0xFCEA, 'M', u'شه'), + (0xFCEB, 'M', u'كل'), + (0xFCEC, 'M', u'كم'), + (0xFCED, 'M', u'لم'), + (0xFCEE, 'M', u'نم'), + (0xFCEF, 'M', u'نه'), + (0xFCF0, 'M', u'يم'), + (0xFCF1, 'M', u'يه'), + (0xFCF2, 'M', u'Ù€ÙŽÙ‘'), + (0xFCF3, 'M', u'Ù€ÙÙ‘'), + (0xFCF4, 'M', u'Ù€ÙÙ‘'), + (0xFCF5, 'M', u'طى'), + (0xFCF6, 'M', u'طي'), + (0xFCF7, 'M', u'عى'), + (0xFCF8, 'M', u'عي'), + (0xFCF9, 'M', u'غى'), + (0xFCFA, 'M', u'غي'), + (0xFCFB, 'M', u'سى'), + (0xFCFC, 'M', u'سي'), + (0xFCFD, 'M', u'شى'), + (0xFCFE, 'M', u'شي'), + (0xFCFF, 'M', u'حى'), + (0xFD00, 'M', u'حي'), + (0xFD01, 'M', u'جى'), + (0xFD02, 'M', u'جي'), + (0xFD03, 'M', u'خى'), + (0xFD04, 'M', u'خي'), + (0xFD05, 'M', u'صى'), + (0xFD06, 'M', u'صي'), + (0xFD07, 'M', u'ضى'), + (0xFD08, 'M', u'ضي'), + (0xFD09, 'M', u'شج'), + (0xFD0A, 'M', u'شح'), + (0xFD0B, 'M', u'شخ'), + (0xFD0C, 'M', u'شم'), + (0xFD0D, 'M', u'شر'), + (0xFD0E, 'M', u'سر'), + (0xFD0F, 'M', u'صر'), + (0xFD10, 'M', u'ضر'), + (0xFD11, 'M', u'طى'), + (0xFD12, 'M', u'طي'), + (0xFD13, 'M', u'عى'), + (0xFD14, 'M', u'عي'), + (0xFD15, 'M', u'غى'), + (0xFD16, 'M', u'غي'), + (0xFD17, 'M', u'سى'), + (0xFD18, 'M', u'سي'), + (0xFD19, 'M', u'شى'), + (0xFD1A, 'M', u'شي'), + (0xFD1B, 'M', u'حى'), + (0xFD1C, 'M', u'حي'), + (0xFD1D, 'M', u'جى'), + (0xFD1E, 'M', u'جي'), + (0xFD1F, 'M', u'خى'), + (0xFD20, 'M', u'خي'), + (0xFD21, 'M', u'صى'), + (0xFD22, 'M', u'صي'), + (0xFD23, 'M', u'ضى'), + (0xFD24, 'M', u'ضي'), + (0xFD25, 'M', u'شج'), + (0xFD26, 'M', u'شح'), + (0xFD27, 'M', u'شخ'), + (0xFD28, 'M', u'شم'), + (0xFD29, 'M', u'شر'), + (0xFD2A, 'M', u'سر'), + (0xFD2B, 'M', u'صر'), + (0xFD2C, 'M', u'ضر'), + (0xFD2D, 'M', u'شج'), + (0xFD2E, 'M', u'شح'), + (0xFD2F, 'M', u'شخ'), + (0xFD30, 'M', u'شم'), + (0xFD31, 'M', u'سه'), + (0xFD32, 'M', u'شه'), + (0xFD33, 'M', u'طم'), + (0xFD34, 'M', u'سج'), + (0xFD35, 'M', u'سح'), + (0xFD36, 'M', u'سخ'), + (0xFD37, 'M', u'شج'), + (0xFD38, 'M', u'شح'), + (0xFD39, 'M', u'شخ'), + (0xFD3A, 'M', u'طم'), + (0xFD3B, 'M', u'ظم'), + (0xFD3C, 'M', u'اً'), + (0xFD3E, 'V'), + (0xFD40, 'X'), + (0xFD50, 'M', u'تجم'), + (0xFD51, 'M', u'تحج'), + (0xFD53, 'M', u'تحم'), + (0xFD54, 'M', u'تخم'), + (0xFD55, 'M', u'تمج'), + (0xFD56, 'M', u'تمح'), + (0xFD57, 'M', u'تمخ'), + (0xFD58, 'M', u'جمح'), + (0xFD5A, 'M', u'حمي'), + (0xFD5B, 'M', u'حمى'), + (0xFD5C, 'M', u'سحج'), + (0xFD5D, 'M', u'سجح'), + (0xFD5E, 'M', u'سجى'), + (0xFD5F, 'M', u'سمح'), + (0xFD61, 'M', u'سمج'), + (0xFD62, 'M', u'سمم'), + (0xFD64, 'M', u'صحح'), + (0xFD66, 'M', u'صمم'), + (0xFD67, 'M', u'شحم'), + (0xFD69, 'M', u'شجي'), + (0xFD6A, 'M', u'شمخ'), + (0xFD6C, 'M', u'شمم'), + (0xFD6E, 'M', u'ضحى'), + (0xFD6F, 'M', u'ضخم'), + (0xFD71, 'M', u'طمح'), + (0xFD73, 'M', u'طمم'), + (0xFD74, 'M', u'طمي'), + (0xFD75, 'M', u'عجم'), + (0xFD76, 'M', u'عمم'), + (0xFD78, 'M', u'عمى'), + (0xFD79, 'M', u'غمم'), + (0xFD7A, 'M', u'غمي'), + (0xFD7B, 'M', u'غمى'), + (0xFD7C, 'M', u'Ùخم'), + (0xFD7E, 'M', u'قمح'), + (0xFD7F, 'M', u'قمم'), + (0xFD80, 'M', u'لحم'), + (0xFD81, 'M', u'لحي'), + (0xFD82, 'M', u'لحى'), + (0xFD83, 'M', u'لجج'), + (0xFD85, 'M', u'لخم'), + (0xFD87, 'M', u'لمح'), + (0xFD89, 'M', u'محج'), + (0xFD8A, 'M', u'محم'), + (0xFD8B, 'M', u'محي'), + (0xFD8C, 'M', u'مجح'), + (0xFD8D, 'M', u'مجم'), + (0xFD8E, 'M', u'مخج'), + (0xFD8F, 'M', u'مخم'), + (0xFD90, 'X'), + (0xFD92, 'M', u'مجخ'), + (0xFD93, 'M', u'همج'), + (0xFD94, 'M', u'همم'), + (0xFD95, 'M', u'نحم'), + (0xFD96, 'M', u'نحى'), + (0xFD97, 'M', u'نجم'), + (0xFD99, 'M', u'نجى'), + (0xFD9A, 'M', u'نمي'), + (0xFD9B, 'M', u'نمى'), + (0xFD9C, 'M', u'يمم'), + (0xFD9E, 'M', u'بخي'), + (0xFD9F, 'M', u'تجي'), + (0xFDA0, 'M', u'تجى'), + (0xFDA1, 'M', u'تخي'), + (0xFDA2, 'M', u'تخى'), + (0xFDA3, 'M', u'تمي'), + (0xFDA4, 'M', u'تمى'), + (0xFDA5, 'M', u'جمي'), + (0xFDA6, 'M', u'جحى'), + (0xFDA7, 'M', u'جمى'), + (0xFDA8, 'M', u'سخى'), + (0xFDA9, 'M', u'صحي'), + (0xFDAA, 'M', u'شحي'), + (0xFDAB, 'M', u'ضحي'), + (0xFDAC, 'M', u'لجي'), + (0xFDAD, 'M', u'لمي'), + (0xFDAE, 'M', u'يحي'), + (0xFDAF, 'M', u'يجي'), + (0xFDB0, 'M', u'يمي'), + (0xFDB1, 'M', u'ممي'), + (0xFDB2, 'M', u'قمي'), + (0xFDB3, 'M', u'نحي'), + (0xFDB4, 'M', u'قمح'), + (0xFDB5, 'M', u'لحم'), + (0xFDB6, 'M', u'عمي'), + (0xFDB7, 'M', u'كمي'), + (0xFDB8, 'M', u'نجح'), + (0xFDB9, 'M', u'مخي'), + (0xFDBA, 'M', u'لجم'), + (0xFDBB, 'M', u'كمم'), + (0xFDBC, 'M', u'لجم'), + (0xFDBD, 'M', u'نجح'), + (0xFDBE, 'M', u'جحي'), + (0xFDBF, 'M', u'حجي'), + (0xFDC0, 'M', u'مجي'), + (0xFDC1, 'M', u'Ùمي'), + (0xFDC2, 'M', u'بحي'), + (0xFDC3, 'M', u'كمم'), + (0xFDC4, 'M', u'عجم'), + (0xFDC5, 'M', u'صمم'), + (0xFDC6, 'M', u'سخي'), + (0xFDC7, 'M', u'نجي'), + (0xFDC8, 'X'), + (0xFDF0, 'M', u'صلے'), + (0xFDF1, 'M', u'قلے'), + (0xFDF2, 'M', u'الله'), + (0xFDF3, 'M', u'اكبر'), + (0xFDF4, 'M', u'محمد'), + (0xFDF5, 'M', u'صلعم'), + (0xFDF6, 'M', u'رسول'), + (0xFDF7, 'M', u'عليه'), + (0xFDF8, 'M', u'وسلم'), + (0xFDF9, 'M', u'صلى'), + (0xFDFA, '3', u'صلى الله عليه وسلم'), + (0xFDFB, '3', u'جل جلاله'), + (0xFDFC, 'M', u'ریال'), + (0xFDFD, 'V'), + (0xFDFE, 'X'), + (0xFE00, 'I'), + (0xFE10, '3', u','), + (0xFE11, 'M', u'ã€'), + (0xFE12, 'X'), + (0xFE13, '3', u':'), + (0xFE14, '3', u';'), + (0xFE15, '3', u'!'), + (0xFE16, '3', u'?'), + (0xFE17, 'M', u'〖'), + (0xFE18, 'M', u'〗'), + (0xFE19, 'X'), + (0xFE20, 'V'), + (0xFE27, 'X'), + (0xFE31, 'M', u'—'), + (0xFE32, 'M', u'–'), + (0xFE33, '3', u'_'), + (0xFE35, '3', u'('), + (0xFE36, '3', u')'), + (0xFE37, '3', u'{'), + (0xFE38, '3', u'}'), + (0xFE39, 'M', u'〔'), + (0xFE3A, 'M', u'〕'), + (0xFE3B, 'M', u'ã€'), + (0xFE3C, 'M', u'】'), + (0xFE3D, 'M', u'《'), + (0xFE3E, 'M', u'》'), + (0xFE3F, 'M', u'〈'), + (0xFE40, 'M', u'〉'), + (0xFE41, 'M', u'「'), + (0xFE42, 'M', u'ã€'), + (0xFE43, 'M', u'『'), + (0xFE44, 'M', u'ã€'), + (0xFE45, 'V'), + (0xFE47, '3', u'['), + (0xFE48, '3', u']'), + (0xFE49, '3', u' Ì…'), + (0xFE4D, '3', u'_'), + (0xFE50, '3', u','), + (0xFE51, 'M', u'ã€'), + (0xFE52, 'X'), + (0xFE54, '3', u';'), + (0xFE55, '3', u':'), + (0xFE56, '3', u'?'), + (0xFE57, '3', u'!'), + (0xFE58, 'M', u'—'), + (0xFE59, '3', u'('), + (0xFE5A, '3', u')'), + (0xFE5B, '3', u'{'), + (0xFE5C, '3', u'}'), + (0xFE5D, 'M', u'〔'), + (0xFE5E, 'M', u'〕'), + (0xFE5F, '3', u'#'), + (0xFE60, '3', u'&'), + (0xFE61, '3', u'*'), + (0xFE62, '3', u'+'), + (0xFE63, 'M', u'-'), + (0xFE64, '3', u'<'), + (0xFE65, '3', u'>'), + (0xFE66, '3', u'='), + (0xFE67, 'X'), + (0xFE68, '3', u'\\'), + (0xFE69, '3', u'$'), + (0xFE6A, '3', u'%'), + (0xFE6B, '3', u'@'), + (0xFE6C, 'X'), + (0xFE70, '3', u' Ù‹'), + (0xFE71, 'M', u'ـً'), + (0xFE72, '3', u' ÙŒ'), + (0xFE73, 'V'), + (0xFE74, '3', u' Ù'), + (0xFE75, 'X'), + (0xFE76, '3', u' ÙŽ'), + (0xFE77, 'M', u'Ù€ÙŽ'), + (0xFE78, '3', u' Ù'), + (0xFE79, 'M', u'Ù€Ù'), + (0xFE7A, '3', u' Ù'), + (0xFE7B, 'M', u'Ù€Ù'), + (0xFE7C, '3', u' Ù‘'), + (0xFE7D, 'M', u'ـّ'), + (0xFE7E, '3', u' Ù’'), + (0xFE7F, 'M', u'ـْ'), + (0xFE80, 'M', u'Ø¡'), + (0xFE81, 'M', u'Ø¢'), + (0xFE83, 'M', u'Ø£'), + (0xFE85, 'M', u'ؤ'), + (0xFE87, 'M', u'Ø¥'), + (0xFE89, 'M', u'ئ'), + (0xFE8D, 'M', u'ا'), + (0xFE8F, 'M', u'ب'), + (0xFE93, 'M', u'Ø©'), + (0xFE95, 'M', u'ت'), + (0xFE99, 'M', u'Ø«'), + (0xFE9D, 'M', u'ج'), + (0xFEA1, 'M', u'Ø­'), + (0xFEA5, 'M', u'Ø®'), + (0xFEA9, 'M', u'د'), + (0xFEAB, 'M', u'Ø°'), + (0xFEAD, 'M', u'ر'), + (0xFEAF, 'M', u'ز'), + (0xFEB1, 'M', u'س'), + (0xFEB5, 'M', u'Ø´'), + (0xFEB9, 'M', u'ص'), + (0xFEBD, 'M', u'ض'), + (0xFEC1, 'M', u'Ø·'), + (0xFEC5, 'M', u'ظ'), + (0xFEC9, 'M', u'ع'), + (0xFECD, 'M', u'غ'), + (0xFED1, 'M', u'Ù'), + (0xFED5, 'M', u'Ù‚'), + (0xFED9, 'M', u'Ùƒ'), + (0xFEDD, 'M', u'Ù„'), + (0xFEE1, 'M', u'Ù…'), + (0xFEE5, 'M', u'Ù†'), + (0xFEE9, 'M', u'Ù‡'), + (0xFEED, 'M', u'Ùˆ'), + (0xFEEF, 'M', u'Ù‰'), + (0xFEF1, 'M', u'ÙŠ'), + (0xFEF5, 'M', u'لآ'), + (0xFEF7, 'M', u'لأ'), + (0xFEF9, 'M', u'لإ'), + (0xFEFB, 'M', u'لا'), + (0xFEFD, 'X'), + (0xFEFF, 'I'), + (0xFF00, 'X'), + (0xFF01, '3', u'!'), + (0xFF02, '3', u'"'), + (0xFF03, '3', u'#'), + (0xFF04, '3', u'$'), + (0xFF05, '3', u'%'), + (0xFF06, '3', u'&'), + (0xFF07, '3', u'\''), + (0xFF08, '3', u'('), + (0xFF09, '3', u')'), + (0xFF0A, '3', u'*'), + (0xFF0B, '3', u'+'), + (0xFF0C, '3', u','), + (0xFF0D, 'M', u'-'), + (0xFF0E, 'M', u'.'), + (0xFF0F, '3', u'/'), + (0xFF10, 'M', u'0'), + (0xFF11, 'M', u'1'), + (0xFF12, 'M', u'2'), + (0xFF13, 'M', u'3'), + (0xFF14, 'M', u'4'), + (0xFF15, 'M', u'5'), + (0xFF16, 'M', u'6'), + (0xFF17, 'M', u'7'), + (0xFF18, 'M', u'8'), + (0xFF19, 'M', u'9'), + (0xFF1A, '3', u':'), + (0xFF1B, '3', u';'), + (0xFF1C, '3', u'<'), + (0xFF1D, '3', u'='), + (0xFF1E, '3', u'>'), + (0xFF1F, '3', u'?'), + (0xFF20, '3', u'@'), + (0xFF21, 'M', u'a'), + (0xFF22, 'M', u'b'), + (0xFF23, 'M', u'c'), + (0xFF24, 'M', u'd'), + (0xFF25, 'M', u'e'), + (0xFF26, 'M', u'f'), + (0xFF27, 'M', u'g'), + (0xFF28, 'M', u'h'), + (0xFF29, 'M', u'i'), + (0xFF2A, 'M', u'j'), + (0xFF2B, 'M', u'k'), + (0xFF2C, 'M', u'l'), + (0xFF2D, 'M', u'm'), + (0xFF2E, 'M', u'n'), + (0xFF2F, 'M', u'o'), + (0xFF30, 'M', u'p'), + (0xFF31, 'M', u'q'), + (0xFF32, 'M', u'r'), + (0xFF33, 'M', u's'), + (0xFF34, 'M', u't'), + (0xFF35, 'M', u'u'), + (0xFF36, 'M', u'v'), + (0xFF37, 'M', u'w'), + (0xFF38, 'M', u'x'), + (0xFF39, 'M', u'y'), + (0xFF3A, 'M', u'z'), + (0xFF3B, '3', u'['), + (0xFF3C, '3', u'\\'), + (0xFF3D, '3', u']'), + (0xFF3E, '3', u'^'), + (0xFF3F, '3', u'_'), + (0xFF40, '3', u'`'), + (0xFF41, 'M', u'a'), + (0xFF42, 'M', u'b'), + (0xFF43, 'M', u'c'), + (0xFF44, 'M', u'd'), + (0xFF45, 'M', u'e'), + (0xFF46, 'M', u'f'), + (0xFF47, 'M', u'g'), + (0xFF48, 'M', u'h'), + (0xFF49, 'M', u'i'), + (0xFF4A, 'M', u'j'), + (0xFF4B, 'M', u'k'), + (0xFF4C, 'M', u'l'), + (0xFF4D, 'M', u'm'), + (0xFF4E, 'M', u'n'), + (0xFF4F, 'M', u'o'), + (0xFF50, 'M', u'p'), + (0xFF51, 'M', u'q'), + (0xFF52, 'M', u'r'), + (0xFF53, 'M', u's'), + (0xFF54, 'M', u't'), + (0xFF55, 'M', u'u'), + (0xFF56, 'M', u'v'), + (0xFF57, 'M', u'w'), + (0xFF58, 'M', u'x'), + (0xFF59, 'M', u'y'), + (0xFF5A, 'M', u'z'), + (0xFF5B, '3', u'{'), + (0xFF5C, '3', u'|'), + (0xFF5D, '3', u'}'), + (0xFF5E, '3', u'~'), + (0xFF5F, 'M', u'⦅'), + (0xFF60, 'M', u'⦆'), + (0xFF61, 'M', u'.'), + (0xFF62, 'M', u'「'), + (0xFF63, 'M', u'ã€'), + (0xFF64, 'M', u'ã€'), + (0xFF65, 'M', u'・'), + (0xFF66, 'M', u'ヲ'), + (0xFF67, 'M', u'ã‚¡'), + (0xFF68, 'M', u'ã‚£'), + (0xFF69, 'M', u'ã‚¥'), + (0xFF6A, 'M', u'ェ'), + (0xFF6B, 'M', u'ã‚©'), + (0xFF6C, 'M', u'ャ'), + (0xFF6D, 'M', u'ュ'), + (0xFF6E, 'M', u'ョ'), + (0xFF6F, 'M', u'ッ'), + (0xFF70, 'M', u'ー'), + (0xFF71, 'M', u'ã‚¢'), + (0xFF72, 'M', u'イ'), + (0xFF73, 'M', u'ウ'), + (0xFF74, 'M', u'エ'), + (0xFF75, 'M', u'オ'), + (0xFF76, 'M', u'ã‚«'), + (0xFF77, 'M', u'ã‚­'), + (0xFF78, 'M', u'ク'), + (0xFF79, 'M', u'ケ'), + (0xFF7A, 'M', u'コ'), + (0xFF7B, 'M', u'サ'), + (0xFF7C, 'M', u'ã‚·'), + (0xFF7D, 'M', u'ス'), + (0xFF7E, 'M', u'ã‚»'), + (0xFF7F, 'M', u'ソ'), + (0xFF80, 'M', u'ã‚¿'), + (0xFF81, 'M', u'ãƒ'), + (0xFF82, 'M', u'ツ'), + (0xFF83, 'M', u'テ'), + (0xFF84, 'M', u'ト'), + (0xFF85, 'M', u'ナ'), + (0xFF86, 'M', u'ニ'), + (0xFF87, 'M', u'ヌ'), + (0xFF88, 'M', u'ãƒ'), + (0xFF89, 'M', u'ノ'), + (0xFF8A, 'M', u'ãƒ'), + (0xFF8B, 'M', u'ヒ'), + (0xFF8C, 'M', u'フ'), + (0xFF8D, 'M', u'ヘ'), + (0xFF8E, 'M', u'ホ'), + (0xFF8F, 'M', u'マ'), + (0xFF90, 'M', u'ミ'), + (0xFF91, 'M', u'ム'), + (0xFF92, 'M', u'メ'), + (0xFF93, 'M', u'モ'), + (0xFF94, 'M', u'ヤ'), + (0xFF95, 'M', u'ユ'), + (0xFF96, 'M', u'ヨ'), + (0xFF97, 'M', u'ラ'), + (0xFF98, 'M', u'リ'), + (0xFF99, 'M', u'ル'), + (0xFF9A, 'M', u'レ'), + (0xFF9B, 'M', u'ロ'), + (0xFF9C, 'M', u'ワ'), + (0xFF9D, 'M', u'ン'), + (0xFF9E, 'M', u'ã‚™'), + (0xFF9F, 'M', u'ã‚š'), + (0xFFA0, 'X'), + (0xFFA1, 'M', u'á„€'), + (0xFFA2, 'M', u'á„'), + (0xFFA3, 'M', u'ᆪ'), + (0xFFA4, 'M', u'á„‚'), + (0xFFA5, 'M', u'ᆬ'), + (0xFFA6, 'M', u'ᆭ'), + (0xFFA7, 'M', u'ᄃ'), + (0xFFA8, 'M', u'á„„'), + (0xFFA9, 'M', u'á„…'), + (0xFFAA, 'M', u'ᆰ'), + (0xFFAB, 'M', u'ᆱ'), + (0xFFAC, 'M', u'ᆲ'), + (0xFFAD, 'M', u'ᆳ'), + (0xFFAE, 'M', u'ᆴ'), + (0xFFAF, 'M', u'ᆵ'), + (0xFFB0, 'M', u'á„š'), + (0xFFB1, 'M', u'ᄆ'), + (0xFFB2, 'M', u'ᄇ'), + (0xFFB3, 'M', u'ᄈ'), + (0xFFB4, 'M', u'á„¡'), + (0xFFB5, 'M', u'ᄉ'), + (0xFFB6, 'M', u'á„Š'), + (0xFFB7, 'M', u'á„‹'), + (0xFFB8, 'M', u'á„Œ'), + (0xFFB9, 'M', u'á„'), + (0xFFBA, 'M', u'á„Ž'), + (0xFFBB, 'M', u'á„'), + (0xFFBC, 'M', u'á„'), + (0xFFBD, 'M', u'á„‘'), + (0xFFBE, 'M', u'á„’'), + (0xFFBF, 'X'), + (0xFFC2, 'M', u'á…¡'), + (0xFFC3, 'M', u'á…¢'), + (0xFFC4, 'M', u'á…£'), + (0xFFC5, 'M', u'á…¤'), + (0xFFC6, 'M', u'á…¥'), + (0xFFC7, 'M', u'á…¦'), + (0xFFC8, 'X'), + (0xFFCA, 'M', u'á…§'), + (0xFFCB, 'M', u'á…¨'), + (0xFFCC, 'M', u'á…©'), + (0xFFCD, 'M', u'á…ª'), + (0xFFCE, 'M', u'á…«'), + (0xFFCF, 'M', u'á…¬'), + (0xFFD0, 'X'), + (0xFFD2, 'M', u'á…­'), + (0xFFD3, 'M', u'á…®'), + (0xFFD4, 'M', u'á…¯'), + (0xFFD5, 'M', u'á…°'), + (0xFFD6, 'M', u'á…±'), + (0xFFD7, 'M', u'á…²'), + (0xFFD8, 'X'), + (0xFFDA, 'M', u'á…³'), + (0xFFDB, 'M', u'á…´'), + (0xFFDC, 'M', u'á…µ'), + (0xFFDD, 'X'), + (0xFFE0, 'M', u'¢'), + (0xFFE1, 'M', u'£'), + (0xFFE2, 'M', u'¬'), + (0xFFE3, '3', u' Ì„'), + (0xFFE4, 'M', u'¦'), + (0xFFE5, 'M', u'Â¥'), + (0xFFE6, 'M', u'â‚©'), + (0xFFE7, 'X'), + (0xFFE8, 'M', u'│'), + (0xFFE9, 'M', u'â†'), + (0xFFEA, 'M', u'↑'), + (0xFFEB, 'M', u'→'), + (0xFFEC, 'M', u'↓'), + (0xFFED, 'M', u'â– '), + (0xFFEE, 'M', u'â—‹'), + (0xFFEF, 'X'), + (0x10000, 'V'), + (0x1000C, 'X'), + (0x1000D, 'V'), + (0x10027, 'X'), + (0x10028, 'V'), + (0x1003B, 'X'), + (0x1003C, 'V'), + (0x1003E, 'X'), + (0x1003F, 'V'), + (0x1004E, 'X'), + (0x10050, 'V'), + (0x1005E, 'X'), + (0x10080, 'V'), + (0x100FB, 'X'), + (0x10100, 'V'), + (0x10103, 'X'), + (0x10107, 'V'), + (0x10134, 'X'), + (0x10137, 'V'), + (0x1018B, 'X'), + (0x10190, 'V'), + (0x1019C, 'X'), + (0x101D0, 'V'), + (0x101FE, 'X'), + (0x10280, 'V'), + (0x1029D, 'X'), + (0x102A0, 'V'), + (0x102D1, 'X'), + (0x10300, 'V'), + (0x1031F, 'X'), + (0x10320, 'V'), + (0x10324, 'X'), + (0x10330, 'V'), + (0x1034B, 'X'), + (0x10380, 'V'), + (0x1039E, 'X'), + (0x1039F, 'V'), + (0x103C4, 'X'), + (0x103C8, 'V'), + (0x103D6, 'X'), + (0x10400, 'M', u'ð¨'), + (0x10401, 'M', u'ð©'), + (0x10402, 'M', u'ðª'), + (0x10403, 'M', u'ð«'), + (0x10404, 'M', u'ð¬'), + (0x10405, 'M', u'ð­'), + (0x10406, 'M', u'ð®'), + (0x10407, 'M', u'ð¯'), + (0x10408, 'M', u'ð°'), + (0x10409, 'M', u'ð±'), + (0x1040A, 'M', u'ð²'), + (0x1040B, 'M', u'ð³'), + (0x1040C, 'M', u'ð´'), + (0x1040D, 'M', u'ðµ'), + (0x1040E, 'M', u'ð¶'), + (0x1040F, 'M', u'ð·'), + (0x10410, 'M', u'ð¸'), + (0x10411, 'M', u'ð¹'), + (0x10412, 'M', u'ðº'), + (0x10413, 'M', u'ð»'), + (0x10414, 'M', u'ð¼'), + (0x10415, 'M', u'ð½'), + (0x10416, 'M', u'ð¾'), + (0x10417, 'M', u'ð¿'), + (0x10418, 'M', u'ð‘€'), + (0x10419, 'M', u'ð‘'), + (0x1041A, 'M', u'ð‘‚'), + (0x1041B, 'M', u'ð‘ƒ'), + (0x1041C, 'M', u'ð‘„'), + (0x1041D, 'M', u'ð‘…'), + (0x1041E, 'M', u'ð‘†'), + (0x1041F, 'M', u'ð‘‡'), + (0x10420, 'M', u'ð‘ˆ'), + (0x10421, 'M', u'ð‘‰'), + (0x10422, 'M', u'ð‘Š'), + (0x10423, 'M', u'ð‘‹'), + (0x10424, 'M', u'ð‘Œ'), + (0x10425, 'M', u'ð‘'), + (0x10426, 'M', u'ð‘Ž'), + (0x10427, 'M', u'ð‘'), + (0x10428, 'V'), + (0x1049E, 'X'), + (0x104A0, 'V'), + (0x104AA, 'X'), + (0x10800, 'V'), + (0x10806, 'X'), + (0x10808, 'V'), + (0x10809, 'X'), + (0x1080A, 'V'), + (0x10836, 'X'), + (0x10837, 'V'), + (0x10839, 'X'), + (0x1083C, 'V'), + (0x1083D, 'X'), + (0x1083F, 'V'), + (0x10856, 'X'), + (0x10857, 'V'), + (0x10860, 'X'), + (0x10900, 'V'), + (0x1091C, 'X'), + (0x1091F, 'V'), + (0x1093A, 'X'), + (0x1093F, 'V'), + (0x10940, 'X'), + (0x10980, 'V'), + (0x109B8, 'X'), + (0x109BE, 'V'), + (0x109C0, 'X'), + (0x10A00, 'V'), + (0x10A04, 'X'), + (0x10A05, 'V'), + (0x10A07, 'X'), + (0x10A0C, 'V'), + (0x10A14, 'X'), + (0x10A15, 'V'), + (0x10A18, 'X'), + (0x10A19, 'V'), + (0x10A34, 'X'), + (0x10A38, 'V'), + (0x10A3B, 'X'), + (0x10A3F, 'V'), + (0x10A48, 'X'), + (0x10A50, 'V'), + (0x10A59, 'X'), + (0x10A60, 'V'), + (0x10A80, 'X'), + (0x10B00, 'V'), + (0x10B36, 'X'), + (0x10B39, 'V'), + (0x10B56, 'X'), + (0x10B58, 'V'), + (0x10B73, 'X'), + (0x10B78, 'V'), + (0x10B80, 'X'), + (0x10C00, 'V'), + (0x10C49, 'X'), + (0x10E60, 'V'), + (0x10E7F, 'X'), + (0x11000, 'V'), + (0x1104E, 'X'), + (0x11052, 'V'), + (0x11070, 'X'), + (0x11080, 'V'), + (0x110BD, 'X'), + (0x110BE, 'V'), + (0x110C2, 'X'), + (0x110D0, 'V'), + (0x110E9, 'X'), + (0x110F0, 'V'), + (0x110FA, 'X'), + (0x11100, 'V'), + (0x11135, 'X'), + (0x11136, 'V'), + (0x11144, 'X'), + (0x11180, 'V'), + (0x111C9, 'X'), + (0x111D0, 'V'), + (0x111DA, 'X'), + (0x11680, 'V'), + (0x116B8, 'X'), + (0x116C0, 'V'), + (0x116CA, 'X'), + (0x12000, 'V'), + (0x1236F, 'X'), + (0x12400, 'V'), + (0x12463, 'X'), + (0x12470, 'V'), + (0x12474, 'X'), + (0x13000, 'V'), + (0x1342F, 'X'), + (0x16800, 'V'), + (0x16A39, 'X'), + (0x16F00, 'V'), + (0x16F45, 'X'), + (0x16F50, 'V'), + (0x16F7F, 'X'), + (0x16F8F, 'V'), + (0x16FA0, 'X'), + (0x1B000, 'V'), + (0x1B002, 'X'), + (0x1D000, 'V'), + (0x1D0F6, 'X'), + (0x1D100, 'V'), + (0x1D127, 'X'), + (0x1D129, 'V'), + (0x1D15E, 'M', u'ð…—ð…¥'), + (0x1D15F, 'M', u'ð…˜ð…¥'), + (0x1D160, 'M', u'ð…˜ð…¥ð…®'), + (0x1D161, 'M', u'ð…˜ð…¥ð…¯'), + (0x1D162, 'M', u'ð…˜ð…¥ð…°'), + (0x1D163, 'M', u'ð…˜ð…¥ð…±'), + (0x1D164, 'M', u'ð…˜ð…¥ð…²'), + (0x1D165, 'V'), + (0x1D173, 'X'), + (0x1D17B, 'V'), + (0x1D1BB, 'M', u'ð†¹ð…¥'), + (0x1D1BC, 'M', u'ð†ºð…¥'), + (0x1D1BD, 'M', u'ð†¹ð…¥ð…®'), + (0x1D1BE, 'M', u'ð†ºð…¥ð…®'), + (0x1D1BF, 'M', u'ð†¹ð…¥ð…¯'), + (0x1D1C0, 'M', u'ð†ºð…¥ð…¯'), + (0x1D1C1, 'V'), + (0x1D1DE, 'X'), + (0x1D200, 'V'), + (0x1D246, 'X'), + (0x1D300, 'V'), + (0x1D357, 'X'), + (0x1D360, 'V'), + (0x1D372, 'X'), + (0x1D400, 'M', u'a'), + (0x1D401, 'M', u'b'), + (0x1D402, 'M', u'c'), + (0x1D403, 'M', u'd'), + (0x1D404, 'M', u'e'), + (0x1D405, 'M', u'f'), + (0x1D406, 'M', u'g'), + (0x1D407, 'M', u'h'), + (0x1D408, 'M', u'i'), + (0x1D409, 'M', u'j'), + (0x1D40A, 'M', u'k'), + (0x1D40B, 'M', u'l'), + (0x1D40C, 'M', u'm'), + (0x1D40D, 'M', u'n'), + (0x1D40E, 'M', u'o'), + (0x1D40F, 'M', u'p'), + (0x1D410, 'M', u'q'), + (0x1D411, 'M', u'r'), + (0x1D412, 'M', u's'), + (0x1D413, 'M', u't'), + (0x1D414, 'M', u'u'), + (0x1D415, 'M', u'v'), + (0x1D416, 'M', u'w'), + (0x1D417, 'M', u'x'), + (0x1D418, 'M', u'y'), + (0x1D419, 'M', u'z'), + (0x1D41A, 'M', u'a'), + (0x1D41B, 'M', u'b'), + (0x1D41C, 'M', u'c'), + (0x1D41D, 'M', u'd'), + (0x1D41E, 'M', u'e'), + (0x1D41F, 'M', u'f'), + (0x1D420, 'M', u'g'), + (0x1D421, 'M', u'h'), + (0x1D422, 'M', u'i'), + (0x1D423, 'M', u'j'), + (0x1D424, 'M', u'k'), + (0x1D425, 'M', u'l'), + (0x1D426, 'M', u'm'), + (0x1D427, 'M', u'n'), + (0x1D428, 'M', u'o'), + (0x1D429, 'M', u'p'), + (0x1D42A, 'M', u'q'), + (0x1D42B, 'M', u'r'), + (0x1D42C, 'M', u's'), + (0x1D42D, 'M', u't'), + (0x1D42E, 'M', u'u'), + (0x1D42F, 'M', u'v'), + (0x1D430, 'M', u'w'), + (0x1D431, 'M', u'x'), + (0x1D432, 'M', u'y'), + (0x1D433, 'M', u'z'), + (0x1D434, 'M', u'a'), + (0x1D435, 'M', u'b'), + (0x1D436, 'M', u'c'), + (0x1D437, 'M', u'd'), + (0x1D438, 'M', u'e'), + (0x1D439, 'M', u'f'), + (0x1D43A, 'M', u'g'), + (0x1D43B, 'M', u'h'), + (0x1D43C, 'M', u'i'), + (0x1D43D, 'M', u'j'), + (0x1D43E, 'M', u'k'), + (0x1D43F, 'M', u'l'), + (0x1D440, 'M', u'm'), + (0x1D441, 'M', u'n'), + (0x1D442, 'M', u'o'), + (0x1D443, 'M', u'p'), + (0x1D444, 'M', u'q'), + (0x1D445, 'M', u'r'), + (0x1D446, 'M', u's'), + (0x1D447, 'M', u't'), + (0x1D448, 'M', u'u'), + (0x1D449, 'M', u'v'), + (0x1D44A, 'M', u'w'), + (0x1D44B, 'M', u'x'), + (0x1D44C, 'M', u'y'), + (0x1D44D, 'M', u'z'), + (0x1D44E, 'M', u'a'), + (0x1D44F, 'M', u'b'), + (0x1D450, 'M', u'c'), + (0x1D451, 'M', u'd'), + (0x1D452, 'M', u'e'), + (0x1D453, 'M', u'f'), + (0x1D454, 'M', u'g'), + (0x1D455, 'X'), + (0x1D456, 'M', u'i'), + (0x1D457, 'M', u'j'), + (0x1D458, 'M', u'k'), + (0x1D459, 'M', u'l'), + (0x1D45A, 'M', u'm'), + (0x1D45B, 'M', u'n'), + (0x1D45C, 'M', u'o'), + (0x1D45D, 'M', u'p'), + (0x1D45E, 'M', u'q'), + (0x1D45F, 'M', u'r'), + (0x1D460, 'M', u's'), + (0x1D461, 'M', u't'), + (0x1D462, 'M', u'u'), + (0x1D463, 'M', u'v'), + (0x1D464, 'M', u'w'), + (0x1D465, 'M', u'x'), + (0x1D466, 'M', u'y'), + (0x1D467, 'M', u'z'), + (0x1D468, 'M', u'a'), + (0x1D469, 'M', u'b'), + (0x1D46A, 'M', u'c'), + (0x1D46B, 'M', u'd'), + (0x1D46C, 'M', u'e'), + (0x1D46D, 'M', u'f'), + (0x1D46E, 'M', u'g'), + (0x1D46F, 'M', u'h'), + (0x1D470, 'M', u'i'), + (0x1D471, 'M', u'j'), + (0x1D472, 'M', u'k'), + (0x1D473, 'M', u'l'), + (0x1D474, 'M', u'm'), + (0x1D475, 'M', u'n'), + (0x1D476, 'M', u'o'), + (0x1D477, 'M', u'p'), + (0x1D478, 'M', u'q'), + (0x1D479, 'M', u'r'), + (0x1D47A, 'M', u's'), + (0x1D47B, 'M', u't'), + (0x1D47C, 'M', u'u'), + (0x1D47D, 'M', u'v'), + (0x1D47E, 'M', u'w'), + (0x1D47F, 'M', u'x'), + (0x1D480, 'M', u'y'), + (0x1D481, 'M', u'z'), + (0x1D482, 'M', u'a'), + (0x1D483, 'M', u'b'), + (0x1D484, 'M', u'c'), + (0x1D485, 'M', u'd'), + (0x1D486, 'M', u'e'), + (0x1D487, 'M', u'f'), + (0x1D488, 'M', u'g'), + (0x1D489, 'M', u'h'), + (0x1D48A, 'M', u'i'), + (0x1D48B, 'M', u'j'), + (0x1D48C, 'M', u'k'), + (0x1D48D, 'M', u'l'), + (0x1D48E, 'M', u'm'), + (0x1D48F, 'M', u'n'), + (0x1D490, 'M', u'o'), + (0x1D491, 'M', u'p'), + (0x1D492, 'M', u'q'), + (0x1D493, 'M', u'r'), + (0x1D494, 'M', u's'), + (0x1D495, 'M', u't'), + (0x1D496, 'M', u'u'), + (0x1D497, 'M', u'v'), + (0x1D498, 'M', u'w'), + (0x1D499, 'M', u'x'), + (0x1D49A, 'M', u'y'), + (0x1D49B, 'M', u'z'), + (0x1D49C, 'M', u'a'), + (0x1D49D, 'X'), + (0x1D49E, 'M', u'c'), + (0x1D49F, 'M', u'd'), + (0x1D4A0, 'X'), + (0x1D4A2, 'M', u'g'), + (0x1D4A3, 'X'), + (0x1D4A5, 'M', u'j'), + (0x1D4A6, 'M', u'k'), + (0x1D4A7, 'X'), + (0x1D4A9, 'M', u'n'), + (0x1D4AA, 'M', u'o'), + (0x1D4AB, 'M', u'p'), + (0x1D4AC, 'M', u'q'), + (0x1D4AD, 'X'), + (0x1D4AE, 'M', u's'), + (0x1D4AF, 'M', u't'), + (0x1D4B0, 'M', u'u'), + (0x1D4B1, 'M', u'v'), + (0x1D4B2, 'M', u'w'), + (0x1D4B3, 'M', u'x'), + (0x1D4B4, 'M', u'y'), + (0x1D4B5, 'M', u'z'), + (0x1D4B6, 'M', u'a'), + (0x1D4B7, 'M', u'b'), + (0x1D4B8, 'M', u'c'), + (0x1D4B9, 'M', u'd'), + (0x1D4BA, 'X'), + (0x1D4BB, 'M', u'f'), + (0x1D4BC, 'X'), + (0x1D4BD, 'M', u'h'), + (0x1D4BE, 'M', u'i'), + (0x1D4BF, 'M', u'j'), + (0x1D4C0, 'M', u'k'), + (0x1D4C1, 'M', u'l'), + (0x1D4C2, 'M', u'm'), + (0x1D4C3, 'M', u'n'), + (0x1D4C4, 'X'), + (0x1D4C5, 'M', u'p'), + (0x1D4C6, 'M', u'q'), + (0x1D4C7, 'M', u'r'), + (0x1D4C8, 'M', u's'), + (0x1D4C9, 'M', u't'), + (0x1D4CA, 'M', u'u'), + (0x1D4CB, 'M', u'v'), + (0x1D4CC, 'M', u'w'), + (0x1D4CD, 'M', u'x'), + (0x1D4CE, 'M', u'y'), + (0x1D4CF, 'M', u'z'), + (0x1D4D0, 'M', u'a'), + (0x1D4D1, 'M', u'b'), + (0x1D4D2, 'M', u'c'), + (0x1D4D3, 'M', u'd'), + (0x1D4D4, 'M', u'e'), + (0x1D4D5, 'M', u'f'), + (0x1D4D6, 'M', u'g'), + (0x1D4D7, 'M', u'h'), + (0x1D4D8, 'M', u'i'), + (0x1D4D9, 'M', u'j'), + (0x1D4DA, 'M', u'k'), + (0x1D4DB, 'M', u'l'), + (0x1D4DC, 'M', u'm'), + (0x1D4DD, 'M', u'n'), + (0x1D4DE, 'M', u'o'), + (0x1D4DF, 'M', u'p'), + (0x1D4E0, 'M', u'q'), + (0x1D4E1, 'M', u'r'), + (0x1D4E2, 'M', u's'), + (0x1D4E3, 'M', u't'), + (0x1D4E4, 'M', u'u'), + (0x1D4E5, 'M', u'v'), + (0x1D4E6, 'M', u'w'), + (0x1D4E7, 'M', u'x'), + (0x1D4E8, 'M', u'y'), + (0x1D4E9, 'M', u'z'), + (0x1D4EA, 'M', u'a'), + (0x1D4EB, 'M', u'b'), + (0x1D4EC, 'M', u'c'), + (0x1D4ED, 'M', u'd'), + (0x1D4EE, 'M', u'e'), + (0x1D4EF, 'M', u'f'), + (0x1D4F0, 'M', u'g'), + (0x1D4F1, 'M', u'h'), + (0x1D4F2, 'M', u'i'), + (0x1D4F3, 'M', u'j'), + (0x1D4F4, 'M', u'k'), + (0x1D4F5, 'M', u'l'), + (0x1D4F6, 'M', u'm'), + (0x1D4F7, 'M', u'n'), + (0x1D4F8, 'M', u'o'), + (0x1D4F9, 'M', u'p'), + (0x1D4FA, 'M', u'q'), + (0x1D4FB, 'M', u'r'), + (0x1D4FC, 'M', u's'), + (0x1D4FD, 'M', u't'), + (0x1D4FE, 'M', u'u'), + (0x1D4FF, 'M', u'v'), + (0x1D500, 'M', u'w'), + (0x1D501, 'M', u'x'), + (0x1D502, 'M', u'y'), + (0x1D503, 'M', u'z'), + (0x1D504, 'M', u'a'), + (0x1D505, 'M', u'b'), + (0x1D506, 'X'), + (0x1D507, 'M', u'd'), + (0x1D508, 'M', u'e'), + (0x1D509, 'M', u'f'), + (0x1D50A, 'M', u'g'), + (0x1D50B, 'X'), + (0x1D50D, 'M', u'j'), + (0x1D50E, 'M', u'k'), + (0x1D50F, 'M', u'l'), + (0x1D510, 'M', u'm'), + (0x1D511, 'M', u'n'), + (0x1D512, 'M', u'o'), + (0x1D513, 'M', u'p'), + (0x1D514, 'M', u'q'), + (0x1D515, 'X'), + (0x1D516, 'M', u's'), + (0x1D517, 'M', u't'), + (0x1D518, 'M', u'u'), + (0x1D519, 'M', u'v'), + (0x1D51A, 'M', u'w'), + (0x1D51B, 'M', u'x'), + (0x1D51C, 'M', u'y'), + (0x1D51D, 'X'), + (0x1D51E, 'M', u'a'), + (0x1D51F, 'M', u'b'), + (0x1D520, 'M', u'c'), + (0x1D521, 'M', u'd'), + (0x1D522, 'M', u'e'), + (0x1D523, 'M', u'f'), + (0x1D524, 'M', u'g'), + (0x1D525, 'M', u'h'), + (0x1D526, 'M', u'i'), + (0x1D527, 'M', u'j'), + (0x1D528, 'M', u'k'), + (0x1D529, 'M', u'l'), + (0x1D52A, 'M', u'm'), + (0x1D52B, 'M', u'n'), + (0x1D52C, 'M', u'o'), + (0x1D52D, 'M', u'p'), + (0x1D52E, 'M', u'q'), + (0x1D52F, 'M', u'r'), + (0x1D530, 'M', u's'), + (0x1D531, 'M', u't'), + (0x1D532, 'M', u'u'), + (0x1D533, 'M', u'v'), + (0x1D534, 'M', u'w'), + (0x1D535, 'M', u'x'), + (0x1D536, 'M', u'y'), + (0x1D537, 'M', u'z'), + (0x1D538, 'M', u'a'), + (0x1D539, 'M', u'b'), + (0x1D53A, 'X'), + (0x1D53B, 'M', u'd'), + (0x1D53C, 'M', u'e'), + (0x1D53D, 'M', u'f'), + (0x1D53E, 'M', u'g'), + (0x1D53F, 'X'), + (0x1D540, 'M', u'i'), + (0x1D541, 'M', u'j'), + (0x1D542, 'M', u'k'), + (0x1D543, 'M', u'l'), + (0x1D544, 'M', u'm'), + (0x1D545, 'X'), + (0x1D546, 'M', u'o'), + (0x1D547, 'X'), + (0x1D54A, 'M', u's'), + (0x1D54B, 'M', u't'), + (0x1D54C, 'M', u'u'), + (0x1D54D, 'M', u'v'), + (0x1D54E, 'M', u'w'), + (0x1D54F, 'M', u'x'), + (0x1D550, 'M', u'y'), + (0x1D551, 'X'), + (0x1D552, 'M', u'a'), + (0x1D553, 'M', u'b'), + (0x1D554, 'M', u'c'), + (0x1D555, 'M', u'd'), + (0x1D556, 'M', u'e'), + (0x1D557, 'M', u'f'), + (0x1D558, 'M', u'g'), + (0x1D559, 'M', u'h'), + (0x1D55A, 'M', u'i'), + (0x1D55B, 'M', u'j'), + (0x1D55C, 'M', u'k'), + (0x1D55D, 'M', u'l'), + (0x1D55E, 'M', u'm'), + (0x1D55F, 'M', u'n'), + (0x1D560, 'M', u'o'), + (0x1D561, 'M', u'p'), + (0x1D562, 'M', u'q'), + (0x1D563, 'M', u'r'), + (0x1D564, 'M', u's'), + (0x1D565, 'M', u't'), + (0x1D566, 'M', u'u'), + (0x1D567, 'M', u'v'), + (0x1D568, 'M', u'w'), + (0x1D569, 'M', u'x'), + (0x1D56A, 'M', u'y'), + (0x1D56B, 'M', u'z'), + (0x1D56C, 'M', u'a'), + (0x1D56D, 'M', u'b'), + (0x1D56E, 'M', u'c'), + (0x1D56F, 'M', u'd'), + (0x1D570, 'M', u'e'), + (0x1D571, 'M', u'f'), + (0x1D572, 'M', u'g'), + (0x1D573, 'M', u'h'), + (0x1D574, 'M', u'i'), + (0x1D575, 'M', u'j'), + (0x1D576, 'M', u'k'), + (0x1D577, 'M', u'l'), + (0x1D578, 'M', u'm'), + (0x1D579, 'M', u'n'), + (0x1D57A, 'M', u'o'), + (0x1D57B, 'M', u'p'), + (0x1D57C, 'M', u'q'), + (0x1D57D, 'M', u'r'), + (0x1D57E, 'M', u's'), + (0x1D57F, 'M', u't'), + (0x1D580, 'M', u'u'), + (0x1D581, 'M', u'v'), + (0x1D582, 'M', u'w'), + (0x1D583, 'M', u'x'), + (0x1D584, 'M', u'y'), + (0x1D585, 'M', u'z'), + (0x1D586, 'M', u'a'), + (0x1D587, 'M', u'b'), + (0x1D588, 'M', u'c'), + (0x1D589, 'M', u'd'), + (0x1D58A, 'M', u'e'), + (0x1D58B, 'M', u'f'), + (0x1D58C, 'M', u'g'), + (0x1D58D, 'M', u'h'), + (0x1D58E, 'M', u'i'), + (0x1D58F, 'M', u'j'), + (0x1D590, 'M', u'k'), + (0x1D591, 'M', u'l'), + (0x1D592, 'M', u'm'), + (0x1D593, 'M', u'n'), + (0x1D594, 'M', u'o'), + (0x1D595, 'M', u'p'), + (0x1D596, 'M', u'q'), + (0x1D597, 'M', u'r'), + (0x1D598, 'M', u's'), + (0x1D599, 'M', u't'), + (0x1D59A, 'M', u'u'), + (0x1D59B, 'M', u'v'), + (0x1D59C, 'M', u'w'), + (0x1D59D, 'M', u'x'), + (0x1D59E, 'M', u'y'), + (0x1D59F, 'M', u'z'), + (0x1D5A0, 'M', u'a'), + (0x1D5A1, 'M', u'b'), + (0x1D5A2, 'M', u'c'), + (0x1D5A3, 'M', u'd'), + (0x1D5A4, 'M', u'e'), + (0x1D5A5, 'M', u'f'), + (0x1D5A6, 'M', u'g'), + (0x1D5A7, 'M', u'h'), + (0x1D5A8, 'M', u'i'), + (0x1D5A9, 'M', u'j'), + (0x1D5AA, 'M', u'k'), + (0x1D5AB, 'M', u'l'), + (0x1D5AC, 'M', u'm'), + (0x1D5AD, 'M', u'n'), + (0x1D5AE, 'M', u'o'), + (0x1D5AF, 'M', u'p'), + (0x1D5B0, 'M', u'q'), + (0x1D5B1, 'M', u'r'), + (0x1D5B2, 'M', u's'), + (0x1D5B3, 'M', u't'), + (0x1D5B4, 'M', u'u'), + (0x1D5B5, 'M', u'v'), + (0x1D5B6, 'M', u'w'), + (0x1D5B7, 'M', u'x'), + (0x1D5B8, 'M', u'y'), + (0x1D5B9, 'M', u'z'), + (0x1D5BA, 'M', u'a'), + (0x1D5BB, 'M', u'b'), + (0x1D5BC, 'M', u'c'), + (0x1D5BD, 'M', u'd'), + (0x1D5BE, 'M', u'e'), + (0x1D5BF, 'M', u'f'), + (0x1D5C0, 'M', u'g'), + (0x1D5C1, 'M', u'h'), + (0x1D5C2, 'M', u'i'), + (0x1D5C3, 'M', u'j'), + (0x1D5C4, 'M', u'k'), + (0x1D5C5, 'M', u'l'), + (0x1D5C6, 'M', u'm'), + (0x1D5C7, 'M', u'n'), + (0x1D5C8, 'M', u'o'), + (0x1D5C9, 'M', u'p'), + (0x1D5CA, 'M', u'q'), + (0x1D5CB, 'M', u'r'), + (0x1D5CC, 'M', u's'), + (0x1D5CD, 'M', u't'), + (0x1D5CE, 'M', u'u'), + (0x1D5CF, 'M', u'v'), + (0x1D5D0, 'M', u'w'), + (0x1D5D1, 'M', u'x'), + (0x1D5D2, 'M', u'y'), + (0x1D5D3, 'M', u'z'), + (0x1D5D4, 'M', u'a'), + (0x1D5D5, 'M', u'b'), + (0x1D5D6, 'M', u'c'), + (0x1D5D7, 'M', u'd'), + (0x1D5D8, 'M', u'e'), + (0x1D5D9, 'M', u'f'), + (0x1D5DA, 'M', u'g'), + (0x1D5DB, 'M', u'h'), + (0x1D5DC, 'M', u'i'), + (0x1D5DD, 'M', u'j'), + (0x1D5DE, 'M', u'k'), + (0x1D5DF, 'M', u'l'), + (0x1D5E0, 'M', u'm'), + (0x1D5E1, 'M', u'n'), + (0x1D5E2, 'M', u'o'), + (0x1D5E3, 'M', u'p'), + (0x1D5E4, 'M', u'q'), + (0x1D5E5, 'M', u'r'), + (0x1D5E6, 'M', u's'), + (0x1D5E7, 'M', u't'), + (0x1D5E8, 'M', u'u'), + (0x1D5E9, 'M', u'v'), + (0x1D5EA, 'M', u'w'), + (0x1D5EB, 'M', u'x'), + (0x1D5EC, 'M', u'y'), + (0x1D5ED, 'M', u'z'), + (0x1D5EE, 'M', u'a'), + (0x1D5EF, 'M', u'b'), + (0x1D5F0, 'M', u'c'), + (0x1D5F1, 'M', u'd'), + (0x1D5F2, 'M', u'e'), + (0x1D5F3, 'M', u'f'), + (0x1D5F4, 'M', u'g'), + (0x1D5F5, 'M', u'h'), + (0x1D5F6, 'M', u'i'), + (0x1D5F7, 'M', u'j'), + (0x1D5F8, 'M', u'k'), + (0x1D5F9, 'M', u'l'), + (0x1D5FA, 'M', u'm'), + (0x1D5FB, 'M', u'n'), + (0x1D5FC, 'M', u'o'), + (0x1D5FD, 'M', u'p'), + (0x1D5FE, 'M', u'q'), + (0x1D5FF, 'M', u'r'), + (0x1D600, 'M', u's'), + (0x1D601, 'M', u't'), + (0x1D602, 'M', u'u'), + (0x1D603, 'M', u'v'), + (0x1D604, 'M', u'w'), + (0x1D605, 'M', u'x'), + (0x1D606, 'M', u'y'), + (0x1D607, 'M', u'z'), + (0x1D608, 'M', u'a'), + (0x1D609, 'M', u'b'), + (0x1D60A, 'M', u'c'), + (0x1D60B, 'M', u'd'), + (0x1D60C, 'M', u'e'), + (0x1D60D, 'M', u'f'), + (0x1D60E, 'M', u'g'), + (0x1D60F, 'M', u'h'), + (0x1D610, 'M', u'i'), + (0x1D611, 'M', u'j'), + (0x1D612, 'M', u'k'), + (0x1D613, 'M', u'l'), + (0x1D614, 'M', u'm'), + (0x1D615, 'M', u'n'), + (0x1D616, 'M', u'o'), + (0x1D617, 'M', u'p'), + (0x1D618, 'M', u'q'), + (0x1D619, 'M', u'r'), + (0x1D61A, 'M', u's'), + (0x1D61B, 'M', u't'), + (0x1D61C, 'M', u'u'), + (0x1D61D, 'M', u'v'), + (0x1D61E, 'M', u'w'), + (0x1D61F, 'M', u'x'), + (0x1D620, 'M', u'y'), + (0x1D621, 'M', u'z'), + (0x1D622, 'M', u'a'), + (0x1D623, 'M', u'b'), + (0x1D624, 'M', u'c'), + (0x1D625, 'M', u'd'), + (0x1D626, 'M', u'e'), + (0x1D627, 'M', u'f'), + (0x1D628, 'M', u'g'), + (0x1D629, 'M', u'h'), + (0x1D62A, 'M', u'i'), + (0x1D62B, 'M', u'j'), + (0x1D62C, 'M', u'k'), + (0x1D62D, 'M', u'l'), + (0x1D62E, 'M', u'm'), + (0x1D62F, 'M', u'n'), + (0x1D630, 'M', u'o'), + (0x1D631, 'M', u'p'), + (0x1D632, 'M', u'q'), + (0x1D633, 'M', u'r'), + (0x1D634, 'M', u's'), + (0x1D635, 'M', u't'), + (0x1D636, 'M', u'u'), + (0x1D637, 'M', u'v'), + (0x1D638, 'M', u'w'), + (0x1D639, 'M', u'x'), + (0x1D63A, 'M', u'y'), + (0x1D63B, 'M', u'z'), + (0x1D63C, 'M', u'a'), + (0x1D63D, 'M', u'b'), + (0x1D63E, 'M', u'c'), + (0x1D63F, 'M', u'd'), + (0x1D640, 'M', u'e'), + (0x1D641, 'M', u'f'), + (0x1D642, 'M', u'g'), + (0x1D643, 'M', u'h'), + (0x1D644, 'M', u'i'), + (0x1D645, 'M', u'j'), + (0x1D646, 'M', u'k'), + (0x1D647, 'M', u'l'), + (0x1D648, 'M', u'm'), + (0x1D649, 'M', u'n'), + (0x1D64A, 'M', u'o'), + (0x1D64B, 'M', u'p'), + (0x1D64C, 'M', u'q'), + (0x1D64D, 'M', u'r'), + (0x1D64E, 'M', u's'), + (0x1D64F, 'M', u't'), + (0x1D650, 'M', u'u'), + (0x1D651, 'M', u'v'), + (0x1D652, 'M', u'w'), + (0x1D653, 'M', u'x'), + (0x1D654, 'M', u'y'), + (0x1D655, 'M', u'z'), + (0x1D656, 'M', u'a'), + (0x1D657, 'M', u'b'), + (0x1D658, 'M', u'c'), + (0x1D659, 'M', u'd'), + (0x1D65A, 'M', u'e'), + (0x1D65B, 'M', u'f'), + (0x1D65C, 'M', u'g'), + (0x1D65D, 'M', u'h'), + (0x1D65E, 'M', u'i'), + (0x1D65F, 'M', u'j'), + (0x1D660, 'M', u'k'), + (0x1D661, 'M', u'l'), + (0x1D662, 'M', u'm'), + (0x1D663, 'M', u'n'), + (0x1D664, 'M', u'o'), + (0x1D665, 'M', u'p'), + (0x1D666, 'M', u'q'), + (0x1D667, 'M', u'r'), + (0x1D668, 'M', u's'), + (0x1D669, 'M', u't'), + (0x1D66A, 'M', u'u'), + (0x1D66B, 'M', u'v'), + (0x1D66C, 'M', u'w'), + (0x1D66D, 'M', u'x'), + (0x1D66E, 'M', u'y'), + (0x1D66F, 'M', u'z'), + (0x1D670, 'M', u'a'), + (0x1D671, 'M', u'b'), + (0x1D672, 'M', u'c'), + (0x1D673, 'M', u'd'), + (0x1D674, 'M', u'e'), + (0x1D675, 'M', u'f'), + (0x1D676, 'M', u'g'), + (0x1D677, 'M', u'h'), + (0x1D678, 'M', u'i'), + (0x1D679, 'M', u'j'), + (0x1D67A, 'M', u'k'), + (0x1D67B, 'M', u'l'), + (0x1D67C, 'M', u'm'), + (0x1D67D, 'M', u'n'), + (0x1D67E, 'M', u'o'), + (0x1D67F, 'M', u'p'), + (0x1D680, 'M', u'q'), + (0x1D681, 'M', u'r'), + (0x1D682, 'M', u's'), + (0x1D683, 'M', u't'), + (0x1D684, 'M', u'u'), + (0x1D685, 'M', u'v'), + (0x1D686, 'M', u'w'), + (0x1D687, 'M', u'x'), + (0x1D688, 'M', u'y'), + (0x1D689, 'M', u'z'), + (0x1D68A, 'M', u'a'), + (0x1D68B, 'M', u'b'), + (0x1D68C, 'M', u'c'), + (0x1D68D, 'M', u'd'), + (0x1D68E, 'M', u'e'), + (0x1D68F, 'M', u'f'), + (0x1D690, 'M', u'g'), + (0x1D691, 'M', u'h'), + (0x1D692, 'M', u'i'), + (0x1D693, 'M', u'j'), + (0x1D694, 'M', u'k'), + (0x1D695, 'M', u'l'), + (0x1D696, 'M', u'm'), + (0x1D697, 'M', u'n'), + (0x1D698, 'M', u'o'), + (0x1D699, 'M', u'p'), + (0x1D69A, 'M', u'q'), + (0x1D69B, 'M', u'r'), + (0x1D69C, 'M', u's'), + (0x1D69D, 'M', u't'), + (0x1D69E, 'M', u'u'), + (0x1D69F, 'M', u'v'), + (0x1D6A0, 'M', u'w'), + (0x1D6A1, 'M', u'x'), + (0x1D6A2, 'M', u'y'), + (0x1D6A3, 'M', u'z'), + (0x1D6A4, 'M', u'ı'), + (0x1D6A5, 'M', u'È·'), + (0x1D6A6, 'X'), + (0x1D6A8, 'M', u'α'), + (0x1D6A9, 'M', u'β'), + (0x1D6AA, 'M', u'γ'), + (0x1D6AB, 'M', u'δ'), + (0x1D6AC, 'M', u'ε'), + (0x1D6AD, 'M', u'ζ'), + (0x1D6AE, 'M', u'η'), + (0x1D6AF, 'M', u'θ'), + (0x1D6B0, 'M', u'ι'), + (0x1D6B1, 'M', u'κ'), + (0x1D6B2, 'M', u'λ'), + (0x1D6B3, 'M', u'μ'), + (0x1D6B4, 'M', u'ν'), + (0x1D6B5, 'M', u'ξ'), + (0x1D6B6, 'M', u'ο'), + (0x1D6B7, 'M', u'Ï€'), + (0x1D6B8, 'M', u'Ï'), + (0x1D6B9, 'M', u'θ'), + (0x1D6BA, 'M', u'σ'), + (0x1D6BB, 'M', u'Ï„'), + (0x1D6BC, 'M', u'Ï…'), + (0x1D6BD, 'M', u'φ'), + (0x1D6BE, 'M', u'χ'), + (0x1D6BF, 'M', u'ψ'), + (0x1D6C0, 'M', u'ω'), + (0x1D6C1, 'M', u'∇'), + (0x1D6C2, 'M', u'α'), + (0x1D6C3, 'M', u'β'), + (0x1D6C4, 'M', u'γ'), + (0x1D6C5, 'M', u'δ'), + (0x1D6C6, 'M', u'ε'), + (0x1D6C7, 'M', u'ζ'), + (0x1D6C8, 'M', u'η'), + (0x1D6C9, 'M', u'θ'), + (0x1D6CA, 'M', u'ι'), + (0x1D6CB, 'M', u'κ'), + (0x1D6CC, 'M', u'λ'), + (0x1D6CD, 'M', u'μ'), + (0x1D6CE, 'M', u'ν'), + (0x1D6CF, 'M', u'ξ'), + (0x1D6D0, 'M', u'ο'), + (0x1D6D1, 'M', u'Ï€'), + (0x1D6D2, 'M', u'Ï'), + (0x1D6D3, 'M', u'σ'), + (0x1D6D5, 'M', u'Ï„'), + (0x1D6D6, 'M', u'Ï…'), + (0x1D6D7, 'M', u'φ'), + (0x1D6D8, 'M', u'χ'), + (0x1D6D9, 'M', u'ψ'), + (0x1D6DA, 'M', u'ω'), + (0x1D6DB, 'M', u'∂'), + (0x1D6DC, 'M', u'ε'), + (0x1D6DD, 'M', u'θ'), + (0x1D6DE, 'M', u'κ'), + (0x1D6DF, 'M', u'φ'), + (0x1D6E0, 'M', u'Ï'), + (0x1D6E1, 'M', u'Ï€'), + (0x1D6E2, 'M', u'α'), + (0x1D6E3, 'M', u'β'), + (0x1D6E4, 'M', u'γ'), + (0x1D6E5, 'M', u'δ'), + (0x1D6E6, 'M', u'ε'), + (0x1D6E7, 'M', u'ζ'), + (0x1D6E8, 'M', u'η'), + (0x1D6E9, 'M', u'θ'), + (0x1D6EA, 'M', u'ι'), + (0x1D6EB, 'M', u'κ'), + (0x1D6EC, 'M', u'λ'), + (0x1D6ED, 'M', u'μ'), + (0x1D6EE, 'M', u'ν'), + (0x1D6EF, 'M', u'ξ'), + (0x1D6F0, 'M', u'ο'), + (0x1D6F1, 'M', u'Ï€'), + (0x1D6F2, 'M', u'Ï'), + (0x1D6F3, 'M', u'θ'), + (0x1D6F4, 'M', u'σ'), + (0x1D6F5, 'M', u'Ï„'), + (0x1D6F6, 'M', u'Ï…'), + (0x1D6F7, 'M', u'φ'), + (0x1D6F8, 'M', u'χ'), + (0x1D6F9, 'M', u'ψ'), + (0x1D6FA, 'M', u'ω'), + (0x1D6FB, 'M', u'∇'), + (0x1D6FC, 'M', u'α'), + (0x1D6FD, 'M', u'β'), + (0x1D6FE, 'M', u'γ'), + (0x1D6FF, 'M', u'δ'), + (0x1D700, 'M', u'ε'), + (0x1D701, 'M', u'ζ'), + (0x1D702, 'M', u'η'), + (0x1D703, 'M', u'θ'), + (0x1D704, 'M', u'ι'), + (0x1D705, 'M', u'κ'), + (0x1D706, 'M', u'λ'), + (0x1D707, 'M', u'μ'), + (0x1D708, 'M', u'ν'), + (0x1D709, 'M', u'ξ'), + (0x1D70A, 'M', u'ο'), + (0x1D70B, 'M', u'Ï€'), + (0x1D70C, 'M', u'Ï'), + (0x1D70D, 'M', u'σ'), + (0x1D70F, 'M', u'Ï„'), + (0x1D710, 'M', u'Ï…'), + (0x1D711, 'M', u'φ'), + (0x1D712, 'M', u'χ'), + (0x1D713, 'M', u'ψ'), + (0x1D714, 'M', u'ω'), + (0x1D715, 'M', u'∂'), + (0x1D716, 'M', u'ε'), + (0x1D717, 'M', u'θ'), + (0x1D718, 'M', u'κ'), + (0x1D719, 'M', u'φ'), + (0x1D71A, 'M', u'Ï'), + (0x1D71B, 'M', u'Ï€'), + (0x1D71C, 'M', u'α'), + (0x1D71D, 'M', u'β'), + (0x1D71E, 'M', u'γ'), + (0x1D71F, 'M', u'δ'), + (0x1D720, 'M', u'ε'), + (0x1D721, 'M', u'ζ'), + (0x1D722, 'M', u'η'), + (0x1D723, 'M', u'θ'), + (0x1D724, 'M', u'ι'), + (0x1D725, 'M', u'κ'), + (0x1D726, 'M', u'λ'), + (0x1D727, 'M', u'μ'), + (0x1D728, 'M', u'ν'), + (0x1D729, 'M', u'ξ'), + (0x1D72A, 'M', u'ο'), + (0x1D72B, 'M', u'Ï€'), + (0x1D72C, 'M', u'Ï'), + (0x1D72D, 'M', u'θ'), + (0x1D72E, 'M', u'σ'), + (0x1D72F, 'M', u'Ï„'), + (0x1D730, 'M', u'Ï…'), + (0x1D731, 'M', u'φ'), + (0x1D732, 'M', u'χ'), + (0x1D733, 'M', u'ψ'), + (0x1D734, 'M', u'ω'), + (0x1D735, 'M', u'∇'), + (0x1D736, 'M', u'α'), + (0x1D737, 'M', u'β'), + (0x1D738, 'M', u'γ'), + (0x1D739, 'M', u'δ'), + (0x1D73A, 'M', u'ε'), + (0x1D73B, 'M', u'ζ'), + (0x1D73C, 'M', u'η'), + (0x1D73D, 'M', u'θ'), + (0x1D73E, 'M', u'ι'), + (0x1D73F, 'M', u'κ'), + (0x1D740, 'M', u'λ'), + (0x1D741, 'M', u'μ'), + (0x1D742, 'M', u'ν'), + (0x1D743, 'M', u'ξ'), + (0x1D744, 'M', u'ο'), + (0x1D745, 'M', u'Ï€'), + (0x1D746, 'M', u'Ï'), + (0x1D747, 'M', u'σ'), + (0x1D749, 'M', u'Ï„'), + (0x1D74A, 'M', u'Ï…'), + (0x1D74B, 'M', u'φ'), + (0x1D74C, 'M', u'χ'), + (0x1D74D, 'M', u'ψ'), + (0x1D74E, 'M', u'ω'), + (0x1D74F, 'M', u'∂'), + (0x1D750, 'M', u'ε'), + (0x1D751, 'M', u'θ'), + (0x1D752, 'M', u'κ'), + (0x1D753, 'M', u'φ'), + (0x1D754, 'M', u'Ï'), + (0x1D755, 'M', u'Ï€'), + (0x1D756, 'M', u'α'), + (0x1D757, 'M', u'β'), + (0x1D758, 'M', u'γ'), + (0x1D759, 'M', u'δ'), + (0x1D75A, 'M', u'ε'), + (0x1D75B, 'M', u'ζ'), + (0x1D75C, 'M', u'η'), + (0x1D75D, 'M', u'θ'), + (0x1D75E, 'M', u'ι'), + (0x1D75F, 'M', u'κ'), + (0x1D760, 'M', u'λ'), + (0x1D761, 'M', u'μ'), + (0x1D762, 'M', u'ν'), + (0x1D763, 'M', u'ξ'), + (0x1D764, 'M', u'ο'), + (0x1D765, 'M', u'Ï€'), + (0x1D766, 'M', u'Ï'), + (0x1D767, 'M', u'θ'), + (0x1D768, 'M', u'σ'), + (0x1D769, 'M', u'Ï„'), + (0x1D76A, 'M', u'Ï…'), + (0x1D76B, 'M', u'φ'), + (0x1D76C, 'M', u'χ'), + (0x1D76D, 'M', u'ψ'), + (0x1D76E, 'M', u'ω'), + (0x1D76F, 'M', u'∇'), + (0x1D770, 'M', u'α'), + (0x1D771, 'M', u'β'), + (0x1D772, 'M', u'γ'), + (0x1D773, 'M', u'δ'), + (0x1D774, 'M', u'ε'), + (0x1D775, 'M', u'ζ'), + (0x1D776, 'M', u'η'), + (0x1D777, 'M', u'θ'), + (0x1D778, 'M', u'ι'), + (0x1D779, 'M', u'κ'), + (0x1D77A, 'M', u'λ'), + (0x1D77B, 'M', u'μ'), + (0x1D77C, 'M', u'ν'), + (0x1D77D, 'M', u'ξ'), + (0x1D77E, 'M', u'ο'), + (0x1D77F, 'M', u'Ï€'), + (0x1D780, 'M', u'Ï'), + (0x1D781, 'M', u'σ'), + (0x1D783, 'M', u'Ï„'), + (0x1D784, 'M', u'Ï…'), + (0x1D785, 'M', u'φ'), + (0x1D786, 'M', u'χ'), + (0x1D787, 'M', u'ψ'), + (0x1D788, 'M', u'ω'), + (0x1D789, 'M', u'∂'), + (0x1D78A, 'M', u'ε'), + (0x1D78B, 'M', u'θ'), + (0x1D78C, 'M', u'κ'), + (0x1D78D, 'M', u'φ'), + (0x1D78E, 'M', u'Ï'), + (0x1D78F, 'M', u'Ï€'), + (0x1D790, 'M', u'α'), + (0x1D791, 'M', u'β'), + (0x1D792, 'M', u'γ'), + (0x1D793, 'M', u'δ'), + (0x1D794, 'M', u'ε'), + (0x1D795, 'M', u'ζ'), + (0x1D796, 'M', u'η'), + (0x1D797, 'M', u'θ'), + (0x1D798, 'M', u'ι'), + (0x1D799, 'M', u'κ'), + (0x1D79A, 'M', u'λ'), + (0x1D79B, 'M', u'μ'), + (0x1D79C, 'M', u'ν'), + (0x1D79D, 'M', u'ξ'), + (0x1D79E, 'M', u'ο'), + (0x1D79F, 'M', u'Ï€'), + (0x1D7A0, 'M', u'Ï'), + (0x1D7A1, 'M', u'θ'), + (0x1D7A2, 'M', u'σ'), + (0x1D7A3, 'M', u'Ï„'), + (0x1D7A4, 'M', u'Ï…'), + (0x1D7A5, 'M', u'φ'), + (0x1D7A6, 'M', u'χ'), + (0x1D7A7, 'M', u'ψ'), + (0x1D7A8, 'M', u'ω'), + (0x1D7A9, 'M', u'∇'), + (0x1D7AA, 'M', u'α'), + (0x1D7AB, 'M', u'β'), + (0x1D7AC, 'M', u'γ'), + (0x1D7AD, 'M', u'δ'), + (0x1D7AE, 'M', u'ε'), + (0x1D7AF, 'M', u'ζ'), + (0x1D7B0, 'M', u'η'), + (0x1D7B1, 'M', u'θ'), + (0x1D7B2, 'M', u'ι'), + (0x1D7B3, 'M', u'κ'), + (0x1D7B4, 'M', u'λ'), + (0x1D7B5, 'M', u'μ'), + (0x1D7B6, 'M', u'ν'), + (0x1D7B7, 'M', u'ξ'), + (0x1D7B8, 'M', u'ο'), + (0x1D7B9, 'M', u'Ï€'), + (0x1D7BA, 'M', u'Ï'), + (0x1D7BB, 'M', u'σ'), + (0x1D7BD, 'M', u'Ï„'), + (0x1D7BE, 'M', u'Ï…'), + (0x1D7BF, 'M', u'φ'), + (0x1D7C0, 'M', u'χ'), + (0x1D7C1, 'M', u'ψ'), + (0x1D7C2, 'M', u'ω'), + (0x1D7C3, 'M', u'∂'), + (0x1D7C4, 'M', u'ε'), + (0x1D7C5, 'M', u'θ'), + (0x1D7C6, 'M', u'κ'), + (0x1D7C7, 'M', u'φ'), + (0x1D7C8, 'M', u'Ï'), + (0x1D7C9, 'M', u'Ï€'), + (0x1D7CA, 'M', u'Ï'), + (0x1D7CC, 'X'), + (0x1D7CE, 'M', u'0'), + (0x1D7CF, 'M', u'1'), + (0x1D7D0, 'M', u'2'), + (0x1D7D1, 'M', u'3'), + (0x1D7D2, 'M', u'4'), + (0x1D7D3, 'M', u'5'), + (0x1D7D4, 'M', u'6'), + (0x1D7D5, 'M', u'7'), + (0x1D7D6, 'M', u'8'), + (0x1D7D7, 'M', u'9'), + (0x1D7D8, 'M', u'0'), + (0x1D7D9, 'M', u'1'), + (0x1D7DA, 'M', u'2'), + (0x1D7DB, 'M', u'3'), + (0x1D7DC, 'M', u'4'), + (0x1D7DD, 'M', u'5'), + (0x1D7DE, 'M', u'6'), + (0x1D7DF, 'M', u'7'), + (0x1D7E0, 'M', u'8'), + (0x1D7E1, 'M', u'9'), + (0x1D7E2, 'M', u'0'), + (0x1D7E3, 'M', u'1'), + (0x1D7E4, 'M', u'2'), + (0x1D7E5, 'M', u'3'), + (0x1D7E6, 'M', u'4'), + (0x1D7E7, 'M', u'5'), + (0x1D7E8, 'M', u'6'), + (0x1D7E9, 'M', u'7'), + (0x1D7EA, 'M', u'8'), + (0x1D7EB, 'M', u'9'), + (0x1D7EC, 'M', u'0'), + (0x1D7ED, 'M', u'1'), + (0x1D7EE, 'M', u'2'), + (0x1D7EF, 'M', u'3'), + (0x1D7F0, 'M', u'4'), + (0x1D7F1, 'M', u'5'), + (0x1D7F2, 'M', u'6'), + (0x1D7F3, 'M', u'7'), + (0x1D7F4, 'M', u'8'), + (0x1D7F5, 'M', u'9'), + (0x1D7F6, 'M', u'0'), + (0x1D7F7, 'M', u'1'), + (0x1D7F8, 'M', u'2'), + (0x1D7F9, 'M', u'3'), + (0x1D7FA, 'M', u'4'), + (0x1D7FB, 'M', u'5'), + (0x1D7FC, 'M', u'6'), + (0x1D7FD, 'M', u'7'), + (0x1D7FE, 'M', u'8'), + (0x1D7FF, 'M', u'9'), + (0x1D800, 'X'), + (0x1EE00, 'M', u'ا'), + (0x1EE01, 'M', u'ب'), + (0x1EE02, 'M', u'ج'), + (0x1EE03, 'M', u'د'), + (0x1EE04, 'X'), + (0x1EE05, 'M', u'Ùˆ'), + (0x1EE06, 'M', u'ز'), + (0x1EE07, 'M', u'Ø­'), + (0x1EE08, 'M', u'Ø·'), + (0x1EE09, 'M', u'ÙŠ'), + (0x1EE0A, 'M', u'Ùƒ'), + (0x1EE0B, 'M', u'Ù„'), + (0x1EE0C, 'M', u'Ù…'), + (0x1EE0D, 'M', u'Ù†'), + (0x1EE0E, 'M', u'س'), + (0x1EE0F, 'M', u'ع'), + (0x1EE10, 'M', u'Ù'), + (0x1EE11, 'M', u'ص'), + (0x1EE12, 'M', u'Ù‚'), + (0x1EE13, 'M', u'ر'), + (0x1EE14, 'M', u'Ø´'), + (0x1EE15, 'M', u'ت'), + (0x1EE16, 'M', u'Ø«'), + (0x1EE17, 'M', u'Ø®'), + (0x1EE18, 'M', u'Ø°'), + (0x1EE19, 'M', u'ض'), + (0x1EE1A, 'M', u'ظ'), + (0x1EE1B, 'M', u'غ'), + (0x1EE1C, 'M', u'Ù®'), + (0x1EE1D, 'M', u'Úº'), + (0x1EE1E, 'M', u'Ú¡'), + (0x1EE1F, 'M', u'Ù¯'), + (0x1EE20, 'X'), + (0x1EE21, 'M', u'ب'), + (0x1EE22, 'M', u'ج'), + (0x1EE23, 'X'), + (0x1EE24, 'M', u'Ù‡'), + (0x1EE25, 'X'), + (0x1EE27, 'M', u'Ø­'), + (0x1EE28, 'X'), + (0x1EE29, 'M', u'ÙŠ'), + (0x1EE2A, 'M', u'Ùƒ'), + (0x1EE2B, 'M', u'Ù„'), + (0x1EE2C, 'M', u'Ù…'), + (0x1EE2D, 'M', u'Ù†'), + (0x1EE2E, 'M', u'س'), + (0x1EE2F, 'M', u'ع'), + (0x1EE30, 'M', u'Ù'), + (0x1EE31, 'M', u'ص'), + (0x1EE32, 'M', u'Ù‚'), + (0x1EE33, 'X'), + (0x1EE34, 'M', u'Ø´'), + (0x1EE35, 'M', u'ت'), + (0x1EE36, 'M', u'Ø«'), + (0x1EE37, 'M', u'Ø®'), + (0x1EE38, 'X'), + (0x1EE39, 'M', u'ض'), + (0x1EE3A, 'X'), + (0x1EE3B, 'M', u'غ'), + (0x1EE3C, 'X'), + (0x1EE42, 'M', u'ج'), + (0x1EE43, 'X'), + (0x1EE47, 'M', u'Ø­'), + (0x1EE48, 'X'), + (0x1EE49, 'M', u'ÙŠ'), + (0x1EE4A, 'X'), + (0x1EE4B, 'M', u'Ù„'), + (0x1EE4C, 'X'), + (0x1EE4D, 'M', u'Ù†'), + (0x1EE4E, 'M', u'س'), + (0x1EE4F, 'M', u'ع'), + (0x1EE50, 'X'), + (0x1EE51, 'M', u'ص'), + (0x1EE52, 'M', u'Ù‚'), + (0x1EE53, 'X'), + (0x1EE54, 'M', u'Ø´'), + (0x1EE55, 'X'), + (0x1EE57, 'M', u'Ø®'), + (0x1EE58, 'X'), + (0x1EE59, 'M', u'ض'), + (0x1EE5A, 'X'), + (0x1EE5B, 'M', u'غ'), + (0x1EE5C, 'X'), + (0x1EE5D, 'M', u'Úº'), + (0x1EE5E, 'X'), + (0x1EE5F, 'M', u'Ù¯'), + (0x1EE60, 'X'), + (0x1EE61, 'M', u'ب'), + (0x1EE62, 'M', u'ج'), + (0x1EE63, 'X'), + (0x1EE64, 'M', u'Ù‡'), + (0x1EE65, 'X'), + (0x1EE67, 'M', u'Ø­'), + (0x1EE68, 'M', u'Ø·'), + (0x1EE69, 'M', u'ÙŠ'), + (0x1EE6A, 'M', u'Ùƒ'), + (0x1EE6B, 'X'), + (0x1EE6C, 'M', u'Ù…'), + (0x1EE6D, 'M', u'Ù†'), + (0x1EE6E, 'M', u'س'), + (0x1EE6F, 'M', u'ع'), + (0x1EE70, 'M', u'Ù'), + (0x1EE71, 'M', u'ص'), + (0x1EE72, 'M', u'Ù‚'), + (0x1EE73, 'X'), + (0x1EE74, 'M', u'Ø´'), + (0x1EE75, 'M', u'ت'), + (0x1EE76, 'M', u'Ø«'), + (0x1EE77, 'M', u'Ø®'), + (0x1EE78, 'X'), + (0x1EE79, 'M', u'ض'), + (0x1EE7A, 'M', u'ظ'), + (0x1EE7B, 'M', u'غ'), + (0x1EE7C, 'M', u'Ù®'), + (0x1EE7D, 'X'), + (0x1EE7E, 'M', u'Ú¡'), + (0x1EE7F, 'X'), + (0x1EE80, 'M', u'ا'), + (0x1EE81, 'M', u'ب'), + (0x1EE82, 'M', u'ج'), + (0x1EE83, 'M', u'د'), + (0x1EE84, 'M', u'Ù‡'), + (0x1EE85, 'M', u'Ùˆ'), + (0x1EE86, 'M', u'ز'), + (0x1EE87, 'M', u'Ø­'), + (0x1EE88, 'M', u'Ø·'), + (0x1EE89, 'M', u'ÙŠ'), + (0x1EE8A, 'X'), + (0x1EE8B, 'M', u'Ù„'), + (0x1EE8C, 'M', u'Ù…'), + (0x1EE8D, 'M', u'Ù†'), + (0x1EE8E, 'M', u'س'), + (0x1EE8F, 'M', u'ع'), + (0x1EE90, 'M', u'Ù'), + (0x1EE91, 'M', u'ص'), + (0x1EE92, 'M', u'Ù‚'), + (0x1EE93, 'M', u'ر'), + (0x1EE94, 'M', u'Ø´'), + (0x1EE95, 'M', u'ت'), + (0x1EE96, 'M', u'Ø«'), + (0x1EE97, 'M', u'Ø®'), + (0x1EE98, 'M', u'Ø°'), + (0x1EE99, 'M', u'ض'), + (0x1EE9A, 'M', u'ظ'), + (0x1EE9B, 'M', u'غ'), + (0x1EE9C, 'X'), + (0x1EEA1, 'M', u'ب'), + (0x1EEA2, 'M', u'ج'), + (0x1EEA3, 'M', u'د'), + (0x1EEA4, 'X'), + (0x1EEA5, 'M', u'Ùˆ'), + (0x1EEA6, 'M', u'ز'), + (0x1EEA7, 'M', u'Ø­'), + (0x1EEA8, 'M', u'Ø·'), + (0x1EEA9, 'M', u'ÙŠ'), + (0x1EEAA, 'X'), + (0x1EEAB, 'M', u'Ù„'), + (0x1EEAC, 'M', u'Ù…'), + (0x1EEAD, 'M', u'Ù†'), + (0x1EEAE, 'M', u'س'), + (0x1EEAF, 'M', u'ع'), + (0x1EEB0, 'M', u'Ù'), + (0x1EEB1, 'M', u'ص'), + (0x1EEB2, 'M', u'Ù‚'), + (0x1EEB3, 'M', u'ر'), + (0x1EEB4, 'M', u'Ø´'), + (0x1EEB5, 'M', u'ت'), + (0x1EEB6, 'M', u'Ø«'), + (0x1EEB7, 'M', u'Ø®'), + (0x1EEB8, 'M', u'Ø°'), + (0x1EEB9, 'M', u'ض'), + (0x1EEBA, 'M', u'ظ'), + (0x1EEBB, 'M', u'غ'), + (0x1EEBC, 'X'), + (0x1EEF0, 'V'), + (0x1EEF2, 'X'), + (0x1F000, 'V'), + (0x1F02C, 'X'), + (0x1F030, 'V'), + (0x1F094, 'X'), + (0x1F0A0, 'V'), + (0x1F0AF, 'X'), + (0x1F0B1, 'V'), + (0x1F0BF, 'X'), + (0x1F0C1, 'V'), + (0x1F0D0, 'X'), + (0x1F0D1, 'V'), + (0x1F0E0, 'X'), + (0x1F101, '3', u'0,'), + (0x1F102, '3', u'1,'), + (0x1F103, '3', u'2,'), + (0x1F104, '3', u'3,'), + (0x1F105, '3', u'4,'), + (0x1F106, '3', u'5,'), + (0x1F107, '3', u'6,'), + (0x1F108, '3', u'7,'), + (0x1F109, '3', u'8,'), + (0x1F10A, '3', u'9,'), + (0x1F10B, 'X'), + (0x1F110, '3', u'(a)'), + (0x1F111, '3', u'(b)'), + (0x1F112, '3', u'(c)'), + (0x1F113, '3', u'(d)'), + (0x1F114, '3', u'(e)'), + (0x1F115, '3', u'(f)'), + (0x1F116, '3', u'(g)'), + (0x1F117, '3', u'(h)'), + (0x1F118, '3', u'(i)'), + (0x1F119, '3', u'(j)'), + (0x1F11A, '3', u'(k)'), + (0x1F11B, '3', u'(l)'), + (0x1F11C, '3', u'(m)'), + (0x1F11D, '3', u'(n)'), + (0x1F11E, '3', u'(o)'), + (0x1F11F, '3', u'(p)'), + (0x1F120, '3', u'(q)'), + (0x1F121, '3', u'(r)'), + (0x1F122, '3', u'(s)'), + (0x1F123, '3', u'(t)'), + (0x1F124, '3', u'(u)'), + (0x1F125, '3', u'(v)'), + (0x1F126, '3', u'(w)'), + (0x1F127, '3', u'(x)'), + (0x1F128, '3', u'(y)'), + (0x1F129, '3', u'(z)'), + (0x1F12A, 'M', u'〔s〕'), + (0x1F12B, 'M', u'c'), + (0x1F12C, 'M', u'r'), + (0x1F12D, 'M', u'cd'), + (0x1F12E, 'M', u'wz'), + (0x1F12F, 'X'), + (0x1F130, 'M', u'a'), + (0x1F131, 'M', u'b'), + (0x1F132, 'M', u'c'), + (0x1F133, 'M', u'd'), + (0x1F134, 'M', u'e'), + (0x1F135, 'M', u'f'), + (0x1F136, 'M', u'g'), + (0x1F137, 'M', u'h'), + (0x1F138, 'M', u'i'), + (0x1F139, 'M', u'j'), + (0x1F13A, 'M', u'k'), + (0x1F13B, 'M', u'l'), + (0x1F13C, 'M', u'm'), + (0x1F13D, 'M', u'n'), + (0x1F13E, 'M', u'o'), + (0x1F13F, 'M', u'p'), + (0x1F140, 'M', u'q'), + (0x1F141, 'M', u'r'), + (0x1F142, 'M', u's'), + (0x1F143, 'M', u't'), + (0x1F144, 'M', u'u'), + (0x1F145, 'M', u'v'), + (0x1F146, 'M', u'w'), + (0x1F147, 'M', u'x'), + (0x1F148, 'M', u'y'), + (0x1F149, 'M', u'z'), + (0x1F14A, 'M', u'hv'), + (0x1F14B, 'M', u'mv'), + (0x1F14C, 'M', u'sd'), + (0x1F14D, 'M', u'ss'), + (0x1F14E, 'M', u'ppv'), + (0x1F14F, 'M', u'wc'), + (0x1F150, 'V'), + (0x1F16A, 'M', u'mc'), + (0x1F16B, 'M', u'md'), + (0x1F16C, 'X'), + (0x1F170, 'V'), + (0x1F190, 'M', u'dj'), + (0x1F191, 'V'), + (0x1F19B, 'X'), + (0x1F1E6, 'V'), + (0x1F200, 'M', u'ã»ã‹'), + (0x1F201, 'M', u'ココ'), + (0x1F202, 'M', u'サ'), + (0x1F203, 'X'), + (0x1F210, 'M', u'手'), + (0x1F211, 'M', u'å­—'), + (0x1F212, 'M', u'åŒ'), + (0x1F213, 'M', u'デ'), + (0x1F214, 'M', u'二'), + (0x1F215, 'M', u'多'), + (0x1F216, 'M', u'解'), + (0x1F217, 'M', u'天'), + (0x1F218, 'M', u'交'), + (0x1F219, 'M', u'映'), + (0x1F21A, 'M', u'ç„¡'), + (0x1F21B, 'M', u'æ–™'), + (0x1F21C, 'M', u'å‰'), + (0x1F21D, 'M', u'後'), + (0x1F21E, 'M', u'å†'), + (0x1F21F, 'M', u'æ–°'), + (0x1F220, 'M', u'åˆ'), + (0x1F221, 'M', u'終'), + (0x1F222, 'M', u'生'), + (0x1F223, 'M', u'販'), + (0x1F224, 'M', u'声'), + (0x1F225, 'M', u'å¹'), + (0x1F226, 'M', u'æ¼”'), + (0x1F227, 'M', u'投'), + (0x1F228, 'M', u'æ•'), + (0x1F229, 'M', u'一'), + (0x1F22A, 'M', u'三'), + (0x1F22B, 'M', u'éŠ'), + (0x1F22C, 'M', u'å·¦'), + (0x1F22D, 'M', u'中'), + (0x1F22E, 'M', u'å³'), + (0x1F22F, 'M', u'指'), + (0x1F230, 'M', u'èµ°'), + (0x1F231, 'M', u'打'), + (0x1F232, 'M', u'ç¦'), + (0x1F233, 'M', u'空'), + (0x1F234, 'M', u'åˆ'), + (0x1F235, 'M', u'満'), + (0x1F236, 'M', u'有'), + (0x1F237, 'M', u'月'), + (0x1F238, 'M', u'申'), + (0x1F239, 'M', u'割'), + (0x1F23A, 'M', u'å–¶'), + (0x1F23B, 'X'), + (0x1F240, 'M', u'〔本〕'), + (0x1F241, 'M', u'〔三〕'), + (0x1F242, 'M', u'〔二〕'), + (0x1F243, 'M', u'〔安〕'), + (0x1F244, 'M', u'〔点〕'), + (0x1F245, 'M', u'〔打〕'), + (0x1F246, 'M', u'〔盗〕'), + (0x1F247, 'M', u'〔å‹ã€•'), + (0x1F248, 'M', u'〔敗〕'), + (0x1F249, 'X'), + (0x1F250, 'M', u'å¾—'), + (0x1F251, 'M', u'å¯'), + (0x1F252, 'X'), + (0x1F300, 'V'), + (0x1F321, 'X'), + (0x1F330, 'V'), + (0x1F336, 'X'), + (0x1F337, 'V'), + (0x1F37D, 'X'), + (0x1F380, 'V'), + (0x1F394, 'X'), + (0x1F3A0, 'V'), + (0x1F3C5, 'X'), + (0x1F3C6, 'V'), + (0x1F3CB, 'X'), + (0x1F3E0, 'V'), + (0x1F3F1, 'X'), + (0x1F400, 'V'), + (0x1F43F, 'X'), + (0x1F440, 'V'), + (0x1F441, 'X'), + (0x1F442, 'V'), + (0x1F4F8, 'X'), + (0x1F4F9, 'V'), + (0x1F4FD, 'X'), + (0x1F500, 'V'), + (0x1F53E, 'X'), + (0x1F540, 'V'), + (0x1F544, 'X'), + (0x1F550, 'V'), + (0x1F568, 'X'), + (0x1F5FB, 'V'), + (0x1F641, 'X'), + (0x1F645, 'V'), + (0x1F650, 'X'), + (0x1F680, 'V'), + (0x1F6C6, 'X'), + (0x1F700, 'V'), + (0x1F774, 'X'), + (0x20000, 'V'), + (0x2A6D7, 'X'), + (0x2A700, 'V'), + (0x2B735, 'X'), + (0x2B740, 'V'), + (0x2B81E, 'X'), + (0x2F800, 'M', u'丽'), + (0x2F801, 'M', u'丸'), + (0x2F802, 'M', u'ä¹'), + (0x2F803, 'M', u'ð „¢'), + (0x2F804, 'M', u'ä½ '), + (0x2F805, 'M', u'ä¾®'), + (0x2F806, 'M', u'ä¾»'), + (0x2F807, 'M', u'倂'), + (0x2F808, 'M', u'åº'), + (0x2F809, 'M', u'å‚™'), + (0x2F80A, 'M', u'僧'), + (0x2F80B, 'M', u'åƒ'), + (0x2F80C, 'M', u'ã’ž'), + (0x2F80D, 'M', u'𠘺'), + (0x2F80E, 'M', u'å…'), + (0x2F80F, 'M', u'å…”'), + (0x2F810, 'M', u'å…¤'), + (0x2F811, 'M', u'å…·'), + (0x2F812, 'M', u'𠔜'), + (0x2F813, 'M', u'ã’¹'), + (0x2F814, 'M', u'å…§'), + (0x2F815, 'M', u'å†'), + (0x2F816, 'M', u'ð •‹'), + (0x2F817, 'M', u'冗'), + (0x2F818, 'M', u'冤'), + (0x2F819, 'M', u'仌'), + (0x2F81A, 'M', u'冬'), + (0x2F81B, 'M', u'况'), + (0x2F81C, 'M', u'𩇟'), + (0x2F81D, 'M', u'凵'), + (0x2F81E, 'M', u'刃'), + (0x2F81F, 'M', u'ã“Ÿ'), + (0x2F820, 'M', u'刻'), + (0x2F821, 'M', u'剆'), + (0x2F822, 'M', u'割'), + (0x2F823, 'M', u'剷'), + (0x2F824, 'M', u'㔕'), + (0x2F825, 'M', u'勇'), + (0x2F826, 'M', u'勉'), + (0x2F827, 'M', u'勤'), + (0x2F828, 'M', u'勺'), + (0x2F829, 'M', u'包'), + (0x2F82A, 'M', u'匆'), + (0x2F82B, 'M', u'北'), + (0x2F82C, 'M', u'å‰'), + (0x2F82D, 'M', u'å‘'), + (0x2F82E, 'M', u'åš'), + (0x2F82F, 'M', u'å³'), + (0x2F830, 'M', u'å½'), + (0x2F831, 'M', u'å¿'), + (0x2F834, 'M', u'𠨬'), + (0x2F835, 'M', u'ç°'), + (0x2F836, 'M', u'åŠ'), + (0x2F837, 'M', u'åŸ'), + (0x2F838, 'M', u'ð ­£'), + (0x2F839, 'M', u'å«'), + (0x2F83A, 'M', u'å±'), + (0x2F83B, 'M', u'å†'), + (0x2F83C, 'M', u'å’ž'), + (0x2F83D, 'M', u'å¸'), + (0x2F83E, 'M', u'呈'), + (0x2F83F, 'M', u'周'), + (0x2F840, 'M', u'å’¢'), + (0x2F841, 'M', u'哶'), + (0x2F842, 'M', u'å”'), + (0x2F843, 'M', u'å•“'), + (0x2F844, 'M', u'å•£'), + (0x2F845, 'M', u'å–„'), + (0x2F847, 'M', u'å–™'), + (0x2F848, 'M', u'å–«'), + (0x2F849, 'M', u'å–³'), + (0x2F84A, 'M', u'å—‚'), + (0x2F84B, 'M', u'圖'), + (0x2F84C, 'M', u'嘆'), + (0x2F84D, 'M', u'圗'), + (0x2F84E, 'M', u'噑'), + (0x2F84F, 'M', u'å™´'), + (0x2F850, 'M', u'切'), + (0x2F851, 'M', u'壮'), + (0x2F852, 'M', u'城'), + (0x2F853, 'M', u'埴'), + (0x2F854, 'M', u'å '), + (0x2F855, 'M', u'åž‹'), + (0x2F856, 'M', u'å ²'), + (0x2F857, 'M', u'å ±'), + (0x2F858, 'M', u'墬'), + (0x2F859, 'M', u'𡓤'), + (0x2F85A, 'M', u'売'), + (0x2F85B, 'M', u'壷'), + (0x2F85C, 'M', u'夆'), + (0x2F85D, 'M', u'多'), + (0x2F85E, 'M', u'夢'), + (0x2F85F, 'M', u'奢'), + (0x2F860, 'M', u'𡚨'), + (0x2F861, 'M', u'𡛪'), + (0x2F862, 'M', u'姬'), + (0x2F863, 'M', u'娛'), + (0x2F864, 'M', u'娧'), + (0x2F865, 'M', u'姘'), + (0x2F866, 'M', u'婦'), + (0x2F867, 'M', u'ã›®'), + (0x2F868, 'X'), + (0x2F869, 'M', u'嬈'), + (0x2F86A, 'M', u'嬾'), + (0x2F86C, 'M', u'𡧈'), + (0x2F86D, 'M', u'寃'), + (0x2F86E, 'M', u'寘'), + (0x2F86F, 'M', u'寧'), + (0x2F870, 'M', u'寳'), + (0x2F871, 'M', u'𡬘'), + (0x2F872, 'M', u'寿'), + (0x2F873, 'M', u'å°†'), + (0x2F874, 'X'), + (0x2F875, 'M', u'å°¢'), + (0x2F876, 'M', u'ãž'), + (0x2F877, 'M', u'å± '), + (0x2F878, 'M', u'å±®'), + (0x2F879, 'M', u'å³€'), + (0x2F87A, 'M', u'å²'), + (0x2F87B, 'M', u'ð¡·¤'), + (0x2F87C, 'M', u'嵃'), + (0x2F87D, 'M', u'ð¡·¦'), + (0x2F87E, 'M', u'åµ®'), + (0x2F87F, 'M', u'嵫'), + (0x2F880, 'M', u'åµ¼'), + (0x2F881, 'M', u'å·¡'), + (0x2F882, 'M', u'å·¢'), + (0x2F883, 'M', u'ã ¯'), + (0x2F884, 'M', u'å·½'), + (0x2F885, 'M', u'帨'), + (0x2F886, 'M', u'帽'), + (0x2F887, 'M', u'幩'), + (0x2F888, 'M', u'ã¡¢'), + (0x2F889, 'M', u'𢆃'), + (0x2F88A, 'M', u'㡼'), + (0x2F88B, 'M', u'庰'), + (0x2F88C, 'M', u'庳'), + (0x2F88D, 'M', u'庶'), + (0x2F88E, 'M', u'廊'), + (0x2F88F, 'M', u'𪎒'), + (0x2F890, 'M', u'廾'), + (0x2F891, 'M', u'𢌱'), + (0x2F893, 'M', u'èˆ'), + (0x2F894, 'M', u'å¼¢'), + (0x2F896, 'M', u'㣇'), + (0x2F897, 'M', u'𣊸'), + (0x2F898, 'M', u'𦇚'), + (0x2F899, 'M', u'å½¢'), + (0x2F89A, 'M', u'彫'), + (0x2F89B, 'M', u'㣣'), + (0x2F89C, 'M', u'徚'), + (0x2F89D, 'M', u'å¿'), + (0x2F89E, 'M', u'å¿—'), + (0x2F89F, 'M', u'忹'), + (0x2F8A0, 'M', u'æ‚'), + (0x2F8A1, 'M', u'㤺'), + (0x2F8A2, 'M', u'㤜'), + (0x2F8A3, 'M', u'æ‚”'), + (0x2F8A4, 'M', u'𢛔'), + (0x2F8A5, 'M', u'惇'), + (0x2F8A6, 'M', u'æ…ˆ'), + (0x2F8A7, 'M', u'æ…Œ'), + (0x2F8A8, 'M', u'æ…Ž'), + (0x2F8A9, 'M', u'æ…Œ'), + (0x2F8AA, 'M', u'æ…º'), + (0x2F8AB, 'M', u'憎'), + (0x2F8AC, 'M', u'憲'), + (0x2F8AD, 'M', u'憤'), + (0x2F8AE, 'M', u'憯'), + (0x2F8AF, 'M', u'懞'), + (0x2F8B0, 'M', u'懲'), + (0x2F8B1, 'M', u'懶'), + (0x2F8B2, 'M', u'æˆ'), + (0x2F8B3, 'M', u'戛'), + (0x2F8B4, 'M', u'æ‰'), + (0x2F8B5, 'M', u'抱'), + (0x2F8B6, 'M', u'æ‹”'), + (0x2F8B7, 'M', u'æ'), + (0x2F8B8, 'M', u'𢬌'), + (0x2F8B9, 'M', u'挽'), + (0x2F8BA, 'M', u'拼'), + (0x2F8BB, 'M', u'æ¨'), + (0x2F8BC, 'M', u'掃'), + (0x2F8BD, 'M', u'æ¤'), + (0x2F8BE, 'M', u'𢯱'), + (0x2F8BF, 'M', u'æ¢'), + (0x2F8C0, 'M', u'æ…'), + (0x2F8C1, 'M', u'掩'), + (0x2F8C2, 'M', u'㨮'), + (0x2F8C3, 'M', u'æ‘©'), + (0x2F8C4, 'M', u'摾'), + (0x2F8C5, 'M', u'æ’'), + (0x2F8C6, 'M', u'æ‘·'), + (0x2F8C7, 'M', u'㩬'), + (0x2F8C8, 'M', u'æ•'), + (0x2F8C9, 'M', u'敬'), + (0x2F8CA, 'M', u'𣀊'), + (0x2F8CB, 'M', u'æ—£'), + (0x2F8CC, 'M', u'書'), + (0x2F8CD, 'M', u'晉'), + (0x2F8CE, 'M', u'㬙'), + (0x2F8CF, 'M', u'æš‘'), + (0x2F8D0, 'M', u'㬈'), + (0x2F8D1, 'M', u'㫤'), + (0x2F8D2, 'M', u'冒'), + (0x2F8D3, 'M', u'冕'), + (0x2F8D4, 'M', u'最'), + (0x2F8D5, 'M', u'æšœ'), + (0x2F8D6, 'M', u'è‚­'), + (0x2F8D7, 'M', u'ä™'), + (0x2F8D8, 'M', u'朗'), + (0x2F8D9, 'M', u'望'), + (0x2F8DA, 'M', u'朡'), + (0x2F8DB, 'M', u'æž'), + (0x2F8DC, 'M', u'æ“'), + (0x2F8DD, 'M', u'ð£ƒ'), + (0x2F8DE, 'M', u'ã­‰'), + (0x2F8DF, 'M', u'柺'), + (0x2F8E0, 'M', u'æž…'), + (0x2F8E1, 'M', u'æ¡’'), + (0x2F8E2, 'M', u'梅'), + (0x2F8E3, 'M', u'𣑭'), + (0x2F8E4, 'M', u'梎'), + (0x2F8E5, 'M', u'æ Ÿ'), + (0x2F8E6, 'M', u'椔'), + (0x2F8E7, 'M', u'ã®'), + (0x2F8E8, 'M', u'楂'), + (0x2F8E9, 'M', u'榣'), + (0x2F8EA, 'M', u'槪'), + (0x2F8EB, 'M', u'檨'), + (0x2F8EC, 'M', u'𣚣'), + (0x2F8ED, 'M', u'æ«›'), + (0x2F8EE, 'M', u'ã°˜'), + (0x2F8EF, 'M', u'次'), + (0x2F8F0, 'M', u'𣢧'), + (0x2F8F1, 'M', u'æ­”'), + (0x2F8F2, 'M', u'㱎'), + (0x2F8F3, 'M', u'æ­²'), + (0x2F8F4, 'M', u'殟'), + (0x2F8F5, 'M', u'殺'), + (0x2F8F6, 'M', u'æ®»'), + (0x2F8F7, 'M', u'ð£ª'), + (0x2F8F8, 'M', u'ð¡´‹'), + (0x2F8F9, 'M', u'𣫺'), + (0x2F8FA, 'M', u'汎'), + (0x2F8FB, 'M', u'𣲼'), + (0x2F8FC, 'M', u'沿'), + (0x2F8FD, 'M', u'æ³'), + (0x2F8FE, 'M', u'汧'), + (0x2F8FF, 'M', u'æ´–'), + (0x2F900, 'M', u'æ´¾'), + (0x2F901, 'M', u'æµ·'), + (0x2F902, 'M', u'æµ'), + (0x2F903, 'M', u'浩'), + (0x2F904, 'M', u'浸'), + (0x2F905, 'M', u'涅'), + (0x2F906, 'M', u'𣴞'), + (0x2F907, 'M', u'æ´´'), + (0x2F908, 'M', u'港'), + (0x2F909, 'M', u'æ¹®'), + (0x2F90A, 'M', u'ã´³'), + (0x2F90B, 'M', u'滋'), + (0x2F90C, 'M', u'滇'), + (0x2F90D, 'M', u'𣻑'), + (0x2F90E, 'M', u'æ·¹'), + (0x2F90F, 'M', u'æ½®'), + (0x2F910, 'M', u'𣽞'), + (0x2F911, 'M', u'𣾎'), + (0x2F912, 'M', u'濆'), + (0x2F913, 'M', u'瀹'), + (0x2F914, 'M', u'瀞'), + (0x2F915, 'M', u'瀛'), + (0x2F916, 'M', u'㶖'), + (0x2F917, 'M', u'çŠ'), + (0x2F918, 'M', u'ç½'), + (0x2F919, 'M', u'ç·'), + (0x2F91A, 'M', u'ç‚­'), + (0x2F91B, 'M', u'𠔥'), + (0x2F91C, 'M', u'ç……'), + (0x2F91D, 'M', u'𤉣'), + (0x2F91E, 'M', u'熜'), + (0x2F91F, 'X'), + (0x2F920, 'M', u'爨'), + (0x2F921, 'M', u'爵'), + (0x2F922, 'M', u'ç‰'), + (0x2F923, 'M', u'𤘈'), + (0x2F924, 'M', u'犀'), + (0x2F925, 'M', u'犕'), + (0x2F926, 'M', u'𤜵'), + (0x2F927, 'M', u'𤠔'), + (0x2F928, 'M', u'çº'), + (0x2F929, 'M', u'王'), + (0x2F92A, 'M', u'㺬'), + (0x2F92B, 'M', u'玥'), + (0x2F92C, 'M', u'㺸'), + (0x2F92E, 'M', u'瑇'), + (0x2F92F, 'M', u'ç‘œ'), + (0x2F930, 'M', u'瑱'), + (0x2F931, 'M', u'ç’…'), + (0x2F932, 'M', u'ç“Š'), + (0x2F933, 'M', u'ã¼›'), + (0x2F934, 'M', u'甤'), + (0x2F935, 'M', u'𤰶'), + (0x2F936, 'M', u'甾'), + (0x2F937, 'M', u'𤲒'), + (0x2F938, 'M', u'ç•°'), + (0x2F939, 'M', u'𢆟'), + (0x2F93A, 'M', u'ç˜'), + (0x2F93B, 'M', u'𤾡'), + (0x2F93C, 'M', u'𤾸'), + (0x2F93D, 'M', u'ð¥„'), + (0x2F93E, 'M', u'㿼'), + (0x2F93F, 'M', u'䀈'), + (0x2F940, 'M', u'ç›´'), + (0x2F941, 'M', u'𥃳'), + (0x2F942, 'M', u'𥃲'), + (0x2F943, 'M', u'𥄙'), + (0x2F944, 'M', u'𥄳'), + (0x2F945, 'M', u'眞'), + (0x2F946, 'M', u'真'), + (0x2F948, 'M', u'çŠ'), + (0x2F949, 'M', u'䀹'), + (0x2F94A, 'M', u'çž‹'), + (0x2F94B, 'M', u'ä†'), + (0x2F94C, 'M', u'ä‚–'), + (0x2F94D, 'M', u'ð¥'), + (0x2F94E, 'M', u'ç¡Ž'), + (0x2F94F, 'M', u'碌'), + (0x2F950, 'M', u'磌'), + (0x2F951, 'M', u'䃣'), + (0x2F952, 'M', u'𥘦'), + (0x2F953, 'M', u'祖'), + (0x2F954, 'M', u'𥚚'), + (0x2F955, 'M', u'𥛅'), + (0x2F956, 'M', u'ç¦'), + (0x2F957, 'M', u'秫'), + (0x2F958, 'M', u'䄯'), + (0x2F959, 'M', u'ç©€'), + (0x2F95A, 'M', u'ç©Š'), + (0x2F95B, 'M', u'ç©'), + (0x2F95C, 'M', u'𥥼'), + (0x2F95D, 'M', u'𥪧'), + (0x2F95F, 'X'), + (0x2F960, 'M', u'䈂'), + (0x2F961, 'M', u'𥮫'), + (0x2F962, 'M', u'篆'), + (0x2F963, 'M', u'築'), + (0x2F964, 'M', u'䈧'), + (0x2F965, 'M', u'𥲀'), + (0x2F966, 'M', u'ç³’'), + (0x2F967, 'M', u'䊠'), + (0x2F968, 'M', u'糨'), + (0x2F969, 'M', u'ç³£'), + (0x2F96A, 'M', u'ç´€'), + (0x2F96B, 'M', u'𥾆'), + (0x2F96C, 'M', u'çµ£'), + (0x2F96D, 'M', u'äŒ'), + (0x2F96E, 'M', u'ç·‡'), + (0x2F96F, 'M', u'縂'), + (0x2F970, 'M', u'ç¹…'), + (0x2F971, 'M', u'䌴'), + (0x2F972, 'M', u'𦈨'), + (0x2F973, 'M', u'𦉇'), + (0x2F974, 'M', u'ä™'), + (0x2F975, 'M', u'𦋙'), + (0x2F976, 'M', u'罺'), + (0x2F977, 'M', u'𦌾'), + (0x2F978, 'M', u'羕'), + (0x2F979, 'M', u'翺'), + (0x2F97A, 'M', u'者'), + (0x2F97B, 'M', u'𦓚'), + (0x2F97C, 'M', u'𦔣'), + (0x2F97D, 'M', u'è '), + (0x2F97E, 'M', u'𦖨'), + (0x2F97F, 'M', u'è°'), + (0x2F980, 'M', u'ð£Ÿ'), + (0x2F981, 'M', u'ä•'), + (0x2F982, 'M', u'育'), + (0x2F983, 'M', u'脃'), + (0x2F984, 'M', u'ä‹'), + (0x2F985, 'M', u'脾'), + (0x2F986, 'M', u'媵'), + (0x2F987, 'M', u'𦞧'), + (0x2F988, 'M', u'𦞵'), + (0x2F989, 'M', u'𣎓'), + (0x2F98A, 'M', u'𣎜'), + (0x2F98B, 'M', u'èˆ'), + (0x2F98C, 'M', u'舄'), + (0x2F98D, 'M', u'辞'), + (0x2F98E, 'M', u'ä‘«'), + (0x2F98F, 'M', u'芑'), + (0x2F990, 'M', u'芋'), + (0x2F991, 'M', u'èŠ'), + (0x2F992, 'M', u'劳'), + (0x2F993, 'M', u'花'), + (0x2F994, 'M', u'芳'), + (0x2F995, 'M', u'芽'), + (0x2F996, 'M', u'苦'), + (0x2F997, 'M', u'𦬼'), + (0x2F998, 'M', u'è‹¥'), + (0x2F999, 'M', u'èŒ'), + (0x2F99A, 'M', u'è£'), + (0x2F99B, 'M', u'莭'), + (0x2F99C, 'M', u'茣'), + (0x2F99D, 'M', u'莽'), + (0x2F99E, 'M', u'è§'), + (0x2F99F, 'M', u'è‘—'), + (0x2F9A0, 'M', u'è“'), + (0x2F9A1, 'M', u'èŠ'), + (0x2F9A2, 'M', u'èŒ'), + (0x2F9A3, 'M', u'èœ'), + (0x2F9A4, 'M', u'𦰶'), + (0x2F9A5, 'M', u'𦵫'), + (0x2F9A6, 'M', u'𦳕'), + (0x2F9A7, 'M', u'䔫'), + (0x2F9A8, 'M', u'蓱'), + (0x2F9A9, 'M', u'蓳'), + (0x2F9AA, 'M', u'è”–'), + (0x2F9AB, 'M', u'ð§Š'), + (0x2F9AC, 'M', u'蕤'), + (0x2F9AD, 'M', u'𦼬'), + (0x2F9AE, 'M', u'ä•'), + (0x2F9AF, 'M', u'ä•¡'), + (0x2F9B0, 'M', u'𦾱'), + (0x2F9B1, 'M', u'𧃒'), + (0x2F9B2, 'M', u'ä•«'), + (0x2F9B3, 'M', u'è™'), + (0x2F9B4, 'M', u'虜'), + (0x2F9B5, 'M', u'虧'), + (0x2F9B6, 'M', u'虩'), + (0x2F9B7, 'M', u'èš©'), + (0x2F9B8, 'M', u'蚈'), + (0x2F9B9, 'M', u'蜎'), + (0x2F9BA, 'M', u'蛢'), + (0x2F9BB, 'M', u'è¹'), + (0x2F9BC, 'M', u'蜨'), + (0x2F9BD, 'M', u'è«'), + (0x2F9BE, 'M', u'螆'), + (0x2F9BF, 'X'), + (0x2F9C0, 'M', u'蟡'), + (0x2F9C1, 'M', u'è '), + (0x2F9C2, 'M', u'ä—¹'), + (0x2F9C3, 'M', u'è¡ '), + (0x2F9C4, 'M', u'è¡£'), + (0x2F9C5, 'M', u'𧙧'), + (0x2F9C6, 'M', u'裗'), + (0x2F9C7, 'M', u'裞'), + (0x2F9C8, 'M', u'䘵'), + (0x2F9C9, 'M', u'裺'), + (0x2F9CA, 'M', u'ã’»'), + (0x2F9CB, 'M', u'𧢮'), + (0x2F9CC, 'M', u'𧥦'), + (0x2F9CD, 'M', u'äš¾'), + (0x2F9CE, 'M', u'䛇'), + (0x2F9CF, 'M', u'誠'), + (0x2F9D0, 'M', u'è«­'), + (0x2F9D1, 'M', u'變'), + (0x2F9D2, 'M', u'豕'), + (0x2F9D3, 'M', u'𧲨'), + (0x2F9D4, 'M', u'貫'), + (0x2F9D5, 'M', u'è³'), + (0x2F9D6, 'M', u'è´›'), + (0x2F9D7, 'M', u'èµ·'), + (0x2F9D8, 'M', u'𧼯'), + (0x2F9D9, 'M', u'ð  „'), + (0x2F9DA, 'M', u'è·‹'), + (0x2F9DB, 'M', u'趼'), + (0x2F9DC, 'M', u'è·°'), + (0x2F9DD, 'M', u'𠣞'), + (0x2F9DE, 'M', u'è»”'), + (0x2F9DF, 'M', u'輸'), + (0x2F9E0, 'M', u'𨗒'), + (0x2F9E1, 'M', u'𨗭'), + (0x2F9E2, 'M', u'é‚”'), + (0x2F9E3, 'M', u'郱'), + (0x2F9E4, 'M', u'é„‘'), + (0x2F9E5, 'M', u'𨜮'), + (0x2F9E6, 'M', u'é„›'), + (0x2F9E7, 'M', u'鈸'), + (0x2F9E8, 'M', u'é‹—'), + (0x2F9E9, 'M', u'鋘'), + (0x2F9EA, 'M', u'鉼'), + (0x2F9EB, 'M', u'é¹'), + (0x2F9EC, 'M', u'é•'), + (0x2F9ED, 'M', u'𨯺'), + (0x2F9EE, 'M', u'é–‹'), + (0x2F9EF, 'M', u'䦕'), + (0x2F9F0, 'M', u'é–·'), + (0x2F9F1, 'M', u'𨵷'), + (0x2F9F2, 'M', u'䧦'), + (0x2F9F3, 'M', u'雃'), + (0x2F9F4, 'M', u'嶲'), + (0x2F9F5, 'M', u'霣'), + (0x2F9F6, 'M', u'ð©……'), + (0x2F9F7, 'M', u'𩈚'), + (0x2F9F8, 'M', u'ä©®'), + (0x2F9F9, 'M', u'䩶'), + (0x2F9FA, 'M', u'韠'), + (0x2F9FB, 'M', u'ð©Š'), + (0x2F9FC, 'M', u'䪲'), + (0x2F9FD, 'M', u'ð©’–'), + (0x2F9FE, 'M', u'é ‹'), + (0x2FA00, 'M', u'é ©'), + (0x2FA01, 'M', u'ð©–¶'), + (0x2FA02, 'M', u'飢'), + (0x2FA03, 'M', u'䬳'), + (0x2FA04, 'M', u'餩'), + (0x2FA05, 'M', u'馧'), + (0x2FA06, 'M', u'駂'), + (0x2FA07, 'M', u'駾'), + (0x2FA08, 'M', u'䯎'), + (0x2FA09, 'M', u'𩬰'), + (0x2FA0A, 'M', u'鬒'), + (0x2FA0B, 'M', u'é±€'), + (0x2FA0C, 'M', u'é³½'), + (0x2FA0D, 'M', u'䳎'), + (0x2FA0E, 'M', u'ä³­'), + (0x2FA0F, 'M', u'鵧'), + (0x2FA10, 'M', u'𪃎'), + (0x2FA11, 'M', u'䳸'), + (0x2FA12, 'M', u'𪄅'), + (0x2FA13, 'M', u'𪈎'), + (0x2FA14, 'M', u'𪊑'), + (0x2FA15, 'M', u'麻'), + (0x2FA16, 'M', u'äµ–'), + (0x2FA17, 'M', u'黹'), + (0x2FA18, 'M', u'黾'), + (0x2FA19, 'M', u'é¼…'), + (0x2FA1A, 'M', u'é¼'), + (0x2FA1B, 'M', u'é¼–'), + (0x2FA1C, 'M', u'é¼»'), + (0x2FA1D, 'M', u'𪘀'), + (0x2FA1E, 'X'), + (0xE0100, 'I'), + (0xE01F0, 'X'), +) diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst b/Lib/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..ca4775a --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,46 @@ +lxml is a Pythonic, mature binding for the libxml2 and libxslt libraries. It +provides safe and convenient access to these libraries using the ElementTree +API. + +It extends the ElementTree API significantly to offer support for XPath, +RelaxNG, XML Schema, XSLT, C14N and much more. + +To contact the project, go to the `project home page +`_ or see our bug tracker at +https://launchpad.net/lxml + +In case you want to use the current in-development version of lxml, +you can get it from the github repository at +https://github.com/lxml/lxml . Note that this requires Cython to +build the sources, see the build instructions on the project home +page. To the same end, running ``easy_install lxml==dev`` will +install lxml from +https://github.com/lxml/lxml/tarball/master#egg=lxml-dev if you have +an appropriate version of Cython installed. + + +After an official release of a new stable series, bug fixes may become +available at +https://github.com/lxml/lxml/tree/lxml-3.5 . +Running ``easy_install lxml==3.5bugfix`` will install +the unreleased branch state from +https://github.com/lxml/lxml/tarball/lxml-3.5#egg=lxml-3.5bugfix +as soon as a maintenance branch has been established. Note that this +requires Cython to be installed at an appropriate version for the build. + +3.5.0 (2015-11-13) +================== + +Bugs fixed +---------- + +* Unicode string results failed XPath queries in PyPy. + +* LP#1497051: HTML target parser failed to terminate on exceptions + and continued parsing instead. + +* Deprecated API usage in doctestcompare. + + + + diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/INSTALLER b/Lib/site-packages/lxml-3.5.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/METADATA b/Lib/site-packages/lxml-3.5.0.dist-info/METADATA new file mode 100644 index 0000000..84053a8 --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/METADATA @@ -0,0 +1,82 @@ +Metadata-Version: 2.0 +Name: lxml +Version: 3.5.0 +Summary: Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API. +Home-page: http://lxml.de/ +Author: lxml dev team +Author-email: lxml-dev@lxml.de +License: UNKNOWN +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Information Technology +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Cython +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: C +Classifier: Operating System :: OS Independent +Classifier: Topic :: Text Processing :: Markup :: HTML +Classifier: Topic :: Text Processing :: Markup :: XML +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Provides-Extra: cssselect +Requires-Dist: cssselect (>=0.7); extra == 'cssselect' +Provides-Extra: html5 +Requires-Dist: html5lib; extra == 'html5' +Provides-Extra: htmlsoup +Requires-Dist: BeautifulSoup4; extra == 'htmlsoup' +Provides-Extra: source +Requires-Dist: Cython (>=0.20); extra == 'source' + +lxml is a Pythonic, mature binding for the libxml2 and libxslt libraries. It +provides safe and convenient access to these libraries using the ElementTree +API. + +It extends the ElementTree API significantly to offer support for XPath, +RelaxNG, XML Schema, XSLT, C14N and much more. + +To contact the project, go to the `project home page +`_ or see our bug tracker at +https://launchpad.net/lxml + +In case you want to use the current in-development version of lxml, +you can get it from the github repository at +https://github.com/lxml/lxml . Note that this requires Cython to +build the sources, see the build instructions on the project home +page. To the same end, running ``easy_install lxml==dev`` will +install lxml from +https://github.com/lxml/lxml/tarball/master#egg=lxml-dev if you have +an appropriate version of Cython installed. + + +After an official release of a new stable series, bug fixes may become +available at +https://github.com/lxml/lxml/tree/lxml-3.5 . +Running ``easy_install lxml==3.5bugfix`` will install +the unreleased branch state from +https://github.com/lxml/lxml/tarball/lxml-3.5#egg=lxml-3.5bugfix +as soon as a maintenance branch has been established. Note that this +requires Cython to be installed at an appropriate version for the build. + +3.5.0 (2015-11-13) +================== + +Bugs fixed +---------- + +* Unicode string results failed XPath queries in PyPy. + +* LP#1497051: HTML target parser failed to terminate on exceptions + and continued parsing instead. + +* Deprecated API usage in doctestcompare. + + + + diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/RECORD b/Lib/site-packages/lxml-3.5.0.dist-info/RECORD new file mode 100644 index 0000000..24d7e47 --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/RECORD @@ -0,0 +1,85 @@ +lxml/ElementInclude.py,sha256=v2Zp-fy190Jvn2Qx7ArC3pjtFEqgQGIBr2J8H87nj04,7641 +lxml/__init__.py,sha256=lYLRn8M0_xdnN2bhEnZ2L7mlY5zM3gwz1kgLILLxC34,551 +lxml/_elementpath.py,sha256=FPpSgh-8KwZUzJQ0Ac2_L0l7i5HnVs5CaMriqDJAPZM,9490 +lxml/builder.py,sha256=2AuTtaN2F8noBWZ9eEfxOlVdztfLGYQGUtfK2uOu--g,8152 +lxml/cssselect.py,sha256=ADTqox2BUhZI_28K26Dnd-rPqvwL1A7KpXwDetXZLfA,3366 +lxml/doctestcompare.py,sha256=c1_02F7p5KE9epBcOJD0OA81v7ANrDyEr2P6A2irB5A,18387 +lxml/etree.cp35-win32.pyd,sha256=pKhpPyJ1YCp_AHWeScIyI50bfZQ0cwSEawm6i3FACwo,2172416 +lxml/lxml.etree.h,sha256=U-TabJyPaqamlrsjCMezjHlVLcTLzqag0Qk1EX_0mlA,8902 +lxml/lxml.etree_api.h,sha256=qy4Y24d0Xi0D3CImAp5Rm2XrPyJW1IRgLa-5b4nPHHQ,17450 +lxml/objectify.cp35-win32.pyd,sha256=mYKoc5Mrdd6x7FfAifQ3FVomHjPI58c8OHaMoaRYKcU,632320 +lxml/pyclasslookup.py,sha256=gLD1HM2HtITYYiGzjEOewSwbB7XkVx_NZv_quCt79Oc,92 +lxml/sax.py,sha256=LHiQ02sYvfKwecxcsRsoZjuH-O6FrQu2PvtK_b_12Mk,8531 +lxml/usedoctest.py,sha256=qRgZKQVcAZcl-zN0AIXVJnOsETUXz2nPXkxuzs1lGgk,230 +lxml/html/ElementSoup.py,sha256=9NQNksJmEr0M2DqzaKhXWdALLo0btwVwjgTHnkfGfvk,319 +lxml/html/__init__.py,sha256=UKxX64g9kNlHmpkS7sTo2tOWPMrhjF9wVqx_kRwiSLI,64571 +lxml/html/_diffcommand.py,sha256=FgOLayf6pz3qmN2EJM9aU6Zrvbi-ecGCPpKKd3jv4MI,2084 +lxml/html/_html5builder.py,sha256=cASxN0Tks3_vqCA_sXa1oCx_McyRL6VpuRLA1T-B58o,3246 +lxml/html/_setmixin.py,sha256=Rt1Pb_GFWp_5BD5GioZBaUPUZbVwPruDPJJMR7O7ntE,1111 +lxml/html/builder.py,sha256=1NuLqKPSaat75wCGufVuv1jIeBLuQCWsJVA_T2XjjUI,4310 +lxml/html/clean.py,sha256=2jHDo-j0eZXIJXbvQkog5XXDo_rtRDWHpeAsRHAsZzk,26084 +lxml/html/defs.py,sha256=fDYcHB4hJ8ncoGqfCIv9s6sZ9JYDEIRBqsNFBYEh2_g,4250 +lxml/html/diff.py,sha256=VwoIaJgvRWTouUmA7FYURwo_JlDce4MkkS0-g36-TVs,30500 +lxml/html/formfill.py,sha256=TcIlKqnGez5L6vfItobqiRlCS5bTaqglgDV5C_56VhM,9699 +lxml/html/html5parser.py,sha256=2MbS_4yTP-Tm8HvHiRm5xaGv9qLsmvS1RAD398--7Cs,6498 +lxml/html/soupparser.py,sha256=KMfefzgk5OLg_US0Rt2jHjJmWafj40I6OTDYV_WPvSE,10012 +lxml/html/usedoctest.py,sha256=tPlmVz4KK1GRKV5DJLrdVECeqsT9PlDzSqqTodVi5s0,249 +lxml/includes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +lxml/includes/c14n.pxd,sha256=pGf910mVH9IdBb7r_aE-J59axIQcqFH4Sx_Tm0PA1m0,1123 +lxml/includes/config.pxd,sha256=H6Mrl8It21hzRI2hzMId9W48QqkYYkoLT4dniLNmdTw,96 +lxml/includes/dtdvalid.pxd,sha256=Rf2vRBbM4O1AOiIsUk_5M7pV3Dz309sS7Ccd2zGFHT0,671 +lxml/includes/etree_defs.h,sha256=cao5QR96gTItCTignEEoGoowC-sZS9vSl0IY7HnSMRk,13748 +lxml/includes/etreepublic.pxd,sha256=d6uN90usIgY_y00OYZqOn4xpk0yuLTmfxlgHlH0fI74,9985 +lxml/includes/htmlparser.pxd,sha256=Va2qbs5zVokERn57HbDY__CiBQOoCS4uI9wEfCnT6zk,2868 +lxml/includes/lxml-version.h,sha256=28xt2kRQ3PzgXmolrD0i3rfaTHmpwlkDPALDcy2TU_0,74 +lxml/includes/relaxng.pxd,sha256=12yapjqDZLF_HTlcuSXSoQpPGK1NU7fj7gzS1EF8kZw,2669 +lxml/includes/schematron.pxd,sha256=5_PUpLHTzzYZ_d-8d2OjKLdwtLIdOm7C20HFUAX8hD4,1640 +lxml/includes/tree.pxd,sha256=Ly-j3oXLo0jyS7nwtoDZKukjt5RydwnctGCFHamQjeA,19759 +lxml/includes/uri.pxd,sha256=5wPtpGU1JtdmpZMTzR8EswazihP3dxkns6Fgo9NWOt8,139 +lxml/includes/xinclude.pxd,sha256=onXD71LVdAbXjUj82_SDtSixNsNh8xbu6Nd9x0V3bmM,852 +lxml/includes/xmlerror.pxd,sha256=8OehYSACGPbrDoUp5arNf7QV_yZqIJOvX8K7wYgOibE,57916 +lxml/includes/xmlparser.pxd,sha256=VG9WadjfcdHUZ3wcstG3iO_pnhDoA2OICAi_lg1C1MQ,10824 +lxml/includes/xmlschema.pxd,sha256=yYQFrIKAQ_feenENV24X2AZyBIYGBltRDm9qB7CYMww,1696 +lxml/includes/xpath.pxd,sha256=tKYAcwpbSRq8qrsZ2ISVYvEaLnCV9GadNC5o_f8Ua_g,5794 +lxml/includes/xslt.pxd,sha256=bAg5a44I0aqVcYZ6sBcsyXddTNiuMI7UjHWlO8FccyU,7631 +lxml/isoschematron/__init__.py,sha256=FzrpeBJets_w6BH3MW1VbwTfThS35XL_Hk0bGr-NcOQ,12408 +lxml/isoschematron/resources/rng/iso-schematron.rng,sha256=cFvf7ObbuHh52exAqMlTBlGu-5iuOH2pE0WERkdC1Tk,15572 +lxml/isoschematron/resources/xsl/RNG2Schtrn.xsl,sha256=ObebsB8Wt-d3uIA_U5NU85TpnQ3PxPX38TdOAqosMac,3172 +lxml/isoschematron/resources/xsl/XSD2Schtrn.xsl,sha256=QweRrIIM-zFcgg98GXA2CaWfIbgVE0XKEeYSfvv67A0,4563 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_abstract_expand.xsl,sha256=0SdZY7oqbDgbuIjB9NpUHLXbMmFHVkxIEUZwTa3iRMM,10917 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_dsdl_include.xsl,sha256=x42QJ-dxQ1waPzydsCoQnp2Xj15y53nW43O7BuoDRHk,39957 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_message.xsl,sha256=Tr9BnO6pzjVWwhqJfm10UlvAy95EgfSCz2iMlrVGT6Q,2015 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_schematron_skeleton_for_xslt1.xsl,sha256=ue8q_88X4e_jsJizo31GRNBxNhdxkEE9fY20oq0Iqwk,71764 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/iso_svrl_for_xslt1.xsl,sha256=BBAdsVSi5zAzeGepuN6gS1saQINDqITXKplmmj4dTWg,20382 +lxml/isoschematron/resources/xsl/iso-schematron-xslt1/readme.txt,sha256=AVTFPZpEKzuHr7OvQZmhaU3LvwKz06AJw8mT_pNh2yI,3144 +lxml-3.5.0.dist-info/DESCRIPTION.rst,sha256=EPfgejczVWfMqhdWp2Wuc8OMyGXAR6lrTYfmjuVWGUE,1541 +lxml-3.5.0.dist-info/METADATA,sha256=QGRDbFLBUsEeTGXmS_zcFC3iLI_dJfttkb8BIRYd590,3018 +lxml-3.5.0.dist-info/RECORD,, +lxml-3.5.0.dist-info/WHEEL,sha256=c3SRX9YjUgBRRFoaDDirVICw0lM0GonS9hcBq3MYzIc,101 +lxml-3.5.0.dist-info/metadata.json,sha256=ByuBabP24VYY3OZKLqS-7huMMWdAoC6H2LUI9RuabHE,1490 +lxml-3.5.0.dist-info/top_level.txt,sha256=NjD988wqaKq512nshNdLt-uDxsjkp4Bh51m6N-dhUrk,5 +c:\users\j\oml\platform\win32\Lib\site-packages\lxml-3.5.0.dist-info\INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +lxml/html/__pycache__/html5parser.cpython-35.pyc,, +lxml/html/__pycache__/diff.cpython-35.pyc,, +lxml/html/__pycache__/defs.cpython-35.pyc,, +lxml/__pycache__/sax.cpython-35.pyc,, +lxml/__pycache__/usedoctest.cpython-35.pyc,, +lxml/__pycache__/__init__.cpython-35.pyc,, +lxml/html/__pycache__/_diffcommand.cpython-35.pyc,, +lxml/html/__pycache__/_html5builder.cpython-35.pyc,, +lxml/html/__pycache__/_setmixin.cpython-35.pyc,, +lxml/html/__pycache__/clean.cpython-35.pyc,, +lxml/__pycache__/ElementInclude.cpython-35.pyc,, +lxml/__pycache__/cssselect.cpython-35.pyc,, +lxml/__pycache__/pyclasslookup.cpython-35.pyc,, +lxml/html/__pycache__/soupparser.cpython-35.pyc,, +lxml/html/__pycache__/formfill.cpython-35.pyc,, +lxml/isoschematron/__pycache__/__init__.cpython-35.pyc,, +lxml/html/__pycache__/usedoctest.cpython-35.pyc,, +lxml/html/__pycache__/ElementSoup.cpython-35.pyc,, +lxml/__pycache__/_elementpath.cpython-35.pyc,, +lxml/html/__pycache__/builder.cpython-35.pyc,, +lxml/__pycache__/builder.cpython-35.pyc,, +lxml/__pycache__/doctestcompare.cpython-35.pyc,, +lxml/includes/__pycache__/__init__.cpython-35.pyc,, +lxml/html/__pycache__/__init__.cpython-35.pyc,, diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/WHEEL b/Lib/site-packages/lxml-3.5.0.dist-info/WHEEL new file mode 100644 index 0000000..754b5c7 --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: false +Tag: cp35-none-win32 + diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/metadata.json b/Lib/site-packages/lxml-3.5.0.dist-info/metadata.json new file mode 100644 index 0000000..e00ea39 --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API.", "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "License :: OSI Approved :: BSD License", "Programming Language :: Cython", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: C", "Operating System :: OS Independent", "Topic :: Text Processing :: Markup :: HTML", "Topic :: Text Processing :: Markup :: XML", "Topic :: Software Development :: Libraries :: Python Modules"], "extensions": {"python.details": {"project_urls": {"Home": "http://lxml.de/"}, "contacts": [{"email": "lxml-dev@lxml.de", "name": "lxml dev team", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}}}, "metadata_version": "2.0", "name": "lxml", "extras": ["cssselect", "html5", "htmlsoup", "source"], "run_requires": [{"requires": ["BeautifulSoup4"], "extra": "htmlsoup"}, {"requires": ["Cython (>=0.20)"], "extra": "source"}, {"requires": ["cssselect (>=0.7)"], "extra": "cssselect"}, {"requires": ["html5lib"], "extra": "html5"}], "version": "3.5.0"} \ No newline at end of file diff --git a/Lib/site-packages/lxml-3.5.0.dist-info/top_level.txt b/Lib/site-packages/lxml-3.5.0.dist-info/top_level.txt new file mode 100644 index 0000000..ab90481 --- /dev/null +++ b/Lib/site-packages/lxml-3.5.0.dist-info/top_level.txt @@ -0,0 +1 @@ +lxml diff --git a/Lib/site-packages/lxml/ElementInclude.py b/Lib/site-packages/lxml/ElementInclude.py new file mode 100644 index 0000000..f780670 --- /dev/null +++ b/Lib/site-packages/lxml/ElementInclude.py @@ -0,0 +1,223 @@ +# +# ElementTree +# $Id: ElementInclude.py 1862 2004-06-18 07:31:02Z Fredrik $ +# +# limited xinclude support for element trees +# +# history: +# 2003-08-15 fl created +# 2003-11-14 fl fixed default loader +# +# Copyright (c) 2003-2004 by Fredrik Lundh. All rights reserved. +# +# fredrik@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The ElementTree toolkit is +# +# Copyright (c) 1999-2004 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +""" +Limited XInclude support for the ElementTree package. + +While lxml.etree has full support for XInclude (see +`etree.ElementTree.xinclude()`), this module provides a simpler, pure +Python, ElementTree compatible implementation that supports a simple +form of custom URL resolvers. +""" + +from lxml import etree +import copy +try: + from urlparse import urljoin + from urllib2 import urlopen +except ImportError: + # Python 3 + from urllib.parse import urljoin + from urllib.request import urlopen + +try: + set +except NameError: + # Python 2.3 + from sets import Set as set + +XINCLUDE = "{http://www.w3.org/2001/XInclude}" + +XINCLUDE_INCLUDE = XINCLUDE + "include" +XINCLUDE_FALLBACK = XINCLUDE + "fallback" + +## +# Fatal include error. + +class FatalIncludeError(etree.LxmlSyntaxError): + pass + +## +# ET compatible default loader. +# This loader reads an included resource from disk. +# +# @param href Resource reference. +# @param parse Parse mode. Either "xml" or "text". +# @param encoding Optional text encoding. +# @return The expanded resource. If the parse mode is "xml", this +# is an ElementTree instance. If the parse mode is "text", this +# is a Unicode string. If the loader fails, it can return None +# or raise an IOError exception. +# @throws IOError If the loader fails to load the resource. + +def default_loader(href, parse, encoding=None): + file = open(href, 'rb') + if parse == "xml": + data = etree.parse(file).getroot() + else: + data = file.read() + if not encoding: + encoding = 'utf-8' + data = data.decode(encoding) + file.close() + return data + +## +# Default loader used by lxml.etree - handles custom resolvers properly +# + +def _lxml_default_loader(href, parse, encoding=None, parser=None): + if parse == "xml": + data = etree.parse(href, parser).getroot() + else: + if "://" in href: + f = urlopen(href) + else: + f = open(href, 'rb') + data = f.read() + f.close() + if not encoding: + encoding = 'utf-8' + data = data.decode(encoding) + return data + +## +# Wrapper for ET compatibility - drops the parser + +def _wrap_et_loader(loader): + def load(href, parse, encoding=None, parser=None): + return loader(href, parse, encoding) + return load + + +## +# Expand XInclude directives. +# +# @param elem Root element. +# @param loader Optional resource loader. If omitted, it defaults +# to {@link default_loader}. If given, it should be a callable +# that implements the same interface as default_loader. +# @throws FatalIncludeError If the function fails to include a given +# resource, or if the tree contains malformed XInclude elements. +# @throws IOError If the function fails to load a given resource. +# @returns the node or its replacement if it was an XInclude node + +def include(elem, loader=None, base_url=None): + if base_url is None: + if hasattr(elem, 'getroot'): + tree = elem + elem = elem.getroot() + else: + tree = elem.getroottree() + if hasattr(tree, 'docinfo'): + base_url = tree.docinfo.URL + elif hasattr(elem, 'getroot'): + elem = elem.getroot() + _include(elem, loader, base_url=base_url) + +def _include(elem, loader=None, _parent_hrefs=None, base_url=None): + if loader is not None: + load_include = _wrap_et_loader(loader) + else: + load_include = _lxml_default_loader + + if _parent_hrefs is None: + _parent_hrefs = set() + + parser = elem.getroottree().parser + + include_elements = list( + elem.iter('{http://www.w3.org/2001/XInclude}*')) + + for e in include_elements: + if e.tag == XINCLUDE_INCLUDE: + # process xinclude directive + href = urljoin(base_url, e.get("href")) + parse = e.get("parse", "xml") + parent = e.getparent() + if parse == "xml": + if href in _parent_hrefs: + raise FatalIncludeError( + "recursive include of %r detected" % href + ) + _parent_hrefs.add(href) + node = load_include(href, parse, parser=parser) + if node is None: + raise FatalIncludeError( + "cannot load %r as %r" % (href, parse) + ) + node = _include(node, loader, _parent_hrefs) + if e.tail: + node.tail = (node.tail or "") + e.tail + if parent is None: + return node # replaced the root node! + parent.replace(e, node) + elif parse == "text": + text = load_include(href, parse, encoding=e.get("encoding")) + if text is None: + raise FatalIncludeError( + "cannot load %r as %r" % (href, parse) + ) + predecessor = e.getprevious() + if predecessor is not None: + predecessor.tail = (predecessor.tail or "") + text + elif parent is None: + return text # replaced the root node! + else: + parent.text = (parent.text or "") + text + (e.tail or "") + parent.remove(e) + else: + raise FatalIncludeError( + "unknown parse type in xi:include tag (%r)" % parse + ) + elif e.tag == XINCLUDE_FALLBACK: + parent = e.getparent() + if parent is not None and parent.tag != XINCLUDE_INCLUDE: + raise FatalIncludeError( + "xi:fallback tag must be child of xi:include (%r)" % e.tag + ) + else: + raise FatalIncludeError( + "Invalid element found in XInclude namespace (%r)" % e.tag + ) + return elem diff --git a/Lib/site-packages/lxml/__init__.py b/Lib/site-packages/lxml/__init__.py new file mode 100644 index 0000000..07cbe3a --- /dev/null +++ b/Lib/site-packages/lxml/__init__.py @@ -0,0 +1,20 @@ +# this is a package + +def get_include(): + """ + Returns a list of header include paths (for lxml itself, libxml2 + and libxslt) needed to compile C code against lxml if it was built + with statically linked libraries. + """ + import os + lxml_path = __path__[0] + include_path = os.path.join(lxml_path, 'includes') + includes = [include_path, lxml_path] + + for name in os.listdir(include_path): + path = os.path.join(include_path, name) + if os.path.isdir(path): + includes.append(path) + + return includes + diff --git a/Lib/site-packages/lxml/_elementpath.py b/Lib/site-packages/lxml/_elementpath.py new file mode 100644 index 0000000..e560de4 --- /dev/null +++ b/Lib/site-packages/lxml/_elementpath.py @@ -0,0 +1,315 @@ +# +# ElementTree +# $Id: ElementPath.py 3375 2008-02-13 08:05:08Z fredrik $ +# +# limited xpath support for element trees +# +# history: +# 2003-05-23 fl created +# 2003-05-28 fl added support for // etc +# 2003-08-27 fl fixed parsing of periods in element names +# 2007-09-10 fl new selection engine +# 2007-09-12 fl fixed parent selector +# 2007-09-13 fl added iterfind; changed findall to return a list +# 2007-11-30 fl added namespaces support +# 2009-10-30 fl added child element value filter +# +# Copyright (c) 2003-2009 by Fredrik Lundh. All rights reserved. +# +# fredrik@pythonware.com +# http://www.pythonware.com +# +# -------------------------------------------------------------------- +# The ElementTree toolkit is +# +# Copyright (c) 1999-2009 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +## +# Implementation module for XPath support. There's usually no reason +# to import this module directly; the ElementTree does this for +# you, if needed. +## + +import re + +xpath_tokenizer_re = re.compile( + "(" + "'[^']*'|\"[^\"]*\"|" + "::|" + "//?|" + "\.\.|" + "\(\)|" + "[/.*:\[\]\(\)@=])|" + "((?:\{[^}]+\})?[^/\[\]\(\)@=\s]+)|" + "\s+" + ) + +def xpath_tokenizer(pattern, namespaces=None): + for token in xpath_tokenizer_re.findall(pattern): + tag = token[1] + if tag and tag[0] != "{" and ":" in tag: + try: + prefix, uri = tag.split(":", 1) + if not namespaces: + raise KeyError + yield token[0], "{%s}%s" % (namespaces[prefix], uri) + except KeyError: + raise SyntaxError("prefix %r not found in prefix map" % prefix) + else: + yield token + + +def prepare_child(next, token): + tag = token[1] + def select(result): + for elem in result: + for e in elem.iterchildren(tag): + yield e + return select + +def prepare_star(next, token): + def select(result): + for elem in result: + for e in elem.iterchildren('*'): + yield e + return select + +def prepare_self(next, token): + def select(result): + return result + return select + +def prepare_descendant(next, token): + token = next() + if token[0] == "*": + tag = "*" + elif not token[0]: + tag = token[1] + else: + raise SyntaxError("invalid descendant") + def select(result): + for elem in result: + for e in elem.iterdescendants(tag): + yield e + return select + +def prepare_parent(next, token): + def select(result): + for elem in result: + parent = elem.getparent() + if parent is not None: + yield parent + return select + +def prepare_predicate(next, token): + # FIXME: replace with real parser!!! refs: + # http://effbot.org/zone/simple-iterator-parser.htm + # http://javascript.crockford.com/tdop/tdop.html + signature = [] + predicate = [] + while 1: + token = next() + if token[0] == "]": + break + if token[0] and token[0][:1] in "'\"": + token = "'", token[0][1:-1] + signature.append(token[0] or "-") + predicate.append(token[1]) + signature = "".join(signature) + # use signature to determine predicate type + if signature == "@-": + # [@attribute] predicate + key = predicate[1] + def select(result): + for elem in result: + if elem.get(key) is not None: + yield elem + return select + if signature == "@-='": + # [@attribute='value'] + key = predicate[1] + value = predicate[-1] + def select(result): + for elem in result: + if elem.get(key) == value: + yield elem + return select + if signature == "-" and not re.match("-?\d+$", predicate[0]): + # [tag] + tag = predicate[0] + def select(result): + for elem in result: + for _ in elem.iterchildren(tag): + yield elem + break + return select + if signature == "-='" and not re.match("-?\d+$", predicate[0]): + # [tag='value'] + tag = predicate[0] + value = predicate[-1] + def select(result): + for elem in result: + for e in elem.iterchildren(tag): + if "".join(e.itertext()) == value: + yield elem + break + return select + if signature == "-" or signature == "-()" or signature == "-()-": + # [index] or [last()] or [last()-index] + if signature == "-": + # [index] + index = int(predicate[0]) - 1 + if index < 0: + if index == -1: + raise SyntaxError( + "indices in path predicates are 1-based, not 0-based") + else: + raise SyntaxError("path index >= 1 expected") + else: + if predicate[0] != "last": + raise SyntaxError("unsupported function") + if signature == "-()-": + try: + index = int(predicate[2]) - 1 + except ValueError: + raise SyntaxError("unsupported expression") + else: + index = -1 + def select(result): + for elem in result: + parent = elem.getparent() + if parent is None: + continue + try: + # FIXME: what if the selector is "*" ? + elems = list(parent.iterchildren(elem.tag)) + if elems[index] is elem: + yield elem + except IndexError: + pass + return select + raise SyntaxError("invalid predicate") + +ops = { + "": prepare_child, + "*": prepare_star, + ".": prepare_self, + "..": prepare_parent, + "//": prepare_descendant, + "[": prepare_predicate, +} + + +# -------------------------------------------------------------------- + +_cache = {} + + +def _build_path_iterator(path, namespaces): + """compile selector pattern""" + if namespaces and (None in namespaces or '' in namespaces): + raise ValueError("empty namespace prefix is not supported in ElementPath") + if path[-1:] == "/": + path += "*" # implicit all (FIXME: keep this?) + cache_key = (path, namespaces and tuple(sorted(namespaces.items())) or None) + try: + return _cache[cache_key] + except KeyError: + pass + if len(_cache) > 100: + _cache.clear() + + if path[:1] == "/": + raise SyntaxError("cannot use absolute path on element") + stream = iter(xpath_tokenizer(path, namespaces)) + try: + _next = stream.next + except AttributeError: + # Python 3 + _next = stream.__next__ + try: + token = _next() + except StopIteration: + raise SyntaxError("empty path expression") + selector = [] + while 1: + try: + selector.append(ops[token[0]](_next, token)) + except StopIteration: + raise SyntaxError("invalid path") + try: + token = _next() + if token[0] == "/": + token = _next() + except StopIteration: + break + _cache[cache_key] = selector + return selector + + +## +# Iterate over the matching nodes + +def iterfind(elem, path, namespaces=None): + selector = _build_path_iterator(path, namespaces) + result = iter((elem,)) + for select in selector: + result = select(result) + return result + + +## +# Find first matching object. + +def find(elem, path, namespaces=None): + it = iterfind(elem, path, namespaces) + try: + try: + _next = it.next + except AttributeError: + return next(it) + else: + return _next() + except StopIteration: + return None + + +## +# Find all matching objects. + +def findall(elem, path, namespaces=None): + return list(iterfind(elem, path, namespaces)) + + +## +# Find text for first matching object. + +def findtext(elem, path, default=None, namespaces=None): + el = find(elem, path, namespaces) + if el is None: + return default + else: + return el.text or '' diff --git a/Lib/site-packages/lxml/builder.py b/Lib/site-packages/lxml/builder.py new file mode 100644 index 0000000..0bb3eff --- /dev/null +++ b/Lib/site-packages/lxml/builder.py @@ -0,0 +1,246 @@ +# +# Element generator factory by Fredrik Lundh. +# +# Source: +# http://online.effbot.org/2006_11_01_archive.htm#et-builder +# http://effbot.python-hosting.com/file/stuff/sandbox/elementlib/builder.py +# +# -------------------------------------------------------------------- +# The ElementTree toolkit is +# +# Copyright (c) 1999-2004 by Fredrik Lundh +# +# By obtaining, using, and/or copying this software and/or its +# associated documentation, you agree that you have read, understood, +# and will comply with the following terms and conditions: +# +# Permission to use, copy, modify, and distribute this software and +# its associated documentation for any purpose and without fee is +# hereby granted, provided that the above copyright notice appears in +# all copies, and that both that copyright notice and this permission +# notice appear in supporting documentation, and that the name of +# Secret Labs AB or the author not be used in advertising or publicity +# pertaining to distribution of the software without specific, written +# prior permission. +# +# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD +# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT- +# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR +# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY +# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE +# OF THIS SOFTWARE. +# -------------------------------------------------------------------- + +""" +The ``E`` Element factory for generating XML documents. +""" + +import lxml.etree as ET + +try: + from functools import partial +except ImportError: + # fake it for pre-2.5 releases + def partial(func, tag): + return lambda *args, **kwargs: func(tag, *args, **kwargs) + +try: + callable +except NameError: + # Python 3 + def callable(f): + return hasattr(f, '__call__') + +try: + basestring +except NameError: + basestring = str + +try: + unicode +except NameError: + unicode = str + + +class ElementMaker(object): + """Element generator factory. + + Unlike the ordinary Element factory, the E factory allows you to pass in + more than just a tag and some optional attributes; you can also pass in + text and other elements. The text is added as either text or tail + attributes, and elements are inserted at the right spot. Some small + examples:: + + >>> from lxml import etree as ET + >>> from lxml.builder import E + + >>> ET.tostring(E("tag")) + '' + >>> ET.tostring(E("tag", "text")) + 'text' + >>> ET.tostring(E("tag", "text", key="value")) + 'text' + >>> ET.tostring(E("tag", E("subtag", "text"), "tail")) + 'texttail' + + For simple tags, the factory also allows you to write ``E.tag(...)`` instead + of ``E('tag', ...)``:: + + >>> ET.tostring(E.tag()) + '' + >>> ET.tostring(E.tag("text")) + 'text' + >>> ET.tostring(E.tag(E.subtag("text"), "tail")) + 'texttail' + + Here's a somewhat larger example; this shows how to generate HTML + documents, using a mix of prepared factory functions for inline elements, + nested ``E.tag`` calls, and embedded XHTML fragments:: + + # some common inline elements + A = E.a + I = E.i + B = E.b + + def CLASS(v): + # helper function, 'class' is a reserved word + return {'class': v} + + page = ( + E.html( + E.head( + E.title("This is a sample document") + ), + E.body( + E.h1("Hello!", CLASS("title")), + E.p("This is a paragraph with ", B("bold"), " text in it!"), + E.p("This is another paragraph, with a ", + A("link", href="http://www.python.org"), "."), + E.p("Here are some reservered characters: ."), + ET.XML("

And finally, here is an embedded XHTML fragment.

"), + ) + ) + ) + + print ET.tostring(page) + + Here's a prettyprinted version of the output from the above script:: + + + + This is a sample document + + +

Hello!

+

This is a paragraph with bold text in it!

+

This is another paragraph, with link.

+

Here are some reservered characters: <spam&egg>.

+

And finally, here is an embedded XHTML fragment.

+ + + + For namespace support, you can pass a namespace map (``nsmap``) + and/or a specific target ``namespace`` to the ElementMaker class:: + + >>> E = ElementMaker(namespace="http://my.ns/") + >>> print(ET.tostring( E.test )) + + + >>> E = ElementMaker(namespace="http://my.ns/", nsmap={'p':'http://my.ns/'}) + >>> print(ET.tostring( E.test )) + + """ + + def __init__(self, typemap=None, + namespace=None, nsmap=None, makeelement=None): + if namespace is not None: + self._namespace = '{' + namespace + '}' + else: + self._namespace = None + + if nsmap: + self._nsmap = dict(nsmap) + else: + self._nsmap = None + + if makeelement is not None: + assert callable(makeelement) + self._makeelement = makeelement + else: + self._makeelement = ET.Element + + # initialize type map for this element factory + + if typemap: + typemap = typemap.copy() + else: + typemap = {} + + def add_text(elem, item): + try: + elem[-1].tail = (elem[-1].tail or "") + item + except IndexError: + elem.text = (elem.text or "") + item + + def add_cdata(elem, cdata): + if elem.text: + raise ValueError("Can't add a CDATA section. Element already has some text: %r" % elem.text) + elem.text = cdata + + if str not in typemap: + typemap[str] = add_text + if unicode not in typemap: + typemap[unicode] = add_text + if ET.CDATA not in typemap: + typemap[ET.CDATA] = add_cdata + + def add_dict(elem, item): + attrib = elem.attrib + for k, v in item.items(): + if isinstance(v, basestring): + attrib[k] = v + else: + attrib[k] = typemap[type(v)](None, v) + if dict not in typemap: + typemap[dict] = add_dict + + self._typemap = typemap + + def __call__(self, tag, *children, **attrib): + get = self._typemap.get + + if self._namespace is not None and tag[0] != '{': + tag = self._namespace + tag + elem = self._makeelement(tag, nsmap=self._nsmap) + if attrib: + get(dict)(elem, attrib) + + for item in children: + if callable(item): + item = item() + t = get(type(item)) + if t is None: + if ET.iselement(item): + elem.append(item) + continue + for basetype in type(item).__mro__: + # See if the typemap knows of any of this type's bases. + t = get(basetype) + if t is not None: + break + else: + raise TypeError("bad argument type: %s(%r)" % + (type(item).__name__, item)) + v = t(elem, item) + if v: + get(type(v))(elem, v) + + return elem + + def __getattr__(self, tag): + return partial(self, tag) + +# create factory object +E = ElementMaker() diff --git a/Lib/site-packages/lxml/cssselect.py b/Lib/site-packages/lxml/cssselect.py new file mode 100644 index 0000000..586a142 --- /dev/null +++ b/Lib/site-packages/lxml/cssselect.py @@ -0,0 +1,102 @@ +"""CSS Selectors based on XPath. + +This module supports selecting XML/HTML tags based on CSS selectors. +See the `CSSSelector` class for details. + +This is a thin wrapper around cssselect 0.7 or later. +""" + +from __future__ import absolute_import + +from . import etree +try: + import cssselect as external_cssselect +except ImportError: + raise ImportError( + 'cssselect does not seem to be installed. ' + 'See http://packages.python.org/cssselect/') + + +SelectorSyntaxError = external_cssselect.SelectorSyntaxError +ExpressionError = external_cssselect.ExpressionError +SelectorError = external_cssselect.SelectorError + + +__all__ = ['SelectorSyntaxError', 'ExpressionError', 'SelectorError', + 'CSSSelector'] + + +class LxmlTranslator(external_cssselect.GenericTranslator): + """ + A custom CSS selector to XPath translator with lxml-specific extensions. + """ + def xpath_contains_function(self, xpath, function): + # Defined there, removed in later drafts: + # http://www.w3.org/TR/2001/CR-css3-selectors-20011113/#content-selectors + if function.argument_types() not in (['STRING'], ['IDENT']): + raise ExpressionError( + "Expected a single string or ident for :contains(), got %r" + % function.arguments) + value = function.arguments[0].value + return xpath.add_condition( + 'contains(__lxml_internal_css:lower-case(string(.)), %s)' + % self.xpath_literal(value.lower())) + + +class LxmlHTMLTranslator(LxmlTranslator, external_cssselect.HTMLTranslator): + """ + lxml extensions + HTML support. + """ + + +def _make_lower_case(context, s): + return s.lower() + +ns = etree.FunctionNamespace('http://codespeak.net/lxml/css/') +ns.prefix = '__lxml_internal_css' +ns['lower-case'] = _make_lower_case + + +class CSSSelector(etree.XPath): + """A CSS selector. + + Usage:: + + >>> from lxml import etree, cssselect + >>> select = cssselect.CSSSelector("a tag > child") + + >>> root = etree.XML("TEXT") + >>> [ el.tag for el in select(root) ] + ['child'] + + To use CSS namespaces, you need to pass a prefix-to-namespace + mapping as ``namespaces`` keyword argument:: + + >>> rdfns = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#' + >>> select_ns = cssselect.CSSSelector('root > rdf|Description', + ... namespaces={'rdf': rdfns}) + + >>> rdf = etree.XML(( + ... '' + ... 'blah' + ... '') % rdfns) + >>> [(el.tag, el.text) for el in select_ns(rdf)] + [('{http://www.w3.org/1999/02/22-rdf-syntax-ns#}Description', 'blah')] + + """ + def __init__(self, css, namespaces=None, translator='xml'): + if translator == 'xml': + translator = LxmlTranslator() + elif translator == 'html': + translator = LxmlHTMLTranslator() + elif translator == 'xhtml': + translator = LxmlHTMLTranslator(xhtml=True) + path = translator.css_to_xpath(css) + etree.XPath.__init__(self, path, namespaces=namespaces) + self.css = css + + def __repr__(self): + return '<%s %s for %r>' % ( + self.__class__.__name__, + hex(abs(id(self)))[2:], + self.css) diff --git a/Lib/site-packages/lxml/doctestcompare.py b/Lib/site-packages/lxml/doctestcompare.py new file mode 100644 index 0000000..eb7c7f9 --- /dev/null +++ b/Lib/site-packages/lxml/doctestcompare.py @@ -0,0 +1,508 @@ +""" +lxml-based doctest output comparison. + +Note: normally, you should just import the `lxml.usedoctest` and +`lxml.html.usedoctest` modules from within a doctest, instead of this +one:: + + >>> import lxml.usedoctest # for XML output + + >>> import lxml.html.usedoctest # for HTML output + +To use this module directly, you must call ``lxmldoctest.install()``, +which will cause doctest to use this in all subsequent calls. + +This changes the way output is checked and comparisons are made for +XML or HTML-like content. + +XML or HTML content is noticed because the example starts with ``<`` +(it's HTML if it starts with ```` or include an ``any`` +attribute in the tag. An ``any`` tag matches any tag, while the +attribute matches any and all attributes. + +When a match fails, the reformatted example and gotten text is +displayed (indented), and a rough diff-like output is given. Anything +marked with ``+`` is in the output but wasn't supposed to be, and +similarly ``-`` means its in the example but wasn't in the output. + +You can disable parsing on one line with ``# doctest:+NOPARSE_MARKUP`` +""" + +from lxml import etree +import sys +import re +import doctest +try: + from html import escape as html_escape +except ImportError: + from cgi import escape as html_escape + +__all__ = ['PARSE_HTML', 'PARSE_XML', 'NOPARSE_MARKUP', 'LXMLOutputChecker', + 'LHTMLOutputChecker', 'install', 'temp_install'] + +try: + _basestring = basestring +except NameError: + _basestring = (str, bytes) + +_IS_PYTHON_3 = sys.version_info[0] >= 3 + +PARSE_HTML = doctest.register_optionflag('PARSE_HTML') +PARSE_XML = doctest.register_optionflag('PARSE_XML') +NOPARSE_MARKUP = doctest.register_optionflag('NOPARSE_MARKUP') + +OutputChecker = doctest.OutputChecker + +def strip(v): + if v is None: + return None + else: + return v.strip() + +def norm_whitespace(v): + return _norm_whitespace_re.sub(' ', v) + +_html_parser = etree.HTMLParser(recover=False, remove_blank_text=True) + +def html_fromstring(html): + return etree.fromstring(html, _html_parser) + +# We use this to distinguish repr()s from elements: +_repr_re = re.compile(r'^<[^>]+ (at|object) ') +_norm_whitespace_re = re.compile(r'[ \t\n][ \t\n]+') + +class LXMLOutputChecker(OutputChecker): + + empty_tags = ( + 'param', 'img', 'area', 'br', 'basefont', 'input', + 'base', 'meta', 'link', 'col') + + def get_default_parser(self): + return etree.XML + + def check_output(self, want, got, optionflags): + alt_self = getattr(self, '_temp_override_self', None) + if alt_self is not None: + super_method = self._temp_call_super_check_output + self = alt_self + else: + super_method = OutputChecker.check_output + parser = self.get_parser(want, got, optionflags) + if not parser: + return super_method( + self, want, got, optionflags) + try: + want_doc = parser(want) + except etree.XMLSyntaxError: + return False + try: + got_doc = parser(got) + except etree.XMLSyntaxError: + return False + return self.compare_docs(want_doc, got_doc) + + def get_parser(self, want, got, optionflags): + parser = None + if NOPARSE_MARKUP & optionflags: + return None + if PARSE_HTML & optionflags: + parser = html_fromstring + elif PARSE_XML & optionflags: + parser = etree.XML + elif (want.strip().lower().startswith('' % el.tag + return '<%s %s>' % (el.tag, ' '.join(attrs)) + + def format_end_tag(self, el): + if isinstance(el, etree.CommentBase): + # FIXME: probably PIs should be handled specially too? + return '-->' + return '' % el.tag + + def collect_diff(self, want, got, html, indent): + parts = [] + if not len(want) and not len(got): + parts.append(' '*indent) + parts.append(self.collect_diff_tag(want, got)) + if not self.html_empty_tag(got, html): + parts.append(self.collect_diff_text(want.text, got.text)) + parts.append(self.collect_diff_end_tag(want, got)) + parts.append(self.collect_diff_text(want.tail, got.tail)) + parts.append('\n') + return ''.join(parts) + parts.append(' '*indent) + parts.append(self.collect_diff_tag(want, got)) + parts.append('\n') + if strip(want.text) or strip(got.text): + parts.append(' '*indent) + parts.append(self.collect_diff_text(want.text, got.text)) + parts.append('\n') + want_children = list(want) + got_children = list(got) + while want_children or got_children: + if not want_children: + parts.append(self.format_doc(got_children.pop(0), html, indent+2, '+')) + continue + if not got_children: + parts.append(self.format_doc(want_children.pop(0), html, indent+2, '-')) + continue + parts.append(self.collect_diff( + want_children.pop(0), got_children.pop(0), html, indent+2)) + parts.append(' '*indent) + parts.append(self.collect_diff_end_tag(want, got)) + parts.append('\n') + if strip(want.tail) or strip(got.tail): + parts.append(' '*indent) + parts.append(self.collect_diff_text(want.tail, got.tail)) + parts.append('\n') + return ''.join(parts) + + def collect_diff_tag(self, want, got): + if not self.tag_compare(want.tag, got.tag): + tag = '%s (got: %s)' % (want.tag, got.tag) + else: + tag = got.tag + attrs = [] + any = want.tag == 'any' or 'any' in want.attrib + for name, value in sorted(got.attrib.items()): + if name not in want.attrib and not any: + attrs.append('+%s="%s"' % (name, self.format_text(value, False))) + else: + if name in want.attrib: + text = self.collect_diff_text(want.attrib[name], value, False) + else: + text = self.format_text(value, False) + attrs.append('%s="%s"' % (name, text)) + if not any: + for name, value in sorted(want.attrib.items()): + if name in got.attrib: + continue + attrs.append('-%s="%s"' % (name, self.format_text(value, False))) + if attrs: + tag = '<%s %s>' % (tag, ' '.join(attrs)) + else: + tag = '<%s>' % tag + return tag + + def collect_diff_end_tag(self, want, got): + if want.tag != got.tag: + tag = '%s (got: %s)' % (want.tag, got.tag) + else: + tag = got.tag + return '' % tag + + def collect_diff_text(self, want, got, strip=True): + if self.text_compare(want, got, strip): + if not got: + return '' + return self.format_text(got, strip) + text = '%s (got: %s)' % (want, got) + return self.format_text(text, strip) + +class LHTMLOutputChecker(LXMLOutputChecker): + def get_default_parser(self): + return html_fromstring + +def install(html=False): + """ + Install doctestcompare for all future doctests. + + If html is true, then by default the HTML parser will be used; + otherwise the XML parser is used. + """ + if html: + doctest.OutputChecker = LHTMLOutputChecker + else: + doctest.OutputChecker = LXMLOutputChecker + +def temp_install(html=False, del_module=None): + """ + Use this *inside* a doctest to enable this checker for this + doctest only. + + If html is true, then by default the HTML parser will be used; + otherwise the XML parser is used. + """ + if html: + Checker = LHTMLOutputChecker + else: + Checker = LXMLOutputChecker + frame = _find_doctest_frame() + dt_self = frame.f_locals['self'] + checker = Checker() + old_checker = dt_self._checker + dt_self._checker = checker + # The unfortunate thing is that there is a local variable 'check' + # in the function that runs the doctests, that is a bound method + # into the output checker. We have to update that. We can't + # modify the frame, so we have to modify the object in place. The + # only way to do this is to actually change the func_code + # attribute of the method. We change it, and then wait for + # __record_outcome to be run, which signals the end of the __run + # method, at which point we restore the previous check_output + # implementation. + if _IS_PYTHON_3: + check_func = frame.f_locals['check'].__func__ + checker_check_func = checker.check_output.__func__ + else: + check_func = frame.f_locals['check'].im_func + checker_check_func = checker.check_output.im_func + # Because we can't patch up func_globals, this is the only global + # in check_output that we care about: + doctest.etree = etree + _RestoreChecker(dt_self, old_checker, checker, + check_func, checker_check_func, + del_module) + +class _RestoreChecker(object): + def __init__(self, dt_self, old_checker, new_checker, check_func, clone_func, + del_module): + self.dt_self = dt_self + self.checker = old_checker + self.checker._temp_call_super_check_output = self.call_super + self.checker._temp_override_self = new_checker + self.check_func = check_func + self.clone_func = clone_func + self.del_module = del_module + self.install_clone() + self.install_dt_self() + def install_clone(self): + if _IS_PYTHON_3: + self.func_code = self.check_func.__code__ + self.func_globals = self.check_func.__globals__ + self.check_func.__code__ = self.clone_func.__code__ + else: + self.func_code = self.check_func.func_code + self.func_globals = self.check_func.func_globals + self.check_func.func_code = self.clone_func.func_code + def uninstall_clone(self): + if _IS_PYTHON_3: + self.check_func.__code__ = self.func_code + else: + self.check_func.func_code = self.func_code + def install_dt_self(self): + self.prev_func = self.dt_self._DocTestRunner__record_outcome + self.dt_self._DocTestRunner__record_outcome = self + def uninstall_dt_self(self): + self.dt_self._DocTestRunner__record_outcome = self.prev_func + def uninstall_module(self): + if self.del_module: + import sys + del sys.modules[self.del_module] + if '.' in self.del_module: + package, module = self.del_module.rsplit('.', 1) + package_mod = sys.modules[package] + delattr(package_mod, module) + def __call__(self, *args, **kw): + self.uninstall_clone() + self.uninstall_dt_self() + del self.checker._temp_override_self + del self.checker._temp_call_super_check_output + result = self.prev_func(*args, **kw) + self.uninstall_module() + return result + def call_super(self, *args, **kw): + self.uninstall_clone() + try: + return self.check_func(*args, **kw) + finally: + self.install_clone() + +def _find_doctest_frame(): + import sys + frame = sys._getframe(1) + while frame: + l = frame.f_locals + if 'BOOM' in l: + # Sign of doctest + return frame + frame = frame.f_back + raise LookupError( + "Could not find doctest (only use this function *inside* a doctest)") + +__test__ = { + 'basic': ''' + >>> temp_install() + >>> print """stuff""" + ... + >>> print """""" + + + + >>> print """blahblahblah""" # doctest: +NOPARSE_MARKUP, +ELLIPSIS + ...foo /> + '''} + +if __name__ == '__main__': + import doctest + doctest.testmod() + + diff --git a/Lib/site-packages/lxml/html/ElementSoup.py b/Lib/site-packages/lxml/html/ElementSoup.py new file mode 100644 index 0000000..8e4fde1 --- /dev/null +++ b/Lib/site-packages/lxml/html/ElementSoup.py @@ -0,0 +1,10 @@ +__doc__ = """Legacy interface to the BeautifulSoup HTML parser. +""" + +__all__ = ["parse", "convert_tree"] + +from soupparser import convert_tree, parse as _parse + +def parse(file, beautifulsoup=None, makeelement=None): + root = _parse(file, beautifulsoup=beautifulsoup, makeelement=makeelement) + return root.getroot() diff --git a/Lib/site-packages/lxml/html/__init__.py b/Lib/site-packages/lxml/html/__init__.py new file mode 100644 index 0000000..1a1d7d2 --- /dev/null +++ b/Lib/site-packages/lxml/html/__init__.py @@ -0,0 +1,1923 @@ +# Copyright (c) 2004 Ian Bicking. All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in +# the documentation and/or other materials provided with the +# distribution. +# +# 3. Neither the name of Ian Bicking nor the names of its contributors may +# be used to endorse or promote products derived from this software +# without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL IAN BICKING OR +# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +"""The ``lxml.html`` tool set for HTML handling. +""" + +from __future__ import absolute_import + +__all__ = [ + 'document_fromstring', 'fragment_fromstring', 'fragments_fromstring', 'fromstring', + 'tostring', 'Element', 'defs', 'open_in_browser', 'submit_form', + 'find_rel_links', 'find_class', 'make_links_absolute', + 'resolve_base_href', 'iterlinks', 'rewrite_links', 'open_in_browser', 'parse'] + + +import copy +import sys +import re +from functools import partial + +try: + # while unnecessary, importing from 'collections.abc' is the right way to do it + from collections.abc import MutableMapping, MutableSet +except ImportError: + from collections import MutableMapping, MutableSet + +from .. import etree +from . import defs +from ._setmixin import SetMixin + +try: + from urlparse import urljoin +except ImportError: + # Python 3 + from urllib.parse import urljoin + +try: + unicode +except NameError: + # Python 3 + unicode = str +try: + basestring +except NameError: + # Python 3 + basestring = (str, bytes) + + +def __fix_docstring(s): + if not s: + return s + if sys.version_info[0] >= 3: + sub = re.compile(r"^(\s*)u'", re.M).sub + else: + sub = re.compile(r"^(\s*)b'", re.M).sub + return sub(r"\1'", s) + + +XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml" + +_rel_links_xpath = etree.XPath("descendant-or-self::a[@rel]|descendant-or-self::x:a[@rel]", + namespaces={'x':XHTML_NAMESPACE}) +_options_xpath = etree.XPath("descendant-or-self::option|descendant-or-self::x:option", + namespaces={'x':XHTML_NAMESPACE}) +_forms_xpath = etree.XPath("descendant-or-self::form|descendant-or-self::x:form", + namespaces={'x':XHTML_NAMESPACE}) +#_class_xpath = etree.XPath(r"descendant-or-self::*[regexp:match(@class, concat('\b', $class_name, '\b'))]", {'regexp': 'http://exslt.org/regular-expressions'}) +_class_xpath = etree.XPath("descendant-or-self::*[@class and contains(concat(' ', normalize-space(@class), ' '), concat(' ', $class_name, ' '))]") +_id_xpath = etree.XPath("descendant-or-self::*[@id=$id]") +_collect_string_content = etree.XPath("string()") +_iter_css_urls = re.compile(r'url\(('+'["][^"]*["]|'+"['][^']*[']|"+r'[^)]*)\)', re.I).finditer +_iter_css_imports = re.compile(r'@import "(.*?)"').finditer +_label_xpath = etree.XPath("//label[@for=$id]|//x:label[@for=$id]", + namespaces={'x':XHTML_NAMESPACE}) +_archive_re = re.compile(r'[^ ]+') +_parse_meta_refresh_url = re.compile( + r'[^;=]*;\s*(?:url\s*=\s*)?(?P.*)$', re.I).search + + +def _unquote_match(s, pos): + if s[:1] == '"' and s[-1:] == '"' or s[:1] == "'" and s[-1:] == "'": + return s[1:-1], pos+1 + else: + return s,pos + + +def _transform_result(typ, result): + """Convert the result back into the input type. + """ + if issubclass(typ, bytes): + return tostring(result, encoding='utf-8') + elif issubclass(typ, unicode): + return tostring(result, encoding='unicode') + else: + return result + + +def _nons(tag): + if isinstance(tag, basestring): + if tag[0] == '{' and tag[1:len(XHTML_NAMESPACE)+1] == XHTML_NAMESPACE: + return tag.split('}')[-1] + return tag + + +class Classes(MutableSet): + """Provides access to an element's class attribute as a set-like collection. + Usage:: + + >>> el = fromstring('') + >>> classes = el.classes # or: classes = Classes(el.attrib) + >>> classes |= ['block', 'paragraph'] + >>> el.get('class') + 'hidden large block paragraph' + >>> classes.toggle('hidden') + False + >>> el.get('class') + 'large block paragraph' + >>> classes -= ('some', 'classes', 'block') + >>> el.get('class') + 'large paragraph' + """ + def __init__(self, attributes): + self._attributes = attributes + self._get_class_value = partial(attributes.get, 'class', '') + + def add(self, value): + """ + Add a class. + + This has no effect if the class is already present. + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + classes = self._get_class_value().split() + if value in classes: + return + classes.append(value) + self._attributes['class'] = ' '.join(classes) + + def discard(self, value): + """ + Remove a class if it is currently present. + + If the class is not present, do nothing. + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + classes = [name for name in self._get_class_value().split() + if name != value] + if classes: + self._attributes['class'] = ' '.join(classes) + elif 'class' in self._attributes: + del self._attributes['class'] + + def remove(self, value): + """ + Remove a class; it must currently be present. + + If the class is not present, raise a KeyError. + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + super(Classes, self).remove(value) + + def __contains__(self, name): + classes = self._get_class_value() + return name in classes and name in classes.split() + + def __iter__(self): + return iter(self._get_class_value().split()) + + def __len__(self): + return len(self._get_class_value().split()) + + # non-standard methods + + def update(self, values): + """ + Add all names from 'values'. + """ + classes = self._get_class_value().split() + extended = False + for value in values: + if value not in classes: + classes.append(value) + extended = True + if extended: + self._attributes['class'] = ' '.join(classes) + + def toggle(self, value): + """ + Add a class name if it isn't there yet, or remove it if it exists. + + Returns true if the class was added (and is now enabled) and + false if it was removed (and is now disabled). + """ + if not value or re.search(r'\s', value): + raise ValueError("Invalid class name: %r" % value) + classes = self._get_class_value().split() + try: + classes.remove(value) + enabled = False + except ValueError: + classes.append(value) + enabled = True + if classes: + self._attributes['class'] = ' '.join(classes) + else: + del self._attributes['class'] + return enabled + + +class HtmlMixin(object): + + @property + def classes(self): + """ + A set-like wrapper around the 'class' attribute. + """ + return Classes(self.attrib) + + @classes.setter + def classes(self, classes): + assert isinstance(classes, Classes) # only allow "el.classes |= ..." etc. + value = classes._get_class_value() + if value: + self.set('class', value) + elif self.get('class') is not None: + del self.attrib['class'] + + @property + def base_url(self): + """ + Returns the base URL, given when the page was parsed. + + Use with ``urlparse.urljoin(el.base_url, href)`` to get + absolute URLs. + """ + return self.getroottree().docinfo.URL + + @property + def forms(self): + """ + Return a list of all the forms + """ + return _forms_xpath(self) + + @property + def body(self): + """ + Return the element. Can be called from a child element + to get the document's head. + """ + return self.xpath('//body|//x:body', namespaces={'x':XHTML_NAMESPACE})[0] + + @property + def head(self): + """ + Returns the element. Can be called from a child + element to get the document's head. + """ + return self.xpath('//head|//x:head', namespaces={'x':XHTML_NAMESPACE})[0] + + @property + def label(self): + """ + Get or set any