split platform
This commit is contained in:
commit
8c9b09577d
2261 changed files with 676163 additions and 0 deletions
23
lib/python3.5/site-packages/cryptography/__about__.py
Normal file
23
lib/python3.5/site-packages/cryptography/__about__.py
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
__all__ = [
|
||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||
"__email__", "__license__", "__copyright__",
|
||||
]
|
||||
|
||||
__title__ = "cryptography"
|
||||
__summary__ = ("cryptography is a package which provides cryptographic recipes"
|
||||
" and primitives to Python developers.")
|
||||
__uri__ = "https://github.com/pyca/cryptography"
|
||||
|
||||
__version__ = "1.1.1"
|
||||
|
||||
__author__ = "The cryptography developers"
|
||||
__email__ = "cryptography-dev@python.org"
|
||||
|
||||
__license__ = "BSD or Apache License, Version 2.0"
|
||||
__copyright__ = "Copyright 2013-2015 {0}".format(__author__)
|
||||
26
lib/python3.5/site-packages/cryptography/__init__.py
Normal file
26
lib/python3.5/site-packages/cryptography/__init__.py
Normal file
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from cryptography.__about__ import (
|
||||
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
||||
__uri__, __version__
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||
"__email__", "__license__", "__copyright__",
|
||||
]
|
||||
|
||||
if sys.version_info[:2] == (2, 6):
|
||||
warnings.warn(
|
||||
"Python 2.6 is no longer supported by the Python core team, please "
|
||||
"upgrade your Python.",
|
||||
DeprecationWarning
|
||||
)
|
||||
70
lib/python3.5/site-packages/cryptography/exceptions.py
Normal file
70
lib/python3.5/site-packages/cryptography/exceptions.py
Normal file
|
|
@ -0,0 +1,70 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from enum import Enum
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives import twofactor
|
||||
|
||||
|
||||
class _Reasons(Enum):
|
||||
BACKEND_MISSING_INTERFACE = 0
|
||||
UNSUPPORTED_HASH = 1
|
||||
UNSUPPORTED_CIPHER = 2
|
||||
UNSUPPORTED_PADDING = 3
|
||||
UNSUPPORTED_MGF = 4
|
||||
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
|
||||
UNSUPPORTED_ELLIPTIC_CURVE = 6
|
||||
UNSUPPORTED_SERIALIZATION = 7
|
||||
UNSUPPORTED_X509 = 8
|
||||
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
|
||||
|
||||
|
||||
class UnsupportedAlgorithm(Exception):
|
||||
def __init__(self, message, reason=None):
|
||||
super(UnsupportedAlgorithm, self).__init__(message)
|
||||
self._reason = reason
|
||||
|
||||
|
||||
class AlreadyFinalized(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class AlreadyUpdated(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class NotYetFinalized(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidTag(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InvalidSignature(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class InternalError(Exception):
|
||||
def __init__(self, msg, err_code):
|
||||
super(InternalError, self).__init__(msg)
|
||||
self.err_code = err_code
|
||||
|
||||
|
||||
class InvalidKey(Exception):
|
||||
pass
|
||||
|
||||
|
||||
InvalidToken = utils.deprecated(
|
||||
twofactor.InvalidToken,
|
||||
__name__,
|
||||
(
|
||||
"The InvalidToken exception has moved to the "
|
||||
"cryptography.hazmat.primitives.twofactor module"
|
||||
),
|
||||
utils.DeprecatedIn09
|
||||
)
|
||||
141
lib/python3.5/site-packages/cryptography/fernet.py
Normal file
141
lib/python3.5/site-packages/cryptography/fernet.py
Normal file
|
|
@ -0,0 +1,141 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import base64
|
||||
import binascii
|
||||
import os
|
||||
import struct
|
||||
import time
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes, padding
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives.hmac import HMAC
|
||||
|
||||
|
||||
class InvalidToken(Exception):
|
||||
pass
|
||||
|
||||
|
||||
_MAX_CLOCK_SKEW = 60
|
||||
|
||||
|
||||
class Fernet(object):
|
||||
def __init__(self, key, backend=None):
|
||||
if backend is None:
|
||||
backend = default_backend()
|
||||
|
||||
key = base64.urlsafe_b64decode(key)
|
||||
if len(key) != 32:
|
||||
raise ValueError(
|
||||
"Fernet key must be 32 url-safe base64-encoded bytes."
|
||||
)
|
||||
|
||||
self._signing_key = key[:16]
|
||||
self._encryption_key = key[16:]
|
||||
self._backend = backend
|
||||
|
||||
@classmethod
|
||||
def generate_key(cls):
|
||||
return base64.urlsafe_b64encode(os.urandom(32))
|
||||
|
||||
def encrypt(self, data):
|
||||
current_time = int(time.time())
|
||||
iv = os.urandom(16)
|
||||
return self._encrypt_from_parts(data, current_time, iv)
|
||||
|
||||
def _encrypt_from_parts(self, data, current_time, iv):
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("data must be bytes.")
|
||||
|
||||
padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
||||
padded_data = padder.update(data) + padder.finalize()
|
||||
encryptor = Cipher(
|
||||
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
|
||||
).encryptor()
|
||||
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
|
||||
|
||||
basic_parts = (
|
||||
b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
|
||||
)
|
||||
|
||||
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
|
||||
h.update(basic_parts)
|
||||
hmac = h.finalize()
|
||||
return base64.urlsafe_b64encode(basic_parts + hmac)
|
||||
|
||||
def decrypt(self, token, ttl=None):
|
||||
if not isinstance(token, bytes):
|
||||
raise TypeError("token must be bytes.")
|
||||
|
||||
current_time = int(time.time())
|
||||
|
||||
try:
|
||||
data = base64.urlsafe_b64decode(token)
|
||||
except (TypeError, binascii.Error):
|
||||
raise InvalidToken
|
||||
|
||||
if not data or six.indexbytes(data, 0) != 0x80:
|
||||
raise InvalidToken
|
||||
|
||||
try:
|
||||
timestamp, = struct.unpack(">Q", data[1:9])
|
||||
except struct.error:
|
||||
raise InvalidToken
|
||||
if ttl is not None:
|
||||
if timestamp + ttl < current_time:
|
||||
raise InvalidToken
|
||||
if current_time + _MAX_CLOCK_SKEW < timestamp:
|
||||
raise InvalidToken
|
||||
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
|
||||
h.update(data[:-32])
|
||||
try:
|
||||
h.verify(data[-32:])
|
||||
except InvalidSignature:
|
||||
raise InvalidToken
|
||||
|
||||
iv = data[9:25]
|
||||
ciphertext = data[25:-32]
|
||||
decryptor = Cipher(
|
||||
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
|
||||
).decryptor()
|
||||
plaintext_padded = decryptor.update(ciphertext)
|
||||
try:
|
||||
plaintext_padded += decryptor.finalize()
|
||||
except ValueError:
|
||||
raise InvalidToken
|
||||
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
||||
|
||||
unpadded = unpadder.update(plaintext_padded)
|
||||
try:
|
||||
unpadded += unpadder.finalize()
|
||||
except ValueError:
|
||||
raise InvalidToken
|
||||
return unpadded
|
||||
|
||||
|
||||
class MultiFernet(object):
|
||||
def __init__(self, fernets):
|
||||
fernets = list(fernets)
|
||||
if not fernets:
|
||||
raise ValueError(
|
||||
"MultiFernet requires at least one Fernet instance"
|
||||
)
|
||||
self._fernets = fernets
|
||||
|
||||
def encrypt(self, msg):
|
||||
return self._fernets[0].encrypt(msg)
|
||||
|
||||
def decrypt(self, msg, ttl=None):
|
||||
for f in self._fernets:
|
||||
try:
|
||||
return f.decrypt(msg, ttl)
|
||||
except InvalidToken:
|
||||
pass
|
||||
raise InvalidToken
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
|
@ -0,0 +1,42 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import pkg_resources
|
||||
|
||||
from cryptography.hazmat.backends.multibackend import MultiBackend
|
||||
|
||||
|
||||
_available_backends_list = None
|
||||
|
||||
|
||||
def _available_backends():
|
||||
global _available_backends_list
|
||||
|
||||
if _available_backends_list is None:
|
||||
_available_backends_list = [
|
||||
# setuptools 11.3 deprecated support for the require parameter to
|
||||
# load(), and introduced the new resolve() method instead.
|
||||
# This can be removed if/when we can assume setuptools>=11.3. At
|
||||
# some point we may wish to add a warning, to push people along,
|
||||
# but at present this would result in too many warnings.
|
||||
ep.resolve() if hasattr(ep, "resolve") else ep.load(require=False)
|
||||
for ep in pkg_resources.iter_entry_points(
|
||||
"cryptography.backends"
|
||||
)
|
||||
]
|
||||
|
||||
return _available_backends_list
|
||||
|
||||
_default_backend = None
|
||||
|
||||
|
||||
def default_backend():
|
||||
global _default_backend
|
||||
|
||||
if _default_backend is None:
|
||||
_default_backend = MultiBackend(_available_backends())
|
||||
|
||||
return _default_backend
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.backends.commoncrypto.backend import backend
|
||||
|
||||
|
||||
__all__ = ["backend"]
|
||||
|
|
@ -0,0 +1,245 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InternalError
|
||||
from cryptography.hazmat.backends.commoncrypto.ciphers import (
|
||||
_CipherContext, _GCMCipherContext
|
||||
)
|
||||
from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext
|
||||
from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext
|
||||
from cryptography.hazmat.backends.interfaces import (
|
||||
CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend
|
||||
)
|
||||
from cryptography.hazmat.bindings.commoncrypto.binding import Binding
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import (
|
||||
AES, ARC4, Blowfish, CAST5, TripleDES
|
||||
)
|
||||
from cryptography.hazmat.primitives.ciphers.modes import (
|
||||
CBC, CFB, CFB8, CTR, ECB, GCM, OFB
|
||||
)
|
||||
|
||||
|
||||
HashMethods = namedtuple(
|
||||
"HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"]
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(CipherBackend)
|
||||
@utils.register_interface(HashBackend)
|
||||
@utils.register_interface(HMACBackend)
|
||||
@utils.register_interface(PBKDF2HMACBackend)
|
||||
class Backend(object):
|
||||
"""
|
||||
CommonCrypto API wrapper.
|
||||
"""
|
||||
name = "commoncrypto"
|
||||
|
||||
def __init__(self):
|
||||
self._binding = Binding()
|
||||
self._ffi = self._binding.ffi
|
||||
self._lib = self._binding.lib
|
||||
|
||||
self._cipher_registry = {}
|
||||
self._register_default_ciphers()
|
||||
self._hash_mapping = {
|
||||
"md5": HashMethods(
|
||||
"CC_MD5_CTX *", self._lib.CC_MD5_Init,
|
||||
self._lib.CC_MD5_Update, self._lib.CC_MD5_Final
|
||||
),
|
||||
"sha1": HashMethods(
|
||||
"CC_SHA1_CTX *", self._lib.CC_SHA1_Init,
|
||||
self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final
|
||||
),
|
||||
"sha224": HashMethods(
|
||||
"CC_SHA256_CTX *", self._lib.CC_SHA224_Init,
|
||||
self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final
|
||||
),
|
||||
"sha256": HashMethods(
|
||||
"CC_SHA256_CTX *", self._lib.CC_SHA256_Init,
|
||||
self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final
|
||||
),
|
||||
"sha384": HashMethods(
|
||||
"CC_SHA512_CTX *", self._lib.CC_SHA384_Init,
|
||||
self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final
|
||||
),
|
||||
"sha512": HashMethods(
|
||||
"CC_SHA512_CTX *", self._lib.CC_SHA512_Init,
|
||||
self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final
|
||||
),
|
||||
}
|
||||
|
||||
self._supported_hmac_algorithms = {
|
||||
"md5": self._lib.kCCHmacAlgMD5,
|
||||
"sha1": self._lib.kCCHmacAlgSHA1,
|
||||
"sha224": self._lib.kCCHmacAlgSHA224,
|
||||
"sha256": self._lib.kCCHmacAlgSHA256,
|
||||
"sha384": self._lib.kCCHmacAlgSHA384,
|
||||
"sha512": self._lib.kCCHmacAlgSHA512,
|
||||
}
|
||||
|
||||
self._supported_pbkdf2_hmac_algorithms = {
|
||||
"sha1": self._lib.kCCPRFHmacAlgSHA1,
|
||||
"sha224": self._lib.kCCPRFHmacAlgSHA224,
|
||||
"sha256": self._lib.kCCPRFHmacAlgSHA256,
|
||||
"sha384": self._lib.kCCPRFHmacAlgSHA384,
|
||||
"sha512": self._lib.kCCPRFHmacAlgSHA512,
|
||||
}
|
||||
|
||||
def hash_supported(self, algorithm):
|
||||
return algorithm.name in self._hash_mapping
|
||||
|
||||
def hmac_supported(self, algorithm):
|
||||
return algorithm.name in self._supported_hmac_algorithms
|
||||
|
||||
def create_hash_ctx(self, algorithm):
|
||||
return _HashContext(self, algorithm)
|
||||
|
||||
def create_hmac_ctx(self, key, algorithm):
|
||||
return _HMACContext(self, key, algorithm)
|
||||
|
||||
def cipher_supported(self, cipher, mode):
|
||||
return (type(cipher), type(mode)) in self._cipher_registry
|
||||
|
||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||
if isinstance(mode, GCM):
|
||||
return _GCMCipherContext(
|
||||
self, cipher, mode, self._lib.kCCEncrypt
|
||||
)
|
||||
else:
|
||||
return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt)
|
||||
|
||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||
if isinstance(mode, GCM):
|
||||
return _GCMCipherContext(
|
||||
self, cipher, mode, self._lib.kCCDecrypt
|
||||
)
|
||||
else:
|
||||
return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt)
|
||||
|
||||
def pbkdf2_hmac_supported(self, algorithm):
|
||||
return algorithm.name in self._supported_pbkdf2_hmac_algorithms
|
||||
|
||||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||
key_material):
|
||||
alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name]
|
||||
buf = self._ffi.new("char[]", length)
|
||||
res = self._lib.CCKeyDerivationPBKDF(
|
||||
self._lib.kCCPBKDF2,
|
||||
key_material,
|
||||
len(key_material),
|
||||
salt,
|
||||
len(salt),
|
||||
alg_enum,
|
||||
iterations,
|
||||
buf,
|
||||
length
|
||||
)
|
||||
self._check_cipher_response(res)
|
||||
|
||||
return self._ffi.buffer(buf)[:]
|
||||
|
||||
def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls,
|
||||
mode_const):
|
||||
if (cipher_cls, mode_cls) in self._cipher_registry:
|
||||
raise ValueError("Duplicate registration for: {0} {1}.".format(
|
||||
cipher_cls, mode_cls)
|
||||
)
|
||||
self._cipher_registry[cipher_cls, mode_cls] = (cipher_const,
|
||||
mode_const)
|
||||
|
||||
def _register_default_ciphers(self):
|
||||
for mode_cls, mode_const in [
|
||||
(CBC, self._lib.kCCModeCBC),
|
||||
(ECB, self._lib.kCCModeECB),
|
||||
(CFB, self._lib.kCCModeCFB),
|
||||
(CFB8, self._lib.kCCModeCFB8),
|
||||
(OFB, self._lib.kCCModeOFB),
|
||||
(CTR, self._lib.kCCModeCTR),
|
||||
(GCM, self._lib.kCCModeGCM),
|
||||
]:
|
||||
self._register_cipher_adapter(
|
||||
AES,
|
||||
self._lib.kCCAlgorithmAES128,
|
||||
mode_cls,
|
||||
mode_const
|
||||
)
|
||||
for mode_cls, mode_const in [
|
||||
(CBC, self._lib.kCCModeCBC),
|
||||
(ECB, self._lib.kCCModeECB),
|
||||
(CFB, self._lib.kCCModeCFB),
|
||||
(CFB8, self._lib.kCCModeCFB8),
|
||||
(OFB, self._lib.kCCModeOFB),
|
||||
]:
|
||||
self._register_cipher_adapter(
|
||||
TripleDES,
|
||||
self._lib.kCCAlgorithm3DES,
|
||||
mode_cls,
|
||||
mode_const
|
||||
)
|
||||
for mode_cls, mode_const in [
|
||||
(CBC, self._lib.kCCModeCBC),
|
||||
(ECB, self._lib.kCCModeECB),
|
||||
(CFB, self._lib.kCCModeCFB),
|
||||
(OFB, self._lib.kCCModeOFB)
|
||||
]:
|
||||
self._register_cipher_adapter(
|
||||
Blowfish,
|
||||
self._lib.kCCAlgorithmBlowfish,
|
||||
mode_cls,
|
||||
mode_const
|
||||
)
|
||||
for mode_cls, mode_const in [
|
||||
(CBC, self._lib.kCCModeCBC),
|
||||
(ECB, self._lib.kCCModeECB),
|
||||
(CFB, self._lib.kCCModeCFB),
|
||||
(OFB, self._lib.kCCModeOFB),
|
||||
(CTR, self._lib.kCCModeCTR)
|
||||
]:
|
||||
self._register_cipher_adapter(
|
||||
CAST5,
|
||||
self._lib.kCCAlgorithmCAST,
|
||||
mode_cls,
|
||||
mode_const
|
||||
)
|
||||
self._register_cipher_adapter(
|
||||
ARC4,
|
||||
self._lib.kCCAlgorithmRC4,
|
||||
type(None),
|
||||
self._lib.kCCModeRC4
|
||||
)
|
||||
|
||||
def _check_cipher_response(self, response):
|
||||
if response == self._lib.kCCSuccess:
|
||||
return
|
||||
elif response == self._lib.kCCAlignmentError:
|
||||
# This error is not currently triggered due to a bug filed as
|
||||
# rdar://15589470
|
||||
raise ValueError(
|
||||
"The length of the provided data is not a multiple of "
|
||||
"the block length."
|
||||
)
|
||||
else:
|
||||
raise InternalError(
|
||||
"The backend returned an unknown error, consider filing a bug."
|
||||
" Code: {0}.".format(response),
|
||||
response
|
||||
)
|
||||
|
||||
def _release_cipher_ctx(self, ctx):
|
||||
"""
|
||||
Called by the garbage collector and used to safely dereference and
|
||||
release the context.
|
||||
"""
|
||||
if ctx[0] != self._ffi.NULL:
|
||||
res = self._lib.CCCryptorRelease(ctx[0])
|
||||
self._check_cipher_response(res)
|
||||
ctx[0] = self._ffi.NULL
|
||||
|
||||
|
||||
backend = Backend()
|
||||
|
|
@ -0,0 +1,193 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidTag, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.primitives import ciphers, constant_time
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
from cryptography.hazmat.primitives.ciphers.modes import (
|
||||
CFB, CFB8, CTR, OFB
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(ciphers.CipherContext)
|
||||
class _CipherContext(object):
|
||||
def __init__(self, backend, cipher, mode, operation):
|
||||
self._backend = backend
|
||||
self._cipher = cipher
|
||||
self._mode = mode
|
||||
self._operation = operation
|
||||
# There is a bug in CommonCrypto where block ciphers do not raise
|
||||
# kCCAlignmentError when finalizing if you supply non-block aligned
|
||||
# data. To work around this we need to keep track of the block
|
||||
# alignment ourselves, but only for alg+mode combos that require
|
||||
# block alignment. OFB, CFB, and CTR make a block cipher algorithm
|
||||
# into a stream cipher so we don't need to track them (and thus their
|
||||
# block size is effectively 1 byte just like OpenSSL/CommonCrypto
|
||||
# treat RC4 and other stream cipher block sizes).
|
||||
# This bug has been filed as rdar://15589470
|
||||
self._bytes_processed = 0
|
||||
if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not
|
||||
isinstance(mode, (OFB, CFB, CFB8, CTR))):
|
||||
self._byte_block_size = cipher.block_size // 8
|
||||
else:
|
||||
self._byte_block_size = 1
|
||||
|
||||
registry = self._backend._cipher_registry
|
||||
try:
|
||||
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {0} in {1} mode is not supported "
|
||||
"by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode),
|
||||
_Reasons.UNSUPPORTED_CIPHER
|
||||
)
|
||||
|
||||
ctx = self._backend._ffi.new("CCCryptorRef *")
|
||||
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
|
||||
|
||||
if isinstance(mode, modes.ModeWithInitializationVector):
|
||||
iv_nonce = mode.initialization_vector
|
||||
elif isinstance(mode, modes.ModeWithNonce):
|
||||
iv_nonce = mode.nonce
|
||||
else:
|
||||
iv_nonce = self._backend._ffi.NULL
|
||||
|
||||
if isinstance(mode, CTR):
|
||||
mode_option = self._backend._lib.kCCModeOptionCTR_BE
|
||||
else:
|
||||
mode_option = 0
|
||||
|
||||
res = self._backend._lib.CCCryptorCreateWithMode(
|
||||
operation,
|
||||
mode_enum, cipher_enum,
|
||||
self._backend._lib.ccNoPadding, iv_nonce,
|
||||
cipher.key, len(cipher.key),
|
||||
self._backend._ffi.NULL, 0, 0, mode_option, ctx)
|
||||
self._backend._check_cipher_response(res)
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
# Count bytes processed to handle block alignment.
|
||||
self._bytes_processed += len(data)
|
||||
buf = self._backend._ffi.new(
|
||||
"unsigned char[]", len(data) + self._byte_block_size - 1)
|
||||
outlen = self._backend._ffi.new("size_t *")
|
||||
res = self._backend._lib.CCCryptorUpdate(
|
||||
self._ctx[0], data, len(data), buf,
|
||||
len(data) + self._byte_block_size - 1, outlen)
|
||||
self._backend._check_cipher_response(res)
|
||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
||||
def finalize(self):
|
||||
# Raise error if block alignment is wrong.
|
||||
if self._bytes_processed % self._byte_block_size:
|
||||
raise ValueError(
|
||||
"The length of the provided data is not a multiple of "
|
||||
"the block length."
|
||||
)
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size)
|
||||
outlen = self._backend._ffi.new("size_t *")
|
||||
res = self._backend._lib.CCCryptorFinal(
|
||||
self._ctx[0], buf, len(buf), outlen)
|
||||
self._backend._check_cipher_response(res)
|
||||
self._backend._release_cipher_ctx(self._ctx)
|
||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
||||
|
||||
@utils.register_interface(ciphers.AEADCipherContext)
|
||||
@utils.register_interface(ciphers.AEADEncryptionContext)
|
||||
class _GCMCipherContext(object):
|
||||
def __init__(self, backend, cipher, mode, operation):
|
||||
self._backend = backend
|
||||
self._cipher = cipher
|
||||
self._mode = mode
|
||||
self._operation = operation
|
||||
self._tag = None
|
||||
|
||||
registry = self._backend._cipher_registry
|
||||
try:
|
||||
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {0} in {1} mode is not supported "
|
||||
"by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode),
|
||||
_Reasons.UNSUPPORTED_CIPHER
|
||||
)
|
||||
|
||||
ctx = self._backend._ffi.new("CCCryptorRef *")
|
||||
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
res = self._backend._lib.CCCryptorCreateWithMode(
|
||||
operation,
|
||||
mode_enum, cipher_enum,
|
||||
self._backend._lib.ccNoPadding,
|
||||
self._backend._ffi.NULL,
|
||||
cipher.key, len(cipher.key),
|
||||
self._backend._ffi.NULL, 0, 0, 0, self._ctx)
|
||||
self._backend._check_cipher_response(res)
|
||||
|
||||
res = self._backend._lib.CCCryptorGCMAddIV(
|
||||
self._ctx[0],
|
||||
mode.initialization_vector,
|
||||
len(mode.initialization_vector)
|
||||
)
|
||||
self._backend._check_cipher_response(res)
|
||||
# CommonCrypto has a bug where calling update without at least one
|
||||
# call to authenticate_additional_data will result in null byte output
|
||||
# for ciphertext. The following empty byte string call prevents the
|
||||
# issue, which is present in at least 10.8 and 10.9.
|
||||
# Filed as rdar://18314544
|
||||
self.authenticate_additional_data(b"")
|
||||
|
||||
def update(self, data):
|
||||
buf = self._backend._ffi.new("unsigned char[]", len(data))
|
||||
args = (self._ctx[0], data, len(data), buf)
|
||||
if self._operation == self._backend._lib.kCCEncrypt:
|
||||
res = self._backend._lib.CCCryptorGCMEncrypt(*args)
|
||||
else:
|
||||
res = self._backend._lib.CCCryptorGCMDecrypt(*args)
|
||||
|
||||
self._backend._check_cipher_response(res)
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def finalize(self):
|
||||
# CommonCrypto has a yet another bug where you must make at least one
|
||||
# call to update. If you pass just AAD and call finalize without a call
|
||||
# to update you'll get null bytes for tag. The following update call
|
||||
# prevents this issue, which is present in at least 10.8 and 10.9.
|
||||
# Filed as rdar://18314580
|
||||
self.update(b"")
|
||||
tag_size = self._cipher.block_size // 8
|
||||
tag_buf = self._backend._ffi.new("unsigned char[]", tag_size)
|
||||
tag_len = self._backend._ffi.new("size_t *", tag_size)
|
||||
res = self._backend._lib.CCCryptorGCMFinal(
|
||||
self._ctx[0], tag_buf, tag_len
|
||||
)
|
||||
self._backend._check_cipher_response(res)
|
||||
self._backend._release_cipher_ctx(self._ctx)
|
||||
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
||||
if (self._operation == self._backend._lib.kCCDecrypt and
|
||||
not constant_time.bytes_eq(
|
||||
self._tag[:len(self._mode.tag)], self._mode.tag
|
||||
)):
|
||||
raise InvalidTag
|
||||
return b""
|
||||
|
||||
def authenticate_additional_data(self, data):
|
||||
res = self._backend._lib.CCCryptorGCMAddAAD(
|
||||
self._ctx[0], data, len(data)
|
||||
)
|
||||
self._backend._check_cipher_response(res)
|
||||
|
||||
tag = utils.read_only_property("_tag")
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class _HashContext(object):
|
||||
def __init__(self, backend, algorithm, ctx=None):
|
||||
self._algorithm = algorithm
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
try:
|
||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
ctx = self._backend._ffi.new(methods.ctx)
|
||||
res = methods.hash_init(ctx)
|
||||
assert res == 1
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def copy(self):
|
||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||
new_ctx = self._backend._ffi.new(methods.ctx)
|
||||
# CommonCrypto has no APIs for copying hashes, so we have to copy the
|
||||
# underlying struct.
|
||||
new_ctx[0] = self._ctx[0]
|
||||
|
||||
return _HashContext(self._backend, self.algorithm, ctx=new_ctx)
|
||||
|
||||
def update(self, data):
|
||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||
res = methods.hash_update(self._ctx, data, len(data))
|
||||
assert res == 1
|
||||
|
||||
def finalize(self):
|
||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||
buf = self._backend._ffi.new("unsigned char[]",
|
||||
self.algorithm.digest_size)
|
||||
res = methods.hash_final(buf, self._ctx)
|
||||
assert res == 1
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
|
@ -0,0 +1,59 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
|
||||
|
||||
|
||||
@utils.register_interface(interfaces.MACContext)
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class _HMACContext(object):
|
||||
def __init__(self, backend, key, algorithm, ctx=None):
|
||||
self._algorithm = algorithm
|
||||
self._backend = backend
|
||||
if ctx is None:
|
||||
ctx = self._backend._ffi.new("CCHmacContext *")
|
||||
try:
|
||||
alg = self._backend._supported_hmac_algorithms[algorithm.name]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported HMAC hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
self._backend._lib.CCHmacInit(ctx, alg, key, len(key))
|
||||
|
||||
self._ctx = ctx
|
||||
self._key = key
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._ffi.new("CCHmacContext *")
|
||||
# CommonCrypto has no APIs for copying HMACs, so we have to copy the
|
||||
# underlying struct.
|
||||
copied_ctx[0] = self._ctx[0]
|
||||
return _HMACContext(
|
||||
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
||||
)
|
||||
|
||||
def update(self, data):
|
||||
self._backend._lib.CCHmacUpdate(self._ctx, data, len(data))
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new("unsigned char[]",
|
||||
self.algorithm.digest_size)
|
||||
self._backend._lib.CCHmacFinal(self._ctx, buf)
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def verify(self, signature):
|
||||
digest = self.finalize()
|
||||
if not constant_time.bytes_eq(digest, signature):
|
||||
raise InvalidSignature("Signature did not match digest.")
|
||||
|
|
@ -0,0 +1,345 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CipherBackend(object):
|
||||
@abc.abstractmethod
|
||||
def cipher_supported(self, cipher, mode):
|
||||
"""
|
||||
Return True if the given cipher and mode are supported.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||
"""
|
||||
Get a CipherContext that can be used for encryption.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||
"""
|
||||
Get a CipherContext that can be used for decryption.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HashBackend(object):
|
||||
@abc.abstractmethod
|
||||
def hash_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported by this backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_hash_ctx(self, algorithm):
|
||||
"""
|
||||
Create a HashContext for calculating a message digest.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HMACBackend(object):
|
||||
@abc.abstractmethod
|
||||
def hmac_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported for HMAC by this
|
||||
backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_hmac_ctx(self, key, algorithm):
|
||||
"""
|
||||
Create a MACContext for calculating a message authentication code.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CMACBackend(object):
|
||||
@abc.abstractmethod
|
||||
def cmac_algorithm_supported(self, algorithm):
|
||||
"""
|
||||
Returns True if the block cipher is supported for CMAC by this backend
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_cmac_ctx(self, algorithm):
|
||||
"""
|
||||
Create a MACContext for calculating a message authentication code.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class PBKDF2HMACBackend(object):
|
||||
@abc.abstractmethod
|
||||
def pbkdf2_hmac_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported for PBKDF2 by this
|
||||
backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||
key_material):
|
||||
"""
|
||||
Return length bytes derived from provided PBKDF2 parameters.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSABackend(object):
|
||||
@abc.abstractmethod
|
||||
def generate_rsa_private_key(self, public_exponent, key_size):
|
||||
"""
|
||||
Generate an RSAPrivateKey instance with public_exponent and a modulus
|
||||
of key_size bits.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def rsa_padding_supported(self, padding):
|
||||
"""
|
||||
Returns True if the backend supports the given padding options.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
||||
"""
|
||||
Returns True if the backend supports the given parameters for key
|
||||
generation.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_rsa_private_numbers(self, numbers):
|
||||
"""
|
||||
Returns an RSAPrivateKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_rsa_public_numbers(self, numbers):
|
||||
"""
|
||||
Returns an RSAPublicKey provider.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSABackend(object):
|
||||
@abc.abstractmethod
|
||||
def generate_dsa_parameters(self, key_size):
|
||||
"""
|
||||
Generate a DSAParameters instance with a modulus of key_size bits.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dsa_private_key(self, parameters):
|
||||
"""
|
||||
Generate a DSAPrivateKey instance with parameters as a DSAParameters
|
||||
object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dsa_private_key_and_parameters(self, key_size):
|
||||
"""
|
||||
Generate a DSAPrivateKey instance using key size only.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dsa_hash_supported(self, algorithm):
|
||||
"""
|
||||
Return True if the hash algorithm is supported by the backend for DSA.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dsa_parameters_supported(self, p, q, g):
|
||||
"""
|
||||
Return True if the parameters are supported by the backend for DSA.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dsa_private_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DSAPrivateKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dsa_public_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DSAPublicKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dsa_parameter_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DSAParameters provider.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurveBackend(object):
|
||||
@abc.abstractmethod
|
||||
def elliptic_curve_signature_algorithm_supported(
|
||||
self, signature_algorithm, curve
|
||||
):
|
||||
"""
|
||||
Returns True if the backend supports the named elliptic curve with the
|
||||
specified signature algorithm.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def elliptic_curve_supported(self, curve):
|
||||
"""
|
||||
Returns True if the backend supports the named elliptic curve.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_elliptic_curve_private_key(self, curve):
|
||||
"""
|
||||
Return an object conforming to the EllipticCurvePrivateKey interface.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_elliptic_curve_public_numbers(self, numbers):
|
||||
"""
|
||||
Return an EllipticCurvePublicKey provider using the given numbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_elliptic_curve_private_numbers(self, numbers):
|
||||
"""
|
||||
Return an EllipticCurvePrivateKey provider using the given numbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
||||
"""
|
||||
Returns whether the exchange algorithm is supported by this backend.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class PEMSerializationBackend(object):
|
||||
@abc.abstractmethod
|
||||
def load_pem_private_key(self, data, password):
|
||||
"""
|
||||
Loads a private key from PEM encoded data, using the provided password
|
||||
if the data is encrypted.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_pem_public_key(self, data):
|
||||
"""
|
||||
Loads a public key from PEM encoded data.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DERSerializationBackend(object):
|
||||
@abc.abstractmethod
|
||||
def load_der_private_key(self, data, password):
|
||||
"""
|
||||
Loads a private key from DER encoded data. Uses the provided password
|
||||
if the data is encrypted.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_public_key(self, data):
|
||||
"""
|
||||
Loads a public key from DER encoded data.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class X509Backend(object):
|
||||
@abc.abstractmethod
|
||||
def load_pem_x509_certificate(self, data):
|
||||
"""
|
||||
Load an X.509 certificate from PEM encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_x509_certificate(self, data):
|
||||
"""
|
||||
Load an X.509 certificate from DER encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_der_x509_csr(self, data):
|
||||
"""
|
||||
Load an X.509 CSR from DER encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_pem_x509_csr(self, data):
|
||||
"""
|
||||
Load an X.509 CSR from PEM encoded data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_x509_csr(self, builder, private_key, algorithm):
|
||||
"""
|
||||
Create and sign an X.509 CSR from a CSR builder object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def create_x509_certificate(self, builder, private_key, algorithm):
|
||||
"""
|
||||
Create and sign an X.509 certificate from a CertificateBuilder object.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHBackend(object):
|
||||
@abc.abstractmethod
|
||||
def generate_dh_parameters(self, key_size):
|
||||
"""
|
||||
Generate a DHParameters instance with a modulus of key_size bits.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dh_private_key(self, parameters):
|
||||
"""
|
||||
Generate a DHPrivateKey instance with parameters as a DHParameters
|
||||
object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def generate_dh_private_key_and_parameters(self, key_size):
|
||||
"""
|
||||
Generate a DHPrivateKey instance using key size only.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dh_private_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DHPrivateKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dh_public_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DHPublicKey provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def load_dh_parameter_numbers(self, numbers):
|
||||
"""
|
||||
Returns a DHParameters provider.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dh_exchange_algorithm_supported(self, exchange_algorithm):
|
||||
"""
|
||||
Returns whether the exchange algorithm is supported by this backend.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def dh_parameters_supported(self, p, g):
|
||||
"""
|
||||
Returns whether the backend supports DH with these parameter values.
|
||||
"""
|
||||
|
|
@ -0,0 +1,386 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.backends.interfaces import (
|
||||
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
|
||||
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
|
||||
PEMSerializationBackend, RSABackend, X509Backend
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(CMACBackend)
|
||||
@utils.register_interface(CipherBackend)
|
||||
@utils.register_interface(DERSerializationBackend)
|
||||
@utils.register_interface(HashBackend)
|
||||
@utils.register_interface(HMACBackend)
|
||||
@utils.register_interface(PBKDF2HMACBackend)
|
||||
@utils.register_interface(RSABackend)
|
||||
@utils.register_interface(DSABackend)
|
||||
@utils.register_interface(EllipticCurveBackend)
|
||||
@utils.register_interface(PEMSerializationBackend)
|
||||
@utils.register_interface(X509Backend)
|
||||
class MultiBackend(object):
|
||||
name = "multibackend"
|
||||
|
||||
def __init__(self, backends):
|
||||
self._backends = backends
|
||||
|
||||
def _filtered_backends(self, interface):
|
||||
for b in self._backends:
|
||||
if isinstance(b, interface):
|
||||
yield b
|
||||
|
||||
def cipher_supported(self, cipher, mode):
|
||||
return any(
|
||||
b.cipher_supported(cipher, mode)
|
||||
for b in self._filtered_backends(CipherBackend)
|
||||
)
|
||||
|
||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||
for b in self._filtered_backends(CipherBackend):
|
||||
try:
|
||||
return b.create_symmetric_encryption_ctx(cipher, mode)
|
||||
except UnsupportedAlgorithm:
|
||||
pass
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {0} in {1} mode is not supported by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode),
|
||||
_Reasons.UNSUPPORTED_CIPHER
|
||||
)
|
||||
|
||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||
for b in self._filtered_backends(CipherBackend):
|
||||
try:
|
||||
return b.create_symmetric_decryption_ctx(cipher, mode)
|
||||
except UnsupportedAlgorithm:
|
||||
pass
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {0} in {1} mode is not supported by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode),
|
||||
_Reasons.UNSUPPORTED_CIPHER
|
||||
)
|
||||
|
||||
def hash_supported(self, algorithm):
|
||||
return any(
|
||||
b.hash_supported(algorithm)
|
||||
for b in self._filtered_backends(HashBackend)
|
||||
)
|
||||
|
||||
def create_hash_ctx(self, algorithm):
|
||||
for b in self._filtered_backends(HashBackend):
|
||||
try:
|
||||
return b.create_hash_ctx(algorithm)
|
||||
except UnsupportedAlgorithm:
|
||||
pass
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
def hmac_supported(self, algorithm):
|
||||
return any(
|
||||
b.hmac_supported(algorithm)
|
||||
for b in self._filtered_backends(HMACBackend)
|
||||
)
|
||||
|
||||
def create_hmac_ctx(self, key, algorithm):
|
||||
for b in self._filtered_backends(HMACBackend):
|
||||
try:
|
||||
return b.create_hmac_ctx(key, algorithm)
|
||||
except UnsupportedAlgorithm:
|
||||
pass
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
def pbkdf2_hmac_supported(self, algorithm):
|
||||
return any(
|
||||
b.pbkdf2_hmac_supported(algorithm)
|
||||
for b in self._filtered_backends(PBKDF2HMACBackend)
|
||||
)
|
||||
|
||||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||
key_material):
|
||||
for b in self._filtered_backends(PBKDF2HMACBackend):
|
||||
try:
|
||||
return b.derive_pbkdf2_hmac(
|
||||
algorithm, length, salt, iterations, key_material
|
||||
)
|
||||
except UnsupportedAlgorithm:
|
||||
pass
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
def generate_rsa_private_key(self, public_exponent, key_size):
|
||||
for b in self._filtered_backends(RSABackend):
|
||||
return b.generate_rsa_private_key(public_exponent, key_size)
|
||||
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
||||
for b in self._filtered_backends(RSABackend):
|
||||
return b.generate_rsa_parameters_supported(
|
||||
public_exponent, key_size
|
||||
)
|
||||
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def rsa_padding_supported(self, padding):
|
||||
for b in self._filtered_backends(RSABackend):
|
||||
return b.rsa_padding_supported(padding)
|
||||
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def load_rsa_private_numbers(self, numbers):
|
||||
for b in self._filtered_backends(RSABackend):
|
||||
return b.load_rsa_private_numbers(numbers)
|
||||
|
||||
raise UnsupportedAlgorithm("RSA is not supported by the backend",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def load_rsa_public_numbers(self, numbers):
|
||||
for b in self._filtered_backends(RSABackend):
|
||||
return b.load_rsa_public_numbers(numbers)
|
||||
|
||||
raise UnsupportedAlgorithm("RSA is not supported by the backend",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def generate_dsa_parameters(self, key_size):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.generate_dsa_parameters(key_size)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def generate_dsa_private_key(self, parameters):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.generate_dsa_private_key(parameters)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def generate_dsa_private_key_and_parameters(self, key_size):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.generate_dsa_private_key_and_parameters(key_size)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def dsa_hash_supported(self, algorithm):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.dsa_hash_supported(algorithm)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def dsa_parameters_supported(self, p, q, g):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.dsa_parameters_supported(p, q, g)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def load_dsa_public_numbers(self, numbers):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.load_dsa_public_numbers(numbers)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def load_dsa_private_numbers(self, numbers):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.load_dsa_private_numbers(numbers)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def load_dsa_parameter_numbers(self, numbers):
|
||||
for b in self._filtered_backends(DSABackend):
|
||||
return b.load_dsa_parameter_numbers(numbers)
|
||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def cmac_algorithm_supported(self, algorithm):
|
||||
return any(
|
||||
b.cmac_algorithm_supported(algorithm)
|
||||
for b in self._filtered_backends(CMACBackend)
|
||||
)
|
||||
|
||||
def create_cmac_ctx(self, algorithm):
|
||||
for b in self._filtered_backends(CMACBackend):
|
||||
try:
|
||||
return b.create_cmac_ctx(algorithm)
|
||||
except UnsupportedAlgorithm:
|
||||
pass
|
||||
raise UnsupportedAlgorithm("This backend does not support CMAC.",
|
||||
_Reasons.UNSUPPORTED_CIPHER)
|
||||
|
||||
def elliptic_curve_supported(self, curve):
|
||||
return any(
|
||||
b.elliptic_curve_supported(curve)
|
||||
for b in self._filtered_backends(EllipticCurveBackend)
|
||||
)
|
||||
|
||||
def elliptic_curve_signature_algorithm_supported(
|
||||
self, signature_algorithm, curve
|
||||
):
|
||||
return any(
|
||||
b.elliptic_curve_signature_algorithm_supported(
|
||||
signature_algorithm, curve
|
||||
)
|
||||
for b in self._filtered_backends(EllipticCurveBackend)
|
||||
)
|
||||
|
||||
def generate_elliptic_curve_private_key(self, curve):
|
||||
for b in self._filtered_backends(EllipticCurveBackend):
|
||||
try:
|
||||
return b.generate_elliptic_curve_private_key(curve)
|
||||
except UnsupportedAlgorithm:
|
||||
continue
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this elliptic curve.",
|
||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||
)
|
||||
|
||||
def load_elliptic_curve_private_numbers(self, numbers):
|
||||
for b in self._filtered_backends(EllipticCurveBackend):
|
||||
try:
|
||||
return b.load_elliptic_curve_private_numbers(numbers)
|
||||
except UnsupportedAlgorithm:
|
||||
continue
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this elliptic curve.",
|
||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||
)
|
||||
|
||||
def load_elliptic_curve_public_numbers(self, numbers):
|
||||
for b in self._filtered_backends(EllipticCurveBackend):
|
||||
try:
|
||||
return b.load_elliptic_curve_public_numbers(numbers)
|
||||
except UnsupportedAlgorithm:
|
||||
continue
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this elliptic curve.",
|
||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||
)
|
||||
|
||||
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
||||
return any(
|
||||
b.elliptic_curve_exchange_algorithm_supported(algorithm, curve)
|
||||
for b in self._filtered_backends(EllipticCurveBackend)
|
||||
)
|
||||
|
||||
def load_pem_private_key(self, data, password):
|
||||
for b in self._filtered_backends(PEMSerializationBackend):
|
||||
return b.load_pem_private_key(data, password)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this key serialization.",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||
)
|
||||
|
||||
def load_pem_public_key(self, data):
|
||||
for b in self._filtered_backends(PEMSerializationBackend):
|
||||
return b.load_pem_public_key(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this key serialization.",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||
)
|
||||
|
||||
def load_der_private_key(self, data, password):
|
||||
for b in self._filtered_backends(DERSerializationBackend):
|
||||
return b.load_der_private_key(data, password)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this key serialization.",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||
)
|
||||
|
||||
def load_der_public_key(self, data):
|
||||
for b in self._filtered_backends(DERSerializationBackend):
|
||||
return b.load_der_public_key(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support this key serialization.",
|
||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||
)
|
||||
|
||||
def load_pem_x509_certificate(self, data):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.load_pem_x509_certificate(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def load_der_x509_certificate(self, data):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.load_der_x509_certificate(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def load_pem_x509_crl(self, data):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.load_pem_x509_crl(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def load_der_x509_crl(self, data):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.load_der_x509_crl(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def load_der_x509_csr(self, data):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.load_der_x509_csr(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def load_pem_x509_csr(self, data):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.load_pem_x509_csr(data)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def create_x509_csr(self, builder, private_key, algorithm):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.create_x509_csr(builder, private_key, algorithm)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
||||
def create_x509_certificate(self, builder, private_key, algorithm):
|
||||
for b in self._filtered_backends(X509Backend):
|
||||
return b.create_x509_certificate(builder, private_key, algorithm)
|
||||
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support X.509.",
|
||||
_Reasons.UNSUPPORTED_X509
|
||||
)
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.backends.openssl.backend import backend
|
||||
|
||||
|
||||
__all__ = ["backend"]
|
||||
File diff suppressed because it is too large
Load diff
|
|
@ -0,0 +1,213 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.primitives import ciphers
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
|
||||
|
||||
@utils.register_interface(ciphers.CipherContext)
|
||||
@utils.register_interface(ciphers.AEADCipherContext)
|
||||
@utils.register_interface(ciphers.AEADEncryptionContext)
|
||||
class _CipherContext(object):
|
||||
_ENCRYPT = 1
|
||||
_DECRYPT = 0
|
||||
|
||||
def __init__(self, backend, cipher, mode, operation):
|
||||
self._backend = backend
|
||||
self._cipher = cipher
|
||||
self._mode = mode
|
||||
self._operation = operation
|
||||
self._tag = None
|
||||
|
||||
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
|
||||
self._block_size = self._cipher.block_size
|
||||
else:
|
||||
self._block_size = 1
|
||||
|
||||
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
|
||||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.EVP_CIPHER_CTX_free
|
||||
)
|
||||
|
||||
registry = self._backend._cipher_registry
|
||||
try:
|
||||
adapter = registry[type(cipher), type(mode)]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {0} in {1} mode is not supported "
|
||||
"by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode),
|
||||
_Reasons.UNSUPPORTED_CIPHER
|
||||
)
|
||||
|
||||
evp_cipher = adapter(self._backend, cipher, mode)
|
||||
if evp_cipher == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"cipher {0} in {1} mode is not supported "
|
||||
"by this backend.".format(
|
||||
cipher.name, mode.name if mode else mode),
|
||||
_Reasons.UNSUPPORTED_CIPHER
|
||||
)
|
||||
|
||||
if isinstance(mode, modes.ModeWithInitializationVector):
|
||||
iv_nonce = mode.initialization_vector
|
||||
elif isinstance(mode, modes.ModeWithNonce):
|
||||
iv_nonce = mode.nonce
|
||||
else:
|
||||
iv_nonce = self._backend._ffi.NULL
|
||||
# begin init with cipher and operation type
|
||||
res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
operation)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
# set the key length to handle variable key ciphers
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
|
||||
ctx, len(cipher.key)
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
if isinstance(mode, modes.GCM):
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN,
|
||||
len(iv_nonce), self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
if operation == self._DECRYPT:
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG,
|
||||
len(mode.tag), mode.tag
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
# pass key/iv
|
||||
res = self._backend._lib.EVP_CipherInit_ex(
|
||||
ctx,
|
||||
self._backend._ffi.NULL,
|
||||
self._backend._ffi.NULL,
|
||||
cipher.key,
|
||||
iv_nonce,
|
||||
operation
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
# We purposely disable padding here as it's handled higher up in the
|
||||
# API.
|
||||
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
# OpenSSL 0.9.8e has an assertion in its EVP code that causes it
|
||||
# to SIGABRT if you call update with an empty byte string. This can be
|
||||
# removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch
|
||||
# should be taken only when length is zero and mode is not GCM because
|
||||
# AES GCM can return improper tag values if you don't call update
|
||||
# with empty plaintext when authenticating AAD for ...reasons.
|
||||
if len(data) == 0 and not isinstance(self._mode, modes.GCM):
|
||||
return b""
|
||||
|
||||
buf = self._backend._ffi.new("unsigned char[]",
|
||||
len(data) + self._block_size - 1)
|
||||
outlen = self._backend._ffi.new("int *")
|
||||
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data,
|
||||
len(data))
|
||||
self._backend.openssl_assert(res != 0)
|
||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
||||
def finalize(self):
|
||||
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
|
||||
# appears to have a bug where you must make at least one call to update
|
||||
# even if you are only using authenticate_additional_data or the
|
||||
# GCM tag will be wrong. An (empty) call to update resolves this
|
||||
# and is harmless for all other versions of OpenSSL.
|
||||
if isinstance(self._mode, modes.GCM):
|
||||
self.update(b"")
|
||||
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._block_size)
|
||||
outlen = self._backend._ffi.new("int *")
|
||||
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
|
||||
if res == 0:
|
||||
errors = self._backend._consume_errors()
|
||||
|
||||
if not errors and isinstance(self._mode, modes.GCM):
|
||||
raise InvalidTag
|
||||
|
||||
self._backend.openssl_assert(
|
||||
errors[0][1:] == (
|
||||
self._backend._lib.ERR_LIB_EVP,
|
||||
self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX,
|
||||
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
|
||||
) or errors[0][1:] == (
|
||||
self._backend._lib.ERR_LIB_EVP,
|
||||
self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX,
|
||||
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
|
||||
)
|
||||
)
|
||||
raise ValueError(
|
||||
"The length of the provided data is not a multiple of "
|
||||
"the block length."
|
||||
)
|
||||
|
||||
if (isinstance(self._mode, modes.GCM) and
|
||||
self._operation == self._ENCRYPT):
|
||||
block_byte_size = self._block_size // 8
|
||||
tag_buf = self._backend._ffi.new(
|
||||
"unsigned char[]", block_byte_size
|
||||
)
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||
self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG,
|
||||
block_byte_size, tag_buf
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
||||
|
||||
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
||||
def authenticate_additional_data(self, data):
|
||||
outlen = self._backend._ffi.new("int *")
|
||||
res = self._backend._lib.EVP_CipherUpdate(
|
||||
self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
tag = utils.read_only_property("_tag")
|
||||
|
||||
|
||||
@utils.register_interface(ciphers.CipherContext)
|
||||
class _AESCTRCipherContext(object):
|
||||
"""
|
||||
This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can
|
||||
be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020).
|
||||
"""
|
||||
def __init__(self, backend, cipher, mode):
|
||||
self._backend = backend
|
||||
|
||||
self._key = self._backend._ffi.new("AES_KEY *")
|
||||
res = self._backend._lib.AES_set_encrypt_key(
|
||||
cipher.key, len(cipher.key) * 8, self._key
|
||||
)
|
||||
self._backend.openssl_assert(res == 0)
|
||||
self._ecount = self._backend._ffi.new("char[]", 16)
|
||||
self._nonce = self._backend._ffi.new("char[16]", mode.nonce)
|
||||
self._num = self._backend._ffi.new("unsigned int *", 0)
|
||||
|
||||
def update(self, data):
|
||||
buf = self._backend._ffi.new("unsigned char[]", len(data))
|
||||
self._backend._lib.AES_ctr128_encrypt(
|
||||
data, buf, len(data), self._key, self._nonce,
|
||||
self._ecount, self._num
|
||||
)
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def finalize(self):
|
||||
self._key = None
|
||||
self._ecount = None
|
||||
self._nonce = None
|
||||
self._num = None
|
||||
return b""
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.primitives import constant_time, interfaces
|
||||
from cryptography.hazmat.primitives.ciphers.modes import CBC
|
||||
|
||||
|
||||
@utils.register_interface(interfaces.MACContext)
|
||||
class _CMACContext(object):
|
||||
def __init__(self, backend, algorithm, ctx=None):
|
||||
if not backend.cmac_algorithm_supported(algorithm):
|
||||
raise UnsupportedAlgorithm("This backend does not support CMAC.",
|
||||
_Reasons.UNSUPPORTED_CIPHER)
|
||||
|
||||
self._backend = backend
|
||||
self._key = algorithm.key
|
||||
self._algorithm = algorithm
|
||||
self._output_length = algorithm.block_size // 8
|
||||
|
||||
if ctx is None:
|
||||
registry = self._backend._cipher_registry
|
||||
adapter = registry[type(algorithm), CBC]
|
||||
|
||||
evp_cipher = adapter(self._backend, algorithm, CBC)
|
||||
|
||||
ctx = self._backend._lib.CMAC_CTX_new()
|
||||
|
||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
|
||||
|
||||
self._backend._lib.CMAC_Init(
|
||||
ctx, self._key, len(self._key),
|
||||
evp_cipher, self._backend._ffi.NULL
|
||||
)
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def update(self, data):
|
||||
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
|
||||
length = self._backend._ffi.new("size_t *", self._output_length)
|
||||
res = self._backend._lib.CMAC_Final(
|
||||
self._ctx, buf, length
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
self._ctx = None
|
||||
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._lib.CMAC_CTX_new()
|
||||
copied_ctx = self._backend._ffi.gc(
|
||||
copied_ctx, self._backend._lib.CMAC_CTX_free
|
||||
)
|
||||
res = self._backend._lib.CMAC_CTX_copy(
|
||||
copied_ctx, self._ctx
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return _CMACContext(
|
||||
self._backend, self._algorithm, ctx=copied_ctx
|
||||
)
|
||||
|
||||
def verify(self, signature):
|
||||
digest = self.finalize()
|
||||
if not constant_time.bytes_eq(digest, signature):
|
||||
raise InvalidSignature("Signature did not match digest.")
|
||||
|
|
@ -0,0 +1,218 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import InvalidSignature
|
||||
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import (
|
||||
AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
|
||||
)
|
||||
|
||||
|
||||
def _truncate_digest_for_dsa(dsa_cdata, digest, backend):
|
||||
"""
|
||||
This function truncates digests that are longer than a given DS
|
||||
key's length so they can be signed. OpenSSL does this for us in
|
||||
1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three
|
||||
releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This
|
||||
truncation is not required in 0.9.8 because DSA is limited to SHA-1.
|
||||
"""
|
||||
|
||||
order_bits = backend._lib.BN_num_bits(dsa_cdata.q)
|
||||
return _truncate_digest(digest, order_bits)
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricVerificationContext)
|
||||
class _DSAVerificationContext(object):
|
||||
def __init__(self, backend, public_key, signature, algorithm):
|
||||
self._backend = backend
|
||||
self._public_key = public_key
|
||||
self._signature = signature
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def verify(self):
|
||||
data_to_verify = self._hash_ctx.finalize()
|
||||
|
||||
data_to_verify = _truncate_digest_for_dsa(
|
||||
self._public_key._dsa_cdata, data_to_verify, self._backend
|
||||
)
|
||||
|
||||
# The first parameter passed to DSA_verify is unused by OpenSSL but
|
||||
# must be an integer.
|
||||
res = self._backend._lib.DSA_verify(
|
||||
0, data_to_verify, len(data_to_verify), self._signature,
|
||||
len(self._signature), self._public_key._dsa_cdata)
|
||||
|
||||
if res != 1:
|
||||
self._backend._consume_errors()
|
||||
raise InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricSignatureContext)
|
||||
class _DSASignatureContext(object):
|
||||
def __init__(self, backend, private_key, algorithm):
|
||||
self._backend = backend
|
||||
self._private_key = private_key
|
||||
self._algorithm = algorithm
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
data_to_sign = self._hash_ctx.finalize()
|
||||
data_to_sign = _truncate_digest_for_dsa(
|
||||
self._private_key._dsa_cdata, data_to_sign, self._backend
|
||||
)
|
||||
sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata)
|
||||
sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len)
|
||||
buflen = self._backend._ffi.new("unsigned int *")
|
||||
|
||||
# The first parameter passed to DSA_sign is unused by OpenSSL but
|
||||
# must be an integer.
|
||||
res = self._backend._lib.DSA_sign(
|
||||
0, data_to_sign, len(data_to_sign), sig_buf,
|
||||
buflen, self._private_key._dsa_cdata)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
self._backend.openssl_assert(buflen[0])
|
||||
|
||||
return self._backend._ffi.buffer(sig_buf)[:buflen[0]]
|
||||
|
||||
|
||||
@utils.register_interface(dsa.DSAParametersWithNumbers)
|
||||
class _DSAParameters(object):
|
||||
def __init__(self, backend, dsa_cdata):
|
||||
self._backend = backend
|
||||
self._dsa_cdata = dsa_cdata
|
||||
|
||||
def parameter_numbers(self):
|
||||
return dsa.DSAParameterNumbers(
|
||||
p=self._backend._bn_to_int(self._dsa_cdata.p),
|
||||
q=self._backend._bn_to_int(self._dsa_cdata.q),
|
||||
g=self._backend._bn_to_int(self._dsa_cdata.g)
|
||||
)
|
||||
|
||||
def generate_private_key(self):
|
||||
return self._backend.generate_dsa_private_key(self)
|
||||
|
||||
|
||||
@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
|
||||
class _DSAPrivateKey(object):
|
||||
def __init__(self, backend, dsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._dsa_cdata = dsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def signer(self, signature_algorithm):
|
||||
return _DSASignatureContext(self._backend, self, signature_algorithm)
|
||||
|
||||
def private_numbers(self):
|
||||
return dsa.DSAPrivateNumbers(
|
||||
public_numbers=dsa.DSAPublicNumbers(
|
||||
parameter_numbers=dsa.DSAParameterNumbers(
|
||||
p=self._backend._bn_to_int(self._dsa_cdata.p),
|
||||
q=self._backend._bn_to_int(self._dsa_cdata.q),
|
||||
g=self._backend._bn_to_int(self._dsa_cdata.g)
|
||||
),
|
||||
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
|
||||
),
|
||||
x=self._backend._bn_to_int(self._dsa_cdata.priv_key)
|
||||
)
|
||||
|
||||
def public_key(self):
|
||||
dsa_cdata = self._backend._lib.DSA_new()
|
||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||
dsa_cdata = self._backend._ffi.gc(
|
||||
dsa_cdata, self._backend._lib.DSA_free
|
||||
)
|
||||
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
|
||||
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
|
||||
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
|
||||
dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key)
|
||||
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
|
||||
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
|
||||
|
||||
def parameters(self):
|
||||
dsa_cdata = self._backend._lib.DSA_new()
|
||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||
dsa_cdata = self._backend._ffi.gc(
|
||||
dsa_cdata, self._backend._lib.DSA_free
|
||||
)
|
||||
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
|
||||
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
|
||||
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
|
||||
return _DSAParameters(self._backend, dsa_cdata)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self._evp_pkey,
|
||||
self._dsa_cdata
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
|
||||
class _DSAPublicKey(object):
|
||||
def __init__(self, backend, dsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._dsa_cdata = dsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
if not isinstance(signature, bytes):
|
||||
raise TypeError("signature must be bytes.")
|
||||
|
||||
return _DSAVerificationContext(
|
||||
self._backend, self, signature, signature_algorithm
|
||||
)
|
||||
|
||||
def public_numbers(self):
|
||||
return dsa.DSAPublicNumbers(
|
||||
parameter_numbers=dsa.DSAParameterNumbers(
|
||||
p=self._backend._bn_to_int(self._dsa_cdata.p),
|
||||
q=self._backend._bn_to_int(self._dsa_cdata.q),
|
||||
g=self._backend._bn_to_int(self._dsa_cdata.g)
|
||||
),
|
||||
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
|
||||
)
|
||||
|
||||
def parameters(self):
|
||||
dsa_cdata = self._backend._lib.DSA_new()
|
||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||
dsa_cdata = self._backend._ffi.gc(
|
||||
dsa_cdata, self._backend._lib.DSA_free
|
||||
)
|
||||
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
|
||||
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
|
||||
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
|
||||
return _DSAParameters(self._backend, dsa_cdata)
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if format is serialization.PublicFormat.PKCS1:
|
||||
raise ValueError(
|
||||
"DSA public keys do not support PKCS1 serialization"
|
||||
)
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
self._evp_pkey,
|
||||
None
|
||||
)
|
||||
|
|
@ -0,0 +1,299 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.hazmat.primitives.asymmetric import (
|
||||
AsymmetricSignatureContext, AsymmetricVerificationContext, ec
|
||||
)
|
||||
|
||||
|
||||
def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend):
|
||||
"""
|
||||
This function truncates digests that are longer than a given elliptic
|
||||
curve key's length so they can be signed. Since elliptic curve keys are
|
||||
much shorter than RSA keys many digests (e.g. SHA-512) may require
|
||||
truncation.
|
||||
"""
|
||||
|
||||
_lib = backend._lib
|
||||
_ffi = backend._ffi
|
||||
|
||||
group = _lib.EC_KEY_get0_group(ec_key_cdata)
|
||||
|
||||
with backend._tmp_bn_ctx() as bn_ctx:
|
||||
order = _lib.BN_CTX_get(bn_ctx)
|
||||
backend.openssl_assert(order != _ffi.NULL)
|
||||
|
||||
res = _lib.EC_GROUP_get_order(group, order, bn_ctx)
|
||||
backend.openssl_assert(res == 1)
|
||||
|
||||
order_bits = _lib.BN_num_bits(order)
|
||||
|
||||
return _truncate_digest(digest, order_bits)
|
||||
|
||||
|
||||
def _ec_key_curve_sn(backend, ec_key):
|
||||
group = backend._lib.EC_KEY_get0_group(ec_key)
|
||||
backend.openssl_assert(group != backend._ffi.NULL)
|
||||
|
||||
nid = backend._lib.EC_GROUP_get_curve_name(group)
|
||||
# The following check is to find EC keys with unnamed curves and raise
|
||||
# an error for now.
|
||||
if nid == backend._lib.NID_undef:
|
||||
raise NotImplementedError(
|
||||
"ECDSA certificates with unnamed curves are unsupported "
|
||||
"at this time"
|
||||
)
|
||||
|
||||
curve_name = backend._lib.OBJ_nid2sn(nid)
|
||||
backend.openssl_assert(curve_name != backend._ffi.NULL)
|
||||
|
||||
sn = backend._ffi.string(curve_name).decode('ascii')
|
||||
return sn
|
||||
|
||||
|
||||
def _mark_asn1_named_ec_curve(backend, ec_cdata):
|
||||
"""
|
||||
Set the named curve flag on the EC_KEY. This causes OpenSSL to
|
||||
serialize EC keys along with their curve OID which makes
|
||||
deserialization easier.
|
||||
"""
|
||||
|
||||
backend._lib.EC_KEY_set_asn1_flag(
|
||||
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
|
||||
)
|
||||
|
||||
|
||||
def _sn_to_elliptic_curve(backend, sn):
|
||||
try:
|
||||
return ec._CURVE_TYPES[sn]()
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported elliptic curve".format(sn),
|
||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricSignatureContext)
|
||||
class _ECDSASignatureContext(object):
|
||||
def __init__(self, backend, private_key, algorithm):
|
||||
self._backend = backend
|
||||
self._private_key = private_key
|
||||
self._digest = hashes.Hash(algorithm, backend)
|
||||
|
||||
def update(self, data):
|
||||
self._digest.update(data)
|
||||
|
||||
def finalize(self):
|
||||
ec_key = self._private_key._ec_key
|
||||
|
||||
digest = self._digest.finalize()
|
||||
|
||||
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
|
||||
|
||||
max_size = self._backend._lib.ECDSA_size(ec_key)
|
||||
self._backend.openssl_assert(max_size > 0)
|
||||
|
||||
sigbuf = self._backend._ffi.new("char[]", max_size)
|
||||
siglen_ptr = self._backend._ffi.new("unsigned int[]", 1)
|
||||
res = self._backend._lib.ECDSA_sign(
|
||||
0,
|
||||
digest,
|
||||
len(digest),
|
||||
sigbuf,
|
||||
siglen_ptr,
|
||||
ec_key
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricVerificationContext)
|
||||
class _ECDSAVerificationContext(object):
|
||||
def __init__(self, backend, public_key, signature, algorithm):
|
||||
self._backend = backend
|
||||
self._public_key = public_key
|
||||
self._signature = signature
|
||||
self._digest = hashes.Hash(algorithm, backend)
|
||||
|
||||
def update(self, data):
|
||||
self._digest.update(data)
|
||||
|
||||
def verify(self):
|
||||
ec_key = self._public_key._ec_key
|
||||
|
||||
digest = self._digest.finalize()
|
||||
|
||||
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
|
||||
|
||||
res = self._backend._lib.ECDSA_verify(
|
||||
0,
|
||||
digest,
|
||||
len(digest),
|
||||
self._signature,
|
||||
len(self._signature),
|
||||
ec_key
|
||||
)
|
||||
if res != 1:
|
||||
self._backend._consume_errors()
|
||||
raise InvalidSignature
|
||||
return True
|
||||
|
||||
|
||||
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
|
||||
class _EllipticCurvePrivateKey(object):
|
||||
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
||||
self._ec_key = ec_key_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
||||
self._curve = _sn_to_elliptic_curve(backend, sn)
|
||||
|
||||
curve = utils.read_only_property("_curve")
|
||||
|
||||
def signer(self, signature_algorithm):
|
||||
if isinstance(signature_algorithm, ec.ECDSA):
|
||||
return _ECDSASignatureContext(
|
||||
self._backend, self, signature_algorithm.algorithm
|
||||
)
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Unsupported elliptic curve signature algorithm.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def exchange(self, algorithm, peer_public_key):
|
||||
if not (
|
||||
self._backend.elliptic_curve_exchange_algorithm_supported(
|
||||
algorithm, self.curve
|
||||
)
|
||||
):
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend does not support the ECDH algorithm.",
|
||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
|
||||
)
|
||||
|
||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||
z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
|
||||
self._backend.openssl_assert(z_len > 0)
|
||||
z_buf = self._backend._ffi.new("uint8_t[]", z_len)
|
||||
peer_key = self._backend._lib.EC_KEY_get0_public_key(
|
||||
peer_public_key._ec_key
|
||||
)
|
||||
|
||||
r = self._backend._lib.ECDH_compute_key(
|
||||
z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(r > 0)
|
||||
return self._backend._ffi.buffer(z_buf)[:z_len]
|
||||
|
||||
def public_key(self):
|
||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||
self._backend.openssl_assert(group != self._backend._ffi.NULL)
|
||||
|
||||
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
|
||||
|
||||
public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid)
|
||||
self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL)
|
||||
public_ec_key = self._backend._ffi.gc(
|
||||
public_ec_key, self._backend._lib.EC_KEY_free
|
||||
)
|
||||
|
||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||
|
||||
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
|
||||
|
||||
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
|
||||
|
||||
def private_numbers(self):
|
||||
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
|
||||
private_value = self._backend._bn_to_int(bn)
|
||||
return ec.EllipticCurvePrivateNumbers(
|
||||
private_value=private_value,
|
||||
public_numbers=self.public_key().public_numbers()
|
||||
)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self._evp_pkey,
|
||||
self._ec_key
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
|
||||
class _EllipticCurvePublicKey(object):
|
||||
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
||||
self._ec_key = ec_key_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
||||
self._curve = _sn_to_elliptic_curve(backend, sn)
|
||||
|
||||
curve = utils.read_only_property("_curve")
|
||||
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
if not isinstance(signature, bytes):
|
||||
raise TypeError("signature must be bytes.")
|
||||
|
||||
if isinstance(signature_algorithm, ec.ECDSA):
|
||||
return _ECDSAVerificationContext(
|
||||
self._backend, self, signature, signature_algorithm.algorithm
|
||||
)
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Unsupported elliptic curve signature algorithm.",
|
||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||
|
||||
def public_numbers(self):
|
||||
set_func, get_func, group = (
|
||||
self._backend._ec_key_determine_group_get_set_funcs(self._ec_key)
|
||||
)
|
||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||
|
||||
with self._backend._tmp_bn_ctx() as bn_ctx:
|
||||
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
|
||||
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
|
||||
|
||||
res = get_func(group, point, bn_x, bn_y, bn_ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
|
||||
x = self._backend._bn_to_int(bn_x)
|
||||
y = self._backend._bn_to_int(bn_y)
|
||||
|
||||
return ec.EllipticCurvePublicNumbers(
|
||||
x=x,
|
||||
y=y,
|
||||
curve=self._curve
|
||||
)
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
if format is serialization.PublicFormat.PKCS1:
|
||||
raise ValueError(
|
||||
"EC public keys do not support PKCS1 serialization"
|
||||
)
|
||||
|
||||
return self._backend._public_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
self._evp_pkey,
|
||||
None
|
||||
)
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class _HashContext(object):
|
||||
def __init__(self, backend, algorithm, ctx=None):
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
ctx = self._backend._lib.EVP_MD_CTX_create()
|
||||
ctx = self._backend._ffi.gc(ctx,
|
||||
self._backend._lib.EVP_MD_CTX_destroy)
|
||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||
algorithm.name.encode("ascii"))
|
||||
if evp_md == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
|
||||
self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._lib.EVP_MD_CTX_create()
|
||||
copied_ctx = self._backend._ffi.gc(
|
||||
copied_ctx, self._backend._lib.EVP_MD_CTX_destroy
|
||||
)
|
||||
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
|
||||
|
||||
def update(self, data):
|
||||
res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new("unsigned char[]",
|
||||
self._backend._lib.EVP_MAX_MD_SIZE)
|
||||
outlen = self._backend._ffi.new("unsigned int *")
|
||||
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
||||
res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
|
||||
|
||||
|
||||
@utils.register_interface(interfaces.MACContext)
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class _HMACContext(object):
|
||||
def __init__(self, backend, key, algorithm, ctx=None):
|
||||
self._algorithm = algorithm
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
ctx = self._backend._ffi.new("HMAC_CTX *")
|
||||
self._backend._lib.HMAC_CTX_init(ctx)
|
||||
ctx = self._backend._ffi.gc(
|
||||
ctx, self._backend._lib.HMAC_CTX_cleanup
|
||||
)
|
||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||
algorithm.name.encode('ascii'))
|
||||
if evp_md == self._backend._ffi.NULL:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not a supported hash on this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
res = self._backend._lib.Cryptography_HMAC_Init_ex(
|
||||
ctx, key, len(key), evp_md, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
self._ctx = ctx
|
||||
self._key = key
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def copy(self):
|
||||
copied_ctx = self._backend._ffi.new("HMAC_CTX *")
|
||||
self._backend._lib.HMAC_CTX_init(copied_ctx)
|
||||
copied_ctx = self._backend._ffi.gc(
|
||||
copied_ctx, self._backend._lib.HMAC_CTX_cleanup
|
||||
)
|
||||
res = self._backend._lib.Cryptography_HMAC_CTX_copy(
|
||||
copied_ctx, self._ctx
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
return _HMACContext(
|
||||
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
||||
)
|
||||
|
||||
def update(self, data):
|
||||
res = self._backend._lib.Cryptography_HMAC_Update(
|
||||
self._ctx, data, len(data)
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
|
||||
def finalize(self):
|
||||
buf = self._backend._ffi.new("unsigned char[]",
|
||||
self._backend._lib.EVP_MAX_MD_SIZE)
|
||||
outlen = self._backend._ffi.new("unsigned int *")
|
||||
res = self._backend._lib.Cryptography_HMAC_Final(
|
||||
self._ctx, buf, outlen
|
||||
)
|
||||
self._backend.openssl_assert(res != 0)
|
||||
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
||||
self._backend._lib.HMAC_CTX_cleanup(self._ctx)
|
||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
||||
def verify(self, signature):
|
||||
digest = self.finalize()
|
||||
if not constant_time.bytes_eq(digest, signature):
|
||||
raise InvalidSignature("Signature did not match digest.")
|
||||
|
|
@ -0,0 +1,604 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import math
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.asymmetric import (
|
||||
AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
|
||||
)
|
||||
from cryptography.hazmat.primitives.asymmetric.padding import (
|
||||
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS
|
||||
)
|
||||
from cryptography.hazmat.primitives.asymmetric.rsa import (
|
||||
RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
|
||||
)
|
||||
|
||||
|
||||
def _get_rsa_pss_salt_length(pss, key_size, digest_size):
|
||||
salt = pss._salt_length
|
||||
|
||||
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
|
||||
# bit length - 1 per RFC 3447
|
||||
emlen = int(math.ceil((key_size - 1) / 8.0))
|
||||
salt_length = emlen - digest_size - 2
|
||||
assert salt_length >= 0
|
||||
return salt_length
|
||||
else:
|
||||
return salt
|
||||
|
||||
|
||||
def _enc_dec_rsa(backend, key, data, padding):
|
||||
if not isinstance(padding, AsymmetricPadding):
|
||||
raise TypeError("Padding must be an instance of AsymmetricPadding.")
|
||||
|
||||
if isinstance(padding, PKCS1v15):
|
||||
padding_enum = backend._lib.RSA_PKCS1_PADDING
|
||||
elif isinstance(padding, OAEP):
|
||||
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
|
||||
if not isinstance(padding._mgf, MGF1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Only MGF1 is supported by this backend.",
|
||||
_Reasons.UNSUPPORTED_MGF
|
||||
)
|
||||
|
||||
if not isinstance(padding._mgf._algorithm, hashes.SHA1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend supports only SHA1 inside MGF1 when "
|
||||
"using OAEP.",
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
if padding._label is not None and padding._label != b"":
|
||||
raise ValueError("This backend does not support OAEP labels.")
|
||||
|
||||
if not isinstance(padding._algorithm, hashes.SHA1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"This backend only supports SHA1 when using OAEP.",
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not supported by this backend.".format(
|
||||
padding.name
|
||||
),
|
||||
_Reasons.UNSUPPORTED_PADDING
|
||||
)
|
||||
|
||||
if backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum)
|
||||
else:
|
||||
return _enc_dec_rsa_098(backend, key, data, padding_enum)
|
||||
|
||||
|
||||
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum):
|
||||
if isinstance(key, _RSAPublicKey):
|
||||
init = backend._lib.EVP_PKEY_encrypt_init
|
||||
crypt = backend._lib.Cryptography_EVP_PKEY_encrypt
|
||||
else:
|
||||
init = backend._lib.EVP_PKEY_decrypt_init
|
||||
crypt = backend._lib.Cryptography_EVP_PKEY_decrypt
|
||||
|
||||
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
|
||||
key._evp_pkey, backend._ffi.NULL
|
||||
)
|
||||
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
|
||||
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
|
||||
res = init(pkey_ctx)
|
||||
backend.openssl_assert(res == 1)
|
||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
||||
pkey_ctx, padding_enum)
|
||||
backend.openssl_assert(res > 0)
|
||||
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
|
||||
backend.openssl_assert(buf_size > 0)
|
||||
outlen = backend._ffi.new("size_t *", buf_size)
|
||||
buf = backend._ffi.new("char[]", buf_size)
|
||||
res = crypt(pkey_ctx, buf, outlen, data, len(data))
|
||||
if res <= 0:
|
||||
_handle_rsa_enc_dec_error(backend, key)
|
||||
|
||||
return backend._ffi.buffer(buf)[:outlen[0]]
|
||||
|
||||
|
||||
def _enc_dec_rsa_098(backend, key, data, padding_enum):
|
||||
if isinstance(key, _RSAPublicKey):
|
||||
crypt = backend._lib.RSA_public_encrypt
|
||||
else:
|
||||
crypt = backend._lib.RSA_private_decrypt
|
||||
|
||||
key_size = backend._lib.RSA_size(key._rsa_cdata)
|
||||
backend.openssl_assert(key_size > 0)
|
||||
buf = backend._ffi.new("unsigned char[]", key_size)
|
||||
res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum)
|
||||
if res < 0:
|
||||
_handle_rsa_enc_dec_error(backend, key)
|
||||
|
||||
return backend._ffi.buffer(buf)[:res]
|
||||
|
||||
|
||||
def _handle_rsa_enc_dec_error(backend, key):
|
||||
errors = backend._consume_errors()
|
||||
assert errors
|
||||
assert errors[0].lib == backend._lib.ERR_LIB_RSA
|
||||
if isinstance(key, _RSAPublicKey):
|
||||
assert (errors[0].reason ==
|
||||
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
|
||||
raise ValueError(
|
||||
"Data too long for key size. Encrypt less data or use a "
|
||||
"larger key size."
|
||||
)
|
||||
else:
|
||||
decoding_errors = [
|
||||
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
|
||||
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
|
||||
]
|
||||
if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
|
||||
decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
|
||||
|
||||
assert errors[0].reason in decoding_errors
|
||||
raise ValueError("Decryption failed.")
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricSignatureContext)
|
||||
class _RSASignatureContext(object):
|
||||
def __init__(self, backend, private_key, padding, algorithm):
|
||||
self._backend = backend
|
||||
self._private_key = private_key
|
||||
|
||||
if not isinstance(padding, AsymmetricPadding):
|
||||
raise TypeError("Expected provider of AsymmetricPadding.")
|
||||
|
||||
self._pkey_size = self._backend._lib.EVP_PKEY_size(
|
||||
self._private_key._evp_pkey
|
||||
)
|
||||
self._backend.openssl_assert(self._pkey_size > 0)
|
||||
|
||||
if isinstance(padding, PKCS1v15):
|
||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||
self._finalize_method = self._finalize_pkey_ctx
|
||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
|
||||
else:
|
||||
self._finalize_method = self._finalize_pkcs1
|
||||
elif isinstance(padding, PSS):
|
||||
if not isinstance(padding._mgf, MGF1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Only MGF1 is supported by this backend.",
|
||||
_Reasons.UNSUPPORTED_MGF
|
||||
)
|
||||
|
||||
# Size of key in bytes - 2 is the maximum
|
||||
# PSS signature length (salt length is checked later)
|
||||
if self._pkey_size - algorithm.digest_size - 2 < 0:
|
||||
raise ValueError("Digest too large for key size. Use a larger "
|
||||
"key.")
|
||||
|
||||
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"When OpenSSL is older than 1.0.1 then only SHA1 is "
|
||||
"supported with MGF1.",
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||
self._finalize_method = self._finalize_pkey_ctx
|
||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
|
||||
else:
|
||||
self._finalize_method = self._finalize_pss
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not supported by this backend.".format(padding.name),
|
||||
_Reasons.UNSUPPORTED_PADDING
|
||||
)
|
||||
|
||||
self._padding = padding
|
||||
self._algorithm = algorithm
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||
self._algorithm.name.encode("ascii"))
|
||||
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
|
||||
|
||||
return self._finalize_method(evp_md)
|
||||
|
||||
def _finalize_pkey_ctx(self, evp_md):
|
||||
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
|
||||
self._private_key._evp_pkey, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
|
||||
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
|
||||
self._backend._lib.EVP_PKEY_CTX_free)
|
||||
res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
|
||||
pkey_ctx, evp_md)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
||||
pkey_ctx, self._padding_enum)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
if isinstance(self._padding, PSS):
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
||||
pkey_ctx,
|
||||
_get_rsa_pss_salt_length(
|
||||
self._padding,
|
||||
self._private_key.key_size,
|
||||
self._hash_ctx.algorithm.digest_size
|
||||
)
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
|
||||
if self._backend._lib.Cryptography_HAS_MGF1_MD:
|
||||
# MGF1 MD is configurable in OpenSSL 1.0.1+
|
||||
mgf1_md = self._backend._lib.EVP_get_digestbyname(
|
||||
self._padding._mgf._algorithm.name.encode("ascii"))
|
||||
self._backend.openssl_assert(
|
||||
mgf1_md != self._backend._ffi.NULL
|
||||
)
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
|
||||
pkey_ctx, mgf1_md
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
data_to_sign = self._hash_ctx.finalize()
|
||||
buflen = self._backend._ffi.new("size_t *")
|
||||
res = self._backend._lib.EVP_PKEY_sign(
|
||||
pkey_ctx,
|
||||
self._backend._ffi.NULL,
|
||||
buflen,
|
||||
data_to_sign,
|
||||
len(data_to_sign)
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
buf = self._backend._ffi.new("unsigned char[]", buflen[0])
|
||||
res = self._backend._lib.EVP_PKEY_sign(
|
||||
pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign))
|
||||
if res != 1:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
||||
reason = None
|
||||
if (errors[0].reason ==
|
||||
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
|
||||
reason = ("Salt length too long for key size. Try using "
|
||||
"MAX_LENGTH instead.")
|
||||
else:
|
||||
assert (errors[0].reason ==
|
||||
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
|
||||
reason = "Digest too large for key size. Use a larger key."
|
||||
assert reason is not None
|
||||
raise ValueError(reason)
|
||||
|
||||
return self._backend._ffi.buffer(buf)[:]
|
||||
|
||||
def _finalize_pkcs1(self, evp_md):
|
||||
if self._hash_ctx._ctx is None:
|
||||
raise AlreadyFinalized("Context has already been finalized.")
|
||||
|
||||
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
|
||||
sig_len = self._backend._ffi.new("unsigned int *")
|
||||
res = self._backend._lib.EVP_SignFinal(
|
||||
self._hash_ctx._ctx._ctx,
|
||||
sig_buf,
|
||||
sig_len,
|
||||
self._private_key._evp_pkey
|
||||
)
|
||||
self._hash_ctx.finalize()
|
||||
if res == 0:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
||||
assert (errors[0].reason ==
|
||||
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
|
||||
raise ValueError("Digest too large for key size. Use a larger "
|
||||
"key.")
|
||||
|
||||
return self._backend._ffi.buffer(sig_buf)[:sig_len[0]]
|
||||
|
||||
def _finalize_pss(self, evp_md):
|
||||
data_to_sign = self._hash_ctx.finalize()
|
||||
padded = self._backend._ffi.new("unsigned char[]", self._pkey_size)
|
||||
res = self._backend._lib.RSA_padding_add_PKCS1_PSS(
|
||||
self._private_key._rsa_cdata,
|
||||
padded,
|
||||
data_to_sign,
|
||||
evp_md,
|
||||
_get_rsa_pss_salt_length(
|
||||
self._padding,
|
||||
self._private_key.key_size,
|
||||
len(data_to_sign)
|
||||
)
|
||||
)
|
||||
if res != 1:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
||||
assert (errors[0].reason ==
|
||||
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
|
||||
raise ValueError("Salt length too long for key size. Try using "
|
||||
"MAX_LENGTH instead.")
|
||||
|
||||
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
|
||||
sig_len = self._backend._lib.RSA_private_encrypt(
|
||||
self._pkey_size,
|
||||
padded,
|
||||
sig_buf,
|
||||
self._private_key._rsa_cdata,
|
||||
self._backend._lib.RSA_NO_PADDING
|
||||
)
|
||||
self._backend.openssl_assert(sig_len != -1)
|
||||
return self._backend._ffi.buffer(sig_buf)[:sig_len]
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricVerificationContext)
|
||||
class _RSAVerificationContext(object):
|
||||
def __init__(self, backend, public_key, signature, padding, algorithm):
|
||||
self._backend = backend
|
||||
self._public_key = public_key
|
||||
self._signature = signature
|
||||
|
||||
if not isinstance(padding, AsymmetricPadding):
|
||||
raise TypeError("Expected provider of AsymmetricPadding.")
|
||||
|
||||
self._pkey_size = self._backend._lib.EVP_PKEY_size(
|
||||
self._public_key._evp_pkey
|
||||
)
|
||||
self._backend.openssl_assert(self._pkey_size > 0)
|
||||
|
||||
if isinstance(padding, PKCS1v15):
|
||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||
self._verify_method = self._verify_pkey_ctx
|
||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
|
||||
else:
|
||||
self._verify_method = self._verify_pkcs1
|
||||
elif isinstance(padding, PSS):
|
||||
if not isinstance(padding._mgf, MGF1):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Only MGF1 is supported by this backend.",
|
||||
_Reasons.UNSUPPORTED_MGF
|
||||
)
|
||||
|
||||
# Size of key in bytes - 2 is the maximum
|
||||
# PSS signature length (salt length is checked later)
|
||||
if self._pkey_size - algorithm.digest_size - 2 < 0:
|
||||
raise ValueError(
|
||||
"Digest too large for key size. Check that you have the "
|
||||
"correct key and digest algorithm."
|
||||
)
|
||||
|
||||
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"When OpenSSL is older than 1.0.1 then only SHA1 is "
|
||||
"supported with MGF1.",
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
|
||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||
self._verify_method = self._verify_pkey_ctx
|
||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
|
||||
else:
|
||||
self._verify_method = self._verify_pss
|
||||
else:
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not supported by this backend.".format(padding.name),
|
||||
_Reasons.UNSUPPORTED_PADDING
|
||||
)
|
||||
|
||||
self._padding = padding
|
||||
self._algorithm = algorithm
|
||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def update(self, data):
|
||||
self._hash_ctx.update(data)
|
||||
|
||||
def verify(self):
|
||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||
self._algorithm.name.encode("ascii"))
|
||||
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
|
||||
|
||||
self._verify_method(evp_md)
|
||||
|
||||
def _verify_pkey_ctx(self, evp_md):
|
||||
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
|
||||
self._public_key._evp_pkey, self._backend._ffi.NULL
|
||||
)
|
||||
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
|
||||
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
|
||||
self._backend._lib.EVP_PKEY_CTX_free)
|
||||
res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
|
||||
pkey_ctx, evp_md)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
||||
pkey_ctx, self._padding_enum)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
if isinstance(self._padding, PSS):
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
||||
pkey_ctx,
|
||||
_get_rsa_pss_salt_length(
|
||||
self._padding,
|
||||
self._public_key.key_size,
|
||||
self._hash_ctx.algorithm.digest_size
|
||||
)
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
if self._backend._lib.Cryptography_HAS_MGF1_MD:
|
||||
# MGF1 MD is configurable in OpenSSL 1.0.1+
|
||||
mgf1_md = self._backend._lib.EVP_get_digestbyname(
|
||||
self._padding._mgf._algorithm.name.encode("ascii"))
|
||||
self._backend.openssl_assert(
|
||||
mgf1_md != self._backend._ffi.NULL
|
||||
)
|
||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
|
||||
pkey_ctx, mgf1_md
|
||||
)
|
||||
self._backend.openssl_assert(res > 0)
|
||||
|
||||
data_to_verify = self._hash_ctx.finalize()
|
||||
res = self._backend._lib.EVP_PKEY_verify(
|
||||
pkey_ctx,
|
||||
self._signature,
|
||||
len(self._signature),
|
||||
data_to_verify,
|
||||
len(data_to_verify)
|
||||
)
|
||||
# The previous call can return negative numbers in the event of an
|
||||
# error. This is not a signature failure but we need to fail if it
|
||||
# occurs.
|
||||
self._backend.openssl_assert(res >= 0)
|
||||
if res == 0:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors
|
||||
raise InvalidSignature
|
||||
|
||||
def _verify_pkcs1(self, evp_md):
|
||||
if self._hash_ctx._ctx is None:
|
||||
raise AlreadyFinalized("Context has already been finalized.")
|
||||
|
||||
res = self._backend._lib.EVP_VerifyFinal(
|
||||
self._hash_ctx._ctx._ctx,
|
||||
self._signature,
|
||||
len(self._signature),
|
||||
self._public_key._evp_pkey
|
||||
)
|
||||
self._hash_ctx.finalize()
|
||||
# The previous call can return negative numbers in the event of an
|
||||
# error. This is not a signature failure but we need to fail if it
|
||||
# occurs.
|
||||
self._backend.openssl_assert(res >= 0)
|
||||
if res == 0:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors
|
||||
raise InvalidSignature
|
||||
|
||||
def _verify_pss(self, evp_md):
|
||||
buf = self._backend._ffi.new("unsigned char[]", self._pkey_size)
|
||||
res = self._backend._lib.RSA_public_decrypt(
|
||||
len(self._signature),
|
||||
self._signature,
|
||||
buf,
|
||||
self._public_key._rsa_cdata,
|
||||
self._backend._lib.RSA_NO_PADDING
|
||||
)
|
||||
if res != self._pkey_size:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors
|
||||
raise InvalidSignature
|
||||
|
||||
data_to_verify = self._hash_ctx.finalize()
|
||||
res = self._backend._lib.RSA_verify_PKCS1_PSS(
|
||||
self._public_key._rsa_cdata,
|
||||
data_to_verify,
|
||||
evp_md,
|
||||
buf,
|
||||
_get_rsa_pss_salt_length(
|
||||
self._padding,
|
||||
self._public_key.key_size,
|
||||
len(data_to_verify)
|
||||
)
|
||||
)
|
||||
if res != 1:
|
||||
errors = self._backend._consume_errors()
|
||||
assert errors
|
||||
raise InvalidSignature
|
||||
|
||||
|
||||
@utils.register_interface(RSAPrivateKeyWithSerialization)
|
||||
class _RSAPrivateKey(object):
|
||||
def __init__(self, backend, rsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._rsa_cdata = rsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def signer(self, padding, algorithm):
|
||||
return _RSASignatureContext(self._backend, self, padding, algorithm)
|
||||
|
||||
def decrypt(self, ciphertext, padding):
|
||||
key_size_bytes = int(math.ceil(self.key_size / 8.0))
|
||||
if key_size_bytes != len(ciphertext):
|
||||
raise ValueError("Ciphertext length must be equal to key size.")
|
||||
|
||||
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
|
||||
|
||||
def public_key(self):
|
||||
ctx = self._backend._lib.RSA_new()
|
||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
|
||||
ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e)
|
||||
ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n)
|
||||
res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
|
||||
return _RSAPublicKey(self._backend, ctx, evp_pkey)
|
||||
|
||||
def private_numbers(self):
|
||||
return rsa.RSAPrivateNumbers(
|
||||
p=self._backend._bn_to_int(self._rsa_cdata.p),
|
||||
q=self._backend._bn_to_int(self._rsa_cdata.q),
|
||||
d=self._backend._bn_to_int(self._rsa_cdata.d),
|
||||
dmp1=self._backend._bn_to_int(self._rsa_cdata.dmp1),
|
||||
dmq1=self._backend._bn_to_int(self._rsa_cdata.dmq1),
|
||||
iqmp=self._backend._bn_to_int(self._rsa_cdata.iqmp),
|
||||
public_numbers=rsa.RSAPublicNumbers(
|
||||
e=self._backend._bn_to_int(self._rsa_cdata.e),
|
||||
n=self._backend._bn_to_int(self._rsa_cdata.n),
|
||||
)
|
||||
)
|
||||
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
return self._backend._private_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
encryption_algorithm,
|
||||
self._evp_pkey,
|
||||
self._rsa_cdata
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(RSAPublicKeyWithSerialization)
|
||||
class _RSAPublicKey(object):
|
||||
def __init__(self, backend, rsa_cdata, evp_pkey):
|
||||
self._backend = backend
|
||||
self._rsa_cdata = rsa_cdata
|
||||
self._evp_pkey = evp_pkey
|
||||
|
||||
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
|
||||
|
||||
key_size = utils.read_only_property("_key_size")
|
||||
|
||||
def verifier(self, signature, padding, algorithm):
|
||||
if not isinstance(signature, bytes):
|
||||
raise TypeError("signature must be bytes.")
|
||||
|
||||
return _RSAVerificationContext(
|
||||
self._backend, self, signature, padding, algorithm
|
||||
)
|
||||
|
||||
def encrypt(self, plaintext, padding):
|
||||
return _enc_dec_rsa(self._backend, self, plaintext, padding)
|
||||
|
||||
def public_numbers(self):
|
||||
return rsa.RSAPublicNumbers(
|
||||
e=self._backend._bn_to_int(self._rsa_cdata.e),
|
||||
n=self._backend._bn_to_int(self._rsa_cdata.n),
|
||||
)
|
||||
|
||||
def public_bytes(self, encoding, format):
|
||||
return self._backend._public_key_bytes(
|
||||
encoding,
|
||||
format,
|
||||
self._evp_pkey,
|
||||
self._rsa_cdata
|
||||
)
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import six
|
||||
|
||||
|
||||
def _truncate_digest(digest, order_bits):
|
||||
digest_len = len(digest)
|
||||
|
||||
if 8 * digest_len > order_bits:
|
||||
digest_len = (order_bits + 7) // 8
|
||||
digest = digest[:digest_len]
|
||||
|
||||
if 8 * digest_len > order_bits:
|
||||
rshift = 8 - (order_bits & 0x7)
|
||||
assert 0 < rshift < 8
|
||||
|
||||
mask = 0xFF >> rshift << rshift
|
||||
|
||||
# Set the bottom rshift bits to 0
|
||||
digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask)
|
||||
|
||||
return digest
|
||||
|
|
@ -0,0 +1,940 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import datetime
|
||||
import ipaddress
|
||||
|
||||
from email.utils import parseaddr
|
||||
|
||||
import idna
|
||||
|
||||
import six
|
||||
|
||||
from six.moves import urllib_parse
|
||||
|
||||
from cryptography import utils, x509
|
||||
from cryptography.exceptions import UnsupportedAlgorithm
|
||||
from cryptography.hazmat.primitives import hashes, serialization
|
||||
from cryptography.x509.oid import (
|
||||
CRLExtensionOID, CertificatePoliciesOID, ExtensionOID
|
||||
)
|
||||
|
||||
|
||||
def _obj2txt(backend, obj):
|
||||
# Set to 80 on the recommendation of
|
||||
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
|
||||
buf_len = 80
|
||||
buf = backend._ffi.new("char[]", buf_len)
|
||||
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
||||
backend.openssl_assert(res > 0)
|
||||
return backend._ffi.buffer(buf, res)[:].decode()
|
||||
|
||||
|
||||
def _decode_x509_name_entry(backend, x509_name_entry):
|
||||
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
|
||||
backend.openssl_assert(data != backend._ffi.NULL)
|
||||
value = backend._asn1_string_to_utf8(data)
|
||||
oid = _obj2txt(backend, obj)
|
||||
|
||||
return x509.NameAttribute(x509.ObjectIdentifier(oid), value)
|
||||
|
||||
|
||||
def _decode_x509_name(backend, x509_name):
|
||||
count = backend._lib.X509_NAME_entry_count(x509_name)
|
||||
attributes = []
|
||||
for x in range(count):
|
||||
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
|
||||
attributes.append(_decode_x509_name_entry(backend, entry))
|
||||
|
||||
return x509.Name(attributes)
|
||||
|
||||
|
||||
def _decode_general_names(backend, gns):
|
||||
num = backend._lib.sk_GENERAL_NAME_num(gns)
|
||||
names = []
|
||||
for i in range(num):
|
||||
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
|
||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||
names.append(_decode_general_name(backend, gn))
|
||||
|
||||
return names
|
||||
|
||||
|
||||
def _decode_general_name(backend, gn):
|
||||
if gn.type == backend._lib.GEN_DNS:
|
||||
data = backend._asn1_string_to_bytes(gn.d.dNSName)
|
||||
if not data:
|
||||
decoded = u""
|
||||
elif data.startswith(b"*."):
|
||||
# This is a wildcard name. We need to remove the leading wildcard,
|
||||
# IDNA decode, then re-add the wildcard. Wildcard characters should
|
||||
# always be left-most (RFC 2595 section 2.4).
|
||||
decoded = u"*." + idna.decode(data[2:])
|
||||
else:
|
||||
# Not a wildcard, decode away. If the string has a * in it anywhere
|
||||
# invalid this will raise an InvalidCodePoint
|
||||
decoded = idna.decode(data)
|
||||
if data.startswith(b"."):
|
||||
# idna strips leading periods. Name constraints can have that
|
||||
# so we need to re-add it. Sigh.
|
||||
decoded = u"." + decoded
|
||||
|
||||
return x509.DNSName(decoded)
|
||||
elif gn.type == backend._lib.GEN_URI:
|
||||
data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier)
|
||||
parsed = urllib_parse.urlparse(data)
|
||||
if parsed.hostname:
|
||||
hostname = idna.decode(parsed.hostname)
|
||||
else:
|
||||
hostname = ""
|
||||
if parsed.port:
|
||||
netloc = hostname + u":" + six.text_type(parsed.port)
|
||||
else:
|
||||
netloc = hostname
|
||||
|
||||
# Note that building a URL in this fashion means it should be
|
||||
# semantically indistinguishable from the original but is not
|
||||
# guaranteed to be exactly the same.
|
||||
uri = urllib_parse.urlunparse((
|
||||
parsed.scheme,
|
||||
netloc,
|
||||
parsed.path,
|
||||
parsed.params,
|
||||
parsed.query,
|
||||
parsed.fragment
|
||||
))
|
||||
return x509.UniformResourceIdentifier(uri)
|
||||
elif gn.type == backend._lib.GEN_RID:
|
||||
oid = _obj2txt(backend, gn.d.registeredID)
|
||||
return x509.RegisteredID(x509.ObjectIdentifier(oid))
|
||||
elif gn.type == backend._lib.GEN_IPADD:
|
||||
data = backend._asn1_string_to_bytes(gn.d.iPAddress)
|
||||
data_len = len(data)
|
||||
if data_len == 8 or data_len == 32:
|
||||
# This is an IPv4 or IPv6 Network and not a single IP. This
|
||||
# type of data appears in Name Constraints. Unfortunately,
|
||||
# ipaddress doesn't support packed bytes + netmask. Additionally,
|
||||
# IPv6Network can only handle CIDR rather than the full 16 byte
|
||||
# netmask. To handle this we convert the netmask to integer, then
|
||||
# find the first 0 bit, which will be the prefix. If another 1
|
||||
# bit is present after that the netmask is invalid.
|
||||
base = ipaddress.ip_address(data[:data_len // 2])
|
||||
netmask = ipaddress.ip_address(data[data_len // 2:])
|
||||
bits = bin(int(netmask))[2:]
|
||||
prefix = bits.find('0')
|
||||
# If no 0 bits are found it is a /32 or /128
|
||||
if prefix == -1:
|
||||
prefix = len(bits)
|
||||
|
||||
if "1" in bits[prefix:]:
|
||||
raise ValueError("Invalid netmask")
|
||||
|
||||
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
|
||||
else:
|
||||
ip = ipaddress.ip_address(data)
|
||||
|
||||
return x509.IPAddress(ip)
|
||||
elif gn.type == backend._lib.GEN_DIRNAME:
|
||||
return x509.DirectoryName(
|
||||
_decode_x509_name(backend, gn.d.directoryName)
|
||||
)
|
||||
elif gn.type == backend._lib.GEN_EMAIL:
|
||||
data = backend._asn1_string_to_ascii(gn.d.rfc822Name)
|
||||
name, address = parseaddr(data)
|
||||
parts = address.split(u"@")
|
||||
if name or not address:
|
||||
# parseaddr has found a name (e.g. Name <email>) or the entire
|
||||
# value is an empty string.
|
||||
raise ValueError("Invalid rfc822name value")
|
||||
elif len(parts) == 1:
|
||||
# Single label email name. This is valid for local delivery. No
|
||||
# IDNA decoding can be done since there is no domain component.
|
||||
return x509.RFC822Name(address)
|
||||
else:
|
||||
# A normal email of the form user@domain.com. Let's attempt to
|
||||
# decode the domain component and return the entire address.
|
||||
return x509.RFC822Name(
|
||||
parts[0] + u"@" + idna.decode(parts[1])
|
||||
)
|
||||
elif gn.type == backend._lib.GEN_OTHERNAME:
|
||||
type_id = _obj2txt(backend, gn.d.otherName.type_id)
|
||||
value = backend._asn1_to_der(gn.d.otherName.value)
|
||||
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
|
||||
else:
|
||||
# x400Address or ediPartyName
|
||||
raise x509.UnsupportedGeneralNameType(
|
||||
"{0} is not a supported type".format(
|
||||
x509._GENERAL_NAMES.get(gn.type, gn.type)
|
||||
),
|
||||
gn.type
|
||||
)
|
||||
|
||||
|
||||
def _decode_ocsp_no_check(backend, ext):
|
||||
return x509.OCSPNoCheck()
|
||||
|
||||
|
||||
class _X509ExtensionParser(object):
|
||||
def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None):
|
||||
self.ext_count = ext_count
|
||||
self.get_ext = get_ext
|
||||
self.handlers = handlers
|
||||
self.unsupported_exts = unsupported_exts
|
||||
|
||||
def parse(self, backend, x509_obj):
|
||||
extensions = []
|
||||
seen_oids = set()
|
||||
for i in range(self.ext_count(backend, x509_obj)):
|
||||
ext = self.get_ext(backend, x509_obj, i)
|
||||
backend.openssl_assert(ext != backend._ffi.NULL)
|
||||
crit = backend._lib.X509_EXTENSION_get_critical(ext)
|
||||
critical = crit == 1
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object))
|
||||
if oid in seen_oids:
|
||||
raise x509.DuplicateExtension(
|
||||
"Duplicate {0} extension found".format(oid), oid
|
||||
)
|
||||
try:
|
||||
handler = self.handlers[oid]
|
||||
except KeyError:
|
||||
if critical:
|
||||
raise x509.UnsupportedExtension(
|
||||
"Critical extension {0} is not currently supported"
|
||||
.format(oid), oid
|
||||
)
|
||||
else:
|
||||
# For extensions which are not supported by OpenSSL we pass the
|
||||
# extension object directly to the parsing routine so it can
|
||||
# be decoded manually.
|
||||
if self.unsupported_exts and oid in self.unsupported_exts:
|
||||
ext_data = ext
|
||||
else:
|
||||
ext_data = backend._lib.X509V3_EXT_d2i(ext)
|
||||
if ext_data == backend._ffi.NULL:
|
||||
backend._consume_errors()
|
||||
raise ValueError(
|
||||
"The {0} extension is invalid and can't be "
|
||||
"parsed".format(oid)
|
||||
)
|
||||
|
||||
value = handler(backend, ext_data)
|
||||
extensions.append(x509.Extension(oid, critical, value))
|
||||
|
||||
seen_oids.add(oid)
|
||||
|
||||
return x509.Extensions(extensions)
|
||||
|
||||
|
||||
@utils.register_interface(x509.Certificate)
|
||||
class _Certificate(object):
|
||||
def __init__(self, backend, x509):
|
||||
self._backend = backend
|
||||
self._x509 = x509
|
||||
|
||||
def __repr__(self):
|
||||
return "<Certificate(subject={0}, ...)>".format(self.subject)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, x509.Certificate):
|
||||
return NotImplemented
|
||||
|
||||
res = self._backend._lib.X509_cmp(self._x509, other._x509)
|
||||
return res == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.public_bytes(serialization.Encoding.DER))
|
||||
|
||||
def fingerprint(self, algorithm):
|
||||
h = hashes.Hash(algorithm, self._backend)
|
||||
h.update(self.public_bytes(serialization.Encoding.DER))
|
||||
return h.finalize()
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
version = self._backend._lib.X509_get_version(self._x509)
|
||||
if version == 0:
|
||||
return x509.Version.v1
|
||||
elif version == 2:
|
||||
return x509.Version.v3
|
||||
else:
|
||||
raise x509.InvalidVersion(
|
||||
"{0} is not a valid X509 version".format(version), version
|
||||
)
|
||||
|
||||
@property
|
||||
def serial(self):
|
||||
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
|
||||
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
||||
return self._backend._asn1_integer_to_int(asn1_int)
|
||||
|
||||
def public_key(self):
|
||||
pkey = self._backend._lib.X509_get_pubkey(self._x509)
|
||||
if pkey == self._backend._ffi.NULL:
|
||||
# Remove errors from the stack.
|
||||
self._backend._consume_errors()
|
||||
raise ValueError("Certificate public key is of an unknown type")
|
||||
|
||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||
|
||||
return self._backend._evp_pkey_to_public_key(pkey)
|
||||
|
||||
@property
|
||||
def not_valid_before(self):
|
||||
asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
|
||||
return self._backend._parse_asn1_time(asn1_time)
|
||||
|
||||
@property
|
||||
def not_valid_after(self):
|
||||
asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
|
||||
return self._backend._parse_asn1_time(asn1_time)
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
|
||||
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, issuer)
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
subject = self._backend._lib.X509_get_subject_name(self._x509)
|
||||
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, subject)
|
||||
|
||||
@property
|
||||
def signature_hash_algorithm(self):
|
||||
oid = _obj2txt(self._backend, self._x509.sig_alg.algorithm)
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{0} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
def extensions(self):
|
||||
return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
bio = self._backend._create_mem_bio()
|
||||
if encoding is serialization.Encoding.PEM:
|
||||
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
|
||||
elif encoding is serialization.Encoding.DER:
|
||||
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
|
||||
else:
|
||||
raise TypeError("encoding must be an item from the Encoding enum")
|
||||
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
|
||||
def _decode_certificate_policies(backend, cp):
|
||||
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
|
||||
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
|
||||
num = backend._lib.sk_POLICYINFO_num(cp)
|
||||
certificate_policies = []
|
||||
for i in range(num):
|
||||
qualifiers = None
|
||||
pi = backend._lib.sk_POLICYINFO_value(cp, i)
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
|
||||
if pi.qualifiers != backend._ffi.NULL:
|
||||
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
|
||||
qualifiers = []
|
||||
for j in range(qnum):
|
||||
pqi = backend._lib.sk_POLICYQUALINFO_value(
|
||||
pi.qualifiers, j
|
||||
)
|
||||
pqualid = x509.ObjectIdentifier(
|
||||
_obj2txt(backend, pqi.pqualid)
|
||||
)
|
||||
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
|
||||
cpsuri = backend._ffi.buffer(
|
||||
pqi.d.cpsuri.data, pqi.d.cpsuri.length
|
||||
)[:].decode('ascii')
|
||||
qualifiers.append(cpsuri)
|
||||
else:
|
||||
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
|
||||
user_notice = _decode_user_notice(
|
||||
backend, pqi.d.usernotice
|
||||
)
|
||||
qualifiers.append(user_notice)
|
||||
|
||||
certificate_policies.append(
|
||||
x509.PolicyInformation(oid, qualifiers)
|
||||
)
|
||||
|
||||
return x509.CertificatePolicies(certificate_policies)
|
||||
|
||||
|
||||
def _decode_user_notice(backend, un):
|
||||
explicit_text = None
|
||||
notice_reference = None
|
||||
|
||||
if un.exptext != backend._ffi.NULL:
|
||||
explicit_text = backend._asn1_string_to_utf8(un.exptext)
|
||||
|
||||
if un.noticeref != backend._ffi.NULL:
|
||||
organization = backend._asn1_string_to_utf8(un.noticeref.organization)
|
||||
|
||||
num = backend._lib.sk_ASN1_INTEGER_num(
|
||||
un.noticeref.noticenos
|
||||
)
|
||||
notice_numbers = []
|
||||
for i in range(num):
|
||||
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
|
||||
un.noticeref.noticenos, i
|
||||
)
|
||||
notice_num = backend._asn1_integer_to_int(asn1_int)
|
||||
notice_numbers.append(notice_num)
|
||||
|
||||
notice_reference = x509.NoticeReference(
|
||||
organization, notice_numbers
|
||||
)
|
||||
|
||||
return x509.UserNotice(notice_reference, explicit_text)
|
||||
|
||||
|
||||
def _decode_basic_constraints(backend, bc_st):
|
||||
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
|
||||
basic_constraints = backend._ffi.gc(
|
||||
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
|
||||
)
|
||||
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
|
||||
# chooses to just map this to its ordinal value, so true is 255 and
|
||||
# false is 0.
|
||||
ca = basic_constraints.ca == 255
|
||||
if basic_constraints.pathlen == backend._ffi.NULL:
|
||||
path_length = None
|
||||
else:
|
||||
path_length = backend._asn1_integer_to_int(basic_constraints.pathlen)
|
||||
|
||||
return x509.BasicConstraints(ca, path_length)
|
||||
|
||||
|
||||
def _decode_subject_key_identifier(backend, asn1_string):
|
||||
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
|
||||
asn1_string = backend._ffi.gc(
|
||||
asn1_string, backend._lib.ASN1_OCTET_STRING_free
|
||||
)
|
||||
return x509.SubjectKeyIdentifier(
|
||||
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
|
||||
)
|
||||
|
||||
|
||||
def _decode_authority_key_identifier(backend, akid):
|
||||
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
|
||||
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
|
||||
key_identifier = None
|
||||
authority_cert_issuer = None
|
||||
authority_cert_serial_number = None
|
||||
|
||||
if akid.keyid != backend._ffi.NULL:
|
||||
key_identifier = backend._ffi.buffer(
|
||||
akid.keyid.data, akid.keyid.length
|
||||
)[:]
|
||||
|
||||
if akid.issuer != backend._ffi.NULL:
|
||||
authority_cert_issuer = _decode_general_names(
|
||||
backend, akid.issuer
|
||||
)
|
||||
|
||||
if akid.serial != backend._ffi.NULL:
|
||||
authority_cert_serial_number = backend._asn1_integer_to_int(
|
||||
akid.serial
|
||||
)
|
||||
|
||||
return x509.AuthorityKeyIdentifier(
|
||||
key_identifier, authority_cert_issuer, authority_cert_serial_number
|
||||
)
|
||||
|
||||
|
||||
def _decode_authority_information_access(backend, aia):
|
||||
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
|
||||
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
|
||||
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
|
||||
access_descriptions = []
|
||||
for i in range(num):
|
||||
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
|
||||
backend.openssl_assert(ad.method != backend._ffi.NULL)
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
|
||||
backend.openssl_assert(ad.location != backend._ffi.NULL)
|
||||
gn = _decode_general_name(backend, ad.location)
|
||||
access_descriptions.append(x509.AccessDescription(oid, gn))
|
||||
|
||||
return x509.AuthorityInformationAccess(access_descriptions)
|
||||
|
||||
|
||||
def _decode_key_usage(backend, bit_string):
|
||||
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
|
||||
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
|
||||
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
||||
digital_signature = get_bit(bit_string, 0) == 1
|
||||
content_commitment = get_bit(bit_string, 1) == 1
|
||||
key_encipherment = get_bit(bit_string, 2) == 1
|
||||
data_encipherment = get_bit(bit_string, 3) == 1
|
||||
key_agreement = get_bit(bit_string, 4) == 1
|
||||
key_cert_sign = get_bit(bit_string, 5) == 1
|
||||
crl_sign = get_bit(bit_string, 6) == 1
|
||||
encipher_only = get_bit(bit_string, 7) == 1
|
||||
decipher_only = get_bit(bit_string, 8) == 1
|
||||
return x509.KeyUsage(
|
||||
digital_signature,
|
||||
content_commitment,
|
||||
key_encipherment,
|
||||
data_encipherment,
|
||||
key_agreement,
|
||||
key_cert_sign,
|
||||
crl_sign,
|
||||
encipher_only,
|
||||
decipher_only
|
||||
)
|
||||
|
||||
|
||||
def _decode_general_names_extension(backend, gns):
|
||||
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
|
||||
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
||||
general_names = _decode_general_names(backend, gns)
|
||||
return general_names
|
||||
|
||||
|
||||
def _decode_subject_alt_name(backend, ext):
|
||||
return x509.SubjectAlternativeName(
|
||||
_decode_general_names_extension(backend, ext)
|
||||
)
|
||||
|
||||
|
||||
def _decode_issuer_alt_name(backend, ext):
|
||||
return x509.IssuerAlternativeName(
|
||||
_decode_general_names_extension(backend, ext)
|
||||
)
|
||||
|
||||
|
||||
def _decode_name_constraints(backend, nc):
|
||||
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
|
||||
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
|
||||
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
|
||||
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
|
||||
return x509.NameConstraints(
|
||||
permitted_subtrees=permitted, excluded_subtrees=excluded
|
||||
)
|
||||
|
||||
|
||||
def _decode_general_subtrees(backend, stack_subtrees):
|
||||
if stack_subtrees == backend._ffi.NULL:
|
||||
return None
|
||||
|
||||
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
|
||||
subtrees = []
|
||||
|
||||
for i in range(num):
|
||||
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
name = _decode_general_name(backend, obj.base)
|
||||
subtrees.append(name)
|
||||
|
||||
return subtrees
|
||||
|
||||
|
||||
def _decode_extended_key_usage(backend, sk):
|
||||
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
|
||||
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
|
||||
num = backend._lib.sk_ASN1_OBJECT_num(sk)
|
||||
ekus = []
|
||||
|
||||
for i in range(num):
|
||||
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
|
||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
|
||||
ekus.append(oid)
|
||||
|
||||
return x509.ExtendedKeyUsage(ekus)
|
||||
|
||||
|
||||
_DISTPOINT_TYPE_FULLNAME = 0
|
||||
_DISTPOINT_TYPE_RELATIVENAME = 1
|
||||
|
||||
|
||||
def _decode_crl_distribution_points(backend, cdps):
|
||||
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
|
||||
cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free)
|
||||
num = backend._lib.sk_DIST_POINT_num(cdps)
|
||||
|
||||
dist_points = []
|
||||
for i in range(num):
|
||||
full_name = None
|
||||
relative_name = None
|
||||
crl_issuer = None
|
||||
reasons = None
|
||||
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
|
||||
if cdp.reasons != backend._ffi.NULL:
|
||||
# We will check each bit from RFC 5280
|
||||
# ReasonFlags ::= BIT STRING {
|
||||
# unused (0),
|
||||
# keyCompromise (1),
|
||||
# cACompromise (2),
|
||||
# affiliationChanged (3),
|
||||
# superseded (4),
|
||||
# cessationOfOperation (5),
|
||||
# certificateHold (6),
|
||||
# privilegeWithdrawn (7),
|
||||
# aACompromise (8) }
|
||||
reasons = []
|
||||
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
||||
if get_bit(cdp.reasons, 1):
|
||||
reasons.append(x509.ReasonFlags.key_compromise)
|
||||
|
||||
if get_bit(cdp.reasons, 2):
|
||||
reasons.append(x509.ReasonFlags.ca_compromise)
|
||||
|
||||
if get_bit(cdp.reasons, 3):
|
||||
reasons.append(x509.ReasonFlags.affiliation_changed)
|
||||
|
||||
if get_bit(cdp.reasons, 4):
|
||||
reasons.append(x509.ReasonFlags.superseded)
|
||||
|
||||
if get_bit(cdp.reasons, 5):
|
||||
reasons.append(x509.ReasonFlags.cessation_of_operation)
|
||||
|
||||
if get_bit(cdp.reasons, 6):
|
||||
reasons.append(x509.ReasonFlags.certificate_hold)
|
||||
|
||||
if get_bit(cdp.reasons, 7):
|
||||
reasons.append(x509.ReasonFlags.privilege_withdrawn)
|
||||
|
||||
if get_bit(cdp.reasons, 8):
|
||||
reasons.append(x509.ReasonFlags.aa_compromise)
|
||||
|
||||
reasons = frozenset(reasons)
|
||||
|
||||
if cdp.CRLissuer != backend._ffi.NULL:
|
||||
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
|
||||
|
||||
# Certificates may have a crl_issuer/reasons and no distribution
|
||||
# point so make sure it's not null.
|
||||
if cdp.distpoint != backend._ffi.NULL:
|
||||
# Type 0 is fullName, there is no #define for it in the code.
|
||||
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
|
||||
full_name = _decode_general_names(
|
||||
backend, cdp.distpoint.name.fullname
|
||||
)
|
||||
# OpenSSL code doesn't test for a specific type for
|
||||
# relativename, everything that isn't fullname is considered
|
||||
# relativename.
|
||||
else:
|
||||
rns = cdp.distpoint.name.relativename
|
||||
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
|
||||
attributes = []
|
||||
for i in range(rnum):
|
||||
rn = backend._lib.sk_X509_NAME_ENTRY_value(
|
||||
rns, i
|
||||
)
|
||||
backend.openssl_assert(rn != backend._ffi.NULL)
|
||||
attributes.append(
|
||||
_decode_x509_name_entry(backend, rn)
|
||||
)
|
||||
|
||||
relative_name = x509.Name(attributes)
|
||||
|
||||
dist_points.append(
|
||||
x509.DistributionPoint(
|
||||
full_name, relative_name, reasons, crl_issuer
|
||||
)
|
||||
)
|
||||
|
||||
return x509.CRLDistributionPoints(dist_points)
|
||||
|
||||
|
||||
def _decode_inhibit_any_policy(backend, asn1_int):
|
||||
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
|
||||
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
||||
skip_certs = backend._asn1_integer_to_int(asn1_int)
|
||||
return x509.InhibitAnyPolicy(skip_certs)
|
||||
|
||||
|
||||
_CRL_REASON_CODE_TO_ENUM = {
|
||||
0: x509.ReasonFlags.unspecified,
|
||||
1: x509.ReasonFlags.key_compromise,
|
||||
2: x509.ReasonFlags.ca_compromise,
|
||||
3: x509.ReasonFlags.affiliation_changed,
|
||||
4: x509.ReasonFlags.superseded,
|
||||
5: x509.ReasonFlags.cessation_of_operation,
|
||||
6: x509.ReasonFlags.certificate_hold,
|
||||
8: x509.ReasonFlags.remove_from_crl,
|
||||
9: x509.ReasonFlags.privilege_withdrawn,
|
||||
10: x509.ReasonFlags.aa_compromise,
|
||||
}
|
||||
|
||||
|
||||
def _decode_crl_reason(backend, enum):
|
||||
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
|
||||
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
|
||||
code = backend._lib.ASN1_ENUMERATED_get(enum)
|
||||
|
||||
try:
|
||||
return _CRL_REASON_CODE_TO_ENUM[code]
|
||||
except KeyError:
|
||||
raise ValueError("Unsupported reason code: {0}".format(code))
|
||||
|
||||
|
||||
def _decode_invalidity_date(backend, inv_date):
|
||||
generalized_time = backend._ffi.cast(
|
||||
"ASN1_GENERALIZEDTIME *", inv_date
|
||||
)
|
||||
generalized_time = backend._ffi.gc(
|
||||
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
|
||||
)
|
||||
time = backend._ffi.string(
|
||||
backend._lib.ASN1_STRING_data(
|
||||
backend._ffi.cast("ASN1_STRING *", generalized_time)
|
||||
)
|
||||
).decode("ascii")
|
||||
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
|
||||
|
||||
|
||||
def _decode_cert_issuer(backend, ext):
|
||||
"""
|
||||
This handler decodes the CertificateIssuer entry extension directly
|
||||
from the X509_EXTENSION object. This is necessary because this entry
|
||||
extension is not directly supported by OpenSSL 0.9.8.
|
||||
"""
|
||||
|
||||
data_ptr_ptr = backend._ffi.new("const unsigned char **")
|
||||
data_ptr_ptr[0] = ext.value.data
|
||||
gns = backend._lib.d2i_GENERAL_NAMES(
|
||||
backend._ffi.NULL, data_ptr_ptr, ext.value.length
|
||||
)
|
||||
|
||||
# Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered
|
||||
# in _X509ExtensionParser but since we are responsible for decoding this
|
||||
# entry extension ourselves, we have to this here.
|
||||
if gns == backend._ffi.NULL:
|
||||
backend._consume_errors()
|
||||
raise ValueError(
|
||||
"The {0} extension is corrupted and can't be parsed".format(
|
||||
CRLExtensionOID.CERTIFICATE_ISSUER))
|
||||
|
||||
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
||||
return x509.GeneralNames(_decode_general_names(backend, gns))
|
||||
|
||||
|
||||
@utils.register_interface(x509.RevokedCertificate)
|
||||
class _RevokedCertificate(object):
|
||||
def __init__(self, backend, x509_revoked):
|
||||
self._backend = backend
|
||||
self._x509_revoked = x509_revoked
|
||||
|
||||
@property
|
||||
def serial_number(self):
|
||||
asn1_int = self._x509_revoked.serialNumber
|
||||
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
||||
return self._backend._asn1_integer_to_int(asn1_int)
|
||||
|
||||
@property
|
||||
def revocation_date(self):
|
||||
return self._backend._parse_asn1_time(
|
||||
self._x509_revoked.revocationDate)
|
||||
|
||||
@property
|
||||
def extensions(self):
|
||||
return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
|
||||
self._backend, self._x509_revoked
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(x509.CertificateRevocationList)
|
||||
class _CertificateRevocationList(object):
|
||||
def __init__(self, backend, x509_crl):
|
||||
self._backend = backend
|
||||
self._x509_crl = x509_crl
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, x509.CertificateRevocationList):
|
||||
return NotImplemented
|
||||
|
||||
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
|
||||
return res == 0
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def fingerprint(self, algorithm):
|
||||
h = hashes.Hash(algorithm, self._backend)
|
||||
bio = self._backend._create_mem_bio()
|
||||
res = self._backend._lib.i2d_X509_CRL_bio(
|
||||
bio, self._x509_crl
|
||||
)
|
||||
self._backend.openssl_assert(res == 1)
|
||||
der = self._backend._read_mem_bio(bio)
|
||||
h.update(der)
|
||||
return h.finalize()
|
||||
|
||||
@property
|
||||
def signature_hash_algorithm(self):
|
||||
oid = _obj2txt(self._backend, self._x509_crl.sig_alg.algorithm)
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{0} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
def issuer(self):
|
||||
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
|
||||
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, issuer)
|
||||
|
||||
@property
|
||||
def next_update(self):
|
||||
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
|
||||
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
|
||||
return self._backend._parse_asn1_time(nu)
|
||||
|
||||
@property
|
||||
def last_update(self):
|
||||
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
|
||||
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
|
||||
return self._backend._parse_asn1_time(lu)
|
||||
|
||||
def _revoked_certificates(self):
|
||||
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
|
||||
self._backend.openssl_assert(revoked != self._backend._ffi.NULL)
|
||||
|
||||
num = self._backend._lib.sk_X509_REVOKED_num(revoked)
|
||||
revoked_list = []
|
||||
for i in range(num):
|
||||
r = self._backend._lib.sk_X509_REVOKED_value(revoked, i)
|
||||
self._backend.openssl_assert(r != self._backend._ffi.NULL)
|
||||
revoked_list.append(_RevokedCertificate(self._backend, r))
|
||||
|
||||
return revoked_list
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._revoked_certificates())
|
||||
|
||||
def __getitem__(self, idx):
|
||||
return self._revoked_certificates()[idx]
|
||||
|
||||
def __len__(self):
|
||||
return len(self._revoked_certificates())
|
||||
|
||||
@property
|
||||
def extensions(self):
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
@utils.register_interface(x509.CertificateSigningRequest)
|
||||
class _CertificateSigningRequest(object):
|
||||
def __init__(self, backend, x509_req):
|
||||
self._backend = backend
|
||||
self._x509_req = x509_req
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, _CertificateSigningRequest):
|
||||
return NotImplemented
|
||||
|
||||
self_bytes = self.public_bytes(serialization.Encoding.DER)
|
||||
other_bytes = other.public_bytes(serialization.Encoding.DER)
|
||||
return self_bytes == other_bytes
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.public_bytes(serialization.Encoding.DER))
|
||||
|
||||
def public_key(self):
|
||||
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
|
||||
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
|
||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||
return self._backend._evp_pkey_to_public_key(pkey)
|
||||
|
||||
@property
|
||||
def subject(self):
|
||||
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
|
||||
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
||||
return _decode_x509_name(self._backend, subject)
|
||||
|
||||
@property
|
||||
def signature_hash_algorithm(self):
|
||||
oid = _obj2txt(self._backend, self._x509_req.sig_alg.algorithm)
|
||||
try:
|
||||
return x509._SIG_OIDS_TO_HASH[oid]
|
||||
except KeyError:
|
||||
raise UnsupportedAlgorithm(
|
||||
"Signature algorithm OID:{0} not recognized".format(oid)
|
||||
)
|
||||
|
||||
@property
|
||||
def extensions(self):
|
||||
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
|
||||
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
|
||||
|
||||
def public_bytes(self, encoding):
|
||||
bio = self._backend._create_mem_bio()
|
||||
if encoding is serialization.Encoding.PEM:
|
||||
res = self._backend._lib.PEM_write_bio_X509_REQ(
|
||||
bio, self._x509_req
|
||||
)
|
||||
elif encoding is serialization.Encoding.DER:
|
||||
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
|
||||
else:
|
||||
raise TypeError("encoding must be an item from the Encoding enum")
|
||||
|
||||
self._backend.openssl_assert(res == 1)
|
||||
return self._backend._read_mem_bio(bio)
|
||||
|
||||
|
||||
_EXTENSION_HANDLERS = {
|
||||
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
|
||||
ExtensionOID.KEY_USAGE: _decode_key_usage,
|
||||
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
|
||||
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||
_decode_authority_information_access
|
||||
),
|
||||
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
|
||||
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
|
||||
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
|
||||
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
|
||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
|
||||
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
|
||||
}
|
||||
|
||||
_REVOKED_EXTENSION_HANDLERS = {
|
||||
CRLExtensionOID.CRL_REASON: _decode_crl_reason,
|
||||
CRLExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
|
||||
CRLExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
|
||||
}
|
||||
|
||||
_REVOKED_UNSUPPORTED_EXTENSIONS = set([
|
||||
CRLExtensionOID.CERTIFICATE_ISSUER,
|
||||
])
|
||||
|
||||
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
|
||||
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
|
||||
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
|
||||
handlers=_EXTENSION_HANDLERS
|
||||
)
|
||||
|
||||
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
|
||||
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
|
||||
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
|
||||
handlers=_EXTENSION_HANDLERS
|
||||
)
|
||||
|
||||
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
|
||||
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
|
||||
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
|
||||
handlers=_REVOKED_EXTENSION_HANDLERS,
|
||||
unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS
|
||||
)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
|
@ -0,0 +1,15 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.bindings._commoncrypto import ffi, lib
|
||||
|
||||
|
||||
class Binding(object):
|
||||
"""
|
||||
CommonCrypto API wrapper.
|
||||
"""
|
||||
lib = lib
|
||||
ffi = ffi
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
|
@ -0,0 +1,423 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
# This is a temporary copy of all the CONDITIONAL_NAMES from _cffi_src so
|
||||
# we can loop over them and delete them at runtime. It will be removed when
|
||||
# cffi supports #if in cdef
|
||||
|
||||
CONDITIONAL_NAMES = {
|
||||
"Cryptography_HAS_AES_WRAP": [
|
||||
"AES_wrap_key",
|
||||
"AES_unwrap_key",
|
||||
],
|
||||
"Cryptography_HAS_CMAC": [
|
||||
"CMAC_CTX_new",
|
||||
"CMAC_Init",
|
||||
"CMAC_Update",
|
||||
"CMAC_Final",
|
||||
"CMAC_CTX_copy",
|
||||
"CMAC_CTX_free",
|
||||
],
|
||||
"Cryptography_HAS_CMS": [
|
||||
"BIO_new_CMS",
|
||||
"i2d_CMS_bio_stream",
|
||||
"PEM_write_bio_CMS_stream",
|
||||
"CMS_final",
|
||||
"CMS_sign",
|
||||
"CMS_verify",
|
||||
"CMS_encrypt",
|
||||
"CMS_decrypt",
|
||||
"CMS_add1_signer",
|
||||
"CMS_TEXT",
|
||||
"CMS_NOCERTS",
|
||||
"CMS_NO_CONTENT_VERIFY",
|
||||
"CMS_NO_ATTR_VERIFY",
|
||||
"CMS_NOSIGS",
|
||||
"CMS_NOINTERN",
|
||||
"CMS_NO_SIGNER_CERT_VERIFY",
|
||||
"CMS_NOVERIFY",
|
||||
"CMS_DETACHED",
|
||||
"CMS_BINARY",
|
||||
"CMS_NOATTR",
|
||||
"CMS_NOSMIMECAP",
|
||||
"CMS_NOOLDMIMETYPE",
|
||||
"CMS_CRLFEOL",
|
||||
"CMS_STREAM",
|
||||
"CMS_NOCRL",
|
||||
"CMS_PARTIAL",
|
||||
"CMS_REUSE_DIGEST",
|
||||
"CMS_USE_KEYID",
|
||||
"CMS_DEBUG_DECRYPT",
|
||||
],
|
||||
"Cryptography_HAS_CMS_BIO_FUNCTIONS": [
|
||||
"BIO_new_CMS",
|
||||
"i2d_CMS_bio_stream",
|
||||
"PEM_write_bio_CMS_stream",
|
||||
],
|
||||
"Cryptography_HAS_EC": [
|
||||
"OPENSSL_EC_NAMED_CURVE",
|
||||
"EC_GROUP_new",
|
||||
"EC_GROUP_free",
|
||||
"EC_GROUP_clear_free",
|
||||
"EC_GROUP_new_curve_GFp",
|
||||
"EC_GROUP_new_by_curve_name",
|
||||
"EC_GROUP_set_curve_GFp",
|
||||
"EC_GROUP_get_curve_GFp",
|
||||
"EC_GROUP_method_of",
|
||||
"EC_GROUP_get0_generator",
|
||||
"EC_GROUP_get_curve_name",
|
||||
"EC_GROUP_get_degree",
|
||||
"EC_GROUP_set_asn1_flag",
|
||||
"EC_GROUP_set_point_conversion_form",
|
||||
"EC_KEY_new",
|
||||
"EC_KEY_free",
|
||||
"EC_get_builtin_curves",
|
||||
"EC_KEY_new_by_curve_name",
|
||||
"EC_KEY_copy",
|
||||
"EC_KEY_dup",
|
||||
"EC_KEY_up_ref",
|
||||
"EC_KEY_set_group",
|
||||
"EC_KEY_get0_private_key",
|
||||
"EC_KEY_set_private_key",
|
||||
"EC_KEY_set_public_key",
|
||||
"EC_KEY_get_enc_flags",
|
||||
"EC_KEY_set_enc_flags",
|
||||
"EC_KEY_set_conv_form",
|
||||
"EC_KEY_get_key_method_data",
|
||||
"EC_KEY_insert_key_method_data",
|
||||
"EC_KEY_set_asn1_flag",
|
||||
"EC_KEY_precompute_mult",
|
||||
"EC_KEY_generate_key",
|
||||
"EC_KEY_check_key",
|
||||
"EC_POINT_new",
|
||||
"EC_POINT_free",
|
||||
"EC_POINT_clear_free",
|
||||
"EC_POINT_copy",
|
||||
"EC_POINT_dup",
|
||||
"EC_POINT_method_of",
|
||||
"EC_POINT_set_to_infinity",
|
||||
"EC_POINT_set_Jprojective_coordinates_GFp",
|
||||
"EC_POINT_get_Jprojective_coordinates_GFp",
|
||||
"EC_POINT_set_affine_coordinates_GFp",
|
||||
"EC_POINT_get_affine_coordinates_GFp",
|
||||
"EC_POINT_set_compressed_coordinates_GFp",
|
||||
"EC_POINT_point2oct",
|
||||
"EC_POINT_oct2point",
|
||||
"EC_POINT_point2bn",
|
||||
"EC_POINT_bn2point",
|
||||
"EC_POINT_point2hex",
|
||||
"EC_POINT_hex2point",
|
||||
"EC_POINT_add",
|
||||
"EC_POINT_dbl",
|
||||
"EC_POINT_invert",
|
||||
"EC_POINT_is_at_infinity",
|
||||
"EC_POINT_is_on_curve",
|
||||
"EC_POINT_cmp",
|
||||
"EC_POINT_make_affine",
|
||||
"EC_POINTs_make_affine",
|
||||
"EC_POINTs_mul",
|
||||
"EC_POINT_mul",
|
||||
"EC_GROUP_precompute_mult",
|
||||
"EC_GROUP_have_precompute_mult",
|
||||
"EC_GFp_simple_method",
|
||||
"EC_GFp_mont_method",
|
||||
"EC_GFp_nist_method",
|
||||
"EC_METHOD_get_field_type",
|
||||
"EVP_PKEY_assign_EC_KEY",
|
||||
"EVP_PKEY_get1_EC_KEY",
|
||||
"EVP_PKEY_set1_EC_KEY",
|
||||
"PEM_write_bio_ECPrivateKey",
|
||||
"i2d_EC_PUBKEY",
|
||||
"d2i_EC_PUBKEY",
|
||||
"d2i_EC_PUBKEY_bio",
|
||||
"i2d_EC_PUBKEY_bio",
|
||||
"d2i_ECPrivateKey",
|
||||
"d2i_ECPrivateKey_bio",
|
||||
"i2d_ECPrivateKey",
|
||||
"i2d_ECPrivateKey_bio",
|
||||
"i2o_ECPublicKey",
|
||||
"o2i_ECPublicKey",
|
||||
"SSL_CTX_set_tmp_ecdh",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_EC_1_0_1": [
|
||||
"EC_KEY_get_flags",
|
||||
"EC_KEY_set_flags",
|
||||
"EC_KEY_clear_flags",
|
||||
"EC_KEY_set_public_key_affine_coordinates",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_EC_NISTP_64_GCC_128": [
|
||||
"EC_GFp_nistp224_method",
|
||||
"EC_GFp_nistp256_method",
|
||||
"EC_GFp_nistp521_method",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_EC2M": [
|
||||
"EC_GF2m_simple_method",
|
||||
"EC_POINT_set_affine_coordinates_GF2m",
|
||||
"EC_POINT_get_affine_coordinates_GF2m",
|
||||
"EC_POINT_set_compressed_coordinates_GF2m",
|
||||
"EC_GROUP_set_curve_GF2m",
|
||||
"EC_GROUP_get_curve_GF2m",
|
||||
"EC_GROUP_new_curve_GF2m",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_EC_1_0_2": [
|
||||
"EC_curve_nid2nist",
|
||||
],
|
||||
"Cryptography_HAS_ECDH": [
|
||||
"ECDH_compute_key",
|
||||
"ECDH_get_ex_new_index",
|
||||
"ECDH_set_ex_data",
|
||||
"ECDH_get_ex_data",
|
||||
],
|
||||
"Cryptography_HAS_ECDSA": [
|
||||
"ECDSA_SIG_new",
|
||||
"ECDSA_SIG_free",
|
||||
"i2d_ECDSA_SIG",
|
||||
"d2i_ECDSA_SIG",
|
||||
"ECDSA_do_sign",
|
||||
"ECDSA_do_sign_ex",
|
||||
"ECDSA_do_verify",
|
||||
"ECDSA_sign_setup",
|
||||
"ECDSA_sign",
|
||||
"ECDSA_sign_ex",
|
||||
"ECDSA_verify",
|
||||
"ECDSA_size",
|
||||
"ECDSA_OpenSSL",
|
||||
"ECDSA_set_default_method",
|
||||
"ECDSA_get_default_method",
|
||||
"ECDSA_set_method",
|
||||
"ECDSA_get_ex_new_index",
|
||||
"ECDSA_set_ex_data",
|
||||
"ECDSA_get_ex_data",
|
||||
],
|
||||
"Cryptography_HAS_ENGINE_CRYPTODEV": [
|
||||
"ENGINE_load_cryptodev"
|
||||
],
|
||||
"Cryptography_HAS_REMOVE_THREAD_STATE": [
|
||||
"ERR_remove_thread_state"
|
||||
],
|
||||
"Cryptography_HAS_098H_ERROR_CODES": [
|
||||
"ASN1_F_B64_READ_ASN1",
|
||||
"ASN1_F_B64_WRITE_ASN1",
|
||||
"ASN1_F_SMIME_READ_ASN1",
|
||||
"ASN1_F_SMIME_TEXT",
|
||||
"ASN1_R_NO_CONTENT_TYPE",
|
||||
"ASN1_R_NO_MULTIPART_BODY_FAILURE",
|
||||
"ASN1_R_NO_MULTIPART_BOUNDARY",
|
||||
],
|
||||
"Cryptography_HAS_098C_CAMELLIA_CODES": [
|
||||
"EVP_F_CAMELLIA_INIT_KEY",
|
||||
"EVP_R_CAMELLIA_KEY_SETUP_FAILED"
|
||||
],
|
||||
"Cryptography_HAS_EC_CODES": [
|
||||
"EC_R_UNKNOWN_GROUP",
|
||||
"EC_F_EC_GROUP_NEW_BY_CURVE_NAME"
|
||||
],
|
||||
"Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": [
|
||||
"RSA_R_PKCS_DECODING_ERROR"
|
||||
],
|
||||
"Cryptography_HAS_GCM": [
|
||||
"EVP_CTRL_GCM_GET_TAG",
|
||||
"EVP_CTRL_GCM_SET_TAG",
|
||||
"EVP_CTRL_GCM_SET_IVLEN",
|
||||
],
|
||||
"Cryptography_HAS_PBKDF2_HMAC": [
|
||||
"PKCS5_PBKDF2_HMAC"
|
||||
],
|
||||
"Cryptography_HAS_PKEY_CTX": [
|
||||
"EVP_PKEY_CTX_new",
|
||||
"EVP_PKEY_CTX_new_id",
|
||||
"EVP_PKEY_CTX_dup",
|
||||
"EVP_PKEY_CTX_free",
|
||||
"EVP_PKEY_sign",
|
||||
"EVP_PKEY_sign_init",
|
||||
"EVP_PKEY_verify",
|
||||
"EVP_PKEY_verify_init",
|
||||
"Cryptography_EVP_PKEY_encrypt",
|
||||
"EVP_PKEY_encrypt_init",
|
||||
"Cryptography_EVP_PKEY_decrypt",
|
||||
"EVP_PKEY_decrypt_init",
|
||||
"EVP_PKEY_CTX_set_signature_md",
|
||||
"EVP_PKEY_id",
|
||||
"EVP_PKEY_CTX_set_rsa_padding",
|
||||
"EVP_PKEY_CTX_set_rsa_pss_saltlen",
|
||||
],
|
||||
"Cryptography_HAS_ECDSA_SHA2_NIDS": [
|
||||
"NID_ecdsa_with_SHA224",
|
||||
"NID_ecdsa_with_SHA256",
|
||||
"NID_ecdsa_with_SHA384",
|
||||
"NID_ecdsa_with_SHA512",
|
||||
],
|
||||
"Cryptography_HAS_EGD": [
|
||||
"RAND_egd",
|
||||
"RAND_egd_bytes",
|
||||
"RAND_query_egd_bytes",
|
||||
],
|
||||
"Cryptography_HAS_PSS_PADDING": [
|
||||
"RSA_PKCS1_PSS_PADDING",
|
||||
],
|
||||
"Cryptography_HAS_MGF1_MD": [
|
||||
"EVP_PKEY_CTX_set_rsa_mgf1_md",
|
||||
],
|
||||
"Cryptography_HAS_TLSv1_1": [
|
||||
"SSL_OP_NO_TLSv1_1",
|
||||
"TLSv1_1_method",
|
||||
"TLSv1_1_server_method",
|
||||
"TLSv1_1_client_method",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_TLSv1_2": [
|
||||
"SSL_OP_NO_TLSv1_2",
|
||||
"TLSv1_2_method",
|
||||
"TLSv1_2_server_method",
|
||||
"TLSv1_2_client_method",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SSL2": [
|
||||
"SSLv2_method",
|
||||
"SSLv2_client_method",
|
||||
"SSLv2_server_method",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SSL3_METHOD": [
|
||||
"SSLv3_method",
|
||||
"SSLv3_client_method",
|
||||
"SSLv3_server_method",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_TLSEXT_HOSTNAME": [
|
||||
"SSL_set_tlsext_host_name",
|
||||
"SSL_get_servername",
|
||||
"SSL_CTX_set_tlsext_servername_callback",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_TLSEXT_STATUS_REQ_CB": [
|
||||
"SSL_CTX_set_tlsext_status_cb",
|
||||
"SSL_CTX_set_tlsext_status_arg"
|
||||
],
|
||||
|
||||
"Cryptography_HAS_STATUS_REQ_OCSP_RESP": [
|
||||
"SSL_set_tlsext_status_ocsp_resp",
|
||||
"SSL_get_tlsext_status_ocsp_resp",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE": [
|
||||
"SSL_set_tlsext_status_type",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_RELEASE_BUFFERS": [
|
||||
"SSL_MODE_RELEASE_BUFFERS",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_OP_NO_COMPRESSION": [
|
||||
"SSL_OP_NO_COMPRESSION",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [
|
||||
"SSL_OP_MSIE_SSLV2_RSA_PADDING",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SSL_OP_NO_TICKET": [
|
||||
"SSL_OP_NO_TICKET",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SSL_SET_SSL_CTX": [
|
||||
"SSL_set_SSL_CTX",
|
||||
"TLSEXT_NAMETYPE_host_name",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_NETBSD_D1_METH": [
|
||||
"DTLSv1_method",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_NEXTPROTONEG": [
|
||||
"SSL_CTX_set_next_protos_advertised_cb",
|
||||
"SSL_CTX_set_next_proto_select_cb",
|
||||
"SSL_select_next_proto",
|
||||
"SSL_get0_next_proto_negotiated",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SECURE_RENEGOTIATION": [
|
||||
"SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION",
|
||||
"SSL_OP_LEGACY_SERVER_CONNECT",
|
||||
"SSL_get_secure_renegotiation_support",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_ALPN": [
|
||||
"SSL_CTX_set_alpn_protos",
|
||||
"SSL_set_alpn_protos",
|
||||
"SSL_CTX_set_alpn_select_cb",
|
||||
"SSL_get0_alpn_selected",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_COMPRESSION": [
|
||||
"SSL_get_current_compression",
|
||||
"SSL_get_current_expansion",
|
||||
"SSL_COMP_get_name",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_GET_SERVER_TMP_KEY": [
|
||||
"SSL_get_server_tmp_key",
|
||||
],
|
||||
|
||||
"Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE": [
|
||||
"SSL_CTX_set_client_cert_engine",
|
||||
],
|
||||
"Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [
|
||||
'X509_V_ERR_SUITE_B_INVALID_VERSION',
|
||||
'X509_V_ERR_SUITE_B_INVALID_ALGORITHM',
|
||||
'X509_V_ERR_SUITE_B_INVALID_CURVE',
|
||||
'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM',
|
||||
'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED',
|
||||
'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256',
|
||||
'X509_V_ERR_HOSTNAME_MISMATCH',
|
||||
'X509_V_ERR_EMAIL_MISMATCH',
|
||||
'X509_V_ERR_IP_ADDRESS_MISMATCH'
|
||||
],
|
||||
"Cryptography_HAS_102_VERIFICATION_PARAMS": [
|
||||
"X509_V_FLAG_SUITEB_128_LOS_ONLY",
|
||||
"X509_V_FLAG_SUITEB_192_LOS",
|
||||
"X509_V_FLAG_SUITEB_128_LOS",
|
||||
"X509_VERIFY_PARAM_set1_host",
|
||||
"X509_VERIFY_PARAM_set1_email",
|
||||
"X509_VERIFY_PARAM_set1_ip",
|
||||
"X509_VERIFY_PARAM_set1_ip_asc",
|
||||
"X509_VERIFY_PARAM_set_hostflags",
|
||||
],
|
||||
"Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [
|
||||
"X509_V_FLAG_TRUSTED_FIRST",
|
||||
],
|
||||
"Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [
|
||||
"X509_V_FLAG_PARTIAL_CHAIN",
|
||||
],
|
||||
"Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [
|
||||
'X509_V_ERR_DIFFERENT_CRL_SCOPE',
|
||||
'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE',
|
||||
'X509_V_ERR_UNNESTED_RESOURCE',
|
||||
'X509_V_ERR_PERMITTED_VIOLATION',
|
||||
'X509_V_ERR_EXCLUDED_VIOLATION',
|
||||
'X509_V_ERR_SUBTREE_MINMAX',
|
||||
'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE',
|
||||
'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX',
|
||||
'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX',
|
||||
'X509_V_ERR_CRL_PATH_VALIDATION_ERROR',
|
||||
],
|
||||
"Cryptography_HAS_100_VERIFICATION_PARAMS": [
|
||||
"Cryptography_HAS_100_VERIFICATION_PARAMS",
|
||||
"X509_V_FLAG_EXTENDED_CRL_SUPPORT",
|
||||
"X509_V_FLAG_USE_DELTAS",
|
||||
],
|
||||
"Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [
|
||||
"X509_V_FLAG_CHECK_SS_SIGNATURE",
|
||||
],
|
||||
"Cryptography_HAS_SET_CERT_CB": [
|
||||
"SSL_CTX_set_cert_cb",
|
||||
"SSL_set_cert_cb",
|
||||
],
|
||||
}
|
||||
|
|
@ -0,0 +1,182 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import collections
|
||||
import os
|
||||
import threading
|
||||
import types
|
||||
|
||||
from cryptography.exceptions import InternalError
|
||||
from cryptography.hazmat.bindings._openssl import ffi, lib
|
||||
from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
|
||||
|
||||
|
||||
_OpenSSLError = collections.namedtuple("_OpenSSLError",
|
||||
["code", "lib", "func", "reason"])
|
||||
|
||||
|
||||
def _consume_errors(lib):
|
||||
errors = []
|
||||
while True:
|
||||
code = lib.ERR_get_error()
|
||||
if code == 0:
|
||||
break
|
||||
|
||||
err_lib = lib.ERR_GET_LIB(code)
|
||||
err_func = lib.ERR_GET_FUNC(code)
|
||||
err_reason = lib.ERR_GET_REASON(code)
|
||||
|
||||
errors.append(_OpenSSLError(code, err_lib, err_func, err_reason))
|
||||
return errors
|
||||
|
||||
|
||||
def _openssl_assert(lib, ok):
|
||||
if not ok:
|
||||
errors = _consume_errors(lib)
|
||||
raise InternalError(
|
||||
"Unknown OpenSSL error. Please file an issue at https://github.com"
|
||||
"/pyca/cryptography/issues with information on how to reproduce "
|
||||
"this. ({0!r})".format(errors),
|
||||
errors
|
||||
)
|
||||
|
||||
|
||||
@ffi.callback("int (*)(unsigned char *, int)", error=-1)
|
||||
def _osrandom_rand_bytes(buf, size):
|
||||
signed = ffi.cast("char *", buf)
|
||||
result = os.urandom(size)
|
||||
signed[0:size] = result
|
||||
return 1
|
||||
|
||||
|
||||
@ffi.callback("int (*)(void)")
|
||||
def _osrandom_rand_status():
|
||||
return 1
|
||||
|
||||
|
||||
def build_conditional_library(lib, conditional_names):
|
||||
conditional_lib = types.ModuleType("lib")
|
||||
excluded_names = set()
|
||||
for condition, names in conditional_names.items():
|
||||
if not getattr(lib, condition):
|
||||
excluded_names |= set(names)
|
||||
|
||||
for attr in dir(lib):
|
||||
if attr not in excluded_names:
|
||||
setattr(conditional_lib, attr, getattr(lib, attr))
|
||||
|
||||
return conditional_lib
|
||||
|
||||
|
||||
class Binding(object):
|
||||
"""
|
||||
OpenSSL API wrapper.
|
||||
"""
|
||||
lib = None
|
||||
ffi = ffi
|
||||
_lib_loaded = False
|
||||
_locks = None
|
||||
_lock_cb_handle = None
|
||||
_init_lock = threading.Lock()
|
||||
_lock_init_lock = threading.Lock()
|
||||
|
||||
_osrandom_engine_id = ffi.new("const char[]", b"osrandom")
|
||||
_osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine")
|
||||
_osrandom_method = ffi.new(
|
||||
"RAND_METHOD *",
|
||||
dict(bytes=_osrandom_rand_bytes, pseudorand=_osrandom_rand_bytes,
|
||||
status=_osrandom_rand_status)
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
self._ensure_ffi_initialized()
|
||||
|
||||
@classmethod
|
||||
def _register_osrandom_engine(cls):
|
||||
_openssl_assert(cls.lib, cls.lib.ERR_peek_error() == 0)
|
||||
|
||||
engine = cls.lib.ENGINE_new()
|
||||
_openssl_assert(cls.lib, engine != cls.ffi.NULL)
|
||||
try:
|
||||
result = cls.lib.ENGINE_set_id(engine, cls._osrandom_engine_id)
|
||||
_openssl_assert(cls.lib, result == 1)
|
||||
result = cls.lib.ENGINE_set_name(engine, cls._osrandom_engine_name)
|
||||
_openssl_assert(cls.lib, result == 1)
|
||||
result = cls.lib.ENGINE_set_RAND(engine, cls._osrandom_method)
|
||||
_openssl_assert(cls.lib, result == 1)
|
||||
result = cls.lib.ENGINE_add(engine)
|
||||
if result != 1:
|
||||
errors = _consume_errors(cls.lib)
|
||||
_openssl_assert(
|
||||
cls.lib,
|
||||
errors[0].reason == cls.lib.ENGINE_R_CONFLICTING_ENGINE_ID
|
||||
)
|
||||
|
||||
finally:
|
||||
result = cls.lib.ENGINE_free(engine)
|
||||
_openssl_assert(cls.lib, result == 1)
|
||||
|
||||
@classmethod
|
||||
def _ensure_ffi_initialized(cls):
|
||||
with cls._init_lock:
|
||||
if not cls._lib_loaded:
|
||||
cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES)
|
||||
cls._lib_loaded = True
|
||||
# initialize the SSL library
|
||||
cls.lib.SSL_library_init()
|
||||
# adds all ciphers/digests for EVP
|
||||
cls.lib.OpenSSL_add_all_algorithms()
|
||||
# loads error strings for libcrypto and libssl functions
|
||||
cls.lib.SSL_load_error_strings()
|
||||
cls._register_osrandom_engine()
|
||||
|
||||
@classmethod
|
||||
def init_static_locks(cls):
|
||||
with cls._lock_init_lock:
|
||||
cls._ensure_ffi_initialized()
|
||||
|
||||
if not cls._lock_cb_handle:
|
||||
cls._lock_cb_handle = cls.ffi.callback(
|
||||
"void(int, int, const char *, int)",
|
||||
cls._lock_cb
|
||||
)
|
||||
|
||||
# Use Python's implementation if available, importing _ssl triggers
|
||||
# the setup for this.
|
||||
__import__("_ssl")
|
||||
|
||||
if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
|
||||
return
|
||||
|
||||
# If nothing else has setup a locking callback already, we set up
|
||||
# our own
|
||||
num_locks = cls.lib.CRYPTO_num_locks()
|
||||
cls._locks = [threading.Lock() for n in range(num_locks)]
|
||||
|
||||
cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle)
|
||||
|
||||
@classmethod
|
||||
def _lock_cb(cls, mode, n, file, line):
|
||||
lock = cls._locks[n]
|
||||
|
||||
if mode & cls.lib.CRYPTO_LOCK:
|
||||
lock.acquire()
|
||||
elif mode & cls.lib.CRYPTO_UNLOCK:
|
||||
lock.release()
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format(
|
||||
mode, n, file, line
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# OpenSSL is not thread safe until the locks are initialized. We call this
|
||||
# method in module scope so that it executes with the import lock. On
|
||||
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
|
||||
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
|
||||
# is per module so this approach will not work.
|
||||
Binding.init_static_locks()
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
|
@ -0,0 +1,40 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AsymmetricSignatureContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes and returns nothing.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Returns the signature as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AsymmetricVerificationContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes and returns nothing.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self):
|
||||
"""
|
||||
Raises an exception if the bytes provided to update do not match the
|
||||
signature or the signature does not match the public key.
|
||||
"""
|
||||
|
|
@ -0,0 +1,166 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
class DHPrivateNumbers(object):
|
||||
def __init__(self, x, public_numbers):
|
||||
if not isinstance(x, six.integer_types):
|
||||
raise TypeError("x must be an integer.")
|
||||
|
||||
if not isinstance(public_numbers, DHPublicNumbers):
|
||||
raise TypeError("public_numbers must be an instance of "
|
||||
"DHPublicNumbers.")
|
||||
|
||||
self._x = x
|
||||
self._public_numbers = public_numbers
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DHPrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._x == other._x and
|
||||
self._public_numbers == other._public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
x = utils.read_only_property("_x")
|
||||
|
||||
|
||||
class DHPublicNumbers(object):
|
||||
def __init__(self, y, parameter_numbers):
|
||||
if not isinstance(y, six.integer_types):
|
||||
raise TypeError("y must be an integer.")
|
||||
|
||||
if not isinstance(parameter_numbers, DHParameterNumbers):
|
||||
raise TypeError(
|
||||
"parameters must be an instance of DHParameterNumbers.")
|
||||
|
||||
self._y = y
|
||||
self._parameter_numbers = parameter_numbers
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DHPublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._y == other._y and
|
||||
self._parameter_numbers == other._parameter_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
y = utils.read_only_property("_y")
|
||||
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
||||
|
||||
|
||||
class DHParameterNumbers(object):
|
||||
def __init__(self, p, g):
|
||||
if (
|
||||
not isinstance(p, six.integer_types) or
|
||||
not isinstance(g, six.integer_types)
|
||||
):
|
||||
raise TypeError("p and g must be integers")
|
||||
|
||||
self._p = p
|
||||
self._g = g
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DHParameterNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self._p == other._p and
|
||||
self._g == other._g
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
p = utils.read_only_property("_p")
|
||||
g = utils.read_only_property("_g")
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHParameters(object):
|
||||
@abc.abstractmethod
|
||||
def generate_private_key(self):
|
||||
"""
|
||||
Generates and returns a DHPrivateKey.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHParametersWithSerialization(DHParameters):
|
||||
@abc.abstractmethod
|
||||
def parameter_numbers(self):
|
||||
"""
|
||||
Returns a DHParameterNumbers.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPrivateKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The DHPublicKey associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DHParameters object associated with this private key.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPrivateKeyWithSerialization(DHPrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns a DHPrivateNumbers.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPublicKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DHParameters object associated with this public key.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DHPublicKeyWithSerialization(DHPublicKey):
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns a DHPublicNumbers.
|
||||
"""
|
||||
|
|
@ -0,0 +1,229 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAParameters(object):
|
||||
@abc.abstractmethod
|
||||
def generate_private_key(self):
|
||||
"""
|
||||
Generates and returns a DSAPrivateKey.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAParametersWithNumbers(DSAParameters):
|
||||
@abc.abstractmethod
|
||||
def parameter_numbers(self):
|
||||
"""
|
||||
Returns a DSAParameterNumbers.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAPrivateKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The DSAPublicKey associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DSAParameters object associated with this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def signer(self, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricSignatureContext used for signing data.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAPrivateKeyWithSerialization(DSAPrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns a DSAPrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class DSAPublicKey(object):
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the prime modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def parameters(self):
|
||||
"""
|
||||
The DSAParameters object associated with this public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricVerificationContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns a DSAPublicNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
DSAPublicKeyWithSerialization = DSAPublicKey
|
||||
|
||||
|
||||
def generate_parameters(key_size, backend):
|
||||
return backend.generate_dsa_parameters(key_size)
|
||||
|
||||
|
||||
def generate_private_key(key_size, backend):
|
||||
return backend.generate_dsa_private_key_and_parameters(key_size)
|
||||
|
||||
|
||||
def _check_dsa_parameters(parameters):
|
||||
if utils.bit_length(parameters.p) not in [1024, 2048, 3072]:
|
||||
raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
|
||||
if utils.bit_length(parameters.q) not in [160, 256]:
|
||||
raise ValueError("q must be exactly 160 or 256 bits long")
|
||||
|
||||
if not (1 < parameters.g < parameters.p):
|
||||
raise ValueError("g, p don't satisfy 1 < g < p.")
|
||||
|
||||
|
||||
def _check_dsa_private_numbers(numbers):
|
||||
parameters = numbers.public_numbers.parameter_numbers
|
||||
_check_dsa_parameters(parameters)
|
||||
if numbers.x <= 0 or numbers.x >= parameters.q:
|
||||
raise ValueError("x must be > 0 and < q.")
|
||||
|
||||
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
|
||||
raise ValueError("y must be equal to (g ** x % p).")
|
||||
|
||||
|
||||
class DSAParameterNumbers(object):
|
||||
def __init__(self, p, q, g):
|
||||
if (
|
||||
not isinstance(p, six.integer_types) or
|
||||
not isinstance(q, six.integer_types) or
|
||||
not isinstance(g, six.integer_types)
|
||||
):
|
||||
raise TypeError(
|
||||
"DSAParameterNumbers p, q, and g arguments must be integers."
|
||||
)
|
||||
|
||||
self._p = p
|
||||
self._q = q
|
||||
self._g = g
|
||||
|
||||
p = utils.read_only_property("_p")
|
||||
q = utils.read_only_property("_q")
|
||||
g = utils.read_only_property("_g")
|
||||
|
||||
def parameters(self, backend):
|
||||
return backend.load_dsa_parameter_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DSAParameterNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return self.p == other.p and self.q == other.q and self.g == other.g
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class DSAPublicNumbers(object):
|
||||
def __init__(self, y, parameter_numbers):
|
||||
if not isinstance(y, six.integer_types):
|
||||
raise TypeError("DSAPublicNumbers y argument must be an integer.")
|
||||
|
||||
if not isinstance(parameter_numbers, DSAParameterNumbers):
|
||||
raise TypeError(
|
||||
"parameter_numbers must be a DSAParameterNumbers instance."
|
||||
)
|
||||
|
||||
self._y = y
|
||||
self._parameter_numbers = parameter_numbers
|
||||
|
||||
y = utils.read_only_property("_y")
|
||||
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
||||
|
||||
def public_key(self, backend):
|
||||
return backend.load_dsa_public_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DSAPublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.y == other.y and
|
||||
self.parameter_numbers == other.parameter_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class DSAPrivateNumbers(object):
|
||||
def __init__(self, x, public_numbers):
|
||||
if not isinstance(x, six.integer_types):
|
||||
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
|
||||
|
||||
if not isinstance(public_numbers, DSAPublicNumbers):
|
||||
raise TypeError(
|
||||
"public_numbers must be a DSAPublicNumbers instance."
|
||||
)
|
||||
self._public_numbers = public_numbers
|
||||
self._x = x
|
||||
|
||||
x = utils.read_only_property("_x")
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
|
||||
def private_key(self, backend):
|
||||
return backend.load_dsa_private_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DSAPrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.x == other.x and self.public_numbers == other.public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
|
@ -0,0 +1,346 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurve(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
The name of the curve. e.g. secp256r1.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the base point of the curve.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurveSignatureAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def algorithm(self):
|
||||
"""
|
||||
The digest algorithm used with this signature.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurvePrivateKey(object):
|
||||
@abc.abstractmethod
|
||||
def signer(self, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricSignatureContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def exchange(self, algorithm, peer_public_key):
|
||||
"""
|
||||
Performs a key exchange operation using the provided algorithm with the
|
||||
provided peer's public key.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The EllipticCurvePublicKey for this private key.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def curve(self):
|
||||
"""
|
||||
The EllipticCurve that this key is on.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns an EllipticCurvePrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class EllipticCurvePublicKey(object):
|
||||
@abc.abstractmethod
|
||||
def verifier(self, signature, signature_algorithm):
|
||||
"""
|
||||
Returns an AsymmetricVerificationContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def curve(self):
|
||||
"""
|
||||
The EllipticCurve that this key is on.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns an EllipticCurvePublicNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT571R1(object):
|
||||
name = "sect571r1"
|
||||
key_size = 571
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT409R1(object):
|
||||
name = "sect409r1"
|
||||
key_size = 409
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT283R1(object):
|
||||
name = "sect283r1"
|
||||
key_size = 283
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT233R1(object):
|
||||
name = "sect233r1"
|
||||
key_size = 233
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT163R2(object):
|
||||
name = "sect163r2"
|
||||
key_size = 163
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT571K1(object):
|
||||
name = "sect571k1"
|
||||
key_size = 571
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT409K1(object):
|
||||
name = "sect409k1"
|
||||
key_size = 409
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT283K1(object):
|
||||
name = "sect283k1"
|
||||
key_size = 283
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT233K1(object):
|
||||
name = "sect233k1"
|
||||
key_size = 233
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECT163K1(object):
|
||||
name = "sect163k1"
|
||||
key_size = 163
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP521R1(object):
|
||||
name = "secp521r1"
|
||||
key_size = 521
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP384R1(object):
|
||||
name = "secp384r1"
|
||||
key_size = 384
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP256R1(object):
|
||||
name = "secp256r1"
|
||||
key_size = 256
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP256K1(object):
|
||||
name = "secp256k1"
|
||||
key_size = 256
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP224R1(object):
|
||||
name = "secp224r1"
|
||||
key_size = 224
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurve)
|
||||
class SECP192R1(object):
|
||||
name = "secp192r1"
|
||||
key_size = 192
|
||||
|
||||
|
||||
_CURVE_TYPES = {
|
||||
"prime192v1": SECP192R1,
|
||||
"prime256v1": SECP256R1,
|
||||
|
||||
"secp192r1": SECP192R1,
|
||||
"secp224r1": SECP224R1,
|
||||
"secp256r1": SECP256R1,
|
||||
"secp384r1": SECP384R1,
|
||||
"secp521r1": SECP521R1,
|
||||
"secp256k1": SECP256K1,
|
||||
|
||||
"sect163k1": SECT163K1,
|
||||
"sect233k1": SECT233K1,
|
||||
"sect283k1": SECT283K1,
|
||||
"sect409k1": SECT409K1,
|
||||
"sect571k1": SECT571K1,
|
||||
|
||||
"sect163r2": SECT163R2,
|
||||
"sect233r1": SECT233R1,
|
||||
"sect283r1": SECT283R1,
|
||||
"sect409r1": SECT409R1,
|
||||
"sect571r1": SECT571R1,
|
||||
}
|
||||
|
||||
|
||||
@utils.register_interface(EllipticCurveSignatureAlgorithm)
|
||||
class ECDSA(object):
|
||||
def __init__(self, algorithm):
|
||||
self._algorithm = algorithm
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
|
||||
def generate_private_key(curve, backend):
|
||||
return backend.generate_elliptic_curve_private_key(curve)
|
||||
|
||||
|
||||
class EllipticCurvePublicNumbers(object):
|
||||
def __init__(self, x, y, curve):
|
||||
if (
|
||||
not isinstance(x, six.integer_types) or
|
||||
not isinstance(y, six.integer_types)
|
||||
):
|
||||
raise TypeError("x and y must be integers.")
|
||||
|
||||
if not isinstance(curve, EllipticCurve):
|
||||
raise TypeError("curve must provide the EllipticCurve interface.")
|
||||
|
||||
self._y = y
|
||||
self._x = x
|
||||
self._curve = curve
|
||||
|
||||
def public_key(self, backend):
|
||||
return backend.load_elliptic_curve_public_numbers(self)
|
||||
|
||||
def encode_point(self):
|
||||
# key_size is in bits. Convert to bytes and round up
|
||||
byte_length = (self.curve.key_size + 7) // 8
|
||||
return (
|
||||
b'\x04' + utils.int_to_bytes(self.x, byte_length) +
|
||||
utils.int_to_bytes(self.y, byte_length)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_encoded_point(cls, curve, data):
|
||||
if not isinstance(curve, EllipticCurve):
|
||||
raise TypeError("curve must be an EllipticCurve instance")
|
||||
|
||||
if data.startswith(b'\x04'):
|
||||
# key_size is in bits. Convert to bytes and round up
|
||||
byte_length = (curve.key_size + 7) // 8
|
||||
if len(data) == 2 * byte_length + 1:
|
||||
x = utils.int_from_bytes(data[1:byte_length + 1], 'big')
|
||||
y = utils.int_from_bytes(data[byte_length + 1:], 'big')
|
||||
return cls(x, y, curve)
|
||||
else:
|
||||
raise ValueError('Invalid elliptic curve point data length')
|
||||
else:
|
||||
raise ValueError('Unsupported elliptic curve point type')
|
||||
|
||||
curve = utils.read_only_property("_curve")
|
||||
x = utils.read_only_property("_x")
|
||||
y = utils.read_only_property("_y")
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, EllipticCurvePublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.x == other.x and
|
||||
self.y == other.y and
|
||||
self.curve.name == other.curve.name and
|
||||
self.curve.key_size == other.curve.key_size
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<EllipticCurvePublicNumbers(curve={0.curve.name}, x={0.x}, "
|
||||
"y={0.y}>".format(self)
|
||||
)
|
||||
|
||||
|
||||
class EllipticCurvePrivateNumbers(object):
|
||||
def __init__(self, private_value, public_numbers):
|
||||
if not isinstance(private_value, six.integer_types):
|
||||
raise TypeError("private_value must be an integer.")
|
||||
|
||||
if not isinstance(public_numbers, EllipticCurvePublicNumbers):
|
||||
raise TypeError(
|
||||
"public_numbers must be an EllipticCurvePublicNumbers "
|
||||
"instance."
|
||||
)
|
||||
|
||||
self._private_value = private_value
|
||||
self._public_numbers = public_numbers
|
||||
|
||||
def private_key(self, backend):
|
||||
return backend.load_elliptic_curve_private_numbers(self)
|
||||
|
||||
private_value = utils.read_only_property("_private_value")
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, EllipticCurvePrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.private_value == other.private_value and
|
||||
self.public_numbers == other.public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class ECDH(object):
|
||||
pass
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AsymmetricPadding(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this padding (e.g. "PSS", "PKCS1").
|
||||
"""
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricPadding)
|
||||
class PKCS1v15(object):
|
||||
name = "EMSA-PKCS1-v1_5"
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricPadding)
|
||||
class PSS(object):
|
||||
MAX_LENGTH = object()
|
||||
name = "EMSA-PSS"
|
||||
|
||||
def __init__(self, mgf, salt_length):
|
||||
self._mgf = mgf
|
||||
|
||||
if (not isinstance(salt_length, six.integer_types) and
|
||||
salt_length is not self.MAX_LENGTH):
|
||||
raise TypeError("salt_length must be an integer.")
|
||||
|
||||
if salt_length is not self.MAX_LENGTH and salt_length < 0:
|
||||
raise ValueError("salt_length must be zero or greater.")
|
||||
|
||||
self._salt_length = salt_length
|
||||
|
||||
|
||||
@utils.register_interface(AsymmetricPadding)
|
||||
class OAEP(object):
|
||||
name = "EME-OAEP"
|
||||
|
||||
def __init__(self, mgf, algorithm, label):
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
|
||||
self._mgf = mgf
|
||||
self._algorithm = algorithm
|
||||
self._label = label
|
||||
|
||||
|
||||
class MGF1(object):
|
||||
MAX_LENGTH = object()
|
||||
|
||||
def __init__(self, algorithm):
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
|
||||
self._algorithm = algorithm
|
||||
|
|
@ -0,0 +1,352 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
from fractions import gcd
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||
from cryptography.hazmat.backends.interfaces import RSABackend
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSAPrivateKey(object):
|
||||
@abc.abstractmethod
|
||||
def signer(self, padding, algorithm):
|
||||
"""
|
||||
Returns an AsymmetricSignatureContext used for signing data.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def decrypt(self, ciphertext, padding):
|
||||
"""
|
||||
Decrypts the provided ciphertext.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the public modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
The RSAPublicKey associated with this private key.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSAPrivateKeyWithSerialization(RSAPrivateKey):
|
||||
@abc.abstractmethod
|
||||
def private_numbers(self):
|
||||
"""
|
||||
Returns an RSAPrivateNumbers.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RSAPublicKey(object):
|
||||
@abc.abstractmethod
|
||||
def verifier(self, signature, padding, algorithm):
|
||||
"""
|
||||
Returns an AsymmetricVerificationContext used for verifying signatures.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def encrypt(self, plaintext, padding):
|
||||
"""
|
||||
Encrypts the given plaintext.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The bit length of the public modulus.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_numbers(self):
|
||||
"""
|
||||
Returns an RSAPublicNumbers
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding, format):
|
||||
"""
|
||||
Returns the key serialized as bytes.
|
||||
"""
|
||||
|
||||
|
||||
RSAPublicKeyWithSerialization = RSAPublicKey
|
||||
|
||||
|
||||
def generate_private_key(public_exponent, key_size, backend):
|
||||
if not isinstance(backend, RSABackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement RSABackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
_verify_rsa_parameters(public_exponent, key_size)
|
||||
return backend.generate_rsa_private_key(public_exponent, key_size)
|
||||
|
||||
|
||||
def _verify_rsa_parameters(public_exponent, key_size):
|
||||
if public_exponent < 3:
|
||||
raise ValueError("public_exponent must be >= 3.")
|
||||
|
||||
if public_exponent & 1 == 0:
|
||||
raise ValueError("public_exponent must be odd.")
|
||||
|
||||
if key_size < 512:
|
||||
raise ValueError("key_size must be at least 512-bits.")
|
||||
|
||||
|
||||
def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,
|
||||
public_exponent, modulus):
|
||||
if modulus < 3:
|
||||
raise ValueError("modulus must be >= 3.")
|
||||
|
||||
if p >= modulus:
|
||||
raise ValueError("p must be < modulus.")
|
||||
|
||||
if q >= modulus:
|
||||
raise ValueError("q must be < modulus.")
|
||||
|
||||
if dmp1 >= modulus:
|
||||
raise ValueError("dmp1 must be < modulus.")
|
||||
|
||||
if dmq1 >= modulus:
|
||||
raise ValueError("dmq1 must be < modulus.")
|
||||
|
||||
if iqmp >= modulus:
|
||||
raise ValueError("iqmp must be < modulus.")
|
||||
|
||||
if private_exponent >= modulus:
|
||||
raise ValueError("private_exponent must be < modulus.")
|
||||
|
||||
if public_exponent < 3 or public_exponent >= modulus:
|
||||
raise ValueError("public_exponent must be >= 3 and < modulus.")
|
||||
|
||||
if public_exponent & 1 == 0:
|
||||
raise ValueError("public_exponent must be odd.")
|
||||
|
||||
if dmp1 & 1 == 0:
|
||||
raise ValueError("dmp1 must be odd.")
|
||||
|
||||
if dmq1 & 1 == 0:
|
||||
raise ValueError("dmq1 must be odd.")
|
||||
|
||||
if p * q != modulus:
|
||||
raise ValueError("p*q must equal modulus.")
|
||||
|
||||
|
||||
def _check_public_key_components(e, n):
|
||||
if n < 3:
|
||||
raise ValueError("n must be >= 3.")
|
||||
|
||||
if e < 3 or e >= n:
|
||||
raise ValueError("e must be >= 3 and < n.")
|
||||
|
||||
if e & 1 == 0:
|
||||
raise ValueError("e must be odd.")
|
||||
|
||||
|
||||
def _modinv(e, m):
|
||||
"""
|
||||
Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
|
||||
"""
|
||||
x1, y1, x2, y2 = 1, 0, 0, 1
|
||||
a, b = e, m
|
||||
while b > 0:
|
||||
q, r = divmod(a, b)
|
||||
xn, yn = x1 - q * x2, y1 - q * y2
|
||||
a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn
|
||||
return x1 % m
|
||||
|
||||
|
||||
def rsa_crt_iqmp(p, q):
|
||||
"""
|
||||
Compute the CRT (q ** -1) % p value from RSA primes p and q.
|
||||
"""
|
||||
return _modinv(q, p)
|
||||
|
||||
|
||||
def rsa_crt_dmp1(private_exponent, p):
|
||||
"""
|
||||
Compute the CRT private_exponent % (p - 1) value from the RSA
|
||||
private_exponent and p.
|
||||
"""
|
||||
return private_exponent % (p - 1)
|
||||
|
||||
|
||||
def rsa_crt_dmq1(private_exponent, q):
|
||||
"""
|
||||
Compute the CRT private_exponent % (q - 1) value from the RSA
|
||||
private_exponent and q.
|
||||
"""
|
||||
return private_exponent % (q - 1)
|
||||
|
||||
|
||||
# Controls the number of iterations rsa_recover_prime_factors will perform
|
||||
# to obtain the prime factors. Each iteration increments by 2 so the actual
|
||||
# maximum attempts is half this number.
|
||||
_MAX_RECOVERY_ATTEMPTS = 1000
|
||||
|
||||
|
||||
def rsa_recover_prime_factors(n, e, d):
|
||||
"""
|
||||
Compute factors p and q from the private exponent d. We assume that n has
|
||||
no more than two factors. This function is adapted from code in PyCrypto.
|
||||
"""
|
||||
# See 8.2.2(i) in Handbook of Applied Cryptography.
|
||||
ktot = d * e - 1
|
||||
# The quantity d*e-1 is a multiple of phi(n), even,
|
||||
# and can be represented as t*2^s.
|
||||
t = ktot
|
||||
while t % 2 == 0:
|
||||
t = t // 2
|
||||
# Cycle through all multiplicative inverses in Zn.
|
||||
# The algorithm is non-deterministic, but there is a 50% chance
|
||||
# any candidate a leads to successful factoring.
|
||||
# See "Digitalized Signatures and Public Key Functions as Intractable
|
||||
# as Factorization", M. Rabin, 1979
|
||||
spotted = False
|
||||
a = 2
|
||||
while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
|
||||
k = t
|
||||
# Cycle through all values a^{t*2^i}=a^k
|
||||
while k < ktot:
|
||||
cand = pow(a, k, n)
|
||||
# Check if a^k is a non-trivial root of unity (mod n)
|
||||
if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
|
||||
# We have found a number such that (cand-1)(cand+1)=0 (mod n).
|
||||
# Either of the terms divides n.
|
||||
p = gcd(cand + 1, n)
|
||||
spotted = True
|
||||
break
|
||||
k *= 2
|
||||
# This value was not any good... let's try another!
|
||||
a += 2
|
||||
if not spotted:
|
||||
raise ValueError("Unable to compute factors p and q from exponent d.")
|
||||
# Found !
|
||||
q, r = divmod(n, p)
|
||||
assert r == 0
|
||||
|
||||
return (p, q)
|
||||
|
||||
|
||||
class RSAPrivateNumbers(object):
|
||||
def __init__(self, p, q, d, dmp1, dmq1, iqmp,
|
||||
public_numbers):
|
||||
if (
|
||||
not isinstance(p, six.integer_types) or
|
||||
not isinstance(q, six.integer_types) or
|
||||
not isinstance(d, six.integer_types) or
|
||||
not isinstance(dmp1, six.integer_types) or
|
||||
not isinstance(dmq1, six.integer_types) or
|
||||
not isinstance(iqmp, six.integer_types)
|
||||
):
|
||||
raise TypeError(
|
||||
"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
|
||||
" all be an integers."
|
||||
)
|
||||
|
||||
if not isinstance(public_numbers, RSAPublicNumbers):
|
||||
raise TypeError(
|
||||
"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
|
||||
" instance."
|
||||
)
|
||||
|
||||
self._p = p
|
||||
self._q = q
|
||||
self._d = d
|
||||
self._dmp1 = dmp1
|
||||
self._dmq1 = dmq1
|
||||
self._iqmp = iqmp
|
||||
self._public_numbers = public_numbers
|
||||
|
||||
p = utils.read_only_property("_p")
|
||||
q = utils.read_only_property("_q")
|
||||
d = utils.read_only_property("_d")
|
||||
dmp1 = utils.read_only_property("_dmp1")
|
||||
dmq1 = utils.read_only_property("_dmq1")
|
||||
iqmp = utils.read_only_property("_iqmp")
|
||||
public_numbers = utils.read_only_property("_public_numbers")
|
||||
|
||||
def private_key(self, backend):
|
||||
return backend.load_rsa_private_numbers(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, RSAPrivateNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.p == other.p and
|
||||
self.q == other.q and
|
||||
self.d == other.d and
|
||||
self.dmp1 == other.dmp1 and
|
||||
self.dmq1 == other.dmq1 and
|
||||
self.iqmp == other.iqmp and
|
||||
self.public_numbers == other.public_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((
|
||||
self.p,
|
||||
self.q,
|
||||
self.d,
|
||||
self.dmp1,
|
||||
self.dmq1,
|
||||
self.iqmp,
|
||||
self.public_numbers,
|
||||
))
|
||||
|
||||
|
||||
class RSAPublicNumbers(object):
|
||||
def __init__(self, e, n):
|
||||
if (
|
||||
not isinstance(e, six.integer_types) or
|
||||
not isinstance(n, six.integer_types)
|
||||
):
|
||||
raise TypeError("RSAPublicNumbers arguments must be integers.")
|
||||
|
||||
self._e = e
|
||||
self._n = n
|
||||
|
||||
e = utils.read_only_property("_e")
|
||||
n = utils.read_only_property("_n")
|
||||
|
||||
def public_key(self, backend):
|
||||
return backend.load_rsa_public_numbers(self)
|
||||
|
||||
def __repr__(self):
|
||||
return "<RSAPublicNumbers(e={0.e}, n={0.n})>".format(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, RSAPublicNumbers):
|
||||
return NotImplemented
|
||||
|
||||
return self.e == other.e and self.n == other.n
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.e, self.n))
|
||||
|
|
@ -0,0 +1,73 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import warnings
|
||||
|
||||
from pyasn1.codec.der import decoder, encoder
|
||||
from pyasn1.error import PyAsn1Error
|
||||
from pyasn1.type import namedtype, univ
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
class _DSSSigValue(univ.Sequence):
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('r', univ.Integer()),
|
||||
namedtype.NamedType('s', univ.Integer())
|
||||
)
|
||||
|
||||
|
||||
def decode_rfc6979_signature(signature):
|
||||
warnings.warn(
|
||||
"decode_rfc6979_signature is deprecated and will "
|
||||
"be removed in a future version, use decode_dss_signature instead "
|
||||
"instead.",
|
||||
utils.DeprecatedIn10,
|
||||
stacklevel=2
|
||||
)
|
||||
return decode_dss_signature(signature)
|
||||
|
||||
|
||||
def decode_dss_signature(signature):
|
||||
try:
|
||||
data, remaining = decoder.decode(signature, asn1Spec=_DSSSigValue())
|
||||
except PyAsn1Error:
|
||||
raise ValueError("Invalid signature data. Unable to decode ASN.1")
|
||||
|
||||
if remaining:
|
||||
raise ValueError(
|
||||
"The signature contains bytes after the end of the ASN.1 sequence."
|
||||
)
|
||||
|
||||
r = int(data.getComponentByName('r'))
|
||||
s = int(data.getComponentByName('s'))
|
||||
return (r, s)
|
||||
|
||||
|
||||
def encode_rfc6979_signature(r, s):
|
||||
warnings.warn(
|
||||
"encode_rfc6979_signature is deprecated and will "
|
||||
"be removed in a future version, use encode_dss_signature instead "
|
||||
"instead.",
|
||||
utils.DeprecatedIn10,
|
||||
stacklevel=2
|
||||
)
|
||||
return encode_dss_signature(r, s)
|
||||
|
||||
|
||||
def encode_dss_signature(r, s):
|
||||
if (
|
||||
not isinstance(r, six.integer_types) or
|
||||
not isinstance(s, six.integer_types)
|
||||
):
|
||||
raise ValueError("Both r and s must be integers")
|
||||
|
||||
sig = _DSSSigValue()
|
||||
sig.setComponentByName('r', r)
|
||||
sig.setComponentByName('s', s)
|
||||
return encoder.encode(sig)
|
||||
|
|
@ -0,0 +1,20 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers.base import (
|
||||
AEADCipherContext, AEADEncryptionContext, BlockCipherAlgorithm, Cipher,
|
||||
CipherAlgorithm, CipherContext
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
"Cipher",
|
||||
"CipherAlgorithm",
|
||||
"BlockCipherAlgorithm",
|
||||
"CipherContext",
|
||||
"AEADCipherContext",
|
||||
"AEADEncryptionContext",
|
||||
]
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives.ciphers import (
|
||||
BlockCipherAlgorithm, CipherAlgorithm
|
||||
)
|
||||
|
||||
|
||||
def _verify_key_size(algorithm, key):
|
||||
# Verify that the key size matches the expected key size
|
||||
if len(key) * 8 not in algorithm.key_sizes:
|
||||
raise ValueError("Invalid key size ({0}) for {1}.".format(
|
||||
len(key) * 8, algorithm.name
|
||||
))
|
||||
return key
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class AES(object):
|
||||
name = "AES"
|
||||
block_size = 128
|
||||
key_sizes = frozenset([128, 192, 256])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class Camellia(object):
|
||||
name = "camellia"
|
||||
block_size = 128
|
||||
key_sizes = frozenset([128, 192, 256])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class TripleDES(object):
|
||||
name = "3DES"
|
||||
block_size = 64
|
||||
key_sizes = frozenset([64, 128, 192])
|
||||
|
||||
def __init__(self, key):
|
||||
if len(key) == 8:
|
||||
key += key + key
|
||||
elif len(key) == 16:
|
||||
key += key[:8]
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class Blowfish(object):
|
||||
name = "Blowfish"
|
||||
block_size = 64
|
||||
key_sizes = frozenset(range(32, 449, 8))
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class CAST5(object):
|
||||
name = "CAST5"
|
||||
block_size = 64
|
||||
key_sizes = frozenset(range(40, 129, 8))
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class ARC4(object):
|
||||
name = "RC4"
|
||||
key_sizes = frozenset([40, 56, 64, 80, 128, 192, 256])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class IDEA(object):
|
||||
name = "IDEA"
|
||||
block_size = 64
|
||||
key_sizes = frozenset([128])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
||||
|
||||
@utils.register_interface(BlockCipherAlgorithm)
|
||||
@utils.register_interface(CipherAlgorithm)
|
||||
class SEED(object):
|
||||
name = "SEED"
|
||||
block_size = 128
|
||||
key_sizes = frozenset([128])
|
||||
|
||||
def __init__(self, key):
|
||||
self.key = _verify_key_size(self, key)
|
||||
|
||||
@property
|
||||
def key_size(self):
|
||||
return len(self.key) * 8
|
||||
|
|
@ -0,0 +1,203 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm,
|
||||
_Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import CipherBackend
|
||||
from cryptography.hazmat.primitives.ciphers import modes
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CipherAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this mode (e.g. "AES", "Camellia").
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def key_size(self):
|
||||
"""
|
||||
The size of the key being used as an integer in bits (e.g. 128, 256).
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BlockCipherAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def block_size(self):
|
||||
"""
|
||||
The size of a block as an integer in bits (e.g. 64, 128).
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CipherContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes through the cipher and returns the results
|
||||
as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Returns the results of processing the final block as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AEADCipherContext(object):
|
||||
@abc.abstractmethod
|
||||
def authenticate_additional_data(self, data):
|
||||
"""
|
||||
Authenticates the provided bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class AEADEncryptionContext(object):
|
||||
@abc.abstractproperty
|
||||
def tag(self):
|
||||
"""
|
||||
Returns tag bytes. This is only available after encryption is
|
||||
finalized.
|
||||
"""
|
||||
|
||||
|
||||
class Cipher(object):
|
||||
def __init__(self, algorithm, mode, backend):
|
||||
if not isinstance(backend, CipherBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement CipherBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, CipherAlgorithm):
|
||||
raise TypeError("Expected interface of CipherAlgorithm.")
|
||||
|
||||
if mode is not None:
|
||||
mode.validate_for_algorithm(algorithm)
|
||||
|
||||
self.algorithm = algorithm
|
||||
self.mode = mode
|
||||
self._backend = backend
|
||||
|
||||
def encryptor(self):
|
||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||
if self.mode.tag is not None:
|
||||
raise ValueError(
|
||||
"Authentication tag must be None when encrypting."
|
||||
)
|
||||
ctx = self._backend.create_symmetric_encryption_ctx(
|
||||
self.algorithm, self.mode
|
||||
)
|
||||
return self._wrap_ctx(ctx, encrypt=True)
|
||||
|
||||
def decryptor(self):
|
||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||
if self.mode.tag is None:
|
||||
raise ValueError(
|
||||
"Authentication tag must be provided when decrypting."
|
||||
)
|
||||
ctx = self._backend.create_symmetric_decryption_ctx(
|
||||
self.algorithm, self.mode
|
||||
)
|
||||
return self._wrap_ctx(ctx, encrypt=False)
|
||||
|
||||
def _wrap_ctx(self, ctx, encrypt):
|
||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||
if encrypt:
|
||||
return _AEADEncryptionContext(ctx)
|
||||
else:
|
||||
return _AEADCipherContext(ctx)
|
||||
else:
|
||||
return _CipherContext(ctx)
|
||||
|
||||
|
||||
@utils.register_interface(CipherContext)
|
||||
class _CipherContext(object):
|
||||
def __init__(self, ctx):
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return self._ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
data = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return data
|
||||
|
||||
|
||||
@utils.register_interface(AEADCipherContext)
|
||||
@utils.register_interface(CipherContext)
|
||||
class _AEADCipherContext(object):
|
||||
def __init__(self, ctx):
|
||||
self._ctx = ctx
|
||||
self._bytes_processed = 0
|
||||
self._aad_bytes_processed = 0
|
||||
self._tag = None
|
||||
self._updated = False
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
self._updated = True
|
||||
self._bytes_processed += len(data)
|
||||
if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
|
||||
raise ValueError(
|
||||
"{0} has a maximum encrypted byte limit of {1}".format(
|
||||
self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
|
||||
)
|
||||
)
|
||||
|
||||
return self._ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
data = self._ctx.finalize()
|
||||
self._tag = self._ctx.tag
|
||||
self._ctx = None
|
||||
return data
|
||||
|
||||
def authenticate_additional_data(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
if self._updated:
|
||||
raise AlreadyUpdated("Update has been called on this context.")
|
||||
|
||||
self._aad_bytes_processed += len(data)
|
||||
if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
|
||||
raise ValueError(
|
||||
"{0} has a maximum AAD byte limit of {0}".format(
|
||||
self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
|
||||
)
|
||||
)
|
||||
|
||||
self._ctx.authenticate_additional_data(data)
|
||||
|
||||
|
||||
@utils.register_interface(AEADEncryptionContext)
|
||||
class _AEADEncryptionContext(_AEADCipherContext):
|
||||
@property
|
||||
def tag(self):
|
||||
if self._ctx is not None:
|
||||
raise NotYetFinalized("You must finalize encryption before "
|
||||
"getting the tag.")
|
||||
return self._tag
|
||||
|
|
@ -0,0 +1,164 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Mode(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this mode (e.g. "ECB", "CBC").
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
"""
|
||||
Checks that all the necessary invariants of this (mode, algorithm)
|
||||
combination are met.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithInitializationVector(object):
|
||||
@abc.abstractproperty
|
||||
def initialization_vector(self):
|
||||
"""
|
||||
The value of the initialization vector for this mode as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithNonce(object):
|
||||
@abc.abstractproperty
|
||||
def nonce(self):
|
||||
"""
|
||||
The value of the nonce for this mode as bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ModeWithAuthenticationTag(object):
|
||||
@abc.abstractproperty
|
||||
def tag(self):
|
||||
"""
|
||||
The value of the tag supplied to the constructor of this mode.
|
||||
"""
|
||||
|
||||
|
||||
def _check_iv_length(self, algorithm):
|
||||
if len(self.initialization_vector) * 8 != algorithm.block_size:
|
||||
raise ValueError("Invalid IV size ({0}) for {1}.".format(
|
||||
len(self.initialization_vector), self.name
|
||||
))
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class CBC(object):
|
||||
name = "CBC"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
class ECB(object):
|
||||
name = "ECB"
|
||||
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
pass
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class OFB(object):
|
||||
name = "OFB"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class CFB(object):
|
||||
name = "CFB"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
class CFB8(object):
|
||||
name = "CFB8"
|
||||
|
||||
def __init__(self, initialization_vector):
|
||||
self._initialization_vector = initialization_vector
|
||||
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
validate_for_algorithm = _check_iv_length
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithNonce)
|
||||
class CTR(object):
|
||||
name = "CTR"
|
||||
|
||||
def __init__(self, nonce):
|
||||
self._nonce = nonce
|
||||
|
||||
nonce = utils.read_only_property("_nonce")
|
||||
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
if len(self.nonce) * 8 != algorithm.block_size:
|
||||
raise ValueError("Invalid nonce size ({0}) for {1}.".format(
|
||||
len(self.nonce), self.name
|
||||
))
|
||||
|
||||
|
||||
@utils.register_interface(Mode)
|
||||
@utils.register_interface(ModeWithInitializationVector)
|
||||
@utils.register_interface(ModeWithAuthenticationTag)
|
||||
class GCM(object):
|
||||
name = "GCM"
|
||||
_MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8
|
||||
_MAX_AAD_BYTES = (2 ** 64) // 8
|
||||
|
||||
def __init__(self, initialization_vector, tag=None, min_tag_length=16):
|
||||
# len(initialization_vector) must in [1, 2 ** 64), but it's impossible
|
||||
# to actually construct a bytes object that large, so we don't check
|
||||
# for it
|
||||
if min_tag_length < 4:
|
||||
raise ValueError("min_tag_length must be >= 4")
|
||||
if tag is not None and len(tag) < min_tag_length:
|
||||
raise ValueError(
|
||||
"Authentication tag must be {0} bytes or longer.".format(
|
||||
min_tag_length)
|
||||
)
|
||||
|
||||
self._initialization_vector = initialization_vector
|
||||
self._tag = tag
|
||||
|
||||
tag = utils.read_only_property("_tag")
|
||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||
|
||||
def validate_for_algorithm(self, algorithm):
|
||||
pass
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import CMACBackend
|
||||
from cryptography.hazmat.primitives import ciphers, interfaces
|
||||
|
||||
|
||||
@utils.register_interface(interfaces.MACContext)
|
||||
class CMAC(object):
|
||||
def __init__(self, algorithm, backend, ctx=None):
|
||||
if not isinstance(backend, CMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement CMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
|
||||
raise TypeError(
|
||||
"Expected instance of BlockCipherAlgorithm."
|
||||
)
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
if ctx is None:
|
||||
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
|
||||
else:
|
||||
self._ctx = ctx
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("data must be bytes.")
|
||||
self._ctx.update(data)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
digest = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return digest
|
||||
|
||||
def verify(self, signature):
|
||||
if not isinstance(signature, bytes):
|
||||
raise TypeError("signature must be bytes.")
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
ctx, self._ctx = self._ctx, None
|
||||
ctx.verify(signature)
|
||||
|
||||
def copy(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return CMAC(
|
||||
self._algorithm,
|
||||
backend=self._backend,
|
||||
ctx=self._ctx.copy()
|
||||
)
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import hmac
|
||||
|
||||
from cryptography.hazmat.bindings._constant_time import lib
|
||||
|
||||
|
||||
if hasattr(hmac, "compare_digest"):
|
||||
def bytes_eq(a, b):
|
||||
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
||||
raise TypeError("a and b must be bytes.")
|
||||
|
||||
return hmac.compare_digest(a, b)
|
||||
|
||||
else:
|
||||
def bytes_eq(a, b):
|
||||
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
||||
raise TypeError("a and b must be bytes.")
|
||||
|
||||
return lib.Cryptography_constant_time_bytes_eq(
|
||||
a, len(a), b, len(b)
|
||||
) == 1
|
||||
|
|
@ -0,0 +1,163 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HashAlgorithm(object):
|
||||
@abc.abstractproperty
|
||||
def name(self):
|
||||
"""
|
||||
A string naming this algorithm (e.g. "sha256", "md5").
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def digest_size(self):
|
||||
"""
|
||||
The size of the resulting digest in bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def block_size(self):
|
||||
"""
|
||||
The internal block size of the hash algorithm in bytes.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class HashContext(object):
|
||||
@abc.abstractproperty
|
||||
def algorithm(self):
|
||||
"""
|
||||
A HashAlgorithm that will be used by this context.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes through the hash.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Finalizes the hash context and returns the hash digest as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def copy(self):
|
||||
"""
|
||||
Return a HashContext that is a copy of the current context.
|
||||
"""
|
||||
|
||||
|
||||
@utils.register_interface(HashContext)
|
||||
class Hash(object):
|
||||
def __init__(self, algorithm, backend, ctx=None):
|
||||
if not isinstance(backend, HashBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HashBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
|
||||
if ctx is None:
|
||||
self._ctx = self._backend.create_hash_ctx(self.algorithm)
|
||||
else:
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("data must be bytes.")
|
||||
self._ctx.update(data)
|
||||
|
||||
def copy(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return Hash(
|
||||
self.algorithm, backend=self._backend, ctx=self._ctx.copy()
|
||||
)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
digest = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return digest
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA1(object):
|
||||
name = "sha1"
|
||||
digest_size = 20
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA224(object):
|
||||
name = "sha224"
|
||||
digest_size = 28
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA256(object):
|
||||
name = "sha256"
|
||||
digest_size = 32
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA384(object):
|
||||
name = "sha384"
|
||||
digest_size = 48
|
||||
block_size = 128
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class SHA512(object):
|
||||
name = "sha512"
|
||||
digest_size = 64
|
||||
block_size = 128
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class RIPEMD160(object):
|
||||
name = "ripemd160"
|
||||
digest_size = 20
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class Whirlpool(object):
|
||||
name = "whirlpool"
|
||||
digest_size = 64
|
||||
block_size = 64
|
||||
|
||||
|
||||
@utils.register_interface(HashAlgorithm)
|
||||
class MD5(object):
|
||||
name = "md5"
|
||||
digest_size = 16
|
||||
block_size = 64
|
||||
|
|
@ -0,0 +1,69 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import hashes, interfaces
|
||||
|
||||
|
||||
@utils.register_interface(interfaces.MACContext)
|
||||
@utils.register_interface(hashes.HashContext)
|
||||
class HMAC(object):
|
||||
def __init__(self, key, algorithm, backend, ctx=None):
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
self._key = key
|
||||
if ctx is None:
|
||||
self._ctx = self._backend.create_hmac_ctx(key, self.algorithm)
|
||||
else:
|
||||
self._ctx = ctx
|
||||
|
||||
algorithm = utils.read_only_property("_algorithm")
|
||||
|
||||
def update(self, data):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("data must be bytes.")
|
||||
self._ctx.update(data)
|
||||
|
||||
def copy(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
return HMAC(
|
||||
self._key,
|
||||
self.algorithm,
|
||||
backend=self._backend,
|
||||
ctx=self._ctx.copy()
|
||||
)
|
||||
|
||||
def finalize(self):
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
digest = self._ctx.finalize()
|
||||
self._ctx = None
|
||||
return digest
|
||||
|
||||
def verify(self, signature):
|
||||
if not isinstance(signature, bytes):
|
||||
raise TypeError("signature must be bytes.")
|
||||
if self._ctx is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
ctx, self._ctx = self._ctx, None
|
||||
ctx.verify(signature)
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class MACContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Processes the provided bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Returns the message authentication code as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def copy(self):
|
||||
"""
|
||||
Return a MACContext that is a copy of the current context.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, signature):
|
||||
"""
|
||||
Checks if the generated message authentication code matches the
|
||||
signature.
|
||||
"""
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeyDerivationFunction(object):
|
||||
@abc.abstractmethod
|
||||
def derive(self, key_material):
|
||||
"""
|
||||
Deterministically generates and returns a new key based on the existing
|
||||
key material.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def verify(self, key_material, expected_key):
|
||||
"""
|
||||
Checks whether the key generated by the key material matches the
|
||||
expected derived key. Raises an exception if they do not match.
|
||||
"""
|
||||
|
|
@ -0,0 +1,125 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hashes, hmac
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
def _int_to_u32be(n):
|
||||
return struct.pack('>I', n)
|
||||
|
||||
|
||||
def _common_args_checks(algorithm, length, otherinfo):
|
||||
max_length = algorithm.digest_size * (2 ** 32 - 1)
|
||||
if length > max_length:
|
||||
raise ValueError(
|
||||
"Can not derive keys larger than {0} bits.".format(
|
||||
max_length
|
||||
))
|
||||
if not (otherinfo is None or isinstance(otherinfo, bytes)):
|
||||
raise TypeError("otherinfo must be bytes.")
|
||||
|
||||
|
||||
def _concatkdf_derive(key_material, length, auxfn, otherinfo):
|
||||
if not isinstance(key_material, bytes):
|
||||
raise TypeError("key_material must be bytes.")
|
||||
|
||||
output = [b""]
|
||||
outlen = 0
|
||||
counter = 1
|
||||
|
||||
while (length > outlen):
|
||||
h = auxfn()
|
||||
h.update(_int_to_u32be(counter))
|
||||
h.update(key_material)
|
||||
h.update(otherinfo)
|
||||
output.append(h.finalize())
|
||||
outlen += len(output[-1])
|
||||
counter += 1
|
||||
|
||||
return b"".join(output)[:length]
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class ConcatKDFHash(object):
|
||||
def __init__(self, algorithm, length, otherinfo, backend):
|
||||
|
||||
_common_args_checks(algorithm, length, otherinfo)
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
self._otherinfo = otherinfo
|
||||
if self._otherinfo is None:
|
||||
self._otherinfo = b""
|
||||
|
||||
if not isinstance(backend, HashBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HashBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
|
||||
def _hash(self):
|
||||
return hashes.Hash(self._algorithm, self._backend)
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
self._used = True
|
||||
return _concatkdf_derive(key_material, self._length,
|
||||
self._hash, self._otherinfo)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class ConcatKDFHMAC(object):
|
||||
def __init__(self, algorithm, length, salt, otherinfo, backend):
|
||||
|
||||
_common_args_checks(algorithm, length, otherinfo)
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
self._otherinfo = otherinfo
|
||||
if self._otherinfo is None:
|
||||
self._otherinfo = b""
|
||||
|
||||
if not (salt is None or isinstance(salt, bytes)):
|
||||
raise TypeError("salt must be bytes.")
|
||||
if salt is None:
|
||||
salt = b"\x00" * algorithm.block_size
|
||||
self._salt = salt
|
||||
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
|
||||
def _hmac(self):
|
||||
return hmac.HMAC(self._salt, self._algorithm, self._backend)
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
self._used = True
|
||||
return _concatkdf_derive(key_material, self._length,
|
||||
self._hmac, self._otherinfo)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hmac
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class HKDF(object):
|
||||
def __init__(self, algorithm, length, salt, info, backend):
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
self._algorithm = algorithm
|
||||
|
||||
if not (salt is None or isinstance(salt, bytes)):
|
||||
raise TypeError("salt must be bytes.")
|
||||
|
||||
if salt is None:
|
||||
salt = b"\x00" * (self._algorithm.digest_size // 8)
|
||||
|
||||
self._salt = salt
|
||||
|
||||
self._backend = backend
|
||||
|
||||
self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend)
|
||||
|
||||
def _extract(self, key_material):
|
||||
h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend)
|
||||
h.update(key_material)
|
||||
return h.finalize()
|
||||
|
||||
def derive(self, key_material):
|
||||
if not isinstance(key_material, bytes):
|
||||
raise TypeError("key_material must be bytes.")
|
||||
|
||||
return self._hkdf_expand.derive(self._extract(key_material))
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class HKDFExpand(object):
|
||||
def __init__(self, algorithm, length, info, backend):
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
self._algorithm = algorithm
|
||||
|
||||
self._backend = backend
|
||||
|
||||
max_length = 255 * (algorithm.digest_size // 8)
|
||||
|
||||
if length > max_length:
|
||||
raise ValueError(
|
||||
"Can not derive keys larger than {0} octets.".format(
|
||||
max_length
|
||||
))
|
||||
|
||||
self._length = length
|
||||
|
||||
if not (info is None or isinstance(info, bytes)):
|
||||
raise TypeError("info must be bytes.")
|
||||
|
||||
if info is None:
|
||||
info = b""
|
||||
|
||||
self._info = info
|
||||
|
||||
self._used = False
|
||||
|
||||
def _expand(self, key_material):
|
||||
output = [b""]
|
||||
counter = 1
|
||||
|
||||
while (self._algorithm.digest_size // 8) * len(output) < self._length:
|
||||
h = hmac.HMAC(key_material, self._algorithm, backend=self._backend)
|
||||
h.update(output[-1])
|
||||
h.update(self._info)
|
||||
h.update(six.int2byte(counter))
|
||||
output.append(h.finalize())
|
||||
counter += 1
|
||||
|
||||
return b"".join(output)[:self._length]
|
||||
|
||||
def derive(self, key_material):
|
||||
if not isinstance(key_material, bytes):
|
||||
raise TypeError("key_material must be bytes.")
|
||||
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
|
||||
self._used = True
|
||||
return self._expand(key_material)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,58 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class PBKDF2HMAC(object):
|
||||
def __init__(self, algorithm, length, salt, iterations, backend):
|
||||
if not isinstance(backend, PBKDF2HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement PBKDF2HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
if not backend.pbkdf2_hmac_supported(algorithm):
|
||||
raise UnsupportedAlgorithm(
|
||||
"{0} is not supported for PBKDF2 by this backend.".format(
|
||||
algorithm.name),
|
||||
_Reasons.UNSUPPORTED_HASH
|
||||
)
|
||||
self._used = False
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
if not isinstance(salt, bytes):
|
||||
raise TypeError("salt must be bytes.")
|
||||
self._salt = salt
|
||||
self._iterations = iterations
|
||||
self._backend = backend
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized("PBKDF2 instances can only be used once.")
|
||||
self._used = True
|
||||
|
||||
if not isinstance(key_material, bytes):
|
||||
raise TypeError("key_material must be bytes.")
|
||||
return self._backend.derive_pbkdf2_hmac(
|
||||
self._algorithm,
|
||||
self._length,
|
||||
self._salt,
|
||||
self._iterations,
|
||||
key_material
|
||||
)
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
derived_key = self.derive(key_material)
|
||||
if not constant_time.bytes_eq(derived_key, expected_key):
|
||||
raise InvalidKey("Keys do not match.")
|
||||
|
|
@ -0,0 +1,70 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import (
|
||||
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hashes
|
||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
||||
|
||||
|
||||
def _int_to_u32be(n):
|
||||
return struct.pack('>I', n)
|
||||
|
||||
|
||||
@utils.register_interface(KeyDerivationFunction)
|
||||
class X963KDF(object):
|
||||
def __init__(self, algorithm, length, sharedinfo, backend):
|
||||
|
||||
max_len = algorithm.digest_size * (2 ** 32 - 1)
|
||||
if length > max_len:
|
||||
raise ValueError(
|
||||
"Can not derive keys larger than {0} bits.".format(max_len))
|
||||
if not (sharedinfo is None or isinstance(sharedinfo, bytes)):
|
||||
raise TypeError("sharedinfo must be bytes.")
|
||||
self._algorithm = algorithm
|
||||
self._length = length
|
||||
self._sharedinfo = sharedinfo
|
||||
|
||||
if not isinstance(backend, HashBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HashBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
self._backend = backend
|
||||
self._used = False
|
||||
|
||||
def derive(self, key_material):
|
||||
if self._used:
|
||||
raise AlreadyFinalized
|
||||
self._used = True
|
||||
|
||||
if not isinstance(key_material, bytes):
|
||||
raise TypeError("key_material must be bytes.")
|
||||
|
||||
output = [b""]
|
||||
outlen = 0
|
||||
counter = 1
|
||||
|
||||
while self._length > outlen:
|
||||
h = hashes.Hash(self._algorithm, self._backend)
|
||||
h.update(key_material)
|
||||
h.update(_int_to_u32be(counter))
|
||||
if self._sharedinfo is not None:
|
||||
h.update(self._sharedinfo)
|
||||
output.append(h.finalize())
|
||||
outlen += len(output[-1])
|
||||
counter += 1
|
||||
|
||||
return b"".join(output)[:self._length]
|
||||
|
||||
def verify(self, key_material, expected_key):
|
||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
||||
raise InvalidKey
|
||||
|
|
@ -0,0 +1,85 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
||||
from cryptography.hazmat.primitives.ciphers.modes import ECB
|
||||
from cryptography.hazmat.primitives.constant_time import bytes_eq
|
||||
|
||||
|
||||
def aes_key_wrap(wrapping_key, key_to_wrap, backend):
|
||||
if len(wrapping_key) not in [16, 24, 32]:
|
||||
raise ValueError("The wrapping key must be a valid AES key length")
|
||||
|
||||
if len(key_to_wrap) < 16:
|
||||
raise ValueError("The key to wrap must be at least 16 bytes")
|
||||
|
||||
if len(key_to_wrap) % 8 != 0:
|
||||
raise ValueError("The key to wrap must be a multiple of 8 bytes")
|
||||
|
||||
# RFC 3394 Key Wrap - 2.2.1 (index method)
|
||||
encryptor = Cipher(AES(wrapping_key), ECB(), backend).encryptor()
|
||||
a = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
|
||||
r = [key_to_wrap[i:i + 8] for i in range(0, len(key_to_wrap), 8)]
|
||||
n = len(r)
|
||||
for j in range(6):
|
||||
for i in range(n):
|
||||
# every encryption operation is a discrete 16 byte chunk (because
|
||||
# AES has a 128-bit block size) and since we're using ECB it is
|
||||
# safe to reuse the encryptor for the entire operation
|
||||
b = encryptor.update(a + r[i])
|
||||
# pack/unpack are safe as these are always 64-bit chunks
|
||||
a = struct.pack(
|
||||
">Q", struct.unpack(">Q", b[:8])[0] ^ ((n * j) + i + 1)
|
||||
)
|
||||
r[i] = b[-8:]
|
||||
|
||||
assert encryptor.finalize() == b""
|
||||
|
||||
return a + b"".join(r)
|
||||
|
||||
|
||||
def aes_key_unwrap(wrapping_key, wrapped_key, backend):
|
||||
if len(wrapped_key) < 24:
|
||||
raise ValueError("Must be at least 24 bytes")
|
||||
|
||||
if len(wrapped_key) % 8 != 0:
|
||||
raise ValueError("The wrapped key must be a multiple of 8 bytes")
|
||||
|
||||
if len(wrapping_key) not in [16, 24, 32]:
|
||||
raise ValueError("The wrapping key must be a valid AES key length")
|
||||
|
||||
# Implement RFC 3394 Key Unwrap - 2.2.2 (index method)
|
||||
decryptor = Cipher(AES(wrapping_key), ECB(), backend).decryptor()
|
||||
aiv = b"\xa6\xa6\xa6\xa6\xa6\xa6\xa6\xa6"
|
||||
|
||||
r = [wrapped_key[i:i + 8] for i in range(0, len(wrapped_key), 8)]
|
||||
a = r.pop(0)
|
||||
n = len(r)
|
||||
for j in reversed(range(6)):
|
||||
for i in reversed(range(n)):
|
||||
# pack/unpack are safe as these are always 64-bit chunks
|
||||
atr = struct.pack(
|
||||
">Q", struct.unpack(">Q", a)[0] ^ ((n * j) + i + 1)
|
||||
) + r[i]
|
||||
# every decryption operation is a discrete 16 byte chunk so
|
||||
# it is safe to reuse the decryptor for the entire operation
|
||||
b = decryptor.update(atr)
|
||||
a = b[:8]
|
||||
r[i] = b[-8:]
|
||||
|
||||
assert decryptor.finalize() == b""
|
||||
|
||||
if not bytes_eq(a, aiv):
|
||||
raise InvalidUnwrap()
|
||||
|
||||
return b"".join(r)
|
||||
|
||||
|
||||
class InvalidUnwrap(Exception):
|
||||
pass
|
||||
|
|
@ -0,0 +1,124 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import AlreadyFinalized
|
||||
from cryptography.hazmat.bindings._padding import lib
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class PaddingContext(object):
|
||||
@abc.abstractmethod
|
||||
def update(self, data):
|
||||
"""
|
||||
Pads the provided bytes and returns any available data as bytes.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def finalize(self):
|
||||
"""
|
||||
Finalize the padding, returns bytes.
|
||||
"""
|
||||
|
||||
|
||||
class PKCS7(object):
|
||||
def __init__(self, block_size):
|
||||
if not (0 <= block_size < 256):
|
||||
raise ValueError("block_size must be in range(0, 256).")
|
||||
|
||||
if block_size % 8 != 0:
|
||||
raise ValueError("block_size must be a multiple of 8.")
|
||||
|
||||
self.block_size = block_size
|
||||
|
||||
def padder(self):
|
||||
return _PKCS7PaddingContext(self.block_size)
|
||||
|
||||
def unpadder(self):
|
||||
return _PKCS7UnpaddingContext(self.block_size)
|
||||
|
||||
|
||||
@utils.register_interface(PaddingContext)
|
||||
class _PKCS7PaddingContext(object):
|
||||
def __init__(self, block_size):
|
||||
self.block_size = block_size
|
||||
# TODO: more copies than necessary, we should use zero-buffer (#193)
|
||||
self._buffer = b""
|
||||
|
||||
def update(self, data):
|
||||
if self._buffer is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("data must be bytes.")
|
||||
|
||||
self._buffer += data
|
||||
|
||||
finished_blocks = len(self._buffer) // (self.block_size // 8)
|
||||
|
||||
result = self._buffer[:finished_blocks * (self.block_size // 8)]
|
||||
self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]
|
||||
|
||||
return result
|
||||
|
||||
def finalize(self):
|
||||
if self._buffer is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
pad_size = self.block_size // 8 - len(self._buffer)
|
||||
result = self._buffer + six.int2byte(pad_size) * pad_size
|
||||
self._buffer = None
|
||||
return result
|
||||
|
||||
|
||||
@utils.register_interface(PaddingContext)
|
||||
class _PKCS7UnpaddingContext(object):
|
||||
def __init__(self, block_size):
|
||||
self.block_size = block_size
|
||||
# TODO: more copies than necessary, we should use zero-buffer (#193)
|
||||
self._buffer = b""
|
||||
|
||||
def update(self, data):
|
||||
if self._buffer is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
if not isinstance(data, bytes):
|
||||
raise TypeError("data must be bytes.")
|
||||
|
||||
self._buffer += data
|
||||
|
||||
finished_blocks = max(
|
||||
len(self._buffer) // (self.block_size // 8) - 1,
|
||||
0
|
||||
)
|
||||
|
||||
result = self._buffer[:finished_blocks * (self.block_size // 8)]
|
||||
self._buffer = self._buffer[finished_blocks * (self.block_size // 8):]
|
||||
|
||||
return result
|
||||
|
||||
def finalize(self):
|
||||
if self._buffer is None:
|
||||
raise AlreadyFinalized("Context was already finalized.")
|
||||
|
||||
if len(self._buffer) != self.block_size // 8:
|
||||
raise ValueError("Invalid padding bytes.")
|
||||
|
||||
valid = lib.Cryptography_check_pkcs7_padding(
|
||||
self._buffer, self.block_size // 8
|
||||
)
|
||||
|
||||
if not valid:
|
||||
raise ValueError("Invalid padding bytes.")
|
||||
|
||||
pad_size = six.indexbytes(self._buffer, -1)
|
||||
res = self._buffer[:-pad_size]
|
||||
self._buffer = None
|
||||
return res
|
||||
|
|
@ -0,0 +1,188 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import base64
|
||||
import struct
|
||||
from enum import Enum
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.exceptions import UnsupportedAlgorithm
|
||||
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
|
||||
|
||||
|
||||
def load_pem_private_key(data, password, backend):
|
||||
return backend.load_pem_private_key(data, password)
|
||||
|
||||
|
||||
def load_pem_public_key(data, backend):
|
||||
return backend.load_pem_public_key(data)
|
||||
|
||||
|
||||
def load_der_private_key(data, password, backend):
|
||||
return backend.load_der_private_key(data, password)
|
||||
|
||||
|
||||
def load_der_public_key(data, backend):
|
||||
return backend.load_der_public_key(data)
|
||||
|
||||
|
||||
def load_ssh_public_key(data, backend):
|
||||
key_parts = data.split(b' ', 2)
|
||||
|
||||
if len(key_parts) < 2:
|
||||
raise ValueError(
|
||||
'Key is not in the proper format or contains extra data.')
|
||||
|
||||
key_type = key_parts[0]
|
||||
|
||||
if key_type == b'ssh-rsa':
|
||||
loader = _load_ssh_rsa_public_key
|
||||
elif key_type == b'ssh-dss':
|
||||
loader = _load_ssh_dss_public_key
|
||||
elif key_type in [
|
||||
b'ecdsa-sha2-nistp256', b'ecdsa-sha2-nistp384', b'ecdsa-sha2-nistp521',
|
||||
]:
|
||||
loader = _load_ssh_ecdsa_public_key
|
||||
else:
|
||||
raise UnsupportedAlgorithm('Key type is not supported.')
|
||||
|
||||
key_body = key_parts[1]
|
||||
|
||||
try:
|
||||
decoded_data = base64.b64decode(key_body)
|
||||
except TypeError:
|
||||
raise ValueError('Key is not in the proper format.')
|
||||
|
||||
inner_key_type, rest = _read_next_string(decoded_data)
|
||||
|
||||
if inner_key_type != key_type:
|
||||
raise ValueError(
|
||||
'Key header and key body contain different key type values.'
|
||||
)
|
||||
|
||||
return loader(key_type, rest, backend)
|
||||
|
||||
|
||||
def _load_ssh_rsa_public_key(key_type, decoded_data, backend):
|
||||
e, rest = _read_next_mpint(decoded_data)
|
||||
n, rest = _read_next_mpint(rest)
|
||||
|
||||
if rest:
|
||||
raise ValueError('Key body contains extra bytes.')
|
||||
|
||||
return rsa.RSAPublicNumbers(e, n).public_key(backend)
|
||||
|
||||
|
||||
def _load_ssh_dss_public_key(key_type, decoded_data, backend):
|
||||
p, rest = _read_next_mpint(decoded_data)
|
||||
q, rest = _read_next_mpint(rest)
|
||||
g, rest = _read_next_mpint(rest)
|
||||
y, rest = _read_next_mpint(rest)
|
||||
|
||||
if rest:
|
||||
raise ValueError('Key body contains extra bytes.')
|
||||
|
||||
parameter_numbers = dsa.DSAParameterNumbers(p, q, g)
|
||||
public_numbers = dsa.DSAPublicNumbers(y, parameter_numbers)
|
||||
|
||||
return public_numbers.public_key(backend)
|
||||
|
||||
|
||||
def _load_ssh_ecdsa_public_key(expected_key_type, decoded_data, backend):
|
||||
curve_name, rest = _read_next_string(decoded_data)
|
||||
data, rest = _read_next_string(rest)
|
||||
|
||||
if expected_key_type != b"ecdsa-sha2-" + curve_name:
|
||||
raise ValueError(
|
||||
'Key header and key body contain different key type values.'
|
||||
)
|
||||
|
||||
if rest:
|
||||
raise ValueError('Key body contains extra bytes.')
|
||||
|
||||
curve = {
|
||||
b"nistp256": ec.SECP256R1,
|
||||
b"nistp384": ec.SECP384R1,
|
||||
b"nistp521": ec.SECP521R1,
|
||||
}[curve_name]()
|
||||
|
||||
if six.indexbytes(data, 0) != 4:
|
||||
raise NotImplementedError(
|
||||
"Compressed elliptic curve points are not supported"
|
||||
)
|
||||
|
||||
# key_size is in bits, and sometimes it's not evenly divisible by 8, so we
|
||||
# add 7 to round up the number of bytes.
|
||||
if len(data) != 1 + 2 * ((curve.key_size + 7) // 8):
|
||||
raise ValueError("Malformed key bytes")
|
||||
|
||||
x = utils.int_from_bytes(
|
||||
data[1:1 + (curve.key_size + 7) // 8], byteorder='big'
|
||||
)
|
||||
y = utils.int_from_bytes(
|
||||
data[1 + (curve.key_size + 7) // 8:], byteorder='big'
|
||||
)
|
||||
return ec.EllipticCurvePublicNumbers(x, y, curve).public_key(backend)
|
||||
|
||||
|
||||
def _read_next_string(data):
|
||||
"""
|
||||
Retrieves the next RFC 4251 string value from the data.
|
||||
|
||||
While the RFC calls these strings, in Python they are bytes objects.
|
||||
"""
|
||||
str_len, = struct.unpack('>I', data[:4])
|
||||
return data[4:4 + str_len], data[4 + str_len:]
|
||||
|
||||
|
||||
def _read_next_mpint(data):
|
||||
"""
|
||||
Reads the next mpint from the data.
|
||||
|
||||
Currently, all mpints are interpreted as unsigned.
|
||||
"""
|
||||
mpint_data, rest = _read_next_string(data)
|
||||
|
||||
return (
|
||||
utils.int_from_bytes(mpint_data, byteorder='big', signed=False), rest
|
||||
)
|
||||
|
||||
|
||||
class Encoding(Enum):
|
||||
PEM = "PEM"
|
||||
DER = "DER"
|
||||
|
||||
|
||||
class PrivateFormat(Enum):
|
||||
PKCS8 = "PKCS8"
|
||||
TraditionalOpenSSL = "TraditionalOpenSSL"
|
||||
|
||||
|
||||
class PublicFormat(Enum):
|
||||
SubjectPublicKeyInfo = "X.509 subjectPublicKeyInfo with PKCS#1"
|
||||
PKCS1 = "Raw PKCS#1"
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class KeySerializationEncryption(object):
|
||||
pass
|
||||
|
||||
|
||||
@utils.register_interface(KeySerializationEncryption)
|
||||
class BestAvailableEncryption(object):
|
||||
def __init__(self, password):
|
||||
if not isinstance(password, bytes) or len(password) == 0:
|
||||
raise ValueError("Password must be 1 or more bytes.")
|
||||
|
||||
self.password = password
|
||||
|
||||
|
||||
@utils.register_interface(KeySerializationEncryption)
|
||||
class NoEncryption(object):
|
||||
pass
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
|
||||
class InvalidToken(Exception):
|
||||
pass
|
||||
|
|
@ -0,0 +1,67 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import struct
|
||||
|
||||
import six
|
||||
|
||||
from cryptography.exceptions import (
|
||||
UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time, hmac
|
||||
from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512
|
||||
from cryptography.hazmat.primitives.twofactor import InvalidToken
|
||||
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
|
||||
|
||||
|
||||
class HOTP(object):
|
||||
def __init__(self, key, length, algorithm, backend):
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
if len(key) < 16:
|
||||
raise ValueError("Key length has to be at least 128 bits.")
|
||||
|
||||
if not isinstance(length, six.integer_types):
|
||||
raise TypeError("Length parameter must be an integer type.")
|
||||
|
||||
if length < 6 or length > 8:
|
||||
raise ValueError("Length of HOTP has to be between 6 to 8.")
|
||||
|
||||
if not isinstance(algorithm, (SHA1, SHA256, SHA512)):
|
||||
raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.")
|
||||
|
||||
self._key = key
|
||||
self._length = length
|
||||
self._algorithm = algorithm
|
||||
self._backend = backend
|
||||
|
||||
def generate(self, counter):
|
||||
truncated_value = self._dynamic_truncate(counter)
|
||||
hotp = truncated_value % (10 ** self._length)
|
||||
return "{0:0{1}}".format(hotp, self._length).encode()
|
||||
|
||||
def verify(self, hotp, counter):
|
||||
if not constant_time.bytes_eq(self.generate(counter), hotp):
|
||||
raise InvalidToken("Supplied HOTP value does not match.")
|
||||
|
||||
def _dynamic_truncate(self, counter):
|
||||
ctx = hmac.HMAC(self._key, self._algorithm, self._backend)
|
||||
ctx.update(struct.pack(">Q", counter))
|
||||
hmac_value = ctx.finalize()
|
||||
|
||||
offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111
|
||||
p = hmac_value[offset:offset + 4]
|
||||
return struct.unpack(">I", p)[0] & 0x7fffffff
|
||||
|
||||
def get_provisioning_uri(self, account_name, counter, issuer):
|
||||
return _generate_uri(self, "hotp", account_name, issuer, [
|
||||
("counter", int(counter)),
|
||||
])
|
||||
|
|
@ -0,0 +1,39 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.exceptions import (
|
||||
UnsupportedAlgorithm, _Reasons
|
||||
)
|
||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
||||
from cryptography.hazmat.primitives import constant_time
|
||||
from cryptography.hazmat.primitives.twofactor import InvalidToken
|
||||
from cryptography.hazmat.primitives.twofactor.hotp import HOTP
|
||||
from cryptography.hazmat.primitives.twofactor.utils import _generate_uri
|
||||
|
||||
|
||||
class TOTP(object):
|
||||
def __init__(self, key, length, algorithm, time_step, backend):
|
||||
if not isinstance(backend, HMACBackend):
|
||||
raise UnsupportedAlgorithm(
|
||||
"Backend object does not implement HMACBackend.",
|
||||
_Reasons.BACKEND_MISSING_INTERFACE
|
||||
)
|
||||
|
||||
self._time_step = time_step
|
||||
self._hotp = HOTP(key, length, algorithm, backend)
|
||||
|
||||
def generate(self, time):
|
||||
counter = int(time / self._time_step)
|
||||
return self._hotp.generate(counter)
|
||||
|
||||
def verify(self, totp, time):
|
||||
if not constant_time.bytes_eq(self.generate(time), totp):
|
||||
raise InvalidToken("Supplied TOTP value does not match.")
|
||||
|
||||
def get_provisioning_uri(self, account_name, issuer):
|
||||
return _generate_uri(self._hotp, "totp", account_name, issuer, [
|
||||
("period", int(self._time_step)),
|
||||
])
|
||||
|
|
@ -0,0 +1,30 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import base64
|
||||
|
||||
from six.moves.urllib.parse import quote, urlencode
|
||||
|
||||
|
||||
def _generate_uri(hotp, type_name, account_name, issuer, extra_parameters):
|
||||
parameters = [
|
||||
("digits", hotp._length),
|
||||
("secret", base64.b32encode(hotp._key)),
|
||||
("algorithm", hotp._algorithm.name.upper()),
|
||||
]
|
||||
|
||||
if issuer is not None:
|
||||
parameters.append(("issuer", issuer))
|
||||
|
||||
parameters.extend(extra_parameters)
|
||||
|
||||
uriparts = {
|
||||
"type": type_name,
|
||||
"label": ("%s:%s" % (quote(issuer), quote(account_name)) if issuer
|
||||
else quote(account_name)),
|
||||
"parameters": urlencode(parameters),
|
||||
}
|
||||
return "otpauth://{type}/{label}?{parameters}".format(**uriparts)
|
||||
127
lib/python3.5/site-packages/cryptography/utils.py
Normal file
127
lib/python3.5/site-packages/cryptography/utils.py
Normal file
|
|
@ -0,0 +1,127 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import binascii
|
||||
import inspect
|
||||
import struct
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
|
||||
DeprecatedIn09 = DeprecationWarning
|
||||
DeprecatedIn10 = PendingDeprecationWarning
|
||||
|
||||
|
||||
def read_only_property(name):
|
||||
return property(lambda self: getattr(self, name))
|
||||
|
||||
|
||||
def register_interface(iface):
|
||||
def register_decorator(klass):
|
||||
verify_interface(iface, klass)
|
||||
iface.register(klass)
|
||||
return klass
|
||||
return register_decorator
|
||||
|
||||
|
||||
if hasattr(int, "from_bytes"):
|
||||
int_from_bytes = int.from_bytes
|
||||
else:
|
||||
def int_from_bytes(data, byteorder, signed=False):
|
||||
assert byteorder == 'big'
|
||||
assert not signed
|
||||
|
||||
if len(data) % 4 != 0:
|
||||
data = (b'\x00' * (4 - (len(data) % 4))) + data
|
||||
|
||||
result = 0
|
||||
|
||||
while len(data) > 0:
|
||||
digit, = struct.unpack('>I', data[:4])
|
||||
result = (result << 32) + digit
|
||||
data = data[4:]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def int_to_bytes(integer, length=None):
|
||||
hex_string = '%x' % integer
|
||||
if length is None:
|
||||
n = len(hex_string)
|
||||
else:
|
||||
n = length * 2
|
||||
return binascii.unhexlify(hex_string.zfill(n + (n & 1)))
|
||||
|
||||
|
||||
class InterfaceNotImplemented(Exception):
|
||||
pass
|
||||
|
||||
|
||||
if hasattr(inspect, "signature"):
|
||||
signature = inspect.signature
|
||||
else:
|
||||
signature = inspect.getargspec
|
||||
|
||||
|
||||
def verify_interface(iface, klass):
|
||||
for method in iface.__abstractmethods__:
|
||||
if not hasattr(klass, method):
|
||||
raise InterfaceNotImplemented(
|
||||
"{0} is missing a {1!r} method".format(klass, method)
|
||||
)
|
||||
if isinstance(getattr(iface, method), abc.abstractproperty):
|
||||
# Can't properly verify these yet.
|
||||
continue
|
||||
sig = signature(getattr(iface, method))
|
||||
actual = signature(getattr(klass, method))
|
||||
if sig != actual:
|
||||
raise InterfaceNotImplemented(
|
||||
"{0}.{1}'s signature differs from the expected. Expected: "
|
||||
"{2!r}. Received: {3!r}".format(
|
||||
klass, method, sig, actual
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
if sys.version_info >= (2, 7):
|
||||
def bit_length(x):
|
||||
return x.bit_length()
|
||||
else:
|
||||
def bit_length(x):
|
||||
return len(bin(x)) - (2 + (x <= 0))
|
||||
|
||||
|
||||
class _DeprecatedValue(object):
|
||||
def __init__(self, value, message, warning_class):
|
||||
self.value = value
|
||||
self.message = message
|
||||
self.warning_class = warning_class
|
||||
|
||||
|
||||
class _ModuleWithDeprecations(object):
|
||||
def __init__(self, module):
|
||||
self.__dict__["_module"] = module
|
||||
|
||||
def __getattr__(self, attr):
|
||||
obj = getattr(self._module, attr)
|
||||
if isinstance(obj, _DeprecatedValue):
|
||||
warnings.warn(obj.message, obj.warning_class, stacklevel=2)
|
||||
obj = obj.value
|
||||
return obj
|
||||
|
||||
def __setattr__(self, attr, value):
|
||||
setattr(self._module, attr, value)
|
||||
|
||||
def __dir__(self):
|
||||
return ["_module"] + dir(self._module)
|
||||
|
||||
|
||||
def deprecated(value, module_name, message, warning_class):
|
||||
module = sys.modules[module_name]
|
||||
if not isinstance(module, _ModuleWithDeprecations):
|
||||
sys.modules[module_name] = module = _ModuleWithDeprecations(module)
|
||||
return _DeprecatedValue(value, message, warning_class)
|
||||
163
lib/python3.5/site-packages/cryptography/x509/__init__.py
Normal file
163
lib/python3.5/site-packages/cryptography/x509/__init__.py
Normal file
|
|
@ -0,0 +1,163 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography.x509.base import (
|
||||
Certificate, CertificateBuilder, CertificateRevocationList,
|
||||
CertificateSigningRequest, CertificateSigningRequestBuilder,
|
||||
InvalidVersion, RevokedCertificate,
|
||||
Version, load_der_x509_certificate, load_der_x509_crl, load_der_x509_csr,
|
||||
load_pem_x509_certificate, load_pem_x509_crl, load_pem_x509_csr,
|
||||
)
|
||||
from cryptography.x509.extensions import (
|
||||
AccessDescription, AuthorityInformationAccess,
|
||||
AuthorityKeyIdentifier, BasicConstraints, CRLDistributionPoints,
|
||||
CertificatePolicies, DistributionPoint, DuplicateExtension,
|
||||
ExtendedKeyUsage, Extension, ExtensionNotFound, ExtensionType, Extensions,
|
||||
GeneralNames, InhibitAnyPolicy, IssuerAlternativeName, KeyUsage,
|
||||
NameConstraints, NoticeReference, OCSPNoCheck, PolicyInformation,
|
||||
ReasonFlags, SubjectAlternativeName, SubjectKeyIdentifier,
|
||||
UnsupportedExtension, UserNotice
|
||||
)
|
||||
from cryptography.x509.general_name import (
|
||||
DNSName, DirectoryName, GeneralName, IPAddress, OtherName, RFC822Name,
|
||||
RegisteredID, UniformResourceIdentifier, UnsupportedGeneralNameType,
|
||||
_GENERAL_NAMES
|
||||
)
|
||||
from cryptography.x509.name import Name, NameAttribute
|
||||
from cryptography.x509.oid import (
|
||||
AuthorityInformationAccessOID, CRLExtensionOID, CertificatePoliciesOID,
|
||||
ExtendedKeyUsageOID, ExtensionOID, NameOID, ObjectIdentifier,
|
||||
SignatureAlgorithmOID, _SIG_OIDS_TO_HASH
|
||||
)
|
||||
|
||||
|
||||
OID_AUTHORITY_INFORMATION_ACCESS = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
|
||||
OID_AUTHORITY_KEY_IDENTIFIER = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
|
||||
OID_BASIC_CONSTRAINTS = ExtensionOID.BASIC_CONSTRAINTS
|
||||
OID_CERTIFICATE_POLICIES = ExtensionOID.CERTIFICATE_POLICIES
|
||||
OID_CRL_DISTRIBUTION_POINTS = ExtensionOID.CRL_DISTRIBUTION_POINTS
|
||||
OID_EXTENDED_KEY_USAGE = ExtensionOID.EXTENDED_KEY_USAGE
|
||||
OID_FRESHEST_CRL = ExtensionOID.FRESHEST_CRL
|
||||
OID_INHIBIT_ANY_POLICY = ExtensionOID.INHIBIT_ANY_POLICY
|
||||
OID_ISSUER_ALTERNATIVE_NAME = ExtensionOID.ISSUER_ALTERNATIVE_NAME
|
||||
OID_KEY_USAGE = ExtensionOID.KEY_USAGE
|
||||
OID_NAME_CONSTRAINTS = ExtensionOID.NAME_CONSTRAINTS
|
||||
OID_OCSP_NO_CHECK = ExtensionOID.OCSP_NO_CHECK
|
||||
OID_POLICY_CONSTRAINTS = ExtensionOID.POLICY_CONSTRAINTS
|
||||
OID_POLICY_MAPPINGS = ExtensionOID.POLICY_MAPPINGS
|
||||
OID_SUBJECT_ALTERNATIVE_NAME = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
|
||||
OID_SUBJECT_DIRECTORY_ATTRIBUTES = ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES
|
||||
OID_SUBJECT_INFORMATION_ACCESS = ExtensionOID.SUBJECT_INFORMATION_ACCESS
|
||||
OID_SUBJECT_KEY_IDENTIFIER = ExtensionOID.SUBJECT_KEY_IDENTIFIER
|
||||
|
||||
OID_DSA_WITH_SHA1 = SignatureAlgorithmOID.DSA_WITH_SHA1
|
||||
OID_DSA_WITH_SHA224 = SignatureAlgorithmOID.DSA_WITH_SHA224
|
||||
OID_DSA_WITH_SHA256 = SignatureAlgorithmOID.DSA_WITH_SHA256
|
||||
OID_ECDSA_WITH_SHA1 = SignatureAlgorithmOID.ECDSA_WITH_SHA1
|
||||
OID_ECDSA_WITH_SHA224 = SignatureAlgorithmOID.ECDSA_WITH_SHA224
|
||||
OID_ECDSA_WITH_SHA256 = SignatureAlgorithmOID.ECDSA_WITH_SHA256
|
||||
OID_ECDSA_WITH_SHA384 = SignatureAlgorithmOID.ECDSA_WITH_SHA384
|
||||
OID_ECDSA_WITH_SHA512 = SignatureAlgorithmOID.ECDSA_WITH_SHA512
|
||||
OID_RSA_WITH_MD5 = SignatureAlgorithmOID.RSA_WITH_MD5
|
||||
OID_RSA_WITH_SHA1 = SignatureAlgorithmOID.RSA_WITH_SHA1
|
||||
OID_RSA_WITH_SHA224 = SignatureAlgorithmOID.RSA_WITH_SHA224
|
||||
OID_RSA_WITH_SHA256 = SignatureAlgorithmOID.RSA_WITH_SHA256
|
||||
OID_RSA_WITH_SHA384 = SignatureAlgorithmOID.RSA_WITH_SHA384
|
||||
OID_RSA_WITH_SHA512 = SignatureAlgorithmOID.RSA_WITH_SHA512
|
||||
|
||||
OID_COMMON_NAME = NameOID.COMMON_NAME
|
||||
OID_COUNTRY_NAME = NameOID.COUNTRY_NAME
|
||||
OID_DOMAIN_COMPONENT = NameOID.DOMAIN_COMPONENT
|
||||
OID_DN_QUALIFIER = NameOID.DN_QUALIFIER
|
||||
OID_EMAIL_ADDRESS = NameOID.EMAIL_ADDRESS
|
||||
OID_GENERATION_QUALIFIER = NameOID.GENERATION_QUALIFIER
|
||||
OID_GIVEN_NAME = NameOID.GIVEN_NAME
|
||||
OID_LOCALITY_NAME = NameOID.LOCALITY_NAME
|
||||
OID_ORGANIZATIONAL_UNIT_NAME = NameOID.ORGANIZATIONAL_UNIT_NAME
|
||||
OID_ORGANIZATION_NAME = NameOID.ORGANIZATION_NAME
|
||||
OID_PSEUDONYM = NameOID.PSEUDONYM
|
||||
OID_SERIAL_NUMBER = NameOID.SERIAL_NUMBER
|
||||
OID_STATE_OR_PROVINCE_NAME = NameOID.STATE_OR_PROVINCE_NAME
|
||||
OID_SURNAME = NameOID.SURNAME
|
||||
OID_TITLE = NameOID.TITLE
|
||||
|
||||
OID_CLIENT_AUTH = ExtendedKeyUsageOID.CLIENT_AUTH
|
||||
OID_CODE_SIGNING = ExtendedKeyUsageOID.CODE_SIGNING
|
||||
OID_EMAIL_PROTECTION = ExtendedKeyUsageOID.EMAIL_PROTECTION
|
||||
OID_OCSP_SIGNING = ExtendedKeyUsageOID.OCSP_SIGNING
|
||||
OID_SERVER_AUTH = ExtendedKeyUsageOID.SERVER_AUTH
|
||||
OID_TIME_STAMPING = ExtendedKeyUsageOID.TIME_STAMPING
|
||||
|
||||
OID_ANY_POLICY = CertificatePoliciesOID.ANY_POLICY
|
||||
OID_CPS_QUALIFIER = CertificatePoliciesOID.CPS_QUALIFIER
|
||||
OID_CPS_USER_NOTICE = CertificatePoliciesOID.CPS_USER_NOTICE
|
||||
|
||||
OID_CERTIFICATE_ISSUER = CRLExtensionOID.CERTIFICATE_ISSUER
|
||||
OID_CRL_REASON = CRLExtensionOID.CRL_REASON
|
||||
OID_INVALIDITY_DATE = CRLExtensionOID.INVALIDITY_DATE
|
||||
|
||||
OID_CA_ISSUERS = AuthorityInformationAccessOID.CA_ISSUERS
|
||||
OID_OCSP = AuthorityInformationAccessOID.OCSP
|
||||
|
||||
|
||||
__all__ = [
|
||||
"load_pem_x509_certificate",
|
||||
"load_der_x509_certificate",
|
||||
"load_pem_x509_csr",
|
||||
"load_der_x509_csr",
|
||||
"load_pem_x509_crl",
|
||||
"load_der_x509_crl",
|
||||
"InvalidVersion",
|
||||
"DuplicateExtension",
|
||||
"UnsupportedExtension",
|
||||
"ExtensionNotFound",
|
||||
"UnsupportedGeneralNameType",
|
||||
"NameAttribute",
|
||||
"Name",
|
||||
"ObjectIdentifier",
|
||||
"ExtensionType",
|
||||
"Extensions",
|
||||
"Extension",
|
||||
"ExtendedKeyUsage",
|
||||
"OCSPNoCheck",
|
||||
"BasicConstraints",
|
||||
"KeyUsage",
|
||||
"AuthorityInformationAccess",
|
||||
"AccessDescription",
|
||||
"CertificatePolicies",
|
||||
"PolicyInformation",
|
||||
"UserNotice",
|
||||
"NoticeReference",
|
||||
"SubjectKeyIdentifier",
|
||||
"NameConstraints",
|
||||
"CRLDistributionPoints",
|
||||
"DistributionPoint",
|
||||
"ReasonFlags",
|
||||
"InhibitAnyPolicy",
|
||||
"SubjectAlternativeName",
|
||||
"IssuerAlternativeName",
|
||||
"AuthorityKeyIdentifier",
|
||||
"GeneralNames",
|
||||
"GeneralName",
|
||||
"RFC822Name",
|
||||
"DNSName",
|
||||
"UniformResourceIdentifier",
|
||||
"RegisteredID",
|
||||
"DirectoryName",
|
||||
"IPAddress",
|
||||
"OtherName",
|
||||
"Certificate",
|
||||
"CertificateRevocationList",
|
||||
"CertificateSigningRequest",
|
||||
"RevokedCertificate",
|
||||
"CertificateSigningRequestBuilder",
|
||||
"CertificateBuilder",
|
||||
"Version",
|
||||
"_SIG_OIDS_TO_HASH",
|
||||
"OID_CA_ISSUERS",
|
||||
"OID_OCSP",
|
||||
"_GENERAL_NAMES",
|
||||
]
|
||||
467
lib/python3.5/site-packages/cryptography/x509/base.py
Normal file
467
lib/python3.5/site-packages/cryptography/x509/base.py
Normal file
|
|
@ -0,0 +1,467 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import datetime
|
||||
from enum import Enum
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa
|
||||
from cryptography.x509.extensions import Extension, ExtensionType
|
||||
from cryptography.x509.name import Name
|
||||
|
||||
|
||||
_UNIX_EPOCH = datetime.datetime(1970, 1, 1)
|
||||
|
||||
|
||||
class Version(Enum):
|
||||
v1 = 0
|
||||
v3 = 2
|
||||
|
||||
|
||||
def load_pem_x509_certificate(data, backend):
|
||||
return backend.load_pem_x509_certificate(data)
|
||||
|
||||
|
||||
def load_der_x509_certificate(data, backend):
|
||||
return backend.load_der_x509_certificate(data)
|
||||
|
||||
|
||||
def load_pem_x509_csr(data, backend):
|
||||
return backend.load_pem_x509_csr(data)
|
||||
|
||||
|
||||
def load_der_x509_csr(data, backend):
|
||||
return backend.load_der_x509_csr(data)
|
||||
|
||||
|
||||
def load_pem_x509_crl(data, backend):
|
||||
return backend.load_pem_x509_crl(data)
|
||||
|
||||
|
||||
def load_der_x509_crl(data, backend):
|
||||
return backend.load_der_x509_crl(data)
|
||||
|
||||
|
||||
class InvalidVersion(Exception):
|
||||
def __init__(self, msg, parsed_version):
|
||||
super(InvalidVersion, self).__init__(msg)
|
||||
self.parsed_version = parsed_version
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class Certificate(object):
|
||||
@abc.abstractmethod
|
||||
def fingerprint(self, algorithm):
|
||||
"""
|
||||
Returns bytes using digest passed.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def serial(self):
|
||||
"""
|
||||
Returns certificate serial number
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def version(self):
|
||||
"""
|
||||
Returns the certificate version
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
Returns the public key
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def not_valid_before(self):
|
||||
"""
|
||||
Not before time (represented as UTC datetime)
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def not_valid_after(self):
|
||||
"""
|
||||
Not after time (represented as UTC datetime)
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def issuer(self):
|
||||
"""
|
||||
Returns the issuer name object.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def subject(self):
|
||||
"""
|
||||
Returns the subject name object.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def signature_hash_algorithm(self):
|
||||
"""
|
||||
Returns a HashAlgorithm corresponding to the type of the digest signed
|
||||
in the certificate.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def extensions(self):
|
||||
"""
|
||||
Returns an Extensions object.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Checks equality.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Checks not equal.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __hash__(self):
|
||||
"""
|
||||
Computes a hash.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding):
|
||||
"""
|
||||
Serializes the certificate to PEM or DER format.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CertificateRevocationList(object):
|
||||
|
||||
@abc.abstractmethod
|
||||
def fingerprint(self, algorithm):
|
||||
"""
|
||||
Returns bytes using digest passed.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def signature_hash_algorithm(self):
|
||||
"""
|
||||
Returns a HashAlgorithm corresponding to the type of the digest signed
|
||||
in the certificate.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def issuer(self):
|
||||
"""
|
||||
Returns the X509Name with the issuer of this CRL.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def next_update(self):
|
||||
"""
|
||||
Returns the date of next update for this CRL.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def last_update(self):
|
||||
"""
|
||||
Returns the date of last update for this CRL.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def extensions(self):
|
||||
"""
|
||||
Returns an Extensions object containing a list of CRL extensions.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Checks equality.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Checks not equal.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class CertificateSigningRequest(object):
|
||||
@abc.abstractmethod
|
||||
def __eq__(self, other):
|
||||
"""
|
||||
Checks equality.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __ne__(self, other):
|
||||
"""
|
||||
Checks not equal.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def __hash__(self):
|
||||
"""
|
||||
Computes a hash.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_key(self):
|
||||
"""
|
||||
Returns the public key
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def subject(self):
|
||||
"""
|
||||
Returns the subject name object.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def signature_hash_algorithm(self):
|
||||
"""
|
||||
Returns a HashAlgorithm corresponding to the type of the digest signed
|
||||
in the certificate.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def extensions(self):
|
||||
"""
|
||||
Returns the extensions in the signing request.
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def public_bytes(self, encoding):
|
||||
"""
|
||||
Encodes the request to PEM or DER format.
|
||||
"""
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class RevokedCertificate(object):
|
||||
@abc.abstractproperty
|
||||
def serial_number(self):
|
||||
"""
|
||||
Returns the serial number of the revoked certificate.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def revocation_date(self):
|
||||
"""
|
||||
Returns the date of when this certificate was revoked.
|
||||
"""
|
||||
|
||||
@abc.abstractproperty
|
||||
def extensions(self):
|
||||
"""
|
||||
Returns an Extensions object containing a list of Revoked extensions.
|
||||
"""
|
||||
|
||||
|
||||
class CertificateSigningRequestBuilder(object):
|
||||
def __init__(self, subject_name=None, extensions=[]):
|
||||
"""
|
||||
Creates an empty X.509 certificate request (v1).
|
||||
"""
|
||||
self._subject_name = subject_name
|
||||
self._extensions = extensions
|
||||
|
||||
def subject_name(self, name):
|
||||
"""
|
||||
Sets the certificate requestor's distinguished name.
|
||||
"""
|
||||
if not isinstance(name, Name):
|
||||
raise TypeError('Expecting x509.Name object.')
|
||||
if self._subject_name is not None:
|
||||
raise ValueError('The subject name may only be set once.')
|
||||
return CertificateSigningRequestBuilder(name, self._extensions)
|
||||
|
||||
def add_extension(self, extension, critical):
|
||||
"""
|
||||
Adds an X.509 extension to the certificate request.
|
||||
"""
|
||||
if not isinstance(extension, ExtensionType):
|
||||
raise TypeError("extension must be an ExtensionType")
|
||||
|
||||
extension = Extension(extension.oid, critical, extension)
|
||||
|
||||
# TODO: This is quadratic in the number of extensions
|
||||
for e in self._extensions:
|
||||
if e.oid == extension.oid:
|
||||
raise ValueError('This extension has already been set.')
|
||||
return CertificateSigningRequestBuilder(
|
||||
self._subject_name, self._extensions + [extension]
|
||||
)
|
||||
|
||||
def sign(self, private_key, algorithm, backend):
|
||||
"""
|
||||
Signs the request using the requestor's private key.
|
||||
"""
|
||||
if self._subject_name is None:
|
||||
raise ValueError("A CertificateSigningRequest must have a subject")
|
||||
return backend.create_x509_csr(self, private_key, algorithm)
|
||||
|
||||
|
||||
class CertificateBuilder(object):
|
||||
def __init__(self, issuer_name=None, subject_name=None,
|
||||
public_key=None, serial_number=None, not_valid_before=None,
|
||||
not_valid_after=None, extensions=[]):
|
||||
self._version = Version.v3
|
||||
self._issuer_name = issuer_name
|
||||
self._subject_name = subject_name
|
||||
self._public_key = public_key
|
||||
self._serial_number = serial_number
|
||||
self._not_valid_before = not_valid_before
|
||||
self._not_valid_after = not_valid_after
|
||||
self._extensions = extensions
|
||||
|
||||
def issuer_name(self, name):
|
||||
"""
|
||||
Sets the CA's distinguished name.
|
||||
"""
|
||||
if not isinstance(name, Name):
|
||||
raise TypeError('Expecting x509.Name object.')
|
||||
if self._issuer_name is not None:
|
||||
raise ValueError('The issuer name may only be set once.')
|
||||
return CertificateBuilder(
|
||||
name, self._subject_name, self._public_key,
|
||||
self._serial_number, self._not_valid_before,
|
||||
self._not_valid_after, self._extensions
|
||||
)
|
||||
|
||||
def subject_name(self, name):
|
||||
"""
|
||||
Sets the requestor's distinguished name.
|
||||
"""
|
||||
if not isinstance(name, Name):
|
||||
raise TypeError('Expecting x509.Name object.')
|
||||
if self._subject_name is not None:
|
||||
raise ValueError('The subject name may only be set once.')
|
||||
return CertificateBuilder(
|
||||
self._issuer_name, name, self._public_key,
|
||||
self._serial_number, self._not_valid_before,
|
||||
self._not_valid_after, self._extensions
|
||||
)
|
||||
|
||||
def public_key(self, key):
|
||||
"""
|
||||
Sets the requestor's public key (as found in the signing request).
|
||||
"""
|
||||
if not isinstance(key, (dsa.DSAPublicKey, rsa.RSAPublicKey,
|
||||
ec.EllipticCurvePublicKey)):
|
||||
raise TypeError('Expecting one of DSAPublicKey, RSAPublicKey,'
|
||||
' or EllipticCurvePublicKey.')
|
||||
if self._public_key is not None:
|
||||
raise ValueError('The public key may only be set once.')
|
||||
return CertificateBuilder(
|
||||
self._issuer_name, self._subject_name, key,
|
||||
self._serial_number, self._not_valid_before,
|
||||
self._not_valid_after, self._extensions
|
||||
)
|
||||
|
||||
def serial_number(self, number):
|
||||
"""
|
||||
Sets the certificate serial number.
|
||||
"""
|
||||
if not isinstance(number, six.integer_types):
|
||||
raise TypeError('Serial number must be of integral type.')
|
||||
if self._serial_number is not None:
|
||||
raise ValueError('The serial number may only be set once.')
|
||||
if number < 0:
|
||||
raise ValueError('The serial number should be non-negative.')
|
||||
if utils.bit_length(number) > 160: # As defined in RFC 5280
|
||||
raise ValueError('The serial number should not be more than 160 '
|
||||
'bits.')
|
||||
return CertificateBuilder(
|
||||
self._issuer_name, self._subject_name,
|
||||
self._public_key, number, self._not_valid_before,
|
||||
self._not_valid_after, self._extensions
|
||||
)
|
||||
|
||||
def not_valid_before(self, time):
|
||||
"""
|
||||
Sets the certificate activation time.
|
||||
"""
|
||||
if not isinstance(time, datetime.datetime):
|
||||
raise TypeError('Expecting datetime object.')
|
||||
if self._not_valid_before is not None:
|
||||
raise ValueError('The not valid before may only be set once.')
|
||||
if time <= _UNIX_EPOCH:
|
||||
raise ValueError('The not valid before date must be after the unix'
|
||||
' epoch (1970 January 1).')
|
||||
return CertificateBuilder(
|
||||
self._issuer_name, self._subject_name,
|
||||
self._public_key, self._serial_number, time,
|
||||
self._not_valid_after, self._extensions
|
||||
)
|
||||
|
||||
def not_valid_after(self, time):
|
||||
"""
|
||||
Sets the certificate expiration time.
|
||||
"""
|
||||
if not isinstance(time, datetime.datetime):
|
||||
raise TypeError('Expecting datetime object.')
|
||||
if self._not_valid_after is not None:
|
||||
raise ValueError('The not valid after may only be set once.')
|
||||
if time <= _UNIX_EPOCH:
|
||||
raise ValueError('The not valid after date must be after the unix'
|
||||
' epoch (1970 January 1).')
|
||||
return CertificateBuilder(
|
||||
self._issuer_name, self._subject_name,
|
||||
self._public_key, self._serial_number, self._not_valid_before,
|
||||
time, self._extensions
|
||||
)
|
||||
|
||||
def add_extension(self, extension, critical):
|
||||
"""
|
||||
Adds an X.509 extension to the certificate.
|
||||
"""
|
||||
if not isinstance(extension, ExtensionType):
|
||||
raise TypeError("extension must be an ExtensionType")
|
||||
|
||||
extension = Extension(extension.oid, critical, extension)
|
||||
|
||||
# TODO: This is quadratic in the number of extensions
|
||||
for e in self._extensions:
|
||||
if e.oid == extension.oid:
|
||||
raise ValueError('This extension has already been set.')
|
||||
|
||||
return CertificateBuilder(
|
||||
self._issuer_name, self._subject_name,
|
||||
self._public_key, self._serial_number, self._not_valid_before,
|
||||
self._not_valid_after, self._extensions + [extension]
|
||||
)
|
||||
|
||||
def sign(self, private_key, algorithm, backend):
|
||||
"""
|
||||
Signs the certificate using the CA's private key.
|
||||
"""
|
||||
if self._subject_name is None:
|
||||
raise ValueError("A certificate must have a subject name")
|
||||
|
||||
if self._issuer_name is None:
|
||||
raise ValueError("A certificate must have an issuer name")
|
||||
|
||||
if self._serial_number is None:
|
||||
raise ValueError("A certificate must have a serial number")
|
||||
|
||||
if self._not_valid_before is None:
|
||||
raise ValueError("A certificate must have a not valid before time")
|
||||
|
||||
if self._not_valid_after is None:
|
||||
raise ValueError("A certificate must have a not valid after time")
|
||||
|
||||
if self._public_key is None:
|
||||
raise ValueError("A certificate must have a public key")
|
||||
|
||||
return backend.create_x509_certificate(self, private_key, algorithm)
|
||||
924
lib/python3.5/site-packages/cryptography/x509/extensions.py
Normal file
924
lib/python3.5/site-packages/cryptography/x509/extensions.py
Normal file
|
|
@ -0,0 +1,924 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import hashlib
|
||||
import ipaddress
|
||||
from enum import Enum
|
||||
|
||||
from pyasn1.codec.der import decoder
|
||||
from pyasn1.type import namedtype, univ
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives import constant_time, serialization
|
||||
from cryptography.x509.general_name import GeneralName, IPAddress, OtherName
|
||||
from cryptography.x509.name import Name
|
||||
from cryptography.x509.oid import (
|
||||
AuthorityInformationAccessOID, ExtensionOID, ObjectIdentifier
|
||||
)
|
||||
|
||||
|
||||
class _SubjectPublicKeyInfo(univ.Sequence):
|
||||
componentType = namedtype.NamedTypes(
|
||||
namedtype.NamedType('algorithm', univ.Sequence()),
|
||||
namedtype.NamedType('subjectPublicKey', univ.BitString())
|
||||
)
|
||||
|
||||
|
||||
def _key_identifier_from_public_key(public_key):
|
||||
# This is a very slow way to do this.
|
||||
serialized = public_key.public_bytes(
|
||||
serialization.Encoding.DER,
|
||||
serialization.PublicFormat.SubjectPublicKeyInfo
|
||||
)
|
||||
spki, remaining = decoder.decode(
|
||||
serialized, asn1Spec=_SubjectPublicKeyInfo()
|
||||
)
|
||||
assert not remaining
|
||||
# the univ.BitString object is a tuple of bits. We need bytes and
|
||||
# pyasn1 really doesn't want to give them to us. To get it we'll
|
||||
# build an integer and convert that to bytes.
|
||||
bits = 0
|
||||
for bit in spki.getComponentByName("subjectPublicKey"):
|
||||
bits = bits << 1 | bit
|
||||
|
||||
data = utils.int_to_bytes(bits)
|
||||
return hashlib.sha1(data).digest()
|
||||
|
||||
|
||||
class DuplicateExtension(Exception):
|
||||
def __init__(self, msg, oid):
|
||||
super(DuplicateExtension, self).__init__(msg)
|
||||
self.oid = oid
|
||||
|
||||
|
||||
class UnsupportedExtension(Exception):
|
||||
def __init__(self, msg, oid):
|
||||
super(UnsupportedExtension, self).__init__(msg)
|
||||
self.oid = oid
|
||||
|
||||
|
||||
class ExtensionNotFound(Exception):
|
||||
def __init__(self, msg, oid):
|
||||
super(ExtensionNotFound, self).__init__(msg)
|
||||
self.oid = oid
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ExtensionType(object):
|
||||
@abc.abstractproperty
|
||||
def oid(self):
|
||||
"""
|
||||
Returns the oid associated with the given extension type.
|
||||
"""
|
||||
|
||||
|
||||
class Extensions(object):
|
||||
def __init__(self, extensions):
|
||||
self._extensions = extensions
|
||||
|
||||
def get_extension_for_oid(self, oid):
|
||||
for ext in self:
|
||||
if ext.oid == oid:
|
||||
return ext
|
||||
|
||||
raise ExtensionNotFound("No {0} extension was found".format(oid), oid)
|
||||
|
||||
def get_extension_for_class(self, extclass):
|
||||
for ext in self:
|
||||
if isinstance(ext.value, extclass):
|
||||
return ext
|
||||
|
||||
raise ExtensionNotFound(
|
||||
"No {0} extension was found".format(extclass), extclass.oid
|
||||
)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._extensions)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._extensions)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<Extensions({0})>".format(self._extensions)
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class AuthorityKeyIdentifier(object):
|
||||
oid = ExtensionOID.AUTHORITY_KEY_IDENTIFIER
|
||||
|
||||
def __init__(self, key_identifier, authority_cert_issuer,
|
||||
authority_cert_serial_number):
|
||||
if authority_cert_issuer or authority_cert_serial_number:
|
||||
if not authority_cert_issuer or not authority_cert_serial_number:
|
||||
raise ValueError(
|
||||
"authority_cert_issuer and authority_cert_serial_number "
|
||||
"must both be present or both None"
|
||||
)
|
||||
|
||||
if not all(
|
||||
isinstance(x, GeneralName) for x in authority_cert_issuer
|
||||
):
|
||||
raise TypeError(
|
||||
"authority_cert_issuer must be a list of GeneralName "
|
||||
"objects"
|
||||
)
|
||||
|
||||
if not isinstance(authority_cert_serial_number, six.integer_types):
|
||||
raise TypeError(
|
||||
"authority_cert_serial_number must be an integer"
|
||||
)
|
||||
|
||||
self._key_identifier = key_identifier
|
||||
self._authority_cert_issuer = authority_cert_issuer
|
||||
self._authority_cert_serial_number = authority_cert_serial_number
|
||||
|
||||
@classmethod
|
||||
def from_issuer_public_key(cls, public_key):
|
||||
digest = _key_identifier_from_public_key(public_key)
|
||||
return cls(
|
||||
key_identifier=digest,
|
||||
authority_cert_issuer=None,
|
||||
authority_cert_serial_number=None
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<AuthorityKeyIdentifier(key_identifier={0.key_identifier!r}, "
|
||||
"authority_cert_issuer={0.authority_cert_issuer}, "
|
||||
"authority_cert_serial_number={0.authority_cert_serial_number}"
|
||||
")>".format(self)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, AuthorityKeyIdentifier):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.key_identifier == other.key_identifier and
|
||||
self.authority_cert_issuer == other.authority_cert_issuer and
|
||||
self.authority_cert_serial_number ==
|
||||
other.authority_cert_serial_number
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
key_identifier = utils.read_only_property("_key_identifier")
|
||||
authority_cert_issuer = utils.read_only_property("_authority_cert_issuer")
|
||||
authority_cert_serial_number = utils.read_only_property(
|
||||
"_authority_cert_serial_number"
|
||||
)
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class SubjectKeyIdentifier(object):
|
||||
oid = ExtensionOID.SUBJECT_KEY_IDENTIFIER
|
||||
|
||||
def __init__(self, digest):
|
||||
self._digest = digest
|
||||
|
||||
@classmethod
|
||||
def from_public_key(cls, public_key):
|
||||
return cls(_key_identifier_from_public_key(public_key))
|
||||
|
||||
digest = utils.read_only_property("_digest")
|
||||
|
||||
def __repr__(self):
|
||||
return "<SubjectKeyIdentifier(digest={0!r})>".format(self.digest)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, SubjectKeyIdentifier):
|
||||
return NotImplemented
|
||||
|
||||
return constant_time.bytes_eq(self.digest, other.digest)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class AuthorityInformationAccess(object):
|
||||
oid = ExtensionOID.AUTHORITY_INFORMATION_ACCESS
|
||||
|
||||
def __init__(self, descriptions):
|
||||
if not all(isinstance(x, AccessDescription) for x in descriptions):
|
||||
raise TypeError(
|
||||
"Every item in the descriptions list must be an "
|
||||
"AccessDescription"
|
||||
)
|
||||
|
||||
self._descriptions = descriptions
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._descriptions)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._descriptions)
|
||||
|
||||
def __repr__(self):
|
||||
return "<AuthorityInformationAccess({0})>".format(self._descriptions)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, AuthorityInformationAccess):
|
||||
return NotImplemented
|
||||
|
||||
return self._descriptions == other._descriptions
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class AccessDescription(object):
|
||||
def __init__(self, access_method, access_location):
|
||||
if not (access_method == AuthorityInformationAccessOID.OCSP or
|
||||
access_method == AuthorityInformationAccessOID.CA_ISSUERS):
|
||||
raise ValueError(
|
||||
"access_method must be OID_OCSP or OID_CA_ISSUERS"
|
||||
)
|
||||
|
||||
if not isinstance(access_location, GeneralName):
|
||||
raise TypeError("access_location must be a GeneralName")
|
||||
|
||||
self._access_method = access_method
|
||||
self._access_location = access_location
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<AccessDescription(access_method={0.access_method}, access_locati"
|
||||
"on={0.access_location})>".format(self)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, AccessDescription):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.access_method == other.access_method and
|
||||
self.access_location == other.access_location
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
access_method = utils.read_only_property("_access_method")
|
||||
access_location = utils.read_only_property("_access_location")
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class BasicConstraints(object):
|
||||
oid = ExtensionOID.BASIC_CONSTRAINTS
|
||||
|
||||
def __init__(self, ca, path_length):
|
||||
if not isinstance(ca, bool):
|
||||
raise TypeError("ca must be a boolean value")
|
||||
|
||||
if path_length is not None and not ca:
|
||||
raise ValueError("path_length must be None when ca is False")
|
||||
|
||||
if (
|
||||
path_length is not None and
|
||||
(not isinstance(path_length, six.integer_types) or path_length < 0)
|
||||
):
|
||||
raise TypeError(
|
||||
"path_length must be a non-negative integer or None"
|
||||
)
|
||||
|
||||
self._ca = ca
|
||||
self._path_length = path_length
|
||||
|
||||
ca = utils.read_only_property("_ca")
|
||||
path_length = utils.read_only_property("_path_length")
|
||||
|
||||
def __repr__(self):
|
||||
return ("<BasicConstraints(ca={0.ca}, "
|
||||
"path_length={0.path_length})>").format(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, BasicConstraints):
|
||||
return NotImplemented
|
||||
|
||||
return self.ca == other.ca and self.path_length == other.path_length
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class CRLDistributionPoints(object):
|
||||
oid = ExtensionOID.CRL_DISTRIBUTION_POINTS
|
||||
|
||||
def __init__(self, distribution_points):
|
||||
if not all(
|
||||
isinstance(x, DistributionPoint) for x in distribution_points
|
||||
):
|
||||
raise TypeError(
|
||||
"distribution_points must be a list of DistributionPoint "
|
||||
"objects"
|
||||
)
|
||||
|
||||
self._distribution_points = distribution_points
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._distribution_points)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._distribution_points)
|
||||
|
||||
def __repr__(self):
|
||||
return "<CRLDistributionPoints({0})>".format(self._distribution_points)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, CRLDistributionPoints):
|
||||
return NotImplemented
|
||||
|
||||
return self._distribution_points == other._distribution_points
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class DistributionPoint(object):
|
||||
def __init__(self, full_name, relative_name, reasons, crl_issuer):
|
||||
if full_name and relative_name:
|
||||
raise ValueError(
|
||||
"You cannot provide both full_name and relative_name, at "
|
||||
"least one must be None."
|
||||
)
|
||||
|
||||
if full_name and not all(
|
||||
isinstance(x, GeneralName) for x in full_name
|
||||
):
|
||||
raise TypeError(
|
||||
"full_name must be a list of GeneralName objects"
|
||||
)
|
||||
|
||||
if relative_name and not isinstance(relative_name, Name):
|
||||
raise TypeError("relative_name must be a Name")
|
||||
|
||||
if crl_issuer and not all(
|
||||
isinstance(x, GeneralName) for x in crl_issuer
|
||||
):
|
||||
raise TypeError(
|
||||
"crl_issuer must be None or a list of general names"
|
||||
)
|
||||
|
||||
if reasons and (not isinstance(reasons, frozenset) or not all(
|
||||
isinstance(x, ReasonFlags) for x in reasons
|
||||
)):
|
||||
raise TypeError("reasons must be None or frozenset of ReasonFlags")
|
||||
|
||||
if reasons and (
|
||||
ReasonFlags.unspecified in reasons or
|
||||
ReasonFlags.remove_from_crl in reasons
|
||||
):
|
||||
raise ValueError(
|
||||
"unspecified and remove_from_crl are not valid reasons in a "
|
||||
"DistributionPoint"
|
||||
)
|
||||
|
||||
if reasons and not crl_issuer and not (full_name or relative_name):
|
||||
raise ValueError(
|
||||
"You must supply crl_issuer, full_name, or relative_name when "
|
||||
"reasons is not None"
|
||||
)
|
||||
|
||||
self._full_name = full_name
|
||||
self._relative_name = relative_name
|
||||
self._reasons = reasons
|
||||
self._crl_issuer = crl_issuer
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<DistributionPoint(full_name={0.full_name}, relative_name={0.rela"
|
||||
"tive_name}, reasons={0.reasons}, crl_issuer={0.crl_is"
|
||||
"suer})>".format(self)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DistributionPoint):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.full_name == other.full_name and
|
||||
self.relative_name == other.relative_name and
|
||||
self.reasons == other.reasons and
|
||||
self.crl_issuer == other.crl_issuer
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
full_name = utils.read_only_property("_full_name")
|
||||
relative_name = utils.read_only_property("_relative_name")
|
||||
reasons = utils.read_only_property("_reasons")
|
||||
crl_issuer = utils.read_only_property("_crl_issuer")
|
||||
|
||||
|
||||
class ReasonFlags(Enum):
|
||||
unspecified = "unspecified"
|
||||
key_compromise = "keyCompromise"
|
||||
ca_compromise = "cACompromise"
|
||||
affiliation_changed = "affiliationChanged"
|
||||
superseded = "superseded"
|
||||
cessation_of_operation = "cessationOfOperation"
|
||||
certificate_hold = "certificateHold"
|
||||
privilege_withdrawn = "privilegeWithdrawn"
|
||||
aa_compromise = "aACompromise"
|
||||
remove_from_crl = "removeFromCRL"
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class CertificatePolicies(object):
|
||||
oid = ExtensionOID.CERTIFICATE_POLICIES
|
||||
|
||||
def __init__(self, policies):
|
||||
if not all(isinstance(x, PolicyInformation) for x in policies):
|
||||
raise TypeError(
|
||||
"Every item in the policies list must be a "
|
||||
"PolicyInformation"
|
||||
)
|
||||
|
||||
self._policies = policies
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._policies)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._policies)
|
||||
|
||||
def __repr__(self):
|
||||
return "<CertificatePolicies({0})>".format(self._policies)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, CertificatePolicies):
|
||||
return NotImplemented
|
||||
|
||||
return self._policies == other._policies
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class PolicyInformation(object):
|
||||
def __init__(self, policy_identifier, policy_qualifiers):
|
||||
if not isinstance(policy_identifier, ObjectIdentifier):
|
||||
raise TypeError("policy_identifier must be an ObjectIdentifier")
|
||||
|
||||
self._policy_identifier = policy_identifier
|
||||
if policy_qualifiers and not all(
|
||||
isinstance(
|
||||
x, (six.text_type, UserNotice)
|
||||
) for x in policy_qualifiers
|
||||
):
|
||||
raise TypeError(
|
||||
"policy_qualifiers must be a list of strings and/or UserNotice"
|
||||
" objects or None"
|
||||
)
|
||||
|
||||
self._policy_qualifiers = policy_qualifiers
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<PolicyInformation(policy_identifier={0.policy_identifier}, polic"
|
||||
"y_qualifiers={0.policy_qualifiers})>".format(self)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, PolicyInformation):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.policy_identifier == other.policy_identifier and
|
||||
self.policy_qualifiers == other.policy_qualifiers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
policy_identifier = utils.read_only_property("_policy_identifier")
|
||||
policy_qualifiers = utils.read_only_property("_policy_qualifiers")
|
||||
|
||||
|
||||
class UserNotice(object):
|
||||
def __init__(self, notice_reference, explicit_text):
|
||||
if notice_reference and not isinstance(
|
||||
notice_reference, NoticeReference
|
||||
):
|
||||
raise TypeError(
|
||||
"notice_reference must be None or a NoticeReference"
|
||||
)
|
||||
|
||||
self._notice_reference = notice_reference
|
||||
self._explicit_text = explicit_text
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<UserNotice(notice_reference={0.notice_reference}, explicit_text="
|
||||
"{0.explicit_text!r})>".format(self)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, UserNotice):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.notice_reference == other.notice_reference and
|
||||
self.explicit_text == other.explicit_text
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
notice_reference = utils.read_only_property("_notice_reference")
|
||||
explicit_text = utils.read_only_property("_explicit_text")
|
||||
|
||||
|
||||
class NoticeReference(object):
|
||||
def __init__(self, organization, notice_numbers):
|
||||
self._organization = organization
|
||||
if not isinstance(notice_numbers, list) or not all(
|
||||
isinstance(x, int) for x in notice_numbers
|
||||
):
|
||||
raise TypeError(
|
||||
"notice_numbers must be a list of integers"
|
||||
)
|
||||
|
||||
self._notice_numbers = notice_numbers
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
"<NoticeReference(organization={0.organization!r}, notice_numbers="
|
||||
"{0.notice_numbers})>".format(self)
|
||||
)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, NoticeReference):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.organization == other.organization and
|
||||
self.notice_numbers == other.notice_numbers
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
organization = utils.read_only_property("_organization")
|
||||
notice_numbers = utils.read_only_property("_notice_numbers")
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class ExtendedKeyUsage(object):
|
||||
oid = ExtensionOID.EXTENDED_KEY_USAGE
|
||||
|
||||
def __init__(self, usages):
|
||||
if not all(isinstance(x, ObjectIdentifier) for x in usages):
|
||||
raise TypeError(
|
||||
"Every item in the usages list must be an ObjectIdentifier"
|
||||
)
|
||||
|
||||
self._usages = usages
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._usages)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._usages)
|
||||
|
||||
def __repr__(self):
|
||||
return "<ExtendedKeyUsage({0})>".format(self._usages)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, ExtendedKeyUsage):
|
||||
return NotImplemented
|
||||
|
||||
return self._usages == other._usages
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class OCSPNoCheck(object):
|
||||
oid = ExtensionOID.OCSP_NO_CHECK
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class InhibitAnyPolicy(object):
|
||||
oid = ExtensionOID.INHIBIT_ANY_POLICY
|
||||
|
||||
def __init__(self, skip_certs):
|
||||
if not isinstance(skip_certs, six.integer_types):
|
||||
raise TypeError("skip_certs must be an integer")
|
||||
|
||||
if skip_certs < 0:
|
||||
raise ValueError("skip_certs must be a non-negative integer")
|
||||
|
||||
self._skip_certs = skip_certs
|
||||
|
||||
def __repr__(self):
|
||||
return "<InhibitAnyPolicy(skip_certs={0.skip_certs})>".format(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, InhibitAnyPolicy):
|
||||
return NotImplemented
|
||||
|
||||
return self.skip_certs == other.skip_certs
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
skip_certs = utils.read_only_property("_skip_certs")
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class KeyUsage(object):
|
||||
oid = ExtensionOID.KEY_USAGE
|
||||
|
||||
def __init__(self, digital_signature, content_commitment, key_encipherment,
|
||||
data_encipherment, key_agreement, key_cert_sign, crl_sign,
|
||||
encipher_only, decipher_only):
|
||||
if not key_agreement and (encipher_only or decipher_only):
|
||||
raise ValueError(
|
||||
"encipher_only and decipher_only can only be true when "
|
||||
"key_agreement is true"
|
||||
)
|
||||
|
||||
self._digital_signature = digital_signature
|
||||
self._content_commitment = content_commitment
|
||||
self._key_encipherment = key_encipherment
|
||||
self._data_encipherment = data_encipherment
|
||||
self._key_agreement = key_agreement
|
||||
self._key_cert_sign = key_cert_sign
|
||||
self._crl_sign = crl_sign
|
||||
self._encipher_only = encipher_only
|
||||
self._decipher_only = decipher_only
|
||||
|
||||
digital_signature = utils.read_only_property("_digital_signature")
|
||||
content_commitment = utils.read_only_property("_content_commitment")
|
||||
key_encipherment = utils.read_only_property("_key_encipherment")
|
||||
data_encipherment = utils.read_only_property("_data_encipherment")
|
||||
key_agreement = utils.read_only_property("_key_agreement")
|
||||
key_cert_sign = utils.read_only_property("_key_cert_sign")
|
||||
crl_sign = utils.read_only_property("_crl_sign")
|
||||
|
||||
@property
|
||||
def encipher_only(self):
|
||||
if not self.key_agreement:
|
||||
raise ValueError(
|
||||
"encipher_only is undefined unless key_agreement is true"
|
||||
)
|
||||
else:
|
||||
return self._encipher_only
|
||||
|
||||
@property
|
||||
def decipher_only(self):
|
||||
if not self.key_agreement:
|
||||
raise ValueError(
|
||||
"decipher_only is undefined unless key_agreement is true"
|
||||
)
|
||||
else:
|
||||
return self._decipher_only
|
||||
|
||||
def __repr__(self):
|
||||
try:
|
||||
encipher_only = self.encipher_only
|
||||
decipher_only = self.decipher_only
|
||||
except ValueError:
|
||||
encipher_only = None
|
||||
decipher_only = None
|
||||
|
||||
return ("<KeyUsage(digital_signature={0.digital_signature}, "
|
||||
"content_commitment={0.content_commitment}, "
|
||||
"key_encipherment={0.key_encipherment}, "
|
||||
"data_encipherment={0.data_encipherment}, "
|
||||
"key_agreement={0.key_agreement}, "
|
||||
"key_cert_sign={0.key_cert_sign}, crl_sign={0.crl_sign}, "
|
||||
"encipher_only={1}, decipher_only={2})>").format(
|
||||
self, encipher_only, decipher_only)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, KeyUsage):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.digital_signature == other.digital_signature and
|
||||
self.content_commitment == other.content_commitment and
|
||||
self.key_encipherment == other.key_encipherment and
|
||||
self.data_encipherment == other.data_encipherment and
|
||||
self.key_agreement == other.key_agreement and
|
||||
self.key_cert_sign == other.key_cert_sign and
|
||||
self.crl_sign == other.crl_sign and
|
||||
self._encipher_only == other._encipher_only and
|
||||
self._decipher_only == other._decipher_only
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class NameConstraints(object):
|
||||
oid = ExtensionOID.NAME_CONSTRAINTS
|
||||
|
||||
def __init__(self, permitted_subtrees, excluded_subtrees):
|
||||
if permitted_subtrees is not None:
|
||||
if not all(
|
||||
isinstance(x, GeneralName) for x in permitted_subtrees
|
||||
):
|
||||
raise TypeError(
|
||||
"permitted_subtrees must be a list of GeneralName objects "
|
||||
"or None"
|
||||
)
|
||||
|
||||
self._validate_ip_name(permitted_subtrees)
|
||||
|
||||
if excluded_subtrees is not None:
|
||||
if not all(
|
||||
isinstance(x, GeneralName) for x in excluded_subtrees
|
||||
):
|
||||
raise TypeError(
|
||||
"excluded_subtrees must be a list of GeneralName objects "
|
||||
"or None"
|
||||
)
|
||||
|
||||
self._validate_ip_name(excluded_subtrees)
|
||||
|
||||
if permitted_subtrees is None and excluded_subtrees is None:
|
||||
raise ValueError(
|
||||
"At least one of permitted_subtrees and excluded_subtrees "
|
||||
"must not be None"
|
||||
)
|
||||
|
||||
self._permitted_subtrees = permitted_subtrees
|
||||
self._excluded_subtrees = excluded_subtrees
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, NameConstraints):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.excluded_subtrees == other.excluded_subtrees and
|
||||
self.permitted_subtrees == other.permitted_subtrees
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def _validate_ip_name(self, tree):
|
||||
if any(isinstance(name, IPAddress) and not isinstance(
|
||||
name.value, (ipaddress.IPv4Network, ipaddress.IPv6Network)
|
||||
) for name in tree):
|
||||
raise TypeError(
|
||||
"IPAddress name constraints must be an IPv4Network or"
|
||||
" IPv6Network object"
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
return (
|
||||
u"<NameConstraints(permitted_subtrees={0.permitted_subtrees}, "
|
||||
u"excluded_subtrees={0.excluded_subtrees})>".format(self)
|
||||
)
|
||||
|
||||
permitted_subtrees = utils.read_only_property("_permitted_subtrees")
|
||||
excluded_subtrees = utils.read_only_property("_excluded_subtrees")
|
||||
|
||||
|
||||
class Extension(object):
|
||||
def __init__(self, oid, critical, value):
|
||||
if not isinstance(oid, ObjectIdentifier):
|
||||
raise TypeError(
|
||||
"oid argument must be an ObjectIdentifier instance."
|
||||
)
|
||||
|
||||
if not isinstance(critical, bool):
|
||||
raise TypeError("critical must be a boolean value")
|
||||
|
||||
self._oid = oid
|
||||
self._critical = critical
|
||||
self._value = value
|
||||
|
||||
oid = utils.read_only_property("_oid")
|
||||
critical = utils.read_only_property("_critical")
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return ("<Extension(oid={0.oid}, critical={0.critical}, "
|
||||
"value={0.value})>").format(self)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Extension):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.oid == other.oid and
|
||||
self.critical == other.critical and
|
||||
self.value == other.value
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
class GeneralNames(object):
|
||||
def __init__(self, general_names):
|
||||
if not all(isinstance(x, GeneralName) for x in general_names):
|
||||
raise TypeError(
|
||||
"Every item in the general_names list must be an "
|
||||
"object conforming to the GeneralName interface"
|
||||
)
|
||||
|
||||
self._general_names = general_names
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._general_names)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._general_names)
|
||||
|
||||
def get_values_for_type(self, type):
|
||||
# Return the value of each GeneralName, except for OtherName instances
|
||||
# which we return directly because it has two important properties not
|
||||
# just one value.
|
||||
objs = (i for i in self if isinstance(i, type))
|
||||
if type != OtherName:
|
||||
objs = (i.value for i in objs)
|
||||
return list(objs)
|
||||
|
||||
def __repr__(self):
|
||||
return "<GeneralNames({0})>".format(self._general_names)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, GeneralNames):
|
||||
return NotImplemented
|
||||
|
||||
return self._general_names == other._general_names
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class SubjectAlternativeName(object):
|
||||
oid = ExtensionOID.SUBJECT_ALTERNATIVE_NAME
|
||||
|
||||
def __init__(self, general_names):
|
||||
self._general_names = GeneralNames(general_names)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._general_names)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._general_names)
|
||||
|
||||
def get_values_for_type(self, type):
|
||||
return self._general_names.get_values_for_type(type)
|
||||
|
||||
def __repr__(self):
|
||||
return "<SubjectAlternativeName({0})>".format(self._general_names)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, SubjectAlternativeName):
|
||||
return NotImplemented
|
||||
|
||||
return self._general_names == other._general_names
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(ExtensionType)
|
||||
class IssuerAlternativeName(object):
|
||||
oid = ExtensionOID.ISSUER_ALTERNATIVE_NAME
|
||||
|
||||
def __init__(self, general_names):
|
||||
self._general_names = GeneralNames(general_names)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._general_names)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._general_names)
|
||||
|
||||
def get_values_for_type(self, type):
|
||||
return self._general_names.get_values_for_type(type)
|
||||
|
||||
def __repr__(self):
|
||||
return "<IssuerAlternativeName({0})>".format(self._general_names)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, IssuerAlternativeName):
|
||||
return NotImplemented
|
||||
|
||||
return self._general_names == other._general_names
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
265
lib/python3.5/site-packages/cryptography/x509/general_name.py
Normal file
265
lib/python3.5/site-packages/cryptography/x509/general_name.py
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import abc
|
||||
import ipaddress
|
||||
from email.utils import parseaddr
|
||||
|
||||
import idna
|
||||
|
||||
import six
|
||||
|
||||
from six.moves import urllib_parse
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.x509.name import Name
|
||||
from cryptography.x509.oid import ObjectIdentifier
|
||||
|
||||
|
||||
_GENERAL_NAMES = {
|
||||
0: "otherName",
|
||||
1: "rfc822Name",
|
||||
2: "dNSName",
|
||||
3: "x400Address",
|
||||
4: "directoryName",
|
||||
5: "ediPartyName",
|
||||
6: "uniformResourceIdentifier",
|
||||
7: "iPAddress",
|
||||
8: "registeredID",
|
||||
}
|
||||
|
||||
|
||||
class UnsupportedGeneralNameType(Exception):
|
||||
def __init__(self, msg, type):
|
||||
super(UnsupportedGeneralNameType, self).__init__(msg)
|
||||
self.type = type
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class GeneralName(object):
|
||||
@abc.abstractproperty
|
||||
def value(self):
|
||||
"""
|
||||
Return the value of the object
|
||||
"""
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class RFC822Name(object):
|
||||
def __init__(self, value):
|
||||
if not isinstance(value, six.text_type):
|
||||
raise TypeError("value must be a unicode string")
|
||||
|
||||
name, address = parseaddr(value)
|
||||
parts = address.split(u"@")
|
||||
if name or not address:
|
||||
# parseaddr has found a name (e.g. Name <email>) or the entire
|
||||
# value is an empty string.
|
||||
raise ValueError("Invalid rfc822name value")
|
||||
elif len(parts) == 1:
|
||||
# Single label email name. This is valid for local delivery.
|
||||
# No IDNA encoding needed since there is no domain component.
|
||||
encoded = address.encode("ascii")
|
||||
else:
|
||||
# A normal email of the form user@domain.com. Let's attempt to
|
||||
# encode the domain component and reconstruct the address.
|
||||
encoded = parts[0].encode("ascii") + b"@" + idna.encode(parts[1])
|
||||
|
||||
self._value = value
|
||||
self._encoded = encoded
|
||||
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<RFC822Name(value={0})>".format(self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, RFC822Name):
|
||||
return NotImplemented
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class DNSName(object):
|
||||
def __init__(self, value):
|
||||
if not isinstance(value, six.text_type):
|
||||
raise TypeError("value must be a unicode string")
|
||||
|
||||
self._value = value
|
||||
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<DNSName(value={0})>".format(self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DNSName):
|
||||
return NotImplemented
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class UniformResourceIdentifier(object):
|
||||
def __init__(self, value):
|
||||
if not isinstance(value, six.text_type):
|
||||
raise TypeError("value must be a unicode string")
|
||||
|
||||
parsed = urllib_parse.urlparse(value)
|
||||
if not parsed.hostname:
|
||||
netloc = ""
|
||||
elif parsed.port:
|
||||
netloc = (
|
||||
idna.encode(parsed.hostname) +
|
||||
":{0}".format(parsed.port).encode("ascii")
|
||||
).decode("ascii")
|
||||
else:
|
||||
netloc = idna.encode(parsed.hostname).decode("ascii")
|
||||
|
||||
# Note that building a URL in this fashion means it should be
|
||||
# semantically indistinguishable from the original but is not
|
||||
# guaranteed to be exactly the same.
|
||||
uri = urllib_parse.urlunparse((
|
||||
parsed.scheme,
|
||||
netloc,
|
||||
parsed.path,
|
||||
parsed.params,
|
||||
parsed.query,
|
||||
parsed.fragment
|
||||
)).encode("ascii")
|
||||
|
||||
self._value = value
|
||||
self._encoded = uri
|
||||
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<UniformResourceIdentifier(value={0})>".format(self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, UniformResourceIdentifier):
|
||||
return NotImplemented
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class DirectoryName(object):
|
||||
def __init__(self, value):
|
||||
if not isinstance(value, Name):
|
||||
raise TypeError("value must be a Name")
|
||||
|
||||
self._value = value
|
||||
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<DirectoryName(value={0})>".format(self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, DirectoryName):
|
||||
return NotImplemented
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class RegisteredID(object):
|
||||
def __init__(self, value):
|
||||
if not isinstance(value, ObjectIdentifier):
|
||||
raise TypeError("value must be an ObjectIdentifier")
|
||||
|
||||
self._value = value
|
||||
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<RegisteredID(value={0})>".format(self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, RegisteredID):
|
||||
return NotImplemented
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class IPAddress(object):
|
||||
def __init__(self, value):
|
||||
if not isinstance(
|
||||
value,
|
||||
(
|
||||
ipaddress.IPv4Address,
|
||||
ipaddress.IPv6Address,
|
||||
ipaddress.IPv4Network,
|
||||
ipaddress.IPv6Network
|
||||
)
|
||||
):
|
||||
raise TypeError(
|
||||
"value must be an instance of ipaddress.IPv4Address, "
|
||||
"ipaddress.IPv6Address, ipaddress.IPv4Network, or "
|
||||
"ipaddress.IPv6Network"
|
||||
)
|
||||
|
||||
self._value = value
|
||||
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<IPAddress(value={0})>".format(self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, IPAddress):
|
||||
return NotImplemented
|
||||
|
||||
return self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
|
||||
@utils.register_interface(GeneralName)
|
||||
class OtherName(object):
|
||||
def __init__(self, type_id, value):
|
||||
if not isinstance(type_id, ObjectIdentifier):
|
||||
raise TypeError("type_id must be an ObjectIdentifier")
|
||||
if not isinstance(value, bytes):
|
||||
raise TypeError("value must be a binary string")
|
||||
|
||||
self._type_id = type_id
|
||||
self._value = value
|
||||
|
||||
type_id = utils.read_only_property("_type_id")
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __repr__(self):
|
||||
return "<OtherName(type_id={0}, value={1!r})>".format(
|
||||
self.type_id, self.value)
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, OtherName):
|
||||
return NotImplemented
|
||||
|
||||
return self.type_id == other.type_id and self.value == other.value
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
78
lib/python3.5/site-packages/cryptography/x509/name.py
Normal file
78
lib/python3.5/site-packages/cryptography/x509/name.py
Normal file
|
|
@ -0,0 +1,78 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import six
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.x509.oid import ObjectIdentifier
|
||||
|
||||
|
||||
class NameAttribute(object):
|
||||
def __init__(self, oid, value):
|
||||
if not isinstance(oid, ObjectIdentifier):
|
||||
raise TypeError(
|
||||
"oid argument must be an ObjectIdentifier instance."
|
||||
)
|
||||
|
||||
if not isinstance(value, six.text_type):
|
||||
raise TypeError(
|
||||
"value argument must be a text type."
|
||||
)
|
||||
|
||||
self._oid = oid
|
||||
self._value = value
|
||||
|
||||
oid = utils.read_only_property("_oid")
|
||||
value = utils.read_only_property("_value")
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, NameAttribute):
|
||||
return NotImplemented
|
||||
|
||||
return (
|
||||
self.oid == other.oid and
|
||||
self.value == other.value
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.oid, self.value))
|
||||
|
||||
def __repr__(self):
|
||||
return "<NameAttribute(oid={0.oid}, value={0.value!r})>".format(self)
|
||||
|
||||
|
||||
class Name(object):
|
||||
def __init__(self, attributes):
|
||||
self._attributes = attributes
|
||||
|
||||
def get_attributes_for_oid(self, oid):
|
||||
return [i for i in self if i.oid == oid]
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, Name):
|
||||
return NotImplemented
|
||||
|
||||
return self._attributes == other._attributes
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
# TODO: this is relatively expensive, if this looks like a bottleneck
|
||||
# for you, consider optimizing!
|
||||
return hash(tuple(self._attributes))
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._attributes)
|
||||
|
||||
def __len__(self):
|
||||
return len(self._attributes)
|
||||
|
||||
def __repr__(self):
|
||||
return "<Name({0!r})>".format(self._attributes)
|
||||
199
lib/python3.5/site-packages/cryptography/x509/oid.py
Normal file
199
lib/python3.5/site-packages/cryptography/x509/oid.py
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||
# for complete details.
|
||||
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from cryptography import utils
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
|
||||
|
||||
class ObjectIdentifier(object):
|
||||
def __init__(self, dotted_string):
|
||||
self._dotted_string = dotted_string
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, ObjectIdentifier):
|
||||
return NotImplemented
|
||||
|
||||
return self.dotted_string == other.dotted_string
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return "<ObjectIdentifier(oid={0}, name={1})>".format(
|
||||
self.dotted_string,
|
||||
self._name
|
||||
)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.dotted_string)
|
||||
|
||||
@property
|
||||
def _name(self):
|
||||
return _OID_NAMES.get(self, "Unknown OID")
|
||||
|
||||
dotted_string = utils.read_only_property("_dotted_string")
|
||||
|
||||
|
||||
class ExtensionOID(object):
|
||||
SUBJECT_DIRECTORY_ATTRIBUTES = ObjectIdentifier("2.5.29.9")
|
||||
SUBJECT_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.14")
|
||||
KEY_USAGE = ObjectIdentifier("2.5.29.15")
|
||||
SUBJECT_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.17")
|
||||
ISSUER_ALTERNATIVE_NAME = ObjectIdentifier("2.5.29.18")
|
||||
BASIC_CONSTRAINTS = ObjectIdentifier("2.5.29.19")
|
||||
NAME_CONSTRAINTS = ObjectIdentifier("2.5.29.30")
|
||||
CRL_DISTRIBUTION_POINTS = ObjectIdentifier("2.5.29.31")
|
||||
CERTIFICATE_POLICIES = ObjectIdentifier("2.5.29.32")
|
||||
POLICY_MAPPINGS = ObjectIdentifier("2.5.29.33")
|
||||
AUTHORITY_KEY_IDENTIFIER = ObjectIdentifier("2.5.29.35")
|
||||
POLICY_CONSTRAINTS = ObjectIdentifier("2.5.29.36")
|
||||
EXTENDED_KEY_USAGE = ObjectIdentifier("2.5.29.37")
|
||||
FRESHEST_CRL = ObjectIdentifier("2.5.29.46")
|
||||
INHIBIT_ANY_POLICY = ObjectIdentifier("2.5.29.54")
|
||||
AUTHORITY_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.1")
|
||||
SUBJECT_INFORMATION_ACCESS = ObjectIdentifier("1.3.6.1.5.5.7.1.11")
|
||||
OCSP_NO_CHECK = ObjectIdentifier("1.3.6.1.5.5.7.48.1.5")
|
||||
|
||||
|
||||
class CRLExtensionOID(object):
|
||||
CERTIFICATE_ISSUER = ObjectIdentifier("2.5.29.29")
|
||||
CRL_REASON = ObjectIdentifier("2.5.29.21")
|
||||
INVALIDITY_DATE = ObjectIdentifier("2.5.29.24")
|
||||
|
||||
|
||||
class NameOID(object):
|
||||
COMMON_NAME = ObjectIdentifier("2.5.4.3")
|
||||
COUNTRY_NAME = ObjectIdentifier("2.5.4.6")
|
||||
LOCALITY_NAME = ObjectIdentifier("2.5.4.7")
|
||||
STATE_OR_PROVINCE_NAME = ObjectIdentifier("2.5.4.8")
|
||||
ORGANIZATION_NAME = ObjectIdentifier("2.5.4.10")
|
||||
ORGANIZATIONAL_UNIT_NAME = ObjectIdentifier("2.5.4.11")
|
||||
SERIAL_NUMBER = ObjectIdentifier("2.5.4.5")
|
||||
SURNAME = ObjectIdentifier("2.5.4.4")
|
||||
GIVEN_NAME = ObjectIdentifier("2.5.4.42")
|
||||
TITLE = ObjectIdentifier("2.5.4.12")
|
||||
GENERATION_QUALIFIER = ObjectIdentifier("2.5.4.44")
|
||||
DN_QUALIFIER = ObjectIdentifier("2.5.4.46")
|
||||
PSEUDONYM = ObjectIdentifier("2.5.4.65")
|
||||
DOMAIN_COMPONENT = ObjectIdentifier("0.9.2342.19200300.100.1.25")
|
||||
EMAIL_ADDRESS = ObjectIdentifier("1.2.840.113549.1.9.1")
|
||||
|
||||
|
||||
class SignatureAlgorithmOID(object):
|
||||
RSA_WITH_MD5 = ObjectIdentifier("1.2.840.113549.1.1.4")
|
||||
RSA_WITH_SHA1 = ObjectIdentifier("1.2.840.113549.1.1.5")
|
||||
RSA_WITH_SHA224 = ObjectIdentifier("1.2.840.113549.1.1.14")
|
||||
RSA_WITH_SHA256 = ObjectIdentifier("1.2.840.113549.1.1.11")
|
||||
RSA_WITH_SHA384 = ObjectIdentifier("1.2.840.113549.1.1.12")
|
||||
RSA_WITH_SHA512 = ObjectIdentifier("1.2.840.113549.1.1.13")
|
||||
ECDSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10045.4.1")
|
||||
ECDSA_WITH_SHA224 = ObjectIdentifier("1.2.840.10045.4.3.1")
|
||||
ECDSA_WITH_SHA256 = ObjectIdentifier("1.2.840.10045.4.3.2")
|
||||
ECDSA_WITH_SHA384 = ObjectIdentifier("1.2.840.10045.4.3.3")
|
||||
ECDSA_WITH_SHA512 = ObjectIdentifier("1.2.840.10045.4.3.4")
|
||||
DSA_WITH_SHA1 = ObjectIdentifier("1.2.840.10040.4.3")
|
||||
DSA_WITH_SHA224 = ObjectIdentifier("2.16.840.1.101.3.4.3.1")
|
||||
DSA_WITH_SHA256 = ObjectIdentifier("2.16.840.1.101.3.4.3.2")
|
||||
|
||||
_SIG_OIDS_TO_HASH = {
|
||||
SignatureAlgorithmOID.RSA_WITH_MD5.dotted_string: hashes.MD5(),
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA1.dotted_string: hashes.SHA1(),
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA224.dotted_string: hashes.SHA224(),
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA256.dotted_string: hashes.SHA256(),
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA384.dotted_string: hashes.SHA384(),
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA512.dotted_string: hashes.SHA512(),
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA1.dotted_string: hashes.SHA1(),
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA224.dotted_string: hashes.SHA224(),
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA256.dotted_string: hashes.SHA256(),
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA384.dotted_string: hashes.SHA384(),
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA512.dotted_string: hashes.SHA512(),
|
||||
SignatureAlgorithmOID.DSA_WITH_SHA1.dotted_string: hashes.SHA1(),
|
||||
SignatureAlgorithmOID.DSA_WITH_SHA224.dotted_string: hashes.SHA224(),
|
||||
SignatureAlgorithmOID.DSA_WITH_SHA256.dotted_string: hashes.SHA256()
|
||||
}
|
||||
|
||||
|
||||
class ExtendedKeyUsageOID(object):
|
||||
SERVER_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.1")
|
||||
CLIENT_AUTH = ObjectIdentifier("1.3.6.1.5.5.7.3.2")
|
||||
CODE_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.3")
|
||||
EMAIL_PROTECTION = ObjectIdentifier("1.3.6.1.5.5.7.3.4")
|
||||
TIME_STAMPING = ObjectIdentifier("1.3.6.1.5.5.7.3.8")
|
||||
OCSP_SIGNING = ObjectIdentifier("1.3.6.1.5.5.7.3.9")
|
||||
|
||||
|
||||
class AuthorityInformationAccessOID(object):
|
||||
CA_ISSUERS = ObjectIdentifier("1.3.6.1.5.5.7.48.2")
|
||||
OCSP = ObjectIdentifier("1.3.6.1.5.5.7.48.1")
|
||||
|
||||
|
||||
class CertificatePoliciesOID(object):
|
||||
CPS_QUALIFIER = ObjectIdentifier("1.3.6.1.5.5.7.2.1")
|
||||
CPS_USER_NOTICE = ObjectIdentifier("1.3.6.1.5.5.7.2.2")
|
||||
ANY_POLICY = ObjectIdentifier("2.5.29.32.0")
|
||||
|
||||
_OID_NAMES = {
|
||||
NameOID.COMMON_NAME: "commonName",
|
||||
NameOID.COUNTRY_NAME: "countryName",
|
||||
NameOID.LOCALITY_NAME: "localityName",
|
||||
NameOID.STATE_OR_PROVINCE_NAME: "stateOrProvinceName",
|
||||
NameOID.ORGANIZATION_NAME: "organizationName",
|
||||
NameOID.ORGANIZATIONAL_UNIT_NAME: "organizationalUnitName",
|
||||
NameOID.SERIAL_NUMBER: "serialNumber",
|
||||
NameOID.SURNAME: "surname",
|
||||
NameOID.GIVEN_NAME: "givenName",
|
||||
NameOID.TITLE: "title",
|
||||
NameOID.GENERATION_QUALIFIER: "generationQualifier",
|
||||
NameOID.DN_QUALIFIER: "dnQualifier",
|
||||
NameOID.PSEUDONYM: "pseudonym",
|
||||
NameOID.DOMAIN_COMPONENT: "domainComponent",
|
||||
NameOID.EMAIL_ADDRESS: "emailAddress",
|
||||
SignatureAlgorithmOID.RSA_WITH_MD5: "md5WithRSAEncryption",
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA1: "sha1WithRSAEncryption",
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA224: "sha224WithRSAEncryption",
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA256: "sha256WithRSAEncryption",
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA384: "sha384WithRSAEncryption",
|
||||
SignatureAlgorithmOID.RSA_WITH_SHA512: "sha512WithRSAEncryption",
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA1: "ecdsa-with-SHA1",
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA224: "ecdsa-with-SHA224",
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA256: "ecdsa-with-SHA256",
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA384: "ecdsa-with-SHA384",
|
||||
SignatureAlgorithmOID.ECDSA_WITH_SHA512: "ecdsa-with-SHA512",
|
||||
SignatureAlgorithmOID.DSA_WITH_SHA1: "dsa-with-sha1",
|
||||
SignatureAlgorithmOID.DSA_WITH_SHA224: "dsa-with-sha224",
|
||||
SignatureAlgorithmOID.DSA_WITH_SHA256: "dsa-with-sha256",
|
||||
ExtendedKeyUsageOID.SERVER_AUTH: "serverAuth",
|
||||
ExtendedKeyUsageOID.CLIENT_AUTH: "clientAuth",
|
||||
ExtendedKeyUsageOID.CODE_SIGNING: "codeSigning",
|
||||
ExtendedKeyUsageOID.EMAIL_PROTECTION: "emailProtection",
|
||||
ExtendedKeyUsageOID.TIME_STAMPING: "timeStamping",
|
||||
ExtendedKeyUsageOID.OCSP_SIGNING: "OCSPSigning",
|
||||
ExtensionOID.SUBJECT_DIRECTORY_ATTRIBUTES: "subjectDirectoryAttributes",
|
||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: "subjectKeyIdentifier",
|
||||
ExtensionOID.KEY_USAGE: "keyUsage",
|
||||
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: "subjectAltName",
|
||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: "issuerAltName",
|
||||
ExtensionOID.BASIC_CONSTRAINTS: "basicConstraints",
|
||||
CRLExtensionOID.CRL_REASON: "cRLReason",
|
||||
CRLExtensionOID.INVALIDITY_DATE: "invalidityDate",
|
||||
CRLExtensionOID.CERTIFICATE_ISSUER: "certificateIssuer",
|
||||
ExtensionOID.NAME_CONSTRAINTS: "nameConstraints",
|
||||
ExtensionOID.CRL_DISTRIBUTION_POINTS: "cRLDistributionPoints",
|
||||
ExtensionOID.CERTIFICATE_POLICIES: "certificatePolicies",
|
||||
ExtensionOID.POLICY_MAPPINGS: "policyMappings",
|
||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: "authorityKeyIdentifier",
|
||||
ExtensionOID.POLICY_CONSTRAINTS: "policyConstraints",
|
||||
ExtensionOID.EXTENDED_KEY_USAGE: "extendedKeyUsage",
|
||||
ExtensionOID.FRESHEST_CRL: "freshestCRL",
|
||||
ExtensionOID.INHIBIT_ANY_POLICY: "inhibitAnyPolicy",
|
||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: "authorityInfoAccess",
|
||||
ExtensionOID.SUBJECT_INFORMATION_ACCESS: "subjectInfoAccess",
|
||||
ExtensionOID.OCSP_NO_CHECK: "OCSPNoCheck",
|
||||
AuthorityInformationAccessOID.OCSP: "OCSP",
|
||||
AuthorityInformationAccessOID.CA_ISSUERS: "caIssuers",
|
||||
CertificatePoliciesOID.CPS_QUALIFIER: "id-qt-cps",
|
||||
CertificatePoliciesOID.CPS_USER_NOTICE: "id-qt-unotice",
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue