split platform

This commit is contained in:
j 2016-02-06 15:06:57 +05:30
commit 8c9b09577d
2261 changed files with 676163 additions and 0 deletions

View file

@ -0,0 +1,42 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import pkg_resources
from cryptography.hazmat.backends.multibackend import MultiBackend
_available_backends_list = None
def _available_backends():
global _available_backends_list
if _available_backends_list is None:
_available_backends_list = [
# setuptools 11.3 deprecated support for the require parameter to
# load(), and introduced the new resolve() method instead.
# This can be removed if/when we can assume setuptools>=11.3. At
# some point we may wish to add a warning, to push people along,
# but at present this would result in too many warnings.
ep.resolve() if hasattr(ep, "resolve") else ep.load(require=False)
for ep in pkg_resources.iter_entry_points(
"cryptography.backends"
)
]
return _available_backends_list
_default_backend = None
def default_backend():
global _default_backend
if _default_backend is None:
_default_backend = MultiBackend(_available_backends())
return _default_backend

View file

@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.backends.commoncrypto.backend import backend
__all__ = ["backend"]

View file

@ -0,0 +1,245 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from collections import namedtuple
from cryptography import utils
from cryptography.exceptions import InternalError
from cryptography.hazmat.backends.commoncrypto.ciphers import (
_CipherContext, _GCMCipherContext
)
from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext
from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext
from cryptography.hazmat.backends.interfaces import (
CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend
)
from cryptography.hazmat.bindings.commoncrypto.binding import Binding
from cryptography.hazmat.primitives.ciphers.algorithms import (
AES, ARC4, Blowfish, CAST5, TripleDES
)
from cryptography.hazmat.primitives.ciphers.modes import (
CBC, CFB, CFB8, CTR, ECB, GCM, OFB
)
HashMethods = namedtuple(
"HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"]
)
@utils.register_interface(CipherBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
class Backend(object):
"""
CommonCrypto API wrapper.
"""
name = "commoncrypto"
def __init__(self):
self._binding = Binding()
self._ffi = self._binding.ffi
self._lib = self._binding.lib
self._cipher_registry = {}
self._register_default_ciphers()
self._hash_mapping = {
"md5": HashMethods(
"CC_MD5_CTX *", self._lib.CC_MD5_Init,
self._lib.CC_MD5_Update, self._lib.CC_MD5_Final
),
"sha1": HashMethods(
"CC_SHA1_CTX *", self._lib.CC_SHA1_Init,
self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final
),
"sha224": HashMethods(
"CC_SHA256_CTX *", self._lib.CC_SHA224_Init,
self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final
),
"sha256": HashMethods(
"CC_SHA256_CTX *", self._lib.CC_SHA256_Init,
self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final
),
"sha384": HashMethods(
"CC_SHA512_CTX *", self._lib.CC_SHA384_Init,
self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final
),
"sha512": HashMethods(
"CC_SHA512_CTX *", self._lib.CC_SHA512_Init,
self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final
),
}
self._supported_hmac_algorithms = {
"md5": self._lib.kCCHmacAlgMD5,
"sha1": self._lib.kCCHmacAlgSHA1,
"sha224": self._lib.kCCHmacAlgSHA224,
"sha256": self._lib.kCCHmacAlgSHA256,
"sha384": self._lib.kCCHmacAlgSHA384,
"sha512": self._lib.kCCHmacAlgSHA512,
}
self._supported_pbkdf2_hmac_algorithms = {
"sha1": self._lib.kCCPRFHmacAlgSHA1,
"sha224": self._lib.kCCPRFHmacAlgSHA224,
"sha256": self._lib.kCCPRFHmacAlgSHA256,
"sha384": self._lib.kCCPRFHmacAlgSHA384,
"sha512": self._lib.kCCPRFHmacAlgSHA512,
}
def hash_supported(self, algorithm):
return algorithm.name in self._hash_mapping
def hmac_supported(self, algorithm):
return algorithm.name in self._supported_hmac_algorithms
def create_hash_ctx(self, algorithm):
return _HashContext(self, algorithm)
def create_hmac_ctx(self, key, algorithm):
return _HMACContext(self, key, algorithm)
def cipher_supported(self, cipher, mode):
return (type(cipher), type(mode)) in self._cipher_registry
def create_symmetric_encryption_ctx(self, cipher, mode):
if isinstance(mode, GCM):
return _GCMCipherContext(
self, cipher, mode, self._lib.kCCEncrypt
)
else:
return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt)
def create_symmetric_decryption_ctx(self, cipher, mode):
if isinstance(mode, GCM):
return _GCMCipherContext(
self, cipher, mode, self._lib.kCCDecrypt
)
else:
return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt)
def pbkdf2_hmac_supported(self, algorithm):
return algorithm.name in self._supported_pbkdf2_hmac_algorithms
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name]
buf = self._ffi.new("char[]", length)
res = self._lib.CCKeyDerivationPBKDF(
self._lib.kCCPBKDF2,
key_material,
len(key_material),
salt,
len(salt),
alg_enum,
iterations,
buf,
length
)
self._check_cipher_response(res)
return self._ffi.buffer(buf)[:]
def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls,
mode_const):
if (cipher_cls, mode_cls) in self._cipher_registry:
raise ValueError("Duplicate registration for: {0} {1}.".format(
cipher_cls, mode_cls)
)
self._cipher_registry[cipher_cls, mode_cls] = (cipher_const,
mode_const)
def _register_default_ciphers(self):
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(CFB8, self._lib.kCCModeCFB8),
(OFB, self._lib.kCCModeOFB),
(CTR, self._lib.kCCModeCTR),
(GCM, self._lib.kCCModeGCM),
]:
self._register_cipher_adapter(
AES,
self._lib.kCCAlgorithmAES128,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(CFB8, self._lib.kCCModeCFB8),
(OFB, self._lib.kCCModeOFB),
]:
self._register_cipher_adapter(
TripleDES,
self._lib.kCCAlgorithm3DES,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(OFB, self._lib.kCCModeOFB)
]:
self._register_cipher_adapter(
Blowfish,
self._lib.kCCAlgorithmBlowfish,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(OFB, self._lib.kCCModeOFB),
(CTR, self._lib.kCCModeCTR)
]:
self._register_cipher_adapter(
CAST5,
self._lib.kCCAlgorithmCAST,
mode_cls,
mode_const
)
self._register_cipher_adapter(
ARC4,
self._lib.kCCAlgorithmRC4,
type(None),
self._lib.kCCModeRC4
)
def _check_cipher_response(self, response):
if response == self._lib.kCCSuccess:
return
elif response == self._lib.kCCAlignmentError:
# This error is not currently triggered due to a bug filed as
# rdar://15589470
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
else:
raise InternalError(
"The backend returned an unknown error, consider filing a bug."
" Code: {0}.".format(response),
response
)
def _release_cipher_ctx(self, ctx):
"""
Called by the garbage collector and used to safely dereference and
release the context.
"""
if ctx[0] != self._ffi.NULL:
res = self._lib.CCCryptorRelease(ctx[0])
self._check_cipher_response(res)
ctx[0] = self._ffi.NULL
backend = Backend()

View file

@ -0,0 +1,193 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidTag, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import ciphers, constant_time
from cryptography.hazmat.primitives.ciphers import modes
from cryptography.hazmat.primitives.ciphers.modes import (
CFB, CFB8, CTR, OFB
)
@utils.register_interface(ciphers.CipherContext)
class _CipherContext(object):
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
# There is a bug in CommonCrypto where block ciphers do not raise
# kCCAlignmentError when finalizing if you supply non-block aligned
# data. To work around this we need to keep track of the block
# alignment ourselves, but only for alg+mode combos that require
# block alignment. OFB, CFB, and CTR make a block cipher algorithm
# into a stream cipher so we don't need to track them (and thus their
# block size is effectively 1 byte just like OpenSSL/CommonCrypto
# treat RC4 and other stream cipher block sizes).
# This bug has been filed as rdar://15589470
self._bytes_processed = 0
if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not
isinstance(mode, (OFB, CFB, CFB8, CTR))):
self._byte_block_size = cipher.block_size // 8
else:
self._byte_block_size = 1
registry = self._backend._cipher_registry
try:
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
ctx = self._backend._ffi.new("CCCryptorRef *")
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = mode.initialization_vector
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = mode.nonce
else:
iv_nonce = self._backend._ffi.NULL
if isinstance(mode, CTR):
mode_option = self._backend._lib.kCCModeOptionCTR_BE
else:
mode_option = 0
res = self._backend._lib.CCCryptorCreateWithMode(
operation,
mode_enum, cipher_enum,
self._backend._lib.ccNoPadding, iv_nonce,
cipher.key, len(cipher.key),
self._backend._ffi.NULL, 0, 0, mode_option, ctx)
self._backend._check_cipher_response(res)
self._ctx = ctx
def update(self, data):
# Count bytes processed to handle block alignment.
self._bytes_processed += len(data)
buf = self._backend._ffi.new(
"unsigned char[]", len(data) + self._byte_block_size - 1)
outlen = self._backend._ffi.new("size_t *")
res = self._backend._lib.CCCryptorUpdate(
self._ctx[0], data, len(data), buf,
len(data) + self._byte_block_size - 1, outlen)
self._backend._check_cipher_response(res)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def finalize(self):
# Raise error if block alignment is wrong.
if self._bytes_processed % self._byte_block_size:
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size)
outlen = self._backend._ffi.new("size_t *")
res = self._backend._lib.CCCryptorFinal(
self._ctx[0], buf, len(buf), outlen)
self._backend._check_cipher_response(res)
self._backend._release_cipher_ctx(self._ctx)
return self._backend._ffi.buffer(buf)[:outlen[0]]
@utils.register_interface(ciphers.AEADCipherContext)
@utils.register_interface(ciphers.AEADEncryptionContext)
class _GCMCipherContext(object):
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
self._tag = None
registry = self._backend._cipher_registry
try:
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
ctx = self._backend._ffi.new("CCCryptorRef *")
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
self._ctx = ctx
res = self._backend._lib.CCCryptorCreateWithMode(
operation,
mode_enum, cipher_enum,
self._backend._lib.ccNoPadding,
self._backend._ffi.NULL,
cipher.key, len(cipher.key),
self._backend._ffi.NULL, 0, 0, 0, self._ctx)
self._backend._check_cipher_response(res)
res = self._backend._lib.CCCryptorGCMAddIV(
self._ctx[0],
mode.initialization_vector,
len(mode.initialization_vector)
)
self._backend._check_cipher_response(res)
# CommonCrypto has a bug where calling update without at least one
# call to authenticate_additional_data will result in null byte output
# for ciphertext. The following empty byte string call prevents the
# issue, which is present in at least 10.8 and 10.9.
# Filed as rdar://18314544
self.authenticate_additional_data(b"")
def update(self, data):
buf = self._backend._ffi.new("unsigned char[]", len(data))
args = (self._ctx[0], data, len(data), buf)
if self._operation == self._backend._lib.kCCEncrypt:
res = self._backend._lib.CCCryptorGCMEncrypt(*args)
else:
res = self._backend._lib.CCCryptorGCMDecrypt(*args)
self._backend._check_cipher_response(res)
return self._backend._ffi.buffer(buf)[:]
def finalize(self):
# CommonCrypto has a yet another bug where you must make at least one
# call to update. If you pass just AAD and call finalize without a call
# to update you'll get null bytes for tag. The following update call
# prevents this issue, which is present in at least 10.8 and 10.9.
# Filed as rdar://18314580
self.update(b"")
tag_size = self._cipher.block_size // 8
tag_buf = self._backend._ffi.new("unsigned char[]", tag_size)
tag_len = self._backend._ffi.new("size_t *", tag_size)
res = self._backend._lib.CCCryptorGCMFinal(
self._ctx[0], tag_buf, tag_len
)
self._backend._check_cipher_response(res)
self._backend._release_cipher_ctx(self._ctx)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
if (self._operation == self._backend._lib.kCCDecrypt and
not constant_time.bytes_eq(
self._tag[:len(self._mode.tag)], self._mode.tag
)):
raise InvalidTag
return b""
def authenticate_additional_data(self, data):
res = self._backend._lib.CCCryptorGCMAddAAD(
self._ctx[0], data, len(data)
)
self._backend._check_cipher_response(res)
tag = utils.read_only_property("_tag")

View file

@ -0,0 +1,55 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import hashes
@utils.register_interface(hashes.HashContext)
class _HashContext(object):
def __init__(self, backend, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
try:
methods = self._backend._hash_mapping[self.algorithm.name]
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
ctx = self._backend._ffi.new(methods.ctx)
res = methods.hash_init(ctx)
assert res == 1
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def copy(self):
methods = self._backend._hash_mapping[self.algorithm.name]
new_ctx = self._backend._ffi.new(methods.ctx)
# CommonCrypto has no APIs for copying hashes, so we have to copy the
# underlying struct.
new_ctx[0] = self._ctx[0]
return _HashContext(self._backend, self.algorithm, ctx=new_ctx)
def update(self, data):
methods = self._backend._hash_mapping[self.algorithm.name]
res = methods.hash_update(self._ctx, data, len(data))
assert res == 1
def finalize(self):
methods = self._backend._hash_mapping[self.algorithm.name]
buf = self._backend._ffi.new("unsigned char[]",
self.algorithm.digest_size)
res = methods.hash_final(buf, self._ctx)
assert res == 1
return self._backend._ffi.buffer(buf)[:]

View file

@ -0,0 +1,59 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
@utils.register_interface(interfaces.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._ffi.new("CCHmacContext *")
try:
alg = self._backend._supported_hmac_algorithms[algorithm.name]
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported HMAC hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
self._backend._lib.CCHmacInit(ctx, alg, key, len(key))
self._ctx = ctx
self._key = key
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._ffi.new("CCHmacContext *")
# CommonCrypto has no APIs for copying HMACs, so we have to copy the
# underlying struct.
copied_ctx[0] = self._ctx[0]
return _HMACContext(
self._backend, self._key, self.algorithm, ctx=copied_ctx
)
def update(self, data):
self._backend._lib.CCHmacUpdate(self._ctx, data, len(data))
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self.algorithm.digest_size)
self._backend._lib.CCHmacFinal(self._ctx, buf)
return self._backend._ffi.buffer(buf)[:]
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View file

@ -0,0 +1,345 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CipherBackend(object):
@abc.abstractmethod
def cipher_supported(self, cipher, mode):
"""
Return True if the given cipher and mode are supported.
"""
@abc.abstractmethod
def create_symmetric_encryption_ctx(self, cipher, mode):
"""
Get a CipherContext that can be used for encryption.
"""
@abc.abstractmethod
def create_symmetric_decryption_ctx(self, cipher, mode):
"""
Get a CipherContext that can be used for decryption.
"""
@six.add_metaclass(abc.ABCMeta)
class HashBackend(object):
@abc.abstractmethod
def hash_supported(self, algorithm):
"""
Return True if the hash algorithm is supported by this backend.
"""
@abc.abstractmethod
def create_hash_ctx(self, algorithm):
"""
Create a HashContext for calculating a message digest.
"""
@six.add_metaclass(abc.ABCMeta)
class HMACBackend(object):
@abc.abstractmethod
def hmac_supported(self, algorithm):
"""
Return True if the hash algorithm is supported for HMAC by this
backend.
"""
@abc.abstractmethod
def create_hmac_ctx(self, key, algorithm):
"""
Create a MACContext for calculating a message authentication code.
"""
@six.add_metaclass(abc.ABCMeta)
class CMACBackend(object):
@abc.abstractmethod
def cmac_algorithm_supported(self, algorithm):
"""
Returns True if the block cipher is supported for CMAC by this backend
"""
@abc.abstractmethod
def create_cmac_ctx(self, algorithm):
"""
Create a MACContext for calculating a message authentication code.
"""
@six.add_metaclass(abc.ABCMeta)
class PBKDF2HMACBackend(object):
@abc.abstractmethod
def pbkdf2_hmac_supported(self, algorithm):
"""
Return True if the hash algorithm is supported for PBKDF2 by this
backend.
"""
@abc.abstractmethod
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
"""
Return length bytes derived from provided PBKDF2 parameters.
"""
@six.add_metaclass(abc.ABCMeta)
class RSABackend(object):
@abc.abstractmethod
def generate_rsa_private_key(self, public_exponent, key_size):
"""
Generate an RSAPrivateKey instance with public_exponent and a modulus
of key_size bits.
"""
@abc.abstractmethod
def rsa_padding_supported(self, padding):
"""
Returns True if the backend supports the given padding options.
"""
@abc.abstractmethod
def generate_rsa_parameters_supported(self, public_exponent, key_size):
"""
Returns True if the backend supports the given parameters for key
generation.
"""
@abc.abstractmethod
def load_rsa_private_numbers(self, numbers):
"""
Returns an RSAPrivateKey provider.
"""
@abc.abstractmethod
def load_rsa_public_numbers(self, numbers):
"""
Returns an RSAPublicKey provider.
"""
@six.add_metaclass(abc.ABCMeta)
class DSABackend(object):
@abc.abstractmethod
def generate_dsa_parameters(self, key_size):
"""
Generate a DSAParameters instance with a modulus of key_size bits.
"""
@abc.abstractmethod
def generate_dsa_private_key(self, parameters):
"""
Generate a DSAPrivateKey instance with parameters as a DSAParameters
object.
"""
@abc.abstractmethod
def generate_dsa_private_key_and_parameters(self, key_size):
"""
Generate a DSAPrivateKey instance using key size only.
"""
@abc.abstractmethod
def dsa_hash_supported(self, algorithm):
"""
Return True if the hash algorithm is supported by the backend for DSA.
"""
@abc.abstractmethod
def dsa_parameters_supported(self, p, q, g):
"""
Return True if the parameters are supported by the backend for DSA.
"""
@abc.abstractmethod
def load_dsa_private_numbers(self, numbers):
"""
Returns a DSAPrivateKey provider.
"""
@abc.abstractmethod
def load_dsa_public_numbers(self, numbers):
"""
Returns a DSAPublicKey provider.
"""
@abc.abstractmethod
def load_dsa_parameter_numbers(self, numbers):
"""
Returns a DSAParameters provider.
"""
@six.add_metaclass(abc.ABCMeta)
class EllipticCurveBackend(object):
@abc.abstractmethod
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
"""
Returns True if the backend supports the named elliptic curve with the
specified signature algorithm.
"""
@abc.abstractmethod
def elliptic_curve_supported(self, curve):
"""
Returns True if the backend supports the named elliptic curve.
"""
@abc.abstractmethod
def generate_elliptic_curve_private_key(self, curve):
"""
Return an object conforming to the EllipticCurvePrivateKey interface.
"""
@abc.abstractmethod
def load_elliptic_curve_public_numbers(self, numbers):
"""
Return an EllipticCurvePublicKey provider using the given numbers.
"""
@abc.abstractmethod
def load_elliptic_curve_private_numbers(self, numbers):
"""
Return an EllipticCurvePrivateKey provider using the given numbers.
"""
@abc.abstractmethod
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
"""
Returns whether the exchange algorithm is supported by this backend.
"""
@six.add_metaclass(abc.ABCMeta)
class PEMSerializationBackend(object):
@abc.abstractmethod
def load_pem_private_key(self, data, password):
"""
Loads a private key from PEM encoded data, using the provided password
if the data is encrypted.
"""
@abc.abstractmethod
def load_pem_public_key(self, data):
"""
Loads a public key from PEM encoded data.
"""
@six.add_metaclass(abc.ABCMeta)
class DERSerializationBackend(object):
@abc.abstractmethod
def load_der_private_key(self, data, password):
"""
Loads a private key from DER encoded data. Uses the provided password
if the data is encrypted.
"""
@abc.abstractmethod
def load_der_public_key(self, data):
"""
Loads a public key from DER encoded data.
"""
@six.add_metaclass(abc.ABCMeta)
class X509Backend(object):
@abc.abstractmethod
def load_pem_x509_certificate(self, data):
"""
Load an X.509 certificate from PEM encoded data.
"""
@abc.abstractmethod
def load_der_x509_certificate(self, data):
"""
Load an X.509 certificate from DER encoded data.
"""
@abc.abstractmethod
def load_der_x509_csr(self, data):
"""
Load an X.509 CSR from DER encoded data.
"""
@abc.abstractmethod
def load_pem_x509_csr(self, data):
"""
Load an X.509 CSR from PEM encoded data.
"""
@abc.abstractmethod
def create_x509_csr(self, builder, private_key, algorithm):
"""
Create and sign an X.509 CSR from a CSR builder object.
"""
@abc.abstractmethod
def create_x509_certificate(self, builder, private_key, algorithm):
"""
Create and sign an X.509 certificate from a CertificateBuilder object.
"""
@six.add_metaclass(abc.ABCMeta)
class DHBackend(object):
@abc.abstractmethod
def generate_dh_parameters(self, key_size):
"""
Generate a DHParameters instance with a modulus of key_size bits.
"""
@abc.abstractmethod
def generate_dh_private_key(self, parameters):
"""
Generate a DHPrivateKey instance with parameters as a DHParameters
object.
"""
@abc.abstractmethod
def generate_dh_private_key_and_parameters(self, key_size):
"""
Generate a DHPrivateKey instance using key size only.
"""
@abc.abstractmethod
def load_dh_private_numbers(self, numbers):
"""
Returns a DHPrivateKey provider.
"""
@abc.abstractmethod
def load_dh_public_numbers(self, numbers):
"""
Returns a DHPublicKey provider.
"""
@abc.abstractmethod
def load_dh_parameter_numbers(self, numbers):
"""
Returns a DHParameters provider.
"""
@abc.abstractmethod
def dh_exchange_algorithm_supported(self, exchange_algorithm):
"""
Returns whether the exchange algorithm is supported by this backend.
"""
@abc.abstractmethod
def dh_parameters_supported(self, p, g):
"""
Returns whether the backend supports DH with these parameter values.
"""

View file

@ -0,0 +1,386 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.backends.interfaces import (
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
PEMSerializationBackend, RSABackend, X509Backend
)
@utils.register_interface(CMACBackend)
@utils.register_interface(CipherBackend)
@utils.register_interface(DERSerializationBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
@utils.register_interface(RSABackend)
@utils.register_interface(DSABackend)
@utils.register_interface(EllipticCurveBackend)
@utils.register_interface(PEMSerializationBackend)
@utils.register_interface(X509Backend)
class MultiBackend(object):
name = "multibackend"
def __init__(self, backends):
self._backends = backends
def _filtered_backends(self, interface):
for b in self._backends:
if isinstance(b, interface):
yield b
def cipher_supported(self, cipher, mode):
return any(
b.cipher_supported(cipher, mode)
for b in self._filtered_backends(CipherBackend)
)
def create_symmetric_encryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_encryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def create_symmetric_decryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_decryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def hash_supported(self, algorithm):
return any(
b.hash_supported(algorithm)
for b in self._filtered_backends(HashBackend)
)
def create_hash_ctx(self, algorithm):
for b in self._filtered_backends(HashBackend):
try:
return b.create_hash_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def hmac_supported(self, algorithm):
return any(
b.hmac_supported(algorithm)
for b in self._filtered_backends(HMACBackend)
)
def create_hmac_ctx(self, key, algorithm):
for b in self._filtered_backends(HMACBackend):
try:
return b.create_hmac_ctx(key, algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def pbkdf2_hmac_supported(self, algorithm):
return any(
b.pbkdf2_hmac_supported(algorithm)
for b in self._filtered_backends(PBKDF2HMACBackend)
)
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
for b in self._filtered_backends(PBKDF2HMACBackend):
try:
return b.derive_pbkdf2_hmac(
algorithm, length, salt, iterations, key_material
)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def generate_rsa_private_key(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_private_key(public_exponent, key_size)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_rsa_parameters_supported(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_parameters_supported(
public_exponent, key_size
)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def rsa_padding_supported(self, padding):
for b in self._filtered_backends(RSABackend):
return b.rsa_padding_supported(padding)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_private_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_private_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_public_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_public_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key(self, parameters):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key(parameters)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key_and_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key_and_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_hash_supported(self, algorithm):
for b in self._filtered_backends(DSABackend):
return b.dsa_hash_supported(algorithm)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_parameters_supported(self, p, q, g):
for b in self._filtered_backends(DSABackend):
return b.dsa_parameters_supported(p, q, g)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_public_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_public_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_private_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_private_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_parameter_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_parameter_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def cmac_algorithm_supported(self, algorithm):
return any(
b.cmac_algorithm_supported(algorithm)
for b in self._filtered_backends(CMACBackend)
)
def create_cmac_ctx(self, algorithm):
for b in self._filtered_backends(CMACBackend):
try:
return b.create_cmac_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm("This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER)
def elliptic_curve_supported(self, curve):
return any(
b.elliptic_curve_supported(curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
return any(
b.elliptic_curve_signature_algorithm_supported(
signature_algorithm, curve
)
for b in self._filtered_backends(EllipticCurveBackend)
)
def generate_elliptic_curve_private_key(self, curve):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.generate_elliptic_curve_private_key(curve)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_private_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_private_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_public_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_public_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
return any(
b.elliptic_curve_exchange_algorithm_supported(algorithm, curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def load_pem_private_key(self, data, password):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_public_key(self, data):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_private_key(self, data, password):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_public_key(self, data):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_crl(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_crl(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_crl(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_crl(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_csr(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_csr(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_certificate(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_certificate(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)

View file

@ -0,0 +1,10 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.backends.openssl.backend import backend
__all__ = ["backend"]

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,213 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import ciphers
from cryptography.hazmat.primitives.ciphers import modes
@utils.register_interface(ciphers.CipherContext)
@utils.register_interface(ciphers.AEADCipherContext)
@utils.register_interface(ciphers.AEADEncryptionContext)
class _CipherContext(object):
_ENCRYPT = 1
_DECRYPT = 0
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
self._tag = None
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
self._block_size = self._cipher.block_size
else:
self._block_size = 1
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.EVP_CIPHER_CTX_free
)
registry = self._backend._cipher_registry
try:
adapter = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
evp_cipher = adapter(self._backend, cipher, mode)
if evp_cipher == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = mode.initialization_vector
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = mode.nonce
else:
iv_nonce = self._backend._ffi.NULL
# begin init with cipher and operation type
res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
operation)
self._backend.openssl_assert(res != 0)
# set the key length to handle variable key ciphers
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
ctx, len(cipher.key)
)
self._backend.openssl_assert(res != 0)
if isinstance(mode, modes.GCM):
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN,
len(iv_nonce), self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
if operation == self._DECRYPT:
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG,
len(mode.tag), mode.tag
)
self._backend.openssl_assert(res != 0)
# pass key/iv
res = self._backend._lib.EVP_CipherInit_ex(
ctx,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
cipher.key,
iv_nonce,
operation
)
self._backend.openssl_assert(res != 0)
# We purposely disable padding here as it's handled higher up in the
# API.
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
self._ctx = ctx
def update(self, data):
# OpenSSL 0.9.8e has an assertion in its EVP code that causes it
# to SIGABRT if you call update with an empty byte string. This can be
# removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch
# should be taken only when length is zero and mode is not GCM because
# AES GCM can return improper tag values if you don't call update
# with empty plaintext when authenticating AAD for ...reasons.
if len(data) == 0 and not isinstance(self._mode, modes.GCM):
return b""
buf = self._backend._ffi.new("unsigned char[]",
len(data) + self._block_size - 1)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data,
len(data))
self._backend.openssl_assert(res != 0)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def finalize(self):
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
# appears to have a bug where you must make at least one call to update
# even if you are only using authenticate_additional_data or the
# GCM tag will be wrong. An (empty) call to update resolves this
# and is harmless for all other versions of OpenSSL.
if isinstance(self._mode, modes.GCM):
self.update(b"")
buf = self._backend._ffi.new("unsigned char[]", self._block_size)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
if res == 0:
errors = self._backend._consume_errors()
if not errors and isinstance(self._mode, modes.GCM):
raise InvalidTag
self._backend.openssl_assert(
errors[0][1:] == (
self._backend._lib.ERR_LIB_EVP,
self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX,
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
) or errors[0][1:] == (
self._backend._lib.ERR_LIB_EVP,
self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX,
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
)
)
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
if (isinstance(self._mode, modes.GCM) and
self._operation == self._ENCRYPT):
block_byte_size = self._block_size // 8
tag_buf = self._backend._ffi.new(
"unsigned char[]", block_byte_size
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG,
block_byte_size, tag_buf
)
self._backend.openssl_assert(res != 0)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def authenticate_additional_data(self, data):
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(
self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
)
self._backend.openssl_assert(res != 0)
tag = utils.read_only_property("_tag")
@utils.register_interface(ciphers.CipherContext)
class _AESCTRCipherContext(object):
"""
This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can
be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020).
"""
def __init__(self, backend, cipher, mode):
self._backend = backend
self._key = self._backend._ffi.new("AES_KEY *")
res = self._backend._lib.AES_set_encrypt_key(
cipher.key, len(cipher.key) * 8, self._key
)
self._backend.openssl_assert(res == 0)
self._ecount = self._backend._ffi.new("char[]", 16)
self._nonce = self._backend._ffi.new("char[16]", mode.nonce)
self._num = self._backend._ffi.new("unsigned int *", 0)
def update(self, data):
buf = self._backend._ffi.new("unsigned char[]", len(data))
self._backend._lib.AES_ctr128_encrypt(
data, buf, len(data), self._key, self._nonce,
self._ecount, self._num
)
return self._backend._ffi.buffer(buf)[:]
def finalize(self):
self._key = None
self._ecount = None
self._nonce = None
self._num = None
return b""

View file

@ -0,0 +1,80 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, interfaces
from cryptography.hazmat.primitives.ciphers.modes import CBC
@utils.register_interface(interfaces.MACContext)
class _CMACContext(object):
def __init__(self, backend, algorithm, ctx=None):
if not backend.cmac_algorithm_supported(algorithm):
raise UnsupportedAlgorithm("This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER)
self._backend = backend
self._key = algorithm.key
self._algorithm = algorithm
self._output_length = algorithm.block_size // 8
if ctx is None:
registry = self._backend._cipher_registry
adapter = registry[type(algorithm), CBC]
evp_cipher = adapter(self._backend, algorithm, CBC)
ctx = self._backend._lib.CMAC_CTX_new()
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
self._backend._lib.CMAC_Init(
ctx, self._key, len(self._key),
evp_cipher, self._backend._ffi.NULL
)
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def update(self, data):
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
self._backend.openssl_assert(res == 1)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
length = self._backend._ffi.new("size_t *", self._output_length)
res = self._backend._lib.CMAC_Final(
self._ctx, buf, length
)
self._backend.openssl_assert(res == 1)
self._ctx = None
return self._backend._ffi.buffer(buf)[:]
def copy(self):
copied_ctx = self._backend._lib.CMAC_CTX_new()
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.CMAC_CTX_free
)
res = self._backend._lib.CMAC_CTX_copy(
copied_ctx, self._ctx
)
self._backend.openssl_assert(res == 1)
return _CMACContext(
self._backend, self._algorithm, ctx=copied_ctx
)
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View file

@ -0,0 +1,218 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
)
def _truncate_digest_for_dsa(dsa_cdata, digest, backend):
"""
This function truncates digests that are longer than a given DS
key's length so they can be signed. OpenSSL does this for us in
1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three
releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This
truncation is not required in 0.9.8 because DSA is limited to SHA-1.
"""
order_bits = backend._lib.BN_num_bits(dsa_cdata.q)
return _truncate_digest(digest, order_bits)
@utils.register_interface(AsymmetricVerificationContext)
class _DSAVerificationContext(object):
def __init__(self, backend, public_key, signature, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def verify(self):
data_to_verify = self._hash_ctx.finalize()
data_to_verify = _truncate_digest_for_dsa(
self._public_key._dsa_cdata, data_to_verify, self._backend
)
# The first parameter passed to DSA_verify is unused by OpenSSL but
# must be an integer.
res = self._backend._lib.DSA_verify(
0, data_to_verify, len(data_to_verify), self._signature,
len(self._signature), self._public_key._dsa_cdata)
if res != 1:
self._backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricSignatureContext)
class _DSASignatureContext(object):
def __init__(self, backend, private_key, algorithm):
self._backend = backend
self._private_key = private_key
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def finalize(self):
data_to_sign = self._hash_ctx.finalize()
data_to_sign = _truncate_digest_for_dsa(
self._private_key._dsa_cdata, data_to_sign, self._backend
)
sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata)
sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len)
buflen = self._backend._ffi.new("unsigned int *")
# The first parameter passed to DSA_sign is unused by OpenSSL but
# must be an integer.
res = self._backend._lib.DSA_sign(
0, data_to_sign, len(data_to_sign), sig_buf,
buflen, self._private_key._dsa_cdata)
self._backend.openssl_assert(res == 1)
self._backend.openssl_assert(buflen[0])
return self._backend._ffi.buffer(sig_buf)[:buflen[0]]
@utils.register_interface(dsa.DSAParametersWithNumbers)
class _DSAParameters(object):
def __init__(self, backend, dsa_cdata):
self._backend = backend
self._dsa_cdata = dsa_cdata
def parameter_numbers(self):
return dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
)
def generate_private_key(self):
return self._backend.generate_dsa_private_key(self)
@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
class _DSAPrivateKey(object):
def __init__(self, backend, dsa_cdata, evp_pkey):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
key_size = utils.read_only_property("_key_size")
def signer(self, signature_algorithm):
return _DSASignatureContext(self._backend, self, signature_algorithm)
def private_numbers(self):
return dsa.DSAPrivateNumbers(
public_numbers=dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
),
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
),
x=self._backend._bn_to_int(self._dsa_cdata.priv_key)
)
def public_key(self):
dsa_cdata = self._backend._lib.DSA_new()
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key)
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
def parameters(self):
dsa_cdata = self._backend._lib.DSA_new()
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
return _DSAParameters(self._backend, dsa_cdata)
def private_bytes(self, encoding, format, encryption_algorithm):
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._dsa_cdata
)
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
class _DSAPublicKey(object):
def __init__(self, backend, dsa_cdata, evp_pkey):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
key_size = utils.read_only_property("_key_size")
def verifier(self, signature, signature_algorithm):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
return _DSAVerificationContext(
self._backend, self, signature, signature_algorithm
)
def public_numbers(self):
return dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
),
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
)
def parameters(self):
dsa_cdata = self._backend._lib.DSA_new()
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
return _DSAParameters(self._backend, dsa_cdata)
def public_bytes(self, encoding, format):
if format is serialization.PublicFormat.PKCS1:
raise ValueError(
"DSA public keys do not support PKCS1 serialization"
)
return self._backend._public_key_bytes(
encoding,
format,
self._evp_pkey,
None
)

View file

@ -0,0 +1,299 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, ec
)
def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend):
"""
This function truncates digests that are longer than a given elliptic
curve key's length so they can be signed. Since elliptic curve keys are
much shorter than RSA keys many digests (e.g. SHA-512) may require
truncation.
"""
_lib = backend._lib
_ffi = backend._ffi
group = _lib.EC_KEY_get0_group(ec_key_cdata)
with backend._tmp_bn_ctx() as bn_ctx:
order = _lib.BN_CTX_get(bn_ctx)
backend.openssl_assert(order != _ffi.NULL)
res = _lib.EC_GROUP_get_order(group, order, bn_ctx)
backend.openssl_assert(res == 1)
order_bits = _lib.BN_num_bits(order)
return _truncate_digest(digest, order_bits)
def _ec_key_curve_sn(backend, ec_key):
group = backend._lib.EC_KEY_get0_group(ec_key)
backend.openssl_assert(group != backend._ffi.NULL)
nid = backend._lib.EC_GROUP_get_curve_name(group)
# The following check is to find EC keys with unnamed curves and raise
# an error for now.
if nid == backend._lib.NID_undef:
raise NotImplementedError(
"ECDSA certificates with unnamed curves are unsupported "
"at this time"
)
curve_name = backend._lib.OBJ_nid2sn(nid)
backend.openssl_assert(curve_name != backend._ffi.NULL)
sn = backend._ffi.string(curve_name).decode('ascii')
return sn
def _mark_asn1_named_ec_curve(backend, ec_cdata):
"""
Set the named curve flag on the EC_KEY. This causes OpenSSL to
serialize EC keys along with their curve OID which makes
deserialization easier.
"""
backend._lib.EC_KEY_set_asn1_flag(
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
)
def _sn_to_elliptic_curve(backend, sn):
try:
return ec._CURVE_TYPES[sn]()
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported elliptic curve".format(sn),
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
@utils.register_interface(AsymmetricSignatureContext)
class _ECDSASignatureContext(object):
def __init__(self, backend, private_key, algorithm):
self._backend = backend
self._private_key = private_key
self._digest = hashes.Hash(algorithm, backend)
def update(self, data):
self._digest.update(data)
def finalize(self):
ec_key = self._private_key._ec_key
digest = self._digest.finalize()
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
max_size = self._backend._lib.ECDSA_size(ec_key)
self._backend.openssl_assert(max_size > 0)
sigbuf = self._backend._ffi.new("char[]", max_size)
siglen_ptr = self._backend._ffi.new("unsigned int[]", 1)
res = self._backend._lib.ECDSA_sign(
0,
digest,
len(digest),
sigbuf,
siglen_ptr,
ec_key
)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
@utils.register_interface(AsymmetricVerificationContext)
class _ECDSAVerificationContext(object):
def __init__(self, backend, public_key, signature, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
self._digest = hashes.Hash(algorithm, backend)
def update(self, data):
self._digest.update(data)
def verify(self):
ec_key = self._public_key._ec_key
digest = self._digest.finalize()
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
res = self._backend._lib.ECDSA_verify(
0,
digest,
len(digest),
self._signature,
len(self._signature),
ec_key
)
if res != 1:
self._backend._consume_errors()
raise InvalidSignature
return True
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
class _EllipticCurvePrivateKey(object):
def __init__(self, backend, ec_key_cdata, evp_pkey):
self._backend = backend
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
curve = utils.read_only_property("_curve")
def signer(self, signature_algorithm):
if isinstance(signature_algorithm, ec.ECDSA):
return _ECDSASignatureContext(
self._backend, self, signature_algorithm.algorithm
)
else:
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def exchange(self, algorithm, peer_public_key):
if not (
self._backend.elliptic_curve_exchange_algorithm_supported(
algorithm, self.curve
)
):
raise UnsupportedAlgorithm(
"This backend does not support the ECDH algorithm.",
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
)
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
self._backend.openssl_assert(z_len > 0)
z_buf = self._backend._ffi.new("uint8_t[]", z_len)
peer_key = self._backend._lib.EC_KEY_get0_public_key(
peer_public_key._ec_key
)
r = self._backend._lib.ECDH_compute_key(
z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
)
self._backend.openssl_assert(r > 0)
return self._backend._ffi.buffer(z_buf)[:z_len]
def public_key(self):
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
self._backend.openssl_assert(group != self._backend._ffi.NULL)
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid)
self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL)
public_ec_key = self._backend._ffi.gc(
public_ec_key, self._backend._lib.EC_KEY_free
)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
def private_numbers(self):
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
private_value = self._backend._bn_to_int(bn)
return ec.EllipticCurvePrivateNumbers(
private_value=private_value,
public_numbers=self.public_key().public_numbers()
)
def private_bytes(self, encoding, format, encryption_algorithm):
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._ec_key
)
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
class _EllipticCurvePublicKey(object):
def __init__(self, backend, ec_key_cdata, evp_pkey):
self._backend = backend
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
self._ec_key = ec_key_cdata
self._evp_pkey = evp_pkey
sn = _ec_key_curve_sn(backend, ec_key_cdata)
self._curve = _sn_to_elliptic_curve(backend, sn)
curve = utils.read_only_property("_curve")
def verifier(self, signature, signature_algorithm):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if isinstance(signature_algorithm, ec.ECDSA):
return _ECDSAVerificationContext(
self._backend, self, signature, signature_algorithm.algorithm
)
else:
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def public_numbers(self):
set_func, get_func, group = (
self._backend._ec_key_determine_group_get_set_funcs(self._ec_key)
)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
with self._backend._tmp_bn_ctx() as bn_ctx:
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
res = get_func(group, point, bn_x, bn_y, bn_ctx)
self._backend.openssl_assert(res == 1)
x = self._backend._bn_to_int(bn_x)
y = self._backend._bn_to_int(bn_y)
return ec.EllipticCurvePublicNumbers(
x=x,
y=y,
curve=self._curve
)
def public_bytes(self, encoding, format):
if format is serialization.PublicFormat.PKCS1:
raise ValueError(
"EC public keys do not support PKCS1 serialization"
)
return self._backend._public_key_bytes(
encoding,
format,
self._evp_pkey,
None
)

View file

@ -0,0 +1,62 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import hashes
@utils.register_interface(hashes.HashContext)
class _HashContext(object):
def __init__(self, backend, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._lib.EVP_MD_CTX_create()
ctx = self._backend._ffi.gc(ctx,
self._backend._lib.EVP_MD_CTX_destroy)
evp_md = self._backend._lib.EVP_get_digestbyname(
algorithm.name.encode("ascii"))
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
self._backend._ffi.NULL)
self._backend.openssl_assert(res != 0)
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._lib.EVP_MD_CTX_create()
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.EVP_MD_CTX_destroy
)
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
self._backend.openssl_assert(res != 0)
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
def update(self, data):
res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
self._backend.openssl_assert(res != 0)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self._backend._lib.EVP_MAX_MD_SIZE)
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[:outlen[0]]

View file

@ -0,0 +1,81 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
@utils.register_interface(interfaces.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._ffi.new("HMAC_CTX *")
self._backend._lib.HMAC_CTX_init(ctx)
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.HMAC_CTX_cleanup
)
evp_md = self._backend._lib.EVP_get_digestbyname(
algorithm.name.encode('ascii'))
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
res = self._backend._lib.Cryptography_HMAC_Init_ex(
ctx, key, len(key), evp_md, self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
self._ctx = ctx
self._key = key
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._ffi.new("HMAC_CTX *")
self._backend._lib.HMAC_CTX_init(copied_ctx)
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.HMAC_CTX_cleanup
)
res = self._backend._lib.Cryptography_HMAC_CTX_copy(
copied_ctx, self._ctx
)
self._backend.openssl_assert(res != 0)
return _HMACContext(
self._backend, self._key, self.algorithm, ctx=copied_ctx
)
def update(self, data):
res = self._backend._lib.Cryptography_HMAC_Update(
self._ctx, data, len(data)
)
self._backend.openssl_assert(res != 0)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self._backend._lib.EVP_MAX_MD_SIZE)
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.Cryptography_HMAC_Final(
self._ctx, buf, outlen
)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
self._backend._lib.HMAC_CTX_cleanup(self._ctx)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View file

@ -0,0 +1,604 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import math
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
)
from cryptography.hazmat.primitives.asymmetric.padding import (
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
)
def _get_rsa_pss_salt_length(pss, key_size, digest_size):
salt = pss._salt_length
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
# bit length - 1 per RFC 3447
emlen = int(math.ceil((key_size - 1) / 8.0))
salt_length = emlen - digest_size - 2
assert salt_length >= 0
return salt_length
else:
return salt
def _enc_dec_rsa(backend, key, data, padding):
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Padding must be an instance of AsymmetricPadding.")
if isinstance(padding, PKCS1v15):
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, OAEP):
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
if not isinstance(padding._mgf._algorithm, hashes.SHA1):
raise UnsupportedAlgorithm(
"This backend supports only SHA1 inside MGF1 when "
"using OAEP.",
_Reasons.UNSUPPORTED_HASH
)
if padding._label is not None and padding._label != b"":
raise ValueError("This backend does not support OAEP labels.")
if not isinstance(padding._algorithm, hashes.SHA1):
raise UnsupportedAlgorithm(
"This backend only supports SHA1 when using OAEP.",
_Reasons.UNSUPPORTED_HASH
)
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(
padding.name
),
_Reasons.UNSUPPORTED_PADDING
)
if backend._lib.Cryptography_HAS_PKEY_CTX:
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum)
else:
return _enc_dec_rsa_098(backend, key, data, padding_enum)
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum):
if isinstance(key, _RSAPublicKey):
init = backend._lib.EVP_PKEY_encrypt_init
crypt = backend._lib.Cryptography_EVP_PKEY_encrypt
else:
init = backend._lib.EVP_PKEY_decrypt_init
crypt = backend._lib.Cryptography_EVP_PKEY_decrypt
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
key._evp_pkey, backend._ffi.NULL
)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init(pkey_ctx)
backend.openssl_assert(res == 1)
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(
pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(buf_size > 0)
outlen = backend._ffi.new("size_t *", buf_size)
buf = backend._ffi.new("char[]", buf_size)
res = crypt(pkey_ctx, buf, outlen, data, len(data))
if res <= 0:
_handle_rsa_enc_dec_error(backend, key)
return backend._ffi.buffer(buf)[:outlen[0]]
def _enc_dec_rsa_098(backend, key, data, padding_enum):
if isinstance(key, _RSAPublicKey):
crypt = backend._lib.RSA_public_encrypt
else:
crypt = backend._lib.RSA_private_decrypt
key_size = backend._lib.RSA_size(key._rsa_cdata)
backend.openssl_assert(key_size > 0)
buf = backend._ffi.new("unsigned char[]", key_size)
res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum)
if res < 0:
_handle_rsa_enc_dec_error(backend, key)
return backend._ffi.buffer(buf)[:res]
def _handle_rsa_enc_dec_error(backend, key):
errors = backend._consume_errors()
assert errors
assert errors[0].lib == backend._lib.ERR_LIB_RSA
if isinstance(key, _RSAPublicKey):
assert (errors[0].reason ==
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
raise ValueError(
"Data too long for key size. Encrypt less data or use a "
"larger key size."
)
else:
decoding_errors = [
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
]
if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
assert errors[0].reason in decoding_errors
raise ValueError("Decryption failed.")
@utils.register_interface(AsymmetricSignatureContext)
class _RSASignatureContext(object):
def __init__(self, backend, private_key, padding, algorithm):
self._backend = backend
self._private_key = private_key
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
self._pkey_size = self._backend._lib.EVP_PKEY_size(
self._private_key._evp_pkey
)
self._backend.openssl_assert(self._pkey_size > 0)
if isinstance(padding, PKCS1v15):
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._finalize_method = self._finalize_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
else:
self._finalize_method = self._finalize_pkcs1
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if self._pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError("Digest too large for key size. Use a larger "
"key.")
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
raise UnsupportedAlgorithm(
"When OpenSSL is older than 1.0.1 then only SHA1 is "
"supported with MGF1.",
_Reasons.UNSUPPORTED_HASH
)
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._finalize_method = self._finalize_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
else:
self._finalize_method = self._finalize_pss
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(padding.name),
_Reasons.UNSUPPORTED_PADDING
)
self._padding = padding
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def finalize(self):
evp_md = self._backend._lib.EVP_get_digestbyname(
self._algorithm.name.encode("ascii"))
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
return self._finalize_method(evp_md)
def _finalize_pkey_ctx(self, evp_md):
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
self._private_key._evp_pkey, self._backend._ffi.NULL
)
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
self._backend._lib.EVP_PKEY_CTX_free)
res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx)
self._backend.openssl_assert(res == 1)
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
pkey_ctx, evp_md)
self._backend.openssl_assert(res > 0)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
pkey_ctx, self._padding_enum)
self._backend.openssl_assert(res > 0)
if isinstance(self._padding, PSS):
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx,
_get_rsa_pss_salt_length(
self._padding,
self._private_key.key_size,
self._hash_ctx.algorithm.digest_size
)
)
self._backend.openssl_assert(res > 0)
if self._backend._lib.Cryptography_HAS_MGF1_MD:
# MGF1 MD is configurable in OpenSSL 1.0.1+
mgf1_md = self._backend._lib.EVP_get_digestbyname(
self._padding._mgf._algorithm.name.encode("ascii"))
self._backend.openssl_assert(
mgf1_md != self._backend._ffi.NULL
)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
pkey_ctx, mgf1_md
)
self._backend.openssl_assert(res > 0)
data_to_sign = self._hash_ctx.finalize()
buflen = self._backend._ffi.new("size_t *")
res = self._backend._lib.EVP_PKEY_sign(
pkey_ctx,
self._backend._ffi.NULL,
buflen,
data_to_sign,
len(data_to_sign)
)
self._backend.openssl_assert(res == 1)
buf = self._backend._ffi.new("unsigned char[]", buflen[0])
res = self._backend._lib.EVP_PKEY_sign(
pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign))
if res != 1:
errors = self._backend._consume_errors()
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
reason = None
if (errors[0].reason ==
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
reason = ("Salt length too long for key size. Try using "
"MAX_LENGTH instead.")
else:
assert (errors[0].reason ==
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
reason = "Digest too large for key size. Use a larger key."
assert reason is not None
raise ValueError(reason)
return self._backend._ffi.buffer(buf)[:]
def _finalize_pkcs1(self, evp_md):
if self._hash_ctx._ctx is None:
raise AlreadyFinalized("Context has already been finalized.")
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
sig_len = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.EVP_SignFinal(
self._hash_ctx._ctx._ctx,
sig_buf,
sig_len,
self._private_key._evp_pkey
)
self._hash_ctx.finalize()
if res == 0:
errors = self._backend._consume_errors()
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
assert (errors[0].reason ==
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
raise ValueError("Digest too large for key size. Use a larger "
"key.")
return self._backend._ffi.buffer(sig_buf)[:sig_len[0]]
def _finalize_pss(self, evp_md):
data_to_sign = self._hash_ctx.finalize()
padded = self._backend._ffi.new("unsigned char[]", self._pkey_size)
res = self._backend._lib.RSA_padding_add_PKCS1_PSS(
self._private_key._rsa_cdata,
padded,
data_to_sign,
evp_md,
_get_rsa_pss_salt_length(
self._padding,
self._private_key.key_size,
len(data_to_sign)
)
)
if res != 1:
errors = self._backend._consume_errors()
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
assert (errors[0].reason ==
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
raise ValueError("Salt length too long for key size. Try using "
"MAX_LENGTH instead.")
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
sig_len = self._backend._lib.RSA_private_encrypt(
self._pkey_size,
padded,
sig_buf,
self._private_key._rsa_cdata,
self._backend._lib.RSA_NO_PADDING
)
self._backend.openssl_assert(sig_len != -1)
return self._backend._ffi.buffer(sig_buf)[:sig_len]
@utils.register_interface(AsymmetricVerificationContext)
class _RSAVerificationContext(object):
def __init__(self, backend, public_key, signature, padding, algorithm):
self._backend = backend
self._public_key = public_key
self._signature = signature
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
self._pkey_size = self._backend._lib.EVP_PKEY_size(
self._public_key._evp_pkey
)
self._backend.openssl_assert(self._pkey_size > 0)
if isinstance(padding, PKCS1v15):
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._verify_method = self._verify_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
else:
self._verify_method = self._verify_pkcs1
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if self._pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError(
"Digest too large for key size. Check that you have the "
"correct key and digest algorithm."
)
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
raise UnsupportedAlgorithm(
"When OpenSSL is older than 1.0.1 then only SHA1 is "
"supported with MGF1.",
_Reasons.UNSUPPORTED_HASH
)
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._verify_method = self._verify_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
else:
self._verify_method = self._verify_pss
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(padding.name),
_Reasons.UNSUPPORTED_PADDING
)
self._padding = padding
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
def update(self, data):
self._hash_ctx.update(data)
def verify(self):
evp_md = self._backend._lib.EVP_get_digestbyname(
self._algorithm.name.encode("ascii"))
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
self._verify_method(evp_md)
def _verify_pkey_ctx(self, evp_md):
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
self._public_key._evp_pkey, self._backend._ffi.NULL
)
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
self._backend._lib.EVP_PKEY_CTX_free)
res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx)
self._backend.openssl_assert(res == 1)
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
pkey_ctx, evp_md)
self._backend.openssl_assert(res > 0)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
pkey_ctx, self._padding_enum)
self._backend.openssl_assert(res > 0)
if isinstance(self._padding, PSS):
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx,
_get_rsa_pss_salt_length(
self._padding,
self._public_key.key_size,
self._hash_ctx.algorithm.digest_size
)
)
self._backend.openssl_assert(res > 0)
if self._backend._lib.Cryptography_HAS_MGF1_MD:
# MGF1 MD is configurable in OpenSSL 1.0.1+
mgf1_md = self._backend._lib.EVP_get_digestbyname(
self._padding._mgf._algorithm.name.encode("ascii"))
self._backend.openssl_assert(
mgf1_md != self._backend._ffi.NULL
)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
pkey_ctx, mgf1_md
)
self._backend.openssl_assert(res > 0)
data_to_verify = self._hash_ctx.finalize()
res = self._backend._lib.EVP_PKEY_verify(
pkey_ctx,
self._signature,
len(self._signature),
data_to_verify,
len(data_to_verify)
)
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
self._backend.openssl_assert(res >= 0)
if res == 0:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
def _verify_pkcs1(self, evp_md):
if self._hash_ctx._ctx is None:
raise AlreadyFinalized("Context has already been finalized.")
res = self._backend._lib.EVP_VerifyFinal(
self._hash_ctx._ctx._ctx,
self._signature,
len(self._signature),
self._public_key._evp_pkey
)
self._hash_ctx.finalize()
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
self._backend.openssl_assert(res >= 0)
if res == 0:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
def _verify_pss(self, evp_md):
buf = self._backend._ffi.new("unsigned char[]", self._pkey_size)
res = self._backend._lib.RSA_public_decrypt(
len(self._signature),
self._signature,
buf,
self._public_key._rsa_cdata,
self._backend._lib.RSA_NO_PADDING
)
if res != self._pkey_size:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
data_to_verify = self._hash_ctx.finalize()
res = self._backend._lib.RSA_verify_PKCS1_PSS(
self._public_key._rsa_cdata,
data_to_verify,
evp_md,
buf,
_get_rsa_pss_salt_length(
self._padding,
self._public_key.key_size,
len(data_to_verify)
)
)
if res != 1:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
@utils.register_interface(RSAPrivateKeyWithSerialization)
class _RSAPrivateKey(object):
def __init__(self, backend, rsa_cdata, evp_pkey):
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
key_size = utils.read_only_property("_key_size")
def signer(self, padding, algorithm):
return _RSASignatureContext(self._backend, self, padding, algorithm)
def decrypt(self, ciphertext, padding):
key_size_bytes = int(math.ceil(self.key_size / 8.0))
if key_size_bytes != len(ciphertext):
raise ValueError("Ciphertext length must be equal to key size.")
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
def public_key(self):
ctx = self._backend._lib.RSA_new()
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e)
ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n)
res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
return _RSAPublicKey(self._backend, ctx, evp_pkey)
def private_numbers(self):
return rsa.RSAPrivateNumbers(
p=self._backend._bn_to_int(self._rsa_cdata.p),
q=self._backend._bn_to_int(self._rsa_cdata.q),
d=self._backend._bn_to_int(self._rsa_cdata.d),
dmp1=self._backend._bn_to_int(self._rsa_cdata.dmp1),
dmq1=self._backend._bn_to_int(self._rsa_cdata.dmq1),
iqmp=self._backend._bn_to_int(self._rsa_cdata.iqmp),
public_numbers=rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(self._rsa_cdata.e),
n=self._backend._bn_to_int(self._rsa_cdata.n),
)
)
def private_bytes(self, encoding, format, encryption_algorithm):
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._rsa_cdata
)
@utils.register_interface(RSAPublicKeyWithSerialization)
class _RSAPublicKey(object):
def __init__(self, backend, rsa_cdata, evp_pkey):
self._backend = backend
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
key_size = utils.read_only_property("_key_size")
def verifier(self, signature, padding, algorithm):
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
return _RSAVerificationContext(
self._backend, self, signature, padding, algorithm
)
def encrypt(self, plaintext, padding):
return _enc_dec_rsa(self._backend, self, plaintext, padding)
def public_numbers(self):
return rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(self._rsa_cdata.e),
n=self._backend._bn_to_int(self._rsa_cdata.n),
)
def public_bytes(self, encoding, format):
return self._backend._public_key_bytes(
encoding,
format,
self._evp_pkey,
self._rsa_cdata
)

View file

@ -0,0 +1,26 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import six
def _truncate_digest(digest, order_bits):
digest_len = len(digest)
if 8 * digest_len > order_bits:
digest_len = (order_bits + 7) // 8
digest = digest[:digest_len]
if 8 * digest_len > order_bits:
rshift = 8 - (order_bits & 0x7)
assert 0 < rshift < 8
mask = 0xFF >> rshift << rshift
# Set the bottom rshift bits to 0
digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask)
return digest

View file

@ -0,0 +1,940 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import ipaddress
from email.utils import parseaddr
import idna
import six
from six.moves import urllib_parse
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.x509.oid import (
CRLExtensionOID, CertificatePoliciesOID, ExtensionOID
)
def _obj2txt(backend, obj):
# Set to 80 on the recommendation of
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
buf_len = 80
buf = backend._ffi.new("char[]", buf_len)
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
backend.openssl_assert(res > 0)
return backend._ffi.buffer(buf, res)[:].decode()
def _decode_x509_name_entry(backend, x509_name_entry):
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
backend.openssl_assert(obj != backend._ffi.NULL)
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
backend.openssl_assert(data != backend._ffi.NULL)
value = backend._asn1_string_to_utf8(data)
oid = _obj2txt(backend, obj)
return x509.NameAttribute(x509.ObjectIdentifier(oid), value)
def _decode_x509_name(backend, x509_name):
count = backend._lib.X509_NAME_entry_count(x509_name)
attributes = []
for x in range(count):
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
attributes.append(_decode_x509_name_entry(backend, entry))
return x509.Name(attributes)
def _decode_general_names(backend, gns):
num = backend._lib.sk_GENERAL_NAME_num(gns)
names = []
for i in range(num):
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
backend.openssl_assert(gn != backend._ffi.NULL)
names.append(_decode_general_name(backend, gn))
return names
def _decode_general_name(backend, gn):
if gn.type == backend._lib.GEN_DNS:
data = backend._asn1_string_to_bytes(gn.d.dNSName)
if not data:
decoded = u""
elif data.startswith(b"*."):
# This is a wildcard name. We need to remove the leading wildcard,
# IDNA decode, then re-add the wildcard. Wildcard characters should
# always be left-most (RFC 2595 section 2.4).
decoded = u"*." + idna.decode(data[2:])
else:
# Not a wildcard, decode away. If the string has a * in it anywhere
# invalid this will raise an InvalidCodePoint
decoded = idna.decode(data)
if data.startswith(b"."):
# idna strips leading periods. Name constraints can have that
# so we need to re-add it. Sigh.
decoded = u"." + decoded
return x509.DNSName(decoded)
elif gn.type == backend._lib.GEN_URI:
data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier)
parsed = urllib_parse.urlparse(data)
if parsed.hostname:
hostname = idna.decode(parsed.hostname)
else:
hostname = ""
if parsed.port:
netloc = hostname + u":" + six.text_type(parsed.port)
else:
netloc = hostname
# Note that building a URL in this fashion means it should be
# semantically indistinguishable from the original but is not
# guaranteed to be exactly the same.
uri = urllib_parse.urlunparse((
parsed.scheme,
netloc,
parsed.path,
parsed.params,
parsed.query,
parsed.fragment
))
return x509.UniformResourceIdentifier(uri)
elif gn.type == backend._lib.GEN_RID:
oid = _obj2txt(backend, gn.d.registeredID)
return x509.RegisteredID(x509.ObjectIdentifier(oid))
elif gn.type == backend._lib.GEN_IPADD:
data = backend._asn1_string_to_bytes(gn.d.iPAddress)
data_len = len(data)
if data_len == 8 or data_len == 32:
# This is an IPv4 or IPv6 Network and not a single IP. This
# type of data appears in Name Constraints. Unfortunately,
# ipaddress doesn't support packed bytes + netmask. Additionally,
# IPv6Network can only handle CIDR rather than the full 16 byte
# netmask. To handle this we convert the netmask to integer, then
# find the first 0 bit, which will be the prefix. If another 1
# bit is present after that the netmask is invalid.
base = ipaddress.ip_address(data[:data_len // 2])
netmask = ipaddress.ip_address(data[data_len // 2:])
bits = bin(int(netmask))[2:]
prefix = bits.find('0')
# If no 0 bits are found it is a /32 or /128
if prefix == -1:
prefix = len(bits)
if "1" in bits[prefix:]:
raise ValueError("Invalid netmask")
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
else:
ip = ipaddress.ip_address(data)
return x509.IPAddress(ip)
elif gn.type == backend._lib.GEN_DIRNAME:
return x509.DirectoryName(
_decode_x509_name(backend, gn.d.directoryName)
)
elif gn.type == backend._lib.GEN_EMAIL:
data = backend._asn1_string_to_ascii(gn.d.rfc822Name)
name, address = parseaddr(data)
parts = address.split(u"@")
if name or not address:
# parseaddr has found a name (e.g. Name <email>) or the entire
# value is an empty string.
raise ValueError("Invalid rfc822name value")
elif len(parts) == 1:
# Single label email name. This is valid for local delivery. No
# IDNA decoding can be done since there is no domain component.
return x509.RFC822Name(address)
else:
# A normal email of the form user@domain.com. Let's attempt to
# decode the domain component and return the entire address.
return x509.RFC822Name(
parts[0] + u"@" + idna.decode(parts[1])
)
elif gn.type == backend._lib.GEN_OTHERNAME:
type_id = _obj2txt(backend, gn.d.otherName.type_id)
value = backend._asn1_to_der(gn.d.otherName.value)
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
else:
# x400Address or ediPartyName
raise x509.UnsupportedGeneralNameType(
"{0} is not a supported type".format(
x509._GENERAL_NAMES.get(gn.type, gn.type)
),
gn.type
)
def _decode_ocsp_no_check(backend, ext):
return x509.OCSPNoCheck()
class _X509ExtensionParser(object):
def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None):
self.ext_count = ext_count
self.get_ext = get_ext
self.handlers = handlers
self.unsupported_exts = unsupported_exts
def parse(self, backend, x509_obj):
extensions = []
seen_oids = set()
for i in range(self.ext_count(backend, x509_obj)):
ext = self.get_ext(backend, x509_obj, i)
backend.openssl_assert(ext != backend._ffi.NULL)
crit = backend._lib.X509_EXTENSION_get_critical(ext)
critical = crit == 1
oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object))
if oid in seen_oids:
raise x509.DuplicateExtension(
"Duplicate {0} extension found".format(oid), oid
)
try:
handler = self.handlers[oid]
except KeyError:
if critical:
raise x509.UnsupportedExtension(
"Critical extension {0} is not currently supported"
.format(oid), oid
)
else:
# For extensions which are not supported by OpenSSL we pass the
# extension object directly to the parsing routine so it can
# be decoded manually.
if self.unsupported_exts and oid in self.unsupported_exts:
ext_data = ext
else:
ext_data = backend._lib.X509V3_EXT_d2i(ext)
if ext_data == backend._ffi.NULL:
backend._consume_errors()
raise ValueError(
"The {0} extension is invalid and can't be "
"parsed".format(oid)
)
value = handler(backend, ext_data)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
return x509.Extensions(extensions)
@utils.register_interface(x509.Certificate)
class _Certificate(object):
def __init__(self, backend, x509):
self._backend = backend
self._x509 = x509
def __repr__(self):
return "<Certificate(subject={0}, ...)>".format(self.subject)
def __eq__(self, other):
if not isinstance(other, x509.Certificate):
return NotImplemented
res = self._backend._lib.X509_cmp(self._x509, other._x509)
return res == 0
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
h.update(self.public_bytes(serialization.Encoding.DER))
return h.finalize()
@property
def version(self):
version = self._backend._lib.X509_get_version(self._x509)
if version == 0:
return x509.Version.v1
elif version == 2:
return x509.Version.v3
else:
raise x509.InvalidVersion(
"{0} is not a valid X509 version".format(version), version
)
@property
def serial(self):
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return self._backend._asn1_integer_to_int(asn1_int)
def public_key(self):
pkey = self._backend._lib.X509_get_pubkey(self._x509)
if pkey == self._backend._ffi.NULL:
# Remove errors from the stack.
self._backend._consume_errors()
raise ValueError("Certificate public key is of an unknown type")
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def not_valid_before(self):
asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
return self._backend._parse_asn1_time(asn1_time)
@property
def not_valid_after(self):
asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
return self._backend._parse_asn1_time(asn1_time)
@property
def issuer(self):
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def subject(self):
subject = self._backend._lib.X509_get_subject_name(self._x509)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = _obj2txt(self._backend, self._x509.sig_alg.algorithm)
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def extensions(self):
return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
def _decode_certificate_policies(backend, cp):
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
num = backend._lib.sk_POLICYINFO_num(cp)
certificate_policies = []
for i in range(num):
qualifiers = None
pi = backend._lib.sk_POLICYINFO_value(cp, i)
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
if pi.qualifiers != backend._ffi.NULL:
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
qualifiers = []
for j in range(qnum):
pqi = backend._lib.sk_POLICYQUALINFO_value(
pi.qualifiers, j
)
pqualid = x509.ObjectIdentifier(
_obj2txt(backend, pqi.pqualid)
)
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
cpsuri = backend._ffi.buffer(
pqi.d.cpsuri.data, pqi.d.cpsuri.length
)[:].decode('ascii')
qualifiers.append(cpsuri)
else:
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
user_notice = _decode_user_notice(
backend, pqi.d.usernotice
)
qualifiers.append(user_notice)
certificate_policies.append(
x509.PolicyInformation(oid, qualifiers)
)
return x509.CertificatePolicies(certificate_policies)
def _decode_user_notice(backend, un):
explicit_text = None
notice_reference = None
if un.exptext != backend._ffi.NULL:
explicit_text = backend._asn1_string_to_utf8(un.exptext)
if un.noticeref != backend._ffi.NULL:
organization = backend._asn1_string_to_utf8(un.noticeref.organization)
num = backend._lib.sk_ASN1_INTEGER_num(
un.noticeref.noticenos
)
notice_numbers = []
for i in range(num):
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
un.noticeref.noticenos, i
)
notice_num = backend._asn1_integer_to_int(asn1_int)
notice_numbers.append(notice_num)
notice_reference = x509.NoticeReference(
organization, notice_numbers
)
return x509.UserNotice(notice_reference, explicit_text)
def _decode_basic_constraints(backend, bc_st):
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
basic_constraints = backend._ffi.gc(
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
)
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
# chooses to just map this to its ordinal value, so true is 255 and
# false is 0.
ca = basic_constraints.ca == 255
if basic_constraints.pathlen == backend._ffi.NULL:
path_length = None
else:
path_length = backend._asn1_integer_to_int(basic_constraints.pathlen)
return x509.BasicConstraints(ca, path_length)
def _decode_subject_key_identifier(backend, asn1_string):
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
asn1_string = backend._ffi.gc(
asn1_string, backend._lib.ASN1_OCTET_STRING_free
)
return x509.SubjectKeyIdentifier(
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
)
def _decode_authority_key_identifier(backend, akid):
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
key_identifier = None
authority_cert_issuer = None
authority_cert_serial_number = None
if akid.keyid != backend._ffi.NULL:
key_identifier = backend._ffi.buffer(
akid.keyid.data, akid.keyid.length
)[:]
if akid.issuer != backend._ffi.NULL:
authority_cert_issuer = _decode_general_names(
backend, akid.issuer
)
if akid.serial != backend._ffi.NULL:
authority_cert_serial_number = backend._asn1_integer_to_int(
akid.serial
)
return x509.AuthorityKeyIdentifier(
key_identifier, authority_cert_issuer, authority_cert_serial_number
)
def _decode_authority_information_access(backend, aia):
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
access_descriptions = []
for i in range(num):
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
backend.openssl_assert(ad.method != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
backend.openssl_assert(ad.location != backend._ffi.NULL)
gn = _decode_general_name(backend, ad.location)
access_descriptions.append(x509.AccessDescription(oid, gn))
return x509.AuthorityInformationAccess(access_descriptions)
def _decode_key_usage(backend, bit_string):
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
digital_signature = get_bit(bit_string, 0) == 1
content_commitment = get_bit(bit_string, 1) == 1
key_encipherment = get_bit(bit_string, 2) == 1
data_encipherment = get_bit(bit_string, 3) == 1
key_agreement = get_bit(bit_string, 4) == 1
key_cert_sign = get_bit(bit_string, 5) == 1
crl_sign = get_bit(bit_string, 6) == 1
encipher_only = get_bit(bit_string, 7) == 1
decipher_only = get_bit(bit_string, 8) == 1
return x509.KeyUsage(
digital_signature,
content_commitment,
key_encipherment,
data_encipherment,
key_agreement,
key_cert_sign,
crl_sign,
encipher_only,
decipher_only
)
def _decode_general_names_extension(backend, gns):
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
general_names = _decode_general_names(backend, gns)
return general_names
def _decode_subject_alt_name(backend, ext):
return x509.SubjectAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_issuer_alt_name(backend, ext):
return x509.IssuerAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_name_constraints(backend, nc):
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
return x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=excluded
)
def _decode_general_subtrees(backend, stack_subtrees):
if stack_subtrees == backend._ffi.NULL:
return None
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
subtrees = []
for i in range(num):
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
backend.openssl_assert(obj != backend._ffi.NULL)
name = _decode_general_name(backend, obj.base)
subtrees.append(name)
return subtrees
def _decode_extended_key_usage(backend, sk):
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
num = backend._lib.sk_ASN1_OBJECT_num(sk)
ekus = []
for i in range(num):
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
backend.openssl_assert(obj != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
ekus.append(oid)
return x509.ExtendedKeyUsage(ekus)
_DISTPOINT_TYPE_FULLNAME = 0
_DISTPOINT_TYPE_RELATIVENAME = 1
def _decode_crl_distribution_points(backend, cdps):
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free)
num = backend._lib.sk_DIST_POINT_num(cdps)
dist_points = []
for i in range(num):
full_name = None
relative_name = None
crl_issuer = None
reasons = None
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
if cdp.reasons != backend._ffi.NULL:
# We will check each bit from RFC 5280
# ReasonFlags ::= BIT STRING {
# unused (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# privilegeWithdrawn (7),
# aACompromise (8) }
reasons = []
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
if get_bit(cdp.reasons, 1):
reasons.append(x509.ReasonFlags.key_compromise)
if get_bit(cdp.reasons, 2):
reasons.append(x509.ReasonFlags.ca_compromise)
if get_bit(cdp.reasons, 3):
reasons.append(x509.ReasonFlags.affiliation_changed)
if get_bit(cdp.reasons, 4):
reasons.append(x509.ReasonFlags.superseded)
if get_bit(cdp.reasons, 5):
reasons.append(x509.ReasonFlags.cessation_of_operation)
if get_bit(cdp.reasons, 6):
reasons.append(x509.ReasonFlags.certificate_hold)
if get_bit(cdp.reasons, 7):
reasons.append(x509.ReasonFlags.privilege_withdrawn)
if get_bit(cdp.reasons, 8):
reasons.append(x509.ReasonFlags.aa_compromise)
reasons = frozenset(reasons)
if cdp.CRLissuer != backend._ffi.NULL:
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
# Certificates may have a crl_issuer/reasons and no distribution
# point so make sure it's not null.
if cdp.distpoint != backend._ffi.NULL:
# Type 0 is fullName, there is no #define for it in the code.
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
full_name = _decode_general_names(
backend, cdp.distpoint.name.fullname
)
# OpenSSL code doesn't test for a specific type for
# relativename, everything that isn't fullname is considered
# relativename.
else:
rns = cdp.distpoint.name.relativename
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
attributes = []
for i in range(rnum):
rn = backend._lib.sk_X509_NAME_ENTRY_value(
rns, i
)
backend.openssl_assert(rn != backend._ffi.NULL)
attributes.append(
_decode_x509_name_entry(backend, rn)
)
relative_name = x509.Name(attributes)
dist_points.append(
x509.DistributionPoint(
full_name, relative_name, reasons, crl_issuer
)
)
return x509.CRLDistributionPoints(dist_points)
def _decode_inhibit_any_policy(backend, asn1_int):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
skip_certs = backend._asn1_integer_to_int(asn1_int)
return x509.InhibitAnyPolicy(skip_certs)
_CRL_REASON_CODE_TO_ENUM = {
0: x509.ReasonFlags.unspecified,
1: x509.ReasonFlags.key_compromise,
2: x509.ReasonFlags.ca_compromise,
3: x509.ReasonFlags.affiliation_changed,
4: x509.ReasonFlags.superseded,
5: x509.ReasonFlags.cessation_of_operation,
6: x509.ReasonFlags.certificate_hold,
8: x509.ReasonFlags.remove_from_crl,
9: x509.ReasonFlags.privilege_withdrawn,
10: x509.ReasonFlags.aa_compromise,
}
def _decode_crl_reason(backend, enum):
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
code = backend._lib.ASN1_ENUMERATED_get(enum)
try:
return _CRL_REASON_CODE_TO_ENUM[code]
except KeyError:
raise ValueError("Unsupported reason code: {0}".format(code))
def _decode_invalidity_date(backend, inv_date):
generalized_time = backend._ffi.cast(
"ASN1_GENERALIZEDTIME *", inv_date
)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
time = backend._ffi.string(
backend._lib.ASN1_STRING_data(
backend._ffi.cast("ASN1_STRING *", generalized_time)
)
).decode("ascii")
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
def _decode_cert_issuer(backend, ext):
"""
This handler decodes the CertificateIssuer entry extension directly
from the X509_EXTENSION object. This is necessary because this entry
extension is not directly supported by OpenSSL 0.9.8.
"""
data_ptr_ptr = backend._ffi.new("const unsigned char **")
data_ptr_ptr[0] = ext.value.data
gns = backend._lib.d2i_GENERAL_NAMES(
backend._ffi.NULL, data_ptr_ptr, ext.value.length
)
# Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered
# in _X509ExtensionParser but since we are responsible for decoding this
# entry extension ourselves, we have to this here.
if gns == backend._ffi.NULL:
backend._consume_errors()
raise ValueError(
"The {0} extension is corrupted and can't be parsed".format(
CRLExtensionOID.CERTIFICATE_ISSUER))
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
return x509.GeneralNames(_decode_general_names(backend, gns))
@utils.register_interface(x509.RevokedCertificate)
class _RevokedCertificate(object):
def __init__(self, backend, x509_revoked):
self._backend = backend
self._x509_revoked = x509_revoked
@property
def serial_number(self):
asn1_int = self._x509_revoked.serialNumber
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
return self._backend._asn1_integer_to_int(asn1_int)
@property
def revocation_date(self):
return self._backend._parse_asn1_time(
self._x509_revoked.revocationDate)
@property
def extensions(self):
return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
self._backend, self._x509_revoked
)
@utils.register_interface(x509.CertificateRevocationList)
class _CertificateRevocationList(object):
def __init__(self, backend, x509_crl):
self._backend = backend
self._x509_crl = x509_crl
def __eq__(self, other):
if not isinstance(other, x509.CertificateRevocationList):
return NotImplemented
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
return res == 0
def __ne__(self, other):
return not self == other
def fingerprint(self, algorithm):
h = hashes.Hash(algorithm, self._backend)
bio = self._backend._create_mem_bio()
res = self._backend._lib.i2d_X509_CRL_bio(
bio, self._x509_crl
)
self._backend.openssl_assert(res == 1)
der = self._backend._read_mem_bio(bio)
h.update(der)
return h.finalize()
@property
def signature_hash_algorithm(self):
oid = _obj2txt(self._backend, self._x509_crl.sig_alg.algorithm)
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def issuer(self):
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, issuer)
@property
def next_update(self):
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
return self._backend._parse_asn1_time(nu)
@property
def last_update(self):
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
return self._backend._parse_asn1_time(lu)
def _revoked_certificates(self):
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
self._backend.openssl_assert(revoked != self._backend._ffi.NULL)
num = self._backend._lib.sk_X509_REVOKED_num(revoked)
revoked_list = []
for i in range(num):
r = self._backend._lib.sk_X509_REVOKED_value(revoked, i)
self._backend.openssl_assert(r != self._backend._ffi.NULL)
revoked_list.append(_RevokedCertificate(self._backend, r))
return revoked_list
def __iter__(self):
return iter(self._revoked_certificates())
def __getitem__(self, idx):
return self._revoked_certificates()[idx]
def __len__(self):
return len(self._revoked_certificates())
@property
def extensions(self):
raise NotImplementedError()
@utils.register_interface(x509.CertificateSigningRequest)
class _CertificateSigningRequest(object):
def __init__(self, backend, x509_req):
self._backend = backend
self._x509_req = x509_req
def __eq__(self, other):
if not isinstance(other, _CertificateSigningRequest):
return NotImplemented
self_bytes = self.public_bytes(serialization.Encoding.DER)
other_bytes = other.public_bytes(serialization.Encoding.DER)
return self_bytes == other_bytes
def __ne__(self, other):
return not self == other
def __hash__(self):
return hash(self.public_bytes(serialization.Encoding.DER))
def public_key(self):
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
return self._backend._evp_pkey_to_public_key(pkey)
@property
def subject(self):
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
return _decode_x509_name(self._backend, subject)
@property
def signature_hash_algorithm(self):
oid = _obj2txt(self._backend, self._x509_req.sig_alg.algorithm)
try:
return x509._SIG_OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID:{0} not recognized".format(oid)
)
@property
def extensions(self):
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
def public_bytes(self, encoding):
bio = self._backend._create_mem_bio()
if encoding is serialization.Encoding.PEM:
res = self._backend._lib.PEM_write_bio_X509_REQ(
bio, self._x509_req
)
elif encoding is serialization.Encoding.DER:
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
else:
raise TypeError("encoding must be an item from the Encoding enum")
self._backend.openssl_assert(res == 1)
return self._backend._read_mem_bio(bio)
_EXTENSION_HANDLERS = {
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _decode_key_usage,
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
}
_REVOKED_EXTENSION_HANDLERS = {
CRLExtensionOID.CRL_REASON: _decode_crl_reason,
CRLExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
CRLExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
}
_REVOKED_UNSUPPORTED_EXTENSIONS = set([
CRLExtensionOID.CERTIFICATE_ISSUER,
])
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
handlers=_EXTENSION_HANDLERS
)
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
handlers=_EXTENSION_HANDLERS
)
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
handlers=_REVOKED_EXTENSION_HANDLERS,
unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS
)