update windows build to Python 3.7

This commit is contained in:
j 2019-01-20 16:05:31 +05:30
commit ddc59ab92d
5761 changed files with 750298 additions and 213405 deletions

View file

@ -4,31 +4,6 @@
from __future__ import absolute_import, division, print_function
import pkg_resources
from cryptography.hazmat.backends.multibackend import MultiBackend
_available_backends_list = None
def _available_backends():
global _available_backends_list
if _available_backends_list is None:
_available_backends_list = [
# setuptools 11.3 deprecated support for the require parameter to
# load(), and introduced the new resolve() method instead.
# This can be removed if/when we can assume setuptools>=11.3. At
# some point we may wish to add a warning, to push people along,
# but at present this would result in too many warnings.
ep.resolve() if hasattr(ep, "resolve") else ep.load(require=False)
for ep in pkg_resources.iter_entry_points(
"cryptography.backends"
)
]
return _available_backends_list
_default_backend = None
@ -37,6 +12,7 @@ def default_backend():
global _default_backend
if _default_backend is None:
_default_backend = MultiBackend(_available_backends())
from cryptography.hazmat.backends.openssl.backend import backend
_default_backend = backend
return _default_backend

View file

@ -1,10 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.hazmat.backends.commoncrypto.backend import backend
__all__ = ["backend"]

View file

@ -1,245 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from collections import namedtuple
from cryptography import utils
from cryptography.exceptions import InternalError
from cryptography.hazmat.backends.commoncrypto.ciphers import (
_CipherContext, _GCMCipherContext
)
from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext
from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext
from cryptography.hazmat.backends.interfaces import (
CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend
)
from cryptography.hazmat.bindings.commoncrypto.binding import Binding
from cryptography.hazmat.primitives.ciphers.algorithms import (
AES, ARC4, Blowfish, CAST5, TripleDES
)
from cryptography.hazmat.primitives.ciphers.modes import (
CBC, CFB, CFB8, CTR, ECB, GCM, OFB
)
HashMethods = namedtuple(
"HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"]
)
@utils.register_interface(CipherBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
class Backend(object):
"""
CommonCrypto API wrapper.
"""
name = "commoncrypto"
def __init__(self):
self._binding = Binding()
self._ffi = self._binding.ffi
self._lib = self._binding.lib
self._cipher_registry = {}
self._register_default_ciphers()
self._hash_mapping = {
"md5": HashMethods(
"CC_MD5_CTX *", self._lib.CC_MD5_Init,
self._lib.CC_MD5_Update, self._lib.CC_MD5_Final
),
"sha1": HashMethods(
"CC_SHA1_CTX *", self._lib.CC_SHA1_Init,
self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final
),
"sha224": HashMethods(
"CC_SHA256_CTX *", self._lib.CC_SHA224_Init,
self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final
),
"sha256": HashMethods(
"CC_SHA256_CTX *", self._lib.CC_SHA256_Init,
self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final
),
"sha384": HashMethods(
"CC_SHA512_CTX *", self._lib.CC_SHA384_Init,
self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final
),
"sha512": HashMethods(
"CC_SHA512_CTX *", self._lib.CC_SHA512_Init,
self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final
),
}
self._supported_hmac_algorithms = {
"md5": self._lib.kCCHmacAlgMD5,
"sha1": self._lib.kCCHmacAlgSHA1,
"sha224": self._lib.kCCHmacAlgSHA224,
"sha256": self._lib.kCCHmacAlgSHA256,
"sha384": self._lib.kCCHmacAlgSHA384,
"sha512": self._lib.kCCHmacAlgSHA512,
}
self._supported_pbkdf2_hmac_algorithms = {
"sha1": self._lib.kCCPRFHmacAlgSHA1,
"sha224": self._lib.kCCPRFHmacAlgSHA224,
"sha256": self._lib.kCCPRFHmacAlgSHA256,
"sha384": self._lib.kCCPRFHmacAlgSHA384,
"sha512": self._lib.kCCPRFHmacAlgSHA512,
}
def hash_supported(self, algorithm):
return algorithm.name in self._hash_mapping
def hmac_supported(self, algorithm):
return algorithm.name in self._supported_hmac_algorithms
def create_hash_ctx(self, algorithm):
return _HashContext(self, algorithm)
def create_hmac_ctx(self, key, algorithm):
return _HMACContext(self, key, algorithm)
def cipher_supported(self, cipher, mode):
return (type(cipher), type(mode)) in self._cipher_registry
def create_symmetric_encryption_ctx(self, cipher, mode):
if isinstance(mode, GCM):
return _GCMCipherContext(
self, cipher, mode, self._lib.kCCEncrypt
)
else:
return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt)
def create_symmetric_decryption_ctx(self, cipher, mode):
if isinstance(mode, GCM):
return _GCMCipherContext(
self, cipher, mode, self._lib.kCCDecrypt
)
else:
return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt)
def pbkdf2_hmac_supported(self, algorithm):
return algorithm.name in self._supported_pbkdf2_hmac_algorithms
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name]
buf = self._ffi.new("char[]", length)
res = self._lib.CCKeyDerivationPBKDF(
self._lib.kCCPBKDF2,
key_material,
len(key_material),
salt,
len(salt),
alg_enum,
iterations,
buf,
length
)
self._check_cipher_response(res)
return self._ffi.buffer(buf)[:]
def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls,
mode_const):
if (cipher_cls, mode_cls) in self._cipher_registry:
raise ValueError("Duplicate registration for: {0} {1}.".format(
cipher_cls, mode_cls)
)
self._cipher_registry[cipher_cls, mode_cls] = (cipher_const,
mode_const)
def _register_default_ciphers(self):
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(CFB8, self._lib.kCCModeCFB8),
(OFB, self._lib.kCCModeOFB),
(CTR, self._lib.kCCModeCTR),
(GCM, self._lib.kCCModeGCM),
]:
self._register_cipher_adapter(
AES,
self._lib.kCCAlgorithmAES128,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(CFB8, self._lib.kCCModeCFB8),
(OFB, self._lib.kCCModeOFB),
]:
self._register_cipher_adapter(
TripleDES,
self._lib.kCCAlgorithm3DES,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(OFB, self._lib.kCCModeOFB)
]:
self._register_cipher_adapter(
Blowfish,
self._lib.kCCAlgorithmBlowfish,
mode_cls,
mode_const
)
for mode_cls, mode_const in [
(CBC, self._lib.kCCModeCBC),
(ECB, self._lib.kCCModeECB),
(CFB, self._lib.kCCModeCFB),
(OFB, self._lib.kCCModeOFB),
(CTR, self._lib.kCCModeCTR)
]:
self._register_cipher_adapter(
CAST5,
self._lib.kCCAlgorithmCAST,
mode_cls,
mode_const
)
self._register_cipher_adapter(
ARC4,
self._lib.kCCAlgorithmRC4,
type(None),
self._lib.kCCModeRC4
)
def _check_cipher_response(self, response):
if response == self._lib.kCCSuccess:
return
elif response == self._lib.kCCAlignmentError:
# This error is not currently triggered due to a bug filed as
# rdar://15589470
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
else:
raise InternalError(
"The backend returned an unknown error, consider filing a bug."
" Code: {0}.".format(response),
response
)
def _release_cipher_ctx(self, ctx):
"""
Called by the garbage collector and used to safely dereference and
release the context.
"""
if ctx[0] != self._ffi.NULL:
res = self._lib.CCCryptorRelease(ctx[0])
self._check_cipher_response(res)
ctx[0] = self._ffi.NULL
backend = Backend()

View file

@ -1,193 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidTag, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import ciphers, constant_time
from cryptography.hazmat.primitives.ciphers import modes
from cryptography.hazmat.primitives.ciphers.modes import (
CFB, CFB8, CTR, OFB
)
@utils.register_interface(ciphers.CipherContext)
class _CipherContext(object):
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
# There is a bug in CommonCrypto where block ciphers do not raise
# kCCAlignmentError when finalizing if you supply non-block aligned
# data. To work around this we need to keep track of the block
# alignment ourselves, but only for alg+mode combos that require
# block alignment. OFB, CFB, and CTR make a block cipher algorithm
# into a stream cipher so we don't need to track them (and thus their
# block size is effectively 1 byte just like OpenSSL/CommonCrypto
# treat RC4 and other stream cipher block sizes).
# This bug has been filed as rdar://15589470
self._bytes_processed = 0
if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not
isinstance(mode, (OFB, CFB, CFB8, CTR))):
self._byte_block_size = cipher.block_size // 8
else:
self._byte_block_size = 1
registry = self._backend._cipher_registry
try:
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
ctx = self._backend._ffi.new("CCCryptorRef *")
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = mode.initialization_vector
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = mode.nonce
else:
iv_nonce = self._backend._ffi.NULL
if isinstance(mode, CTR):
mode_option = self._backend._lib.kCCModeOptionCTR_BE
else:
mode_option = 0
res = self._backend._lib.CCCryptorCreateWithMode(
operation,
mode_enum, cipher_enum,
self._backend._lib.ccNoPadding, iv_nonce,
cipher.key, len(cipher.key),
self._backend._ffi.NULL, 0, 0, mode_option, ctx)
self._backend._check_cipher_response(res)
self._ctx = ctx
def update(self, data):
# Count bytes processed to handle block alignment.
self._bytes_processed += len(data)
buf = self._backend._ffi.new(
"unsigned char[]", len(data) + self._byte_block_size - 1)
outlen = self._backend._ffi.new("size_t *")
res = self._backend._lib.CCCryptorUpdate(
self._ctx[0], data, len(data), buf,
len(data) + self._byte_block_size - 1, outlen)
self._backend._check_cipher_response(res)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def finalize(self):
# Raise error if block alignment is wrong.
if self._bytes_processed % self._byte_block_size:
raise ValueError(
"The length of the provided data is not a multiple of "
"the block length."
)
buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size)
outlen = self._backend._ffi.new("size_t *")
res = self._backend._lib.CCCryptorFinal(
self._ctx[0], buf, len(buf), outlen)
self._backend._check_cipher_response(res)
self._backend._release_cipher_ctx(self._ctx)
return self._backend._ffi.buffer(buf)[:outlen[0]]
@utils.register_interface(ciphers.AEADCipherContext)
@utils.register_interface(ciphers.AEADEncryptionContext)
class _GCMCipherContext(object):
def __init__(self, backend, cipher, mode, operation):
self._backend = backend
self._cipher = cipher
self._mode = mode
self._operation = operation
self._tag = None
registry = self._backend._cipher_registry
try:
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
except KeyError:
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported "
"by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
ctx = self._backend._ffi.new("CCCryptorRef *")
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
self._ctx = ctx
res = self._backend._lib.CCCryptorCreateWithMode(
operation,
mode_enum, cipher_enum,
self._backend._lib.ccNoPadding,
self._backend._ffi.NULL,
cipher.key, len(cipher.key),
self._backend._ffi.NULL, 0, 0, 0, self._ctx)
self._backend._check_cipher_response(res)
res = self._backend._lib.CCCryptorGCMAddIV(
self._ctx[0],
mode.initialization_vector,
len(mode.initialization_vector)
)
self._backend._check_cipher_response(res)
# CommonCrypto has a bug where calling update without at least one
# call to authenticate_additional_data will result in null byte output
# for ciphertext. The following empty byte string call prevents the
# issue, which is present in at least 10.8 and 10.9.
# Filed as rdar://18314544
self.authenticate_additional_data(b"")
def update(self, data):
buf = self._backend._ffi.new("unsigned char[]", len(data))
args = (self._ctx[0], data, len(data), buf)
if self._operation == self._backend._lib.kCCEncrypt:
res = self._backend._lib.CCCryptorGCMEncrypt(*args)
else:
res = self._backend._lib.CCCryptorGCMDecrypt(*args)
self._backend._check_cipher_response(res)
return self._backend._ffi.buffer(buf)[:]
def finalize(self):
# CommonCrypto has a yet another bug where you must make at least one
# call to update. If you pass just AAD and call finalize without a call
# to update you'll get null bytes for tag. The following update call
# prevents this issue, which is present in at least 10.8 and 10.9.
# Filed as rdar://18314580
self.update(b"")
tag_size = self._cipher.block_size // 8
tag_buf = self._backend._ffi.new("unsigned char[]", tag_size)
tag_len = self._backend._ffi.new("size_t *", tag_size)
res = self._backend._lib.CCCryptorGCMFinal(
self._ctx[0], tag_buf, tag_len
)
self._backend._check_cipher_response(res)
self._backend._release_cipher_ctx(self._ctx)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
if (self._operation == self._backend._lib.kCCDecrypt and
not constant_time.bytes_eq(
self._tag[:len(self._mode.tag)], self._mode.tag
)):
raise InvalidTag
return b""
def authenticate_additional_data(self, data):
res = self._backend._lib.CCCryptorGCMAddAAD(
self._ctx[0], data, len(data)
)
self._backend._check_cipher_response(res)
tag = utils.read_only_property("_tag")

View file

@ -1,55 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import hashes
@utils.register_interface(hashes.HashContext)
class _HashContext(object):
def __init__(self, backend, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
try:
methods = self._backend._hash_mapping[self.algorithm.name]
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
ctx = self._backend._ffi.new(methods.ctx)
res = methods.hash_init(ctx)
assert res == 1
self._ctx = ctx
algorithm = utils.read_only_property("_algorithm")
def copy(self):
methods = self._backend._hash_mapping[self.algorithm.name]
new_ctx = self._backend._ffi.new(methods.ctx)
# CommonCrypto has no APIs for copying hashes, so we have to copy the
# underlying struct.
new_ctx[0] = self._ctx[0]
return _HashContext(self._backend, self.algorithm, ctx=new_ctx)
def update(self, data):
methods = self._backend._hash_mapping[self.algorithm.name]
res = methods.hash_update(self._ctx, data, len(data))
assert res == 1
def finalize(self):
methods = self._backend._hash_mapping[self.algorithm.name]
buf = self._backend._ffi.new("unsigned char[]",
self.algorithm.digest_size)
res = methods.hash_final(buf, self._ctx)
assert res == 1
return self._backend._ffi.buffer(buf)[:]

View file

@ -1,59 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
@utils.register_interface(interfaces.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
self._algorithm = algorithm
self._backend = backend
if ctx is None:
ctx = self._backend._ffi.new("CCHmacContext *")
try:
alg = self._backend._supported_hmac_algorithms[algorithm.name]
except KeyError:
raise UnsupportedAlgorithm(
"{0} is not a supported HMAC hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
self._backend._lib.CCHmacInit(ctx, alg, key, len(key))
self._ctx = ctx
self._key = key
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._ffi.new("CCHmacContext *")
# CommonCrypto has no APIs for copying HMACs, so we have to copy the
# underlying struct.
copied_ctx[0] = self._ctx[0]
return _HMACContext(
self._backend, self._key, self.algorithm, ctx=copied_ctx
)
def update(self, data):
self._backend._lib.CCHmacUpdate(self._ctx, data, len(data))
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self.algorithm.digest_size)
self._backend._lib.CCHmacFinal(self._ctx, buf)
return self._backend._ffi.buffer(buf)[:]
def verify(self, signature):
digest = self.finalize()
if not constant_time.bytes_eq(digest, signature):
raise InvalidSignature("Signature did not match digest.")

View file

@ -221,6 +221,12 @@ class EllipticCurveBackend(object):
Returns whether the exchange algorithm is supported by this backend.
"""
@abc.abstractmethod
def derive_elliptic_curve_private_key(self, private_value, curve):
"""
Compute the private key given the private value and curve.
"""
@six.add_metaclass(abc.ABCMeta)
class PEMSerializationBackend(object):
@ -237,6 +243,12 @@ class PEMSerializationBackend(object):
Loads a public key from PEM encoded data.
"""
@abc.abstractmethod
def load_pem_parameters(self, data):
"""
Load encryption parameters from PEM encoded data.
"""
@six.add_metaclass(abc.ABCMeta)
class DERSerializationBackend(object):
@ -253,6 +265,12 @@ class DERSerializationBackend(object):
Loads a public key from DER encoded data.
"""
@abc.abstractmethod
def load_der_parameters(self, data):
"""
Load encryption parameters from DER encoded data.
"""
@six.add_metaclass(abc.ABCMeta)
class X509Backend(object):
@ -306,13 +324,20 @@ class X509Backend(object):
object.
"""
@abc.abstractmethod
def x509_name_bytes(self, name):
"""
Compute the DER encoded bytes of an X509 Name object.
"""
@six.add_metaclass(abc.ABCMeta)
class DHBackend(object):
@abc.abstractmethod
def generate_dh_parameters(self, key_size):
def generate_dh_parameters(self, generator, key_size):
"""
Generate a DHParameters instance with a modulus of key_size bits.
Using the given generator. Often 2 or 5.
"""
@abc.abstractmethod
@ -323,37 +348,48 @@ class DHBackend(object):
"""
@abc.abstractmethod
def generate_dh_private_key_and_parameters(self, key_size):
def generate_dh_private_key_and_parameters(self, generator, key_size):
"""
Generate a DHPrivateKey instance using key size only.
Using the given generator. Often 2 or 5.
"""
@abc.abstractmethod
def load_dh_private_numbers(self, numbers):
"""
Returns a DHPrivateKey provider.
Load a DHPrivateKey from DHPrivateNumbers
"""
@abc.abstractmethod
def load_dh_public_numbers(self, numbers):
"""
Returns a DHPublicKey provider.
Load a DHPublicKey from DHPublicNumbers.
"""
@abc.abstractmethod
def load_dh_parameter_numbers(self, numbers):
"""
Returns a DHParameters provider.
Load DHParameters from DHParameterNumbers.
"""
@abc.abstractmethod
def dh_exchange_algorithm_supported(self, exchange_algorithm):
"""
Returns whether the exchange algorithm is supported by this backend.
"""
@abc.abstractmethod
def dh_parameters_supported(self, p, g):
def dh_parameters_supported(self, p, g, q=None):
"""
Returns whether the backend supports DH with these parameter values.
"""
@abc.abstractmethod
def dh_x942_serialization_supported(self):
"""
Returns True if the backend supports the serialization of DH objects
with subgroup order (q).
"""
@six.add_metaclass(abc.ABCMeta)
class ScryptBackend(object):
@abc.abstractmethod
def derive_scrypt(self, key_material, salt, length, n, r, p):
"""
Return bytes derived from provided Scrypt parameters.
"""

View file

@ -1,404 +0,0 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.backends.interfaces import (
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
PEMSerializationBackend, RSABackend, X509Backend
)
@utils.register_interface(CMACBackend)
@utils.register_interface(CipherBackend)
@utils.register_interface(DERSerializationBackend)
@utils.register_interface(HashBackend)
@utils.register_interface(HMACBackend)
@utils.register_interface(PBKDF2HMACBackend)
@utils.register_interface(RSABackend)
@utils.register_interface(DSABackend)
@utils.register_interface(EllipticCurveBackend)
@utils.register_interface(PEMSerializationBackend)
@utils.register_interface(X509Backend)
class MultiBackend(object):
name = "multibackend"
def __init__(self, backends):
self._backends = backends
def _filtered_backends(self, interface):
for b in self._backends:
if isinstance(b, interface):
yield b
def cipher_supported(self, cipher, mode):
return any(
b.cipher_supported(cipher, mode)
for b in self._filtered_backends(CipherBackend)
)
def create_symmetric_encryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_encryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def create_symmetric_decryption_ctx(self, cipher, mode):
for b in self._filtered_backends(CipherBackend):
try:
return b.create_symmetric_decryption_ctx(cipher, mode)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"cipher {0} in {1} mode is not supported by this backend.".format(
cipher.name, mode.name if mode else mode),
_Reasons.UNSUPPORTED_CIPHER
)
def hash_supported(self, algorithm):
return any(
b.hash_supported(algorithm)
for b in self._filtered_backends(HashBackend)
)
def create_hash_ctx(self, algorithm):
for b in self._filtered_backends(HashBackend):
try:
return b.create_hash_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def hmac_supported(self, algorithm):
return any(
b.hmac_supported(algorithm)
for b in self._filtered_backends(HMACBackend)
)
def create_hmac_ctx(self, key, algorithm):
for b in self._filtered_backends(HMACBackend):
try:
return b.create_hmac_ctx(key, algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def pbkdf2_hmac_supported(self, algorithm):
return any(
b.pbkdf2_hmac_supported(algorithm)
for b in self._filtered_backends(PBKDF2HMACBackend)
)
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
key_material):
for b in self._filtered_backends(PBKDF2HMACBackend):
try:
return b.derive_pbkdf2_hmac(
algorithm, length, salt, iterations, key_material
)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
def generate_rsa_private_key(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_private_key(public_exponent, key_size)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_rsa_parameters_supported(self, public_exponent, key_size):
for b in self._filtered_backends(RSABackend):
return b.generate_rsa_parameters_supported(
public_exponent, key_size
)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def rsa_padding_supported(self, padding):
for b in self._filtered_backends(RSABackend):
return b.rsa_padding_supported(padding)
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_private_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_private_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_rsa_public_numbers(self, numbers):
for b in self._filtered_backends(RSABackend):
return b.load_rsa_public_numbers(numbers)
raise UnsupportedAlgorithm("RSA is not supported by the backend",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key(self, parameters):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key(parameters)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def generate_dsa_private_key_and_parameters(self, key_size):
for b in self._filtered_backends(DSABackend):
return b.generate_dsa_private_key_and_parameters(key_size)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_hash_supported(self, algorithm):
for b in self._filtered_backends(DSABackend):
return b.dsa_hash_supported(algorithm)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def dsa_parameters_supported(self, p, q, g):
for b in self._filtered_backends(DSABackend):
return b.dsa_parameters_supported(p, q, g)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_public_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_public_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_private_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_private_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def load_dsa_parameter_numbers(self, numbers):
for b in self._filtered_backends(DSABackend):
return b.load_dsa_parameter_numbers(numbers)
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def cmac_algorithm_supported(self, algorithm):
return any(
b.cmac_algorithm_supported(algorithm)
for b in self._filtered_backends(CMACBackend)
)
def create_cmac_ctx(self, algorithm):
for b in self._filtered_backends(CMACBackend):
try:
return b.create_cmac_ctx(algorithm)
except UnsupportedAlgorithm:
pass
raise UnsupportedAlgorithm("This backend does not support CMAC.",
_Reasons.UNSUPPORTED_CIPHER)
def elliptic_curve_supported(self, curve):
return any(
b.elliptic_curve_supported(curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def elliptic_curve_signature_algorithm_supported(
self, signature_algorithm, curve
):
return any(
b.elliptic_curve_signature_algorithm_supported(
signature_algorithm, curve
)
for b in self._filtered_backends(EllipticCurveBackend)
)
def generate_elliptic_curve_private_key(self, curve):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.generate_elliptic_curve_private_key(curve)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_private_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_private_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def load_elliptic_curve_public_numbers(self, numbers):
for b in self._filtered_backends(EllipticCurveBackend):
try:
return b.load_elliptic_curve_public_numbers(numbers)
except UnsupportedAlgorithm:
continue
raise UnsupportedAlgorithm(
"This backend does not support this elliptic curve.",
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
)
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
return any(
b.elliptic_curve_exchange_algorithm_supported(algorithm, curve)
for b in self._filtered_backends(EllipticCurveBackend)
)
def load_pem_private_key(self, data, password):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_public_key(self, data):
for b in self._filtered_backends(PEMSerializationBackend):
return b.load_pem_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_private_key(self, data, password):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_private_key(data, password)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_der_public_key(self, data):
for b in self._filtered_backends(DERSerializationBackend):
return b.load_der_public_key(data)
raise UnsupportedAlgorithm(
"This backend does not support this key serialization.",
_Reasons.UNSUPPORTED_SERIALIZATION
)
def load_pem_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_certificate(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_certificate(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_crl(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_crl(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_crl(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_crl(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_der_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_der_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def load_pem_x509_csr(self, data):
for b in self._filtered_backends(X509Backend):
return b.load_pem_x509_csr(data)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_csr(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_csr(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_certificate(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_certificate(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_crl(self, builder, private_key, algorithm):
for b in self._filtered_backends(X509Backend):
return b.create_x509_crl(builder, private_key, algorithm)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)
def create_x509_revoked_certificate(self, builder):
for b in self._filtered_backends(X509Backend):
return b.create_x509_revoked_certificate(builder)
raise UnsupportedAlgorithm(
"This backend does not support X.509.",
_Reasons.UNSUPPORTED_X509
)

View file

@ -0,0 +1,159 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography.exceptions import InvalidTag
_ENCRYPT = 1
_DECRYPT = 0
def _aead_cipher_name(cipher):
from cryptography.hazmat.primitives.ciphers.aead import (
AESCCM, AESGCM, ChaCha20Poly1305
)
if isinstance(cipher, ChaCha20Poly1305):
return b"chacha20-poly1305"
elif isinstance(cipher, AESCCM):
return "aes-{0}-ccm".format(len(cipher._key) * 8).encode("ascii")
else:
assert isinstance(cipher, AESGCM)
return "aes-{0}-gcm".format(len(cipher._key) * 8).encode("ascii")
def _aead_setup(backend, cipher_name, key, nonce, tag, tag_len, operation):
evp_cipher = backend._lib.EVP_get_cipherbyname(cipher_name)
backend.openssl_assert(evp_cipher != backend._ffi.NULL)
ctx = backend._lib.EVP_CIPHER_CTX_new()
ctx = backend._ffi.gc(ctx, backend._lib.EVP_CIPHER_CTX_free)
res = backend._lib.EVP_CipherInit_ex(
ctx, evp_cipher,
backend._ffi.NULL,
backend._ffi.NULL,
backend._ffi.NULL,
int(operation == _ENCRYPT)
)
backend.openssl_assert(res != 0)
res = backend._lib.EVP_CIPHER_CTX_set_key_length(ctx, len(key))
backend.openssl_assert(res != 0)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_IVLEN, len(nonce),
backend._ffi.NULL
)
backend.openssl_assert(res != 0)
if operation == _DECRYPT:
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, len(tag), tag
)
backend.openssl_assert(res != 0)
else:
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_SET_TAG, tag_len, backend._ffi.NULL
)
res = backend._lib.EVP_CipherInit_ex(
ctx,
backend._ffi.NULL,
backend._ffi.NULL,
key,
nonce,
int(operation == _ENCRYPT)
)
backend.openssl_assert(res != 0)
return ctx
def _set_length(backend, ctx, data_len):
intptr = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
ctx,
backend._ffi.NULL,
intptr,
backend._ffi.NULL,
data_len
)
backend.openssl_assert(res != 0)
def _process_aad(backend, ctx, associated_data):
outlen = backend._ffi.new("int *")
res = backend._lib.EVP_CipherUpdate(
ctx, backend._ffi.NULL, outlen, associated_data, len(associated_data)
)
backend.openssl_assert(res != 0)
def _process_data(backend, ctx, data):
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
backend.openssl_assert(res != 0)
return backend._ffi.buffer(buf, outlen[0])[:]
def _encrypt(backend, cipher, nonce, data, associated_data, tag_length):
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
cipher_name = _aead_cipher_name(cipher)
ctx = _aead_setup(
backend, cipher_name, cipher._key, nonce, None, tag_length, _ENCRYPT
)
# CCM requires us to pass the length of the data before processing anything
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_set_length(backend, ctx, len(data))
_process_aad(backend, ctx, associated_data)
processed_data = _process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
backend.openssl_assert(res != 0)
backend.openssl_assert(outlen[0] == 0)
tag_buf = backend._ffi.new("unsigned char[]", tag_length)
res = backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, backend._lib.EVP_CTRL_AEAD_GET_TAG, tag_length, tag_buf
)
backend.openssl_assert(res != 0)
tag = backend._ffi.buffer(tag_buf)[:]
return processed_data + tag
def _decrypt(backend, cipher, nonce, data, associated_data, tag_length):
from cryptography.hazmat.primitives.ciphers.aead import AESCCM
if len(data) < tag_length:
raise InvalidTag
tag = data[-tag_length:]
data = data[:-tag_length]
cipher_name = _aead_cipher_name(cipher)
ctx = _aead_setup(
backend, cipher_name, cipher._key, nonce, tag, tag_length, _DECRYPT
)
# CCM requires us to pass the length of the data before processing anything
# However calling this with any other AEAD results in an error
if isinstance(cipher, AESCCM):
_set_length(backend, ctx, len(data))
_process_aad(backend, ctx, associated_data)
# CCM has a different error path if the tag doesn't match. Errors are
# raised in Update and Final is irrelevant.
if isinstance(cipher, AESCCM):
outlen = backend._ffi.new("int *")
buf = backend._ffi.new("unsigned char[]", len(data))
res = backend._lib.EVP_CipherUpdate(ctx, buf, outlen, data, len(data))
if res != 1:
backend._consume_errors()
raise InvalidTag
processed_data = backend._ffi.buffer(buf, outlen[0])[:]
else:
processed_data = _process_data(backend, ctx, data)
outlen = backend._ffi.new("int *")
res = backend._lib.EVP_CipherFinal_ex(ctx, backend._ffi.NULL, outlen)
if res == 0:
backend._consume_errors()
raise InvalidTag
return processed_data

View file

@ -13,6 +13,7 @@ from cryptography.hazmat.primitives.ciphers import modes
@utils.register_interface(ciphers.CipherContext)
@utils.register_interface(ciphers.AEADCipherContext)
@utils.register_interface(ciphers.AEADEncryptionContext)
@utils.register_interface(ciphers.AEADDecryptionContext)
class _CipherContext(object):
_ENCRYPT = 1
_DECRYPT = 0
@ -25,9 +26,9 @@ class _CipherContext(object):
self._tag = None
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
self._block_size = self._cipher.block_size
self._block_size_bytes = self._cipher.block_size // 8
else:
self._block_size = 1
self._block_size_bytes = 1
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
ctx = self._backend._ffi.gc(
@ -56,8 +57,12 @@ class _CipherContext(object):
if isinstance(mode, modes.ModeWithInitializationVector):
iv_nonce = mode.initialization_vector
elif isinstance(mode, modes.ModeWithTweak):
iv_nonce = mode.tweak
elif isinstance(mode, modes.ModeWithNonce):
iv_nonce = mode.nonce
elif isinstance(cipher, modes.ModeWithNonce):
iv_nonce = cipher.nonce
else:
iv_nonce = self._backend._ffi.NULL
# begin init with cipher and operation type
@ -74,16 +79,26 @@ class _CipherContext(object):
self._backend.openssl_assert(res != 0)
if isinstance(mode, modes.GCM):
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN,
ctx, self._backend._lib.EVP_CTRL_AEAD_SET_IVLEN,
len(iv_nonce), self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
if operation == self._DECRYPT:
if mode.tag is not None:
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG,
ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(mode.tag), mode.tag
)
self._backend.openssl_assert(res != 0)
self._tag = mode.tag
elif (
self._operation == self._DECRYPT and
self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
):
raise NotImplementedError(
"delayed passing of GCM tag requires OpenSSL >= 1.0.2."
" To use this feature please update OpenSSL"
)
# pass key/iv
res = self._backend._lib.EVP_CipherInit_ex(
@ -101,22 +116,25 @@ class _CipherContext(object):
self._ctx = ctx
def update(self, data):
# OpenSSL 0.9.8e has an assertion in its EVP code that causes it
# to SIGABRT if you call update with an empty byte string. This can be
# removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch
# should be taken only when length is zero and mode is not GCM because
# AES GCM can return improper tag values if you don't call update
# with empty plaintext when authenticating AAD for ...reasons.
if len(data) == 0 and not isinstance(self._mode, modes.GCM):
return b""
buf = bytearray(len(data) + self._block_size_bytes - 1)
n = self.update_into(data, buf)
return bytes(buf[:n])
buf = self._backend._ffi.new("unsigned char[]",
len(data) + self._block_size - 1)
def update_into(self, data, buf):
if len(buf) < (len(data) + self._block_size_bytes - 1):
raise ValueError(
"buffer must be at least {0} bytes for this "
"payload".format(len(data) + self._block_size_bytes - 1)
)
buf = self._backend._ffi.cast(
"unsigned char *", self._backend._ffi.from_buffer(buf)
)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data,
len(data))
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen,
data, len(data))
self._backend.openssl_assert(res != 0)
return self._backend._ffi.buffer(buf)[:outlen[0]]
return outlen[0]
def finalize(self):
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
@ -127,7 +145,16 @@ class _CipherContext(object):
if isinstance(self._mode, modes.GCM):
self.update(b"")
buf = self._backend._ffi.new("unsigned char[]", self._block_size)
if (
self._operation == self._DECRYPT and
isinstance(self._mode, modes.ModeWithAuthenticationTag) and
self.tag is None
):
raise ValueError(
"Authentication tag must be provided when decrypting."
)
buf = self._backend._ffi.new("unsigned char[]", self._block_size_bytes)
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
if res == 0:
@ -137,13 +164,8 @@ class _CipherContext(object):
raise InvalidTag
self._backend.openssl_assert(
errors[0][1:] == (
errors[0]._lib_reason_match(
self._backend._lib.ERR_LIB_EVP,
self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX,
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
) or errors[0][1:] == (
self._backend._lib.ERR_LIB_EVP,
self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX,
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
)
)
@ -154,13 +176,12 @@ class _CipherContext(object):
if (isinstance(self._mode, modes.GCM) and
self._operation == self._ENCRYPT):
block_byte_size = self._block_size // 8
tag_buf = self._backend._ffi.new(
"unsigned char[]", block_byte_size
"unsigned char[]", self._block_size_bytes
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG,
block_byte_size, tag_buf
self._ctx, self._backend._lib.EVP_CTRL_AEAD_GET_TAG,
self._block_size_bytes, tag_buf
)
self._backend.openssl_assert(res != 0)
self._tag = self._backend._ffi.buffer(tag_buf)[:]
@ -169,6 +190,28 @@ class _CipherContext(object):
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def finalize_with_tag(self, tag):
if (
self._backend._lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102 and
not self._backend._lib.CRYPTOGRAPHY_IS_LIBRESSL
):
raise NotImplementedError(
"finalize_with_tag requires OpenSSL >= 1.0.2. To use this "
"method please update OpenSSL"
)
if len(tag) < self._mode._min_tag_length:
raise ValueError(
"Authentication tag must be {0} bytes or longer.".format(
self._mode._min_tag_length)
)
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
self._ctx, self._backend._lib.EVP_CTRL_AEAD_SET_TAG,
len(tag), tag
)
self._backend.openssl_assert(res != 0)
self._tag = tag
return self.finalize()
def authenticate_additional_data(self, data):
outlen = self._backend._ffi.new("int *")
res = self._backend._lib.EVP_CipherUpdate(
@ -177,37 +220,3 @@ class _CipherContext(object):
self._backend.openssl_assert(res != 0)
tag = utils.read_only_property("_tag")
@utils.register_interface(ciphers.CipherContext)
class _AESCTRCipherContext(object):
"""
This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can
be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020).
"""
def __init__(self, backend, cipher, mode):
self._backend = backend
self._key = self._backend._ffi.new("AES_KEY *")
res = self._backend._lib.AES_set_encrypt_key(
cipher.key, len(cipher.key) * 8, self._key
)
self._backend.openssl_assert(res == 0)
self._ecount = self._backend._ffi.new("char[]", 16)
self._nonce = self._backend._ffi.new("char[16]", mode.nonce)
self._num = self._backend._ffi.new("unsigned int *", 0)
def update(self, data):
buf = self._backend._ffi.new("unsigned char[]", len(data))
self._backend._lib.AES_ctr128_encrypt(
data, buf, len(data), self._key, self._nonce,
self._ecount, self._num
)
return self._backend._ffi.buffer(buf)[:]
def finalize(self):
self._key = None
self._ecount = None
self._nonce = None
self._num = None
return b""

View file

@ -9,11 +9,11 @@ from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, interfaces
from cryptography.hazmat.primitives import constant_time, mac
from cryptography.hazmat.primitives.ciphers.modes import CBC
@utils.register_interface(interfaces.MACContext)
@utils.register_interface(mac.MACContext)
class _CMACContext(object):
def __init__(self, backend, algorithm, ctx=None):
if not backend.cmac_algorithm_supported(algorithm):
@ -36,10 +36,11 @@ class _CMACContext(object):
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
self._backend._lib.CMAC_Init(
res = self._backend._lib.CMAC_Init(
ctx, self._key, len(self._key),
evp_cipher, self._backend._ffi.NULL
)
self._backend.openssl_assert(res == 1)
self._ctx = ctx

View file

@ -0,0 +1,867 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import ipaddress
import asn1crypto.core
import six
from cryptography import x509
from cryptography.x509.extensions import _TLS_FEATURE_TYPE_TO_ENUM
from cryptography.x509.name import _ASN1_TYPE_TO_ENUM
from cryptography.x509.oid import (
CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID,
OCSPExtensionOID,
)
class _Integers(asn1crypto.core.SequenceOf):
_child_spec = asn1crypto.core.Integer
def _obj2txt(backend, obj):
# Set to 80 on the recommendation of
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
#
# But OIDs longer than this occur in real life (e.g. Active
# Directory makes some very long OIDs). So we need to detect
# and properly handle the case where the default buffer is not
# big enough.
#
buf_len = 80
buf = backend._ffi.new("char[]", buf_len)
# 'res' is the number of bytes that *would* be written if the
# buffer is large enough. If 'res' > buf_len - 1, we need to
# alloc a big-enough buffer and go again.
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
if res > buf_len - 1: # account for terminating null byte
buf_len = res + 1
buf = backend._ffi.new("char[]", buf_len)
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
backend.openssl_assert(res > 0)
return backend._ffi.buffer(buf, res)[:].decode()
def _decode_x509_name_entry(backend, x509_name_entry):
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
backend.openssl_assert(obj != backend._ffi.NULL)
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
backend.openssl_assert(data != backend._ffi.NULL)
value = _asn1_string_to_utf8(backend, data)
oid = _obj2txt(backend, obj)
type = _ASN1_TYPE_TO_ENUM[data.type]
return x509.NameAttribute(x509.ObjectIdentifier(oid), value, type)
def _decode_x509_name(backend, x509_name):
count = backend._lib.X509_NAME_entry_count(x509_name)
attributes = []
prev_set_id = -1
for x in range(count):
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
attribute = _decode_x509_name_entry(backend, entry)
set_id = backend._lib.Cryptography_X509_NAME_ENTRY_set(entry)
if set_id != prev_set_id:
attributes.append(set([attribute]))
else:
# is in the same RDN a previous entry
attributes[-1].add(attribute)
prev_set_id = set_id
return x509.Name(x509.RelativeDistinguishedName(rdn) for rdn in attributes)
def _decode_general_names(backend, gns):
num = backend._lib.sk_GENERAL_NAME_num(gns)
names = []
for i in range(num):
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
backend.openssl_assert(gn != backend._ffi.NULL)
names.append(_decode_general_name(backend, gn))
return names
def _decode_general_name(backend, gn):
if gn.type == backend._lib.GEN_DNS:
# Convert to bytes and then decode to utf8. We don't use
# asn1_string_to_utf8 here because it doesn't properly convert
# utf8 from ia5strings.
data = _asn1_string_to_bytes(backend, gn.d.dNSName).decode("utf8")
# We don't use the constructor for DNSName so we can bypass validation
# This allows us to create DNSName objects that have unicode chars
# when a certificate (against the RFC) contains them.
return x509.DNSName._init_without_validation(data)
elif gn.type == backend._lib.GEN_URI:
# Convert to bytes and then decode to utf8. We don't use
# asn1_string_to_utf8 here because it doesn't properly convert
# utf8 from ia5strings.
data = _asn1_string_to_bytes(
backend, gn.d.uniformResourceIdentifier
).decode("utf8")
# We don't use the constructor for URI so we can bypass validation
# This allows us to create URI objects that have unicode chars
# when a certificate (against the RFC) contains them.
return x509.UniformResourceIdentifier._init_without_validation(data)
elif gn.type == backend._lib.GEN_RID:
oid = _obj2txt(backend, gn.d.registeredID)
return x509.RegisteredID(x509.ObjectIdentifier(oid))
elif gn.type == backend._lib.GEN_IPADD:
data = _asn1_string_to_bytes(backend, gn.d.iPAddress)
data_len = len(data)
if data_len == 8 or data_len == 32:
# This is an IPv4 or IPv6 Network and not a single IP. This
# type of data appears in Name Constraints. Unfortunately,
# ipaddress doesn't support packed bytes + netmask. Additionally,
# IPv6Network can only handle CIDR rather than the full 16 byte
# netmask. To handle this we convert the netmask to integer, then
# find the first 0 bit, which will be the prefix. If another 1
# bit is present after that the netmask is invalid.
base = ipaddress.ip_address(data[:data_len // 2])
netmask = ipaddress.ip_address(data[data_len // 2:])
bits = bin(int(netmask))[2:]
prefix = bits.find('0')
# If no 0 bits are found it is a /32 or /128
if prefix == -1:
prefix = len(bits)
if "1" in bits[prefix:]:
raise ValueError("Invalid netmask")
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
else:
ip = ipaddress.ip_address(data)
return x509.IPAddress(ip)
elif gn.type == backend._lib.GEN_DIRNAME:
return x509.DirectoryName(
_decode_x509_name(backend, gn.d.directoryName)
)
elif gn.type == backend._lib.GEN_EMAIL:
# Convert to bytes and then decode to utf8. We don't use
# asn1_string_to_utf8 here because it doesn't properly convert
# utf8 from ia5strings.
data = _asn1_string_to_bytes(backend, gn.d.rfc822Name).decode("utf8")
# We don't use the constructor for RFC822Name so we can bypass
# validation. This allows us to create RFC822Name objects that have
# unicode chars when a certificate (against the RFC) contains them.
return x509.RFC822Name._init_without_validation(data)
elif gn.type == backend._lib.GEN_OTHERNAME:
type_id = _obj2txt(backend, gn.d.otherName.type_id)
value = _asn1_to_der(backend, gn.d.otherName.value)
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
else:
# x400Address or ediPartyName
raise x509.UnsupportedGeneralNameType(
"{0} is not a supported type".format(
x509._GENERAL_NAMES.get(gn.type, gn.type)
),
gn.type
)
def _decode_ocsp_no_check(backend, ext):
return x509.OCSPNoCheck()
def _decode_crl_number(backend, ext):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int))
def _decode_delta_crl_indicator(backend, ext):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
return x509.DeltaCRLIndicator(_asn1_integer_to_int(backend, asn1_int))
class _X509ExtensionParser(object):
def __init__(self, ext_count, get_ext, handlers):
self.ext_count = ext_count
self.get_ext = get_ext
self.handlers = handlers
def parse(self, backend, x509_obj):
extensions = []
seen_oids = set()
for i in range(self.ext_count(backend, x509_obj)):
ext = self.get_ext(backend, x509_obj, i)
backend.openssl_assert(ext != backend._ffi.NULL)
crit = backend._lib.X509_EXTENSION_get_critical(ext)
critical = crit == 1
oid = x509.ObjectIdentifier(
_obj2txt(backend, backend._lib.X509_EXTENSION_get_object(ext))
)
if oid in seen_oids:
raise x509.DuplicateExtension(
"Duplicate {0} extension found".format(oid), oid
)
# These OIDs are only supported in OpenSSL 1.1.0+ but we want
# to support them in all versions of OpenSSL so we decode them
# ourselves.
if oid == ExtensionOID.TLS_FEATURE:
data = backend._lib.X509_EXTENSION_get_data(ext)
parsed = _Integers.load(_asn1_string_to_bytes(backend, data))
value = x509.TLSFeature(
[_TLS_FEATURE_TYPE_TO_ENUM[x.native] for x in parsed]
)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
continue
elif oid == ExtensionOID.PRECERT_POISON:
data = backend._lib.X509_EXTENSION_get_data(ext)
parsed = asn1crypto.core.Null.load(
_asn1_string_to_bytes(backend, data)
)
assert parsed == asn1crypto.core.Null()
extensions.append(x509.Extension(
oid, critical, x509.PrecertPoison()
))
seen_oids.add(oid)
continue
try:
handler = self.handlers[oid]
except KeyError:
# Dump the DER payload into an UnrecognizedExtension object
data = backend._lib.X509_EXTENSION_get_data(ext)
backend.openssl_assert(data != backend._ffi.NULL)
der = backend._ffi.buffer(data.data, data.length)[:]
unrecognized = x509.UnrecognizedExtension(oid, der)
extensions.append(
x509.Extension(oid, critical, unrecognized)
)
else:
ext_data = backend._lib.X509V3_EXT_d2i(ext)
if ext_data == backend._ffi.NULL:
backend._consume_errors()
raise ValueError(
"The {0} extension is invalid and can't be "
"parsed".format(oid)
)
value = handler(backend, ext_data)
extensions.append(x509.Extension(oid, critical, value))
seen_oids.add(oid)
return x509.Extensions(extensions)
def _decode_certificate_policies(backend, cp):
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
cp = backend._ffi.gc(cp, backend._lib.CERTIFICATEPOLICIES_free)
num = backend._lib.sk_POLICYINFO_num(cp)
certificate_policies = []
for i in range(num):
qualifiers = None
pi = backend._lib.sk_POLICYINFO_value(cp, i)
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
if pi.qualifiers != backend._ffi.NULL:
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
qualifiers = []
for j in range(qnum):
pqi = backend._lib.sk_POLICYQUALINFO_value(
pi.qualifiers, j
)
pqualid = x509.ObjectIdentifier(
_obj2txt(backend, pqi.pqualid)
)
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
cpsuri = backend._ffi.buffer(
pqi.d.cpsuri.data, pqi.d.cpsuri.length
)[:].decode('ascii')
qualifiers.append(cpsuri)
else:
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
user_notice = _decode_user_notice(
backend, pqi.d.usernotice
)
qualifiers.append(user_notice)
certificate_policies.append(
x509.PolicyInformation(oid, qualifiers)
)
return x509.CertificatePolicies(certificate_policies)
def _decode_user_notice(backend, un):
explicit_text = None
notice_reference = None
if un.exptext != backend._ffi.NULL:
explicit_text = _asn1_string_to_utf8(backend, un.exptext)
if un.noticeref != backend._ffi.NULL:
organization = _asn1_string_to_utf8(
backend, un.noticeref.organization
)
num = backend._lib.sk_ASN1_INTEGER_num(
un.noticeref.noticenos
)
notice_numbers = []
for i in range(num):
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
un.noticeref.noticenos, i
)
notice_num = _asn1_integer_to_int(backend, asn1_int)
notice_numbers.append(notice_num)
notice_reference = x509.NoticeReference(
organization, notice_numbers
)
return x509.UserNotice(notice_reference, explicit_text)
def _decode_basic_constraints(backend, bc_st):
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
basic_constraints = backend._ffi.gc(
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
)
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
# chooses to just map this to its ordinal value, so true is 255 and
# false is 0.
ca = basic_constraints.ca == 255
path_length = _asn1_integer_to_int_or_none(
backend, basic_constraints.pathlen
)
return x509.BasicConstraints(ca, path_length)
def _decode_subject_key_identifier(backend, asn1_string):
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
asn1_string = backend._ffi.gc(
asn1_string, backend._lib.ASN1_OCTET_STRING_free
)
return x509.SubjectKeyIdentifier(
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
)
def _decode_authority_key_identifier(backend, akid):
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
key_identifier = None
authority_cert_issuer = None
if akid.keyid != backend._ffi.NULL:
key_identifier = backend._ffi.buffer(
akid.keyid.data, akid.keyid.length
)[:]
if akid.issuer != backend._ffi.NULL:
authority_cert_issuer = _decode_general_names(
backend, akid.issuer
)
authority_cert_serial_number = _asn1_integer_to_int_or_none(
backend, akid.serial
)
return x509.AuthorityKeyIdentifier(
key_identifier, authority_cert_issuer, authority_cert_serial_number
)
def _decode_authority_information_access(backend, aia):
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
access_descriptions = []
for i in range(num):
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
backend.openssl_assert(ad.method != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
backend.openssl_assert(ad.location != backend._ffi.NULL)
gn = _decode_general_name(backend, ad.location)
access_descriptions.append(x509.AccessDescription(oid, gn))
return x509.AuthorityInformationAccess(access_descriptions)
def _decode_key_usage(backend, bit_string):
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
digital_signature = get_bit(bit_string, 0) == 1
content_commitment = get_bit(bit_string, 1) == 1
key_encipherment = get_bit(bit_string, 2) == 1
data_encipherment = get_bit(bit_string, 3) == 1
key_agreement = get_bit(bit_string, 4) == 1
key_cert_sign = get_bit(bit_string, 5) == 1
crl_sign = get_bit(bit_string, 6) == 1
encipher_only = get_bit(bit_string, 7) == 1
decipher_only = get_bit(bit_string, 8) == 1
return x509.KeyUsage(
digital_signature,
content_commitment,
key_encipherment,
data_encipherment,
key_agreement,
key_cert_sign,
crl_sign,
encipher_only,
decipher_only
)
def _decode_general_names_extension(backend, gns):
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
general_names = _decode_general_names(backend, gns)
return general_names
def _decode_subject_alt_name(backend, ext):
return x509.SubjectAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_issuer_alt_name(backend, ext):
return x509.IssuerAlternativeName(
_decode_general_names_extension(backend, ext)
)
def _decode_name_constraints(backend, nc):
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
return x509.NameConstraints(
permitted_subtrees=permitted, excluded_subtrees=excluded
)
def _decode_general_subtrees(backend, stack_subtrees):
if stack_subtrees == backend._ffi.NULL:
return None
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
subtrees = []
for i in range(num):
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
backend.openssl_assert(obj != backend._ffi.NULL)
name = _decode_general_name(backend, obj.base)
subtrees.append(name)
return subtrees
def _decode_policy_constraints(backend, pc):
pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
require_explicit_policy = _asn1_integer_to_int_or_none(
backend, pc.requireExplicitPolicy
)
inhibit_policy_mapping = _asn1_integer_to_int_or_none(
backend, pc.inhibitPolicyMapping
)
return x509.PolicyConstraints(
require_explicit_policy, inhibit_policy_mapping
)
def _decode_extended_key_usage(backend, sk):
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
num = backend._lib.sk_ASN1_OBJECT_num(sk)
ekus = []
for i in range(num):
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
backend.openssl_assert(obj != backend._ffi.NULL)
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
ekus.append(oid)
return x509.ExtendedKeyUsage(ekus)
_DISTPOINT_TYPE_FULLNAME = 0
_DISTPOINT_TYPE_RELATIVENAME = 1
def _decode_dist_points(backend, cdps):
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
cdps = backend._ffi.gc(cdps, backend._lib.CRL_DIST_POINTS_free)
num = backend._lib.sk_DIST_POINT_num(cdps)
dist_points = []
for i in range(num):
full_name = None
relative_name = None
crl_issuer = None
reasons = None
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
if cdp.reasons != backend._ffi.NULL:
reasons = _decode_reasons(backend, cdp.reasons)
if cdp.CRLissuer != backend._ffi.NULL:
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
# Certificates may have a crl_issuer/reasons and no distribution
# point so make sure it's not null.
if cdp.distpoint != backend._ffi.NULL:
full_name, relative_name = _decode_distpoint(
backend, cdp.distpoint
)
dist_points.append(
x509.DistributionPoint(
full_name, relative_name, reasons, crl_issuer
)
)
return dist_points
# ReasonFlags ::= BIT STRING {
# unused (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# privilegeWithdrawn (7),
# aACompromise (8) }
_REASON_BIT_MAPPING = {
1: x509.ReasonFlags.key_compromise,
2: x509.ReasonFlags.ca_compromise,
3: x509.ReasonFlags.affiliation_changed,
4: x509.ReasonFlags.superseded,
5: x509.ReasonFlags.cessation_of_operation,
6: x509.ReasonFlags.certificate_hold,
7: x509.ReasonFlags.privilege_withdrawn,
8: x509.ReasonFlags.aa_compromise,
}
def _decode_reasons(backend, reasons):
# We will check each bit from RFC 5280
enum_reasons = []
for bit_position, reason in six.iteritems(_REASON_BIT_MAPPING):
if backend._lib.ASN1_BIT_STRING_get_bit(reasons, bit_position):
enum_reasons.append(reason)
return frozenset(enum_reasons)
def _decode_distpoint(backend, distpoint):
if distpoint.type == _DISTPOINT_TYPE_FULLNAME:
full_name = _decode_general_names(backend, distpoint.name.fullname)
return full_name, None
# OpenSSL code doesn't test for a specific type for
# relativename, everything that isn't fullname is considered
# relativename. Per RFC 5280:
#
# DistributionPointName ::= CHOICE {
# fullName [0] GeneralNames,
# nameRelativeToCRLIssuer [1] RelativeDistinguishedName }
rns = distpoint.name.relativename
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
attributes = set()
for i in range(rnum):
rn = backend._lib.sk_X509_NAME_ENTRY_value(
rns, i
)
backend.openssl_assert(rn != backend._ffi.NULL)
attributes.add(
_decode_x509_name_entry(backend, rn)
)
relative_name = x509.RelativeDistinguishedName(attributes)
return None, relative_name
def _decode_crl_distribution_points(backend, cdps):
dist_points = _decode_dist_points(backend, cdps)
return x509.CRLDistributionPoints(dist_points)
def _decode_freshest_crl(backend, cdps):
dist_points = _decode_dist_points(backend, cdps)
return x509.FreshestCRL(dist_points)
def _decode_inhibit_any_policy(backend, asn1_int):
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
skip_certs = _asn1_integer_to_int(backend, asn1_int)
return x509.InhibitAnyPolicy(skip_certs)
def _decode_precert_signed_certificate_timestamps(backend, asn1_scts):
from cryptography.hazmat.backends.openssl.x509 import (
_SignedCertificateTimestamp
)
asn1_scts = backend._ffi.cast("Cryptography_STACK_OF_SCT *", asn1_scts)
asn1_scts = backend._ffi.gc(asn1_scts, backend._lib.SCT_LIST_free)
scts = []
for i in range(backend._lib.sk_SCT_num(asn1_scts)):
sct = backend._lib.sk_SCT_value(asn1_scts, i)
scts.append(_SignedCertificateTimestamp(backend, asn1_scts, sct))
return x509.PrecertificateSignedCertificateTimestamps(scts)
# CRLReason ::= ENUMERATED {
# unspecified (0),
# keyCompromise (1),
# cACompromise (2),
# affiliationChanged (3),
# superseded (4),
# cessationOfOperation (5),
# certificateHold (6),
# -- value 7 is not used
# removeFromCRL (8),
# privilegeWithdrawn (9),
# aACompromise (10) }
_CRL_ENTRY_REASON_CODE_TO_ENUM = {
0: x509.ReasonFlags.unspecified,
1: x509.ReasonFlags.key_compromise,
2: x509.ReasonFlags.ca_compromise,
3: x509.ReasonFlags.affiliation_changed,
4: x509.ReasonFlags.superseded,
5: x509.ReasonFlags.cessation_of_operation,
6: x509.ReasonFlags.certificate_hold,
8: x509.ReasonFlags.remove_from_crl,
9: x509.ReasonFlags.privilege_withdrawn,
10: x509.ReasonFlags.aa_compromise,
}
_CRL_ENTRY_REASON_ENUM_TO_CODE = {
x509.ReasonFlags.unspecified: 0,
x509.ReasonFlags.key_compromise: 1,
x509.ReasonFlags.ca_compromise: 2,
x509.ReasonFlags.affiliation_changed: 3,
x509.ReasonFlags.superseded: 4,
x509.ReasonFlags.cessation_of_operation: 5,
x509.ReasonFlags.certificate_hold: 6,
x509.ReasonFlags.remove_from_crl: 8,
x509.ReasonFlags.privilege_withdrawn: 9,
x509.ReasonFlags.aa_compromise: 10
}
def _decode_crl_reason(backend, enum):
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
code = backend._lib.ASN1_ENUMERATED_get(enum)
try:
return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
except KeyError:
raise ValueError("Unsupported reason code: {0}".format(code))
def _decode_invalidity_date(backend, inv_date):
generalized_time = backend._ffi.cast(
"ASN1_GENERALIZEDTIME *", inv_date
)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
return x509.InvalidityDate(
_parse_asn1_generalized_time(backend, generalized_time)
)
def _decode_cert_issuer(backend, gns):
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
general_names = _decode_general_names(backend, gns)
return x509.CertificateIssuer(general_names)
def _asn1_to_der(backend, asn1_type):
buf = backend._ffi.new("unsigned char **")
res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf)
backend.openssl_assert(res >= 0)
backend.openssl_assert(buf[0] != backend._ffi.NULL)
buf = backend._ffi.gc(
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
)
return backend._ffi.buffer(buf[0], res)[:]
def _asn1_integer_to_int(backend, asn1_int):
bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL)
backend.openssl_assert(bn != backend._ffi.NULL)
bn = backend._ffi.gc(bn, backend._lib.BN_free)
return backend._bn_to_int(bn)
def _asn1_integer_to_int_or_none(backend, asn1_int):
if asn1_int == backend._ffi.NULL:
return None
else:
return _asn1_integer_to_int(backend, asn1_int)
def _asn1_string_to_bytes(backend, asn1_string):
return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
def _asn1_string_to_ascii(backend, asn1_string):
return _asn1_string_to_bytes(backend, asn1_string).decode("ascii")
def _asn1_string_to_utf8(backend, asn1_string):
buf = backend._ffi.new("unsigned char **")
res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
if res == -1:
raise ValueError(
"Unsupported ASN1 string type. Type: {0}".format(asn1_string.type)
)
backend.openssl_assert(buf[0] != backend._ffi.NULL)
buf = backend._ffi.gc(
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
)
return backend._ffi.buffer(buf[0], res)[:].decode('utf8')
def _parse_asn1_time(backend, asn1_time):
backend.openssl_assert(asn1_time != backend._ffi.NULL)
generalized_time = backend._lib.ASN1_TIME_to_generalizedtime(
asn1_time, backend._ffi.NULL
)
if generalized_time == backend._ffi.NULL:
raise ValueError(
"Couldn't parse ASN.1 time as generalizedtime {!r}".format(
_asn1_string_to_bytes(backend, asn1_time)
)
)
generalized_time = backend._ffi.gc(
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
)
return _parse_asn1_generalized_time(backend, generalized_time)
def _parse_asn1_generalized_time(backend, generalized_time):
time = _asn1_string_to_ascii(
backend, backend._ffi.cast("ASN1_STRING *", generalized_time)
)
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
def _decode_nonce(backend, nonce):
nonce = backend._ffi.cast("ASN1_OCTET_STRING *", nonce)
nonce = backend._ffi.gc(nonce, backend._lib.ASN1_OCTET_STRING_free)
return x509.OCSPNonce(_asn1_string_to_bytes(backend, nonce))
_EXTENSION_HANDLERS_NO_SCT = {
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _decode_key_usage,
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
ExtensionOID.FRESHEST_CRL: _decode_freshest_crl,
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints,
}
_EXTENSION_HANDLERS = _EXTENSION_HANDLERS_NO_SCT.copy()
_EXTENSION_HANDLERS[
ExtensionOID.PRECERT_SIGNED_CERTIFICATE_TIMESTAMPS
] = _decode_precert_signed_certificate_timestamps
_REVOKED_EXTENSION_HANDLERS = {
CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason,
CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
}
_CRL_EXTENSION_HANDLERS = {
ExtensionOID.CRL_NUMBER: _decode_crl_number,
ExtensionOID.DELTA_CRL_INDICATOR: _decode_delta_crl_indicator,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_decode_authority_information_access
),
}
_OCSP_REQ_EXTENSION_HANDLERS = {
OCSPExtensionOID.NONCE: _decode_nonce,
}
_OCSP_BASICRESP_EXTENSION_HANDLERS = {
OCSPExtensionOID.NONCE: _decode_nonce,
}
_CERTIFICATE_EXTENSION_PARSER_NO_SCT = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
handlers=_EXTENSION_HANDLERS_NO_SCT
)
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
handlers=_EXTENSION_HANDLERS
)
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
handlers=_EXTENSION_HANDLERS
)
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
handlers=_REVOKED_EXTENSION_HANDLERS,
)
_CRL_EXTENSION_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i),
handlers=_CRL_EXTENSION_HANDLERS,
)
_OCSP_REQ_EXT_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.OCSP_REQUEST_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.OCSP_REQUEST_get_ext(x, i),
handlers=_OCSP_REQ_EXTENSION_HANDLERS,
)
_OCSP_BASICRESP_EXT_PARSER = _X509ExtensionParser(
ext_count=lambda backend, x: backend._lib.OCSP_BASICRESP_get_ext_count(x),
get_ext=lambda backend, x, i: backend._lib.OCSP_BASICRESP_get_ext(x, i),
handlers=_OCSP_BASICRESP_EXTENSION_HANDLERS,
)

View file

@ -0,0 +1,280 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import dh
def _dh_params_dup(dh_cdata, backend):
lib = backend._lib
ffi = backend._ffi
param_cdata = lib.DHparams_dup(dh_cdata)
backend.openssl_assert(param_cdata != ffi.NULL)
param_cdata = ffi.gc(param_cdata, lib.DH_free)
if lib.CRYPTOGRAPHY_OPENSSL_LESS_THAN_102:
# In OpenSSL versions < 1.0.2 or libressl DHparams_dup don't copy q
q = ffi.new("BIGNUM **")
lib.DH_get0_pqg(dh_cdata, ffi.NULL, q, ffi.NULL)
q_dup = lib.BN_dup(q[0])
res = lib.DH_set0_pqg(param_cdata, ffi.NULL, q_dup, ffi.NULL)
backend.openssl_assert(res == 1)
return param_cdata
def _dh_cdata_to_parameters(dh_cdata, backend):
param_cdata = _dh_params_dup(dh_cdata, backend)
return _DHParameters(backend, param_cdata)
@utils.register_interface(dh.DHParametersWithSerialization)
class _DHParameters(object):
def __init__(self, backend, dh_cdata):
self._backend = backend
self._dh_cdata = dh_cdata
def parameter_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
if q[0] == self._backend._ffi.NULL:
q_val = None
else:
q_val = self._backend._bn_to_int(q[0])
return dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
q=q_val
)
def generate_private_key(self):
return self._backend.generate_dh_private_key(self)
def parameter_bytes(self, encoding, format):
if format is not serialization.ParameterFormat.PKCS3:
raise ValueError(
"Only PKCS3 serialization is supported"
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata,
self._backend._ffi.NULL,
q,
self._backend._ffi.NULL)
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
_Reasons.UNSUPPORTED_SERIALIZATION)
return self._backend._parameter_bytes(
encoding,
format,
self._dh_cdata
)
def _handle_dh_compute_key_error(errors, backend):
lib = backend._lib
backend.openssl_assert(
errors[0]._lib_reason_match(
lib.ERR_LIB_DH, lib.DH_R_INVALID_PUBKEY
)
)
raise ValueError("Public key value is invalid for this exchange.")
def _get_dh_num_bits(backend, dh_cdata):
p = backend._ffi.new("BIGNUM **")
backend._lib.DH_get0_pqg(dh_cdata, p,
backend._ffi.NULL,
backend._ffi.NULL)
backend.openssl_assert(p[0] != backend._ffi.NULL)
return backend._lib.BN_num_bits(p[0])
@utils.register_interface(dh.DHPrivateKeyWithSerialization)
class _DHPrivateKey(object):
def __init__(self, backend, dh_cdata, evp_pkey):
self._backend = backend
self._dh_cdata = dh_cdata
self._evp_pkey = evp_pkey
self._key_size_bytes = self._backend._lib.DH_size(dh_cdata)
@property
def key_size(self):
return _get_dh_num_bits(self._backend, self._dh_cdata)
def private_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
if q[0] == self._backend._ffi.NULL:
q_val = None
else:
q_val = self._backend._bn_to_int(q[0])
pub_key = self._backend._ffi.new("BIGNUM **")
priv_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(self._dh_cdata, pub_key, priv_key)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
return dh.DHPrivateNumbers(
public_numbers=dh.DHPublicNumbers(
parameter_numbers=dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
q=q_val
),
y=self._backend._bn_to_int(pub_key[0])
),
x=self._backend._bn_to_int(priv_key[0])
)
def exchange(self, peer_public_key):
buf = self._backend._ffi.new("unsigned char[]", self._key_size_bytes)
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(peer_public_key._dh_cdata, pub_key,
self._backend._ffi.NULL)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
res = self._backend._lib.DH_compute_key(
buf,
pub_key[0],
self._dh_cdata
)
if res == -1:
errors = self._backend._consume_errors()
return _handle_dh_compute_key_error(errors, self._backend)
else:
self._backend.openssl_assert(res >= 1)
key = self._backend._ffi.buffer(buf)[:res]
pad = self._key_size_bytes - len(key)
if pad > 0:
key = (b"\x00" * pad) + key
return key
def public_key(self):
dh_cdata = _dh_params_dup(self._dh_cdata, self._backend)
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(self._dh_cdata,
pub_key, self._backend._ffi.NULL)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
self._backend.openssl_assert(pub_key_dup != self._backend._ffi.NULL)
res = self._backend._lib.DH_set0_key(dh_cdata,
pub_key_dup,
self._backend._ffi.NULL)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._dh_cdata_to_evp_pkey(dh_cdata)
return _DHPublicKey(self._backend, dh_cdata, evp_pkey)
def parameters(self):
return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
def private_bytes(self, encoding, format, encryption_algorithm):
if format is not serialization.PrivateFormat.PKCS8:
raise ValueError(
"DH private keys support only PKCS8 serialization"
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata,
self._backend._ffi.NULL,
q,
self._backend._ffi.NULL)
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
_Reasons.UNSUPPORTED_SERIALIZATION)
return self._backend._private_key_bytes(
encoding,
format,
encryption_algorithm,
self._evp_pkey,
self._dh_cdata
)
@utils.register_interface(dh.DHPublicKeyWithSerialization)
class _DHPublicKey(object):
def __init__(self, backend, dh_cdata, evp_pkey):
self._backend = backend
self._dh_cdata = dh_cdata
self._evp_pkey = evp_pkey
self._key_size_bits = _get_dh_num_bits(self._backend, self._dh_cdata)
@property
def key_size(self):
return self._key_size_bits
def public_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
if q[0] == self._backend._ffi.NULL:
q_val = None
else:
q_val = self._backend._bn_to_int(q[0])
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_key(self._dh_cdata,
pub_key, self._backend._ffi.NULL)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
return dh.DHPublicNumbers(
parameter_numbers=dh.DHParameterNumbers(
p=self._backend._bn_to_int(p[0]),
g=self._backend._bn_to_int(g[0]),
q=q_val
),
y=self._backend._bn_to_int(pub_key[0])
)
def parameters(self):
return _dh_cdata_to_parameters(self._dh_cdata, self._backend)
def public_bytes(self, encoding, format):
if format is not serialization.PublicFormat.SubjectPublicKeyInfo:
raise ValueError(
"DH public keys support only "
"SubjectPublicKeyInfo serialization"
)
if not self._backend._lib.Cryptography_HAS_EVP_PKEY_DHX:
q = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DH_get0_pqg(self._dh_cdata,
self._backend._ffi.NULL,
q,
self._backend._ffi.NULL)
if q[0] != self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"DH X9.42 serialization is not supported",
_Reasons.UNSUPPORTED_SERIALIZATION)
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
None
)

View file

@ -6,24 +6,42 @@ from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.exceptions import InvalidSignature
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm, _check_not_prehashed,
_warn_sign_verify_deprecated
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
)
def _truncate_digest_for_dsa(dsa_cdata, digest, backend):
"""
This function truncates digests that are longer than a given DS
key's length so they can be signed. OpenSSL does this for us in
1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three
releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This
truncation is not required in 0.9.8 because DSA is limited to SHA-1.
"""
def _dsa_sig_sign(backend, private_key, data):
sig_buf_len = backend._lib.DSA_size(private_key._dsa_cdata)
sig_buf = backend._ffi.new("unsigned char[]", sig_buf_len)
buflen = backend._ffi.new("unsigned int *")
order_bits = backend._lib.BN_num_bits(dsa_cdata.q)
return _truncate_digest(digest, order_bits)
# The first parameter passed to DSA_sign is unused by OpenSSL but
# must be an integer.
res = backend._lib.DSA_sign(
0, data, len(data), sig_buf, buflen, private_key._dsa_cdata
)
backend.openssl_assert(res == 1)
backend.openssl_assert(buflen[0])
return backend._ffi.buffer(sig_buf)[:buflen[0]]
def _dsa_sig_verify(backend, public_key, signature, data):
# The first parameter passed to DSA_verify is unused by OpenSSL but
# must be an integer.
res = backend._lib.DSA_verify(
0, data, len(data), signature, len(signature), public_key._dsa_cdata
)
if res != 1:
backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricVerificationContext)
@ -42,20 +60,10 @@ class _DSAVerificationContext(object):
def verify(self):
data_to_verify = self._hash_ctx.finalize()
data_to_verify = _truncate_digest_for_dsa(
self._public_key._dsa_cdata, data_to_verify, self._backend
_dsa_sig_verify(
self._backend, self._public_key, self._signature, data_to_verify
)
# The first parameter passed to DSA_verify is unused by OpenSSL but
# must be an integer.
res = self._backend._lib.DSA_verify(
0, data_to_verify, len(data_to_verify), self._signature,
len(self._signature), self._public_key._dsa_cdata)
if res != 1:
self._backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricSignatureContext)
class _DSASignatureContext(object):
@ -70,22 +78,7 @@ class _DSASignatureContext(object):
def finalize(self):
data_to_sign = self._hash_ctx.finalize()
data_to_sign = _truncate_digest_for_dsa(
self._private_key._dsa_cdata, data_to_sign, self._backend
)
sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata)
sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len)
buflen = self._backend._ffi.new("unsigned int *")
# The first parameter passed to DSA_sign is unused by OpenSSL but
# must be an integer.
res = self._backend._lib.DSA_sign(
0, data_to_sign, len(data_to_sign), sig_buf,
buflen, self._private_key._dsa_cdata)
self._backend.openssl_assert(res == 1)
self._backend.openssl_assert(buflen[0])
return self._backend._ffi.buffer(sig_buf)[:buflen[0]]
return _dsa_sig_sign(self._backend, self._private_key, data_to_sign)
@utils.register_interface(dsa.DSAParametersWithNumbers)
@ -95,10 +88,17 @@ class _DSAParameters(object):
self._dsa_cdata = dsa_cdata
def parameter_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
return dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
g=self._backend._bn_to_int(g[0])
)
def generate_private_key(self):
@ -111,48 +111,71 @@ class _DSAPrivateKey(object):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
p = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
)
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(p[0])
key_size = utils.read_only_property("_key_size")
def signer(self, signature_algorithm):
_warn_sign_verify_deprecated()
_check_not_prehashed(signature_algorithm)
return _DSASignatureContext(self._backend, self, signature_algorithm)
def private_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
pub_key = self._backend._ffi.new("BIGNUM **")
priv_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
return dsa.DSAPrivateNumbers(
public_numbers=dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
g=self._backend._bn_to_int(g[0])
),
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
y=self._backend._bn_to_int(pub_key[0])
),
x=self._backend._bn_to_int(self._dsa_cdata.priv_key)
x=self._backend._bn_to_int(priv_key[0])
)
def public_key(self):
dsa_cdata = self._backend._lib.DSA_new()
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key)
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_key(
self._dsa_cdata, pub_key, self._backend._ffi.NULL
)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
res = self._backend._lib.DSA_set0_key(
dsa_cdata, pub_key_dup, self._backend._ffi.NULL
)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
def parameters(self):
dsa_cdata = self._backend._lib.DSA_new()
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
return _DSAParameters(self._backend, dsa_cdata)
def private_bytes(self, encoding, format, encryption_algorithm):
@ -164,6 +187,12 @@ class _DSAPrivateKey(object):
self._dsa_cdata
)
def sign(self, data, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _dsa_sig_sign(self._backend, self, data)
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
class _DSAPublicKey(object):
@ -171,37 +200,52 @@ class _DSAPublicKey(object):
self._backend = backend
self._dsa_cdata = dsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
p = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
)
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(p[0])
key_size = utils.read_only_property("_key_size")
def verifier(self, signature, signature_algorithm):
_warn_sign_verify_deprecated()
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
_check_not_prehashed(signature_algorithm)
return _DSAVerificationContext(
self._backend, self, signature, signature_algorithm
)
def public_numbers(self):
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
g = self._backend._ffi.new("BIGNUM **")
pub_key = self._backend._ffi.new("BIGNUM **")
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
self._backend._lib.DSA_get0_key(
self._dsa_cdata, pub_key, self._backend._ffi.NULL
)
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
return dsa.DSAPublicNumbers(
parameter_numbers=dsa.DSAParameterNumbers(
p=self._backend._bn_to_int(self._dsa_cdata.p),
q=self._backend._bn_to_int(self._dsa_cdata.q),
g=self._backend._bn_to_int(self._dsa_cdata.g)
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
g=self._backend._bn_to_int(g[0])
),
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
y=self._backend._bn_to_int(pub_key[0])
)
def parameters(self):
dsa_cdata = self._backend._lib.DSA_new()
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
dsa_cdata = self._backend._lib.DSAparams_dup(self._dsa_cdata)
dsa_cdata = self._backend._ffi.gc(
dsa_cdata, self._backend._lib.DSA_free
)
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
return _DSAParameters(self._backend, dsa_cdata)
def public_bytes(self, encoding, format):
@ -213,6 +257,13 @@ class _DSAPublicKey(object):
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
None
)
def verify(self, signature, data, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _dsa_sig_verify(self._backend, self, signature, data)

View file

@ -8,36 +8,21 @@ from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm, _check_not_prehashed,
_warn_sign_verify_deprecated
)
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, ec
)
def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend):
"""
This function truncates digests that are longer than a given elliptic
curve key's length so they can be signed. Since elliptic curve keys are
much shorter than RSA keys many digests (e.g. SHA-512) may require
truncation.
"""
_lib = backend._lib
_ffi = backend._ffi
group = _lib.EC_KEY_get0_group(ec_key_cdata)
with backend._tmp_bn_ctx() as bn_ctx:
order = _lib.BN_CTX_get(bn_ctx)
backend.openssl_assert(order != _ffi.NULL)
res = _lib.EC_GROUP_get_order(group, order, bn_ctx)
backend.openssl_assert(res == 1)
order_bits = _lib.BN_num_bits(order)
return _truncate_digest(digest, order_bits)
def _check_signature_algorithm(signature_algorithm):
if not isinstance(signature_algorithm, ec.ECDSA):
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
def _ec_key_curve_sn(backend, ec_key):
@ -82,6 +67,28 @@ def _sn_to_elliptic_curve(backend, sn):
)
def _ecdsa_sig_sign(backend, private_key, data):
max_size = backend._lib.ECDSA_size(private_key._ec_key)
backend.openssl_assert(max_size > 0)
sigbuf = backend._ffi.new("unsigned char[]", max_size)
siglen_ptr = backend._ffi.new("unsigned int[]", 1)
res = backend._lib.ECDSA_sign(
0, data, len(data), sigbuf, siglen_ptr, private_key._ec_key
)
backend.openssl_assert(res == 1)
return backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
def _ecdsa_sig_verify(backend, public_key, signature, data):
res = backend._lib.ECDSA_verify(
0, data, len(data), signature, len(signature), public_key._ec_key
)
if res != 1:
backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricSignatureContext)
class _ECDSASignatureContext(object):
def __init__(self, backend, private_key, algorithm):
@ -93,27 +100,9 @@ class _ECDSASignatureContext(object):
self._digest.update(data)
def finalize(self):
ec_key = self._private_key._ec_key
digest = self._digest.finalize()
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
max_size = self._backend._lib.ECDSA_size(ec_key)
self._backend.openssl_assert(max_size > 0)
sigbuf = self._backend._ffi.new("char[]", max_size)
siglen_ptr = self._backend._ffi.new("unsigned int[]", 1)
res = self._backend._lib.ECDSA_sign(
0,
digest,
len(digest),
sigbuf,
siglen_ptr,
ec_key
)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
return _ecdsa_sig_sign(self._backend, self._private_key, digest)
@utils.register_interface(AsymmetricVerificationContext)
@ -128,24 +117,10 @@ class _ECDSAVerificationContext(object):
self._digest.update(data)
def verify(self):
ec_key = self._public_key._ec_key
digest = self._digest.finalize()
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
res = self._backend._lib.ECDSA_verify(
0,
digest,
len(digest),
self._signature,
len(self._signature),
ec_key
_ecdsa_sig_verify(
self._backend, self._public_key, self._signature, digest
)
if res != 1:
self._backend._consume_errors()
raise InvalidSignature
return True
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
@ -161,15 +136,17 @@ class _EllipticCurvePrivateKey(object):
curve = utils.read_only_property("_curve")
@property
def key_size(self):
return self.curve.key_size
def signer(self, signature_algorithm):
if isinstance(signature_algorithm, ec.ECDSA):
return _ECDSASignatureContext(
self._backend, self, signature_algorithm.algorithm
)
else:
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
_warn_sign_verify_deprecated()
_check_signature_algorithm(signature_algorithm)
_check_not_prehashed(signature_algorithm.algorithm)
return _ECDSASignatureContext(
self._backend, self, signature_algorithm.algorithm
)
def exchange(self, algorithm, peer_public_key):
if not (
@ -240,6 +217,13 @@ class _EllipticCurvePrivateKey(object):
self._ec_key
)
def sign(self, data, signature_algorithm):
_check_signature_algorithm(signature_algorithm)
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, signature_algorithm._algorithm
)
return _ecdsa_sig_sign(self._backend, self, data)
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
class _EllipticCurvePublicKey(object):
@ -254,22 +238,24 @@ class _EllipticCurvePublicKey(object):
curve = utils.read_only_property("_curve")
@property
def key_size(self):
return self.curve.key_size
def verifier(self, signature, signature_algorithm):
_warn_sign_verify_deprecated()
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
if isinstance(signature_algorithm, ec.ECDSA):
return _ECDSAVerificationContext(
self._backend, self, signature, signature_algorithm.algorithm
)
else:
raise UnsupportedAlgorithm(
"Unsupported elliptic curve signature algorithm.",
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
_check_signature_algorithm(signature_algorithm)
_check_not_prehashed(signature_algorithm.algorithm)
return _ECDSAVerificationContext(
self._backend, self, signature, signature_algorithm.algorithm
)
def public_numbers(self):
set_func, get_func, group = (
self._backend._ec_key_determine_group_get_set_funcs(self._ec_key)
get_func, group = (
self._backend._ec_key_determine_group_get_func(self._ec_key)
)
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
self._backend.openssl_assert(point != self._backend._ffi.NULL)
@ -299,6 +285,14 @@ class _EllipticCurvePublicKey(object):
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
None
)
def verify(self, signature, data, signature_algorithm):
_check_signature_algorithm(signature_algorithm)
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, signature_algorithm._algorithm
)
_ecdsa_sig_verify(self._backend, self, signature, data)

View file

@ -0,0 +1,620 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import calendar
import ipaddress
import six
from cryptography import utils, x509
from cryptography.hazmat.backends.openssl.decode_asn1 import (
_CRL_ENTRY_REASON_ENUM_TO_CODE, _DISTPOINT_TYPE_FULLNAME,
_DISTPOINT_TYPE_RELATIVENAME
)
from cryptography.x509.name import _ASN1Type
from cryptography.x509.oid import (
CRLEntryExtensionOID, ExtensionOID, OCSPExtensionOID,
)
def _encode_asn1_int(backend, x):
"""
Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER
will not be garbage collected (to support adding them to structs that take
ownership of the object). Be sure to register it for GC if it will be
discarded after use.
"""
# Convert Python integer to OpenSSL "bignum" in case value exceeds
# machine's native integer limits (note: `int_to_bn` doesn't automatically
# GC).
i = backend._int_to_bn(x)
i = backend._ffi.gc(i, backend._lib.BN_free)
# Wrap in an ASN.1 integer. Don't GC -- as documented.
i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL)
backend.openssl_assert(i != backend._ffi.NULL)
return i
def _encode_asn1_int_gc(backend, x):
i = _encode_asn1_int(backend, x)
i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free)
return i
def _encode_asn1_str(backend, data):
"""
Create an ASN1_OCTET_STRING from a Python byte string.
"""
s = backend._lib.ASN1_OCTET_STRING_new()
res = backend._lib.ASN1_OCTET_STRING_set(s, data, len(data))
backend.openssl_assert(res == 1)
return s
def _encode_asn1_utf8_str(backend, string):
"""
Create an ASN1_UTF8STRING from a Python unicode string.
This object will be an ASN1_STRING with UTF8 type in OpenSSL and
can be decoded with ASN1_STRING_to_UTF8.
"""
s = backend._lib.ASN1_UTF8STRING_new()
res = backend._lib.ASN1_STRING_set(
s, string.encode("utf8"), len(string.encode("utf8"))
)
backend.openssl_assert(res == 1)
return s
def _encode_asn1_str_gc(backend, data):
s = _encode_asn1_str(backend, data)
s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
return s
def _encode_inhibit_any_policy(backend, inhibit_any_policy):
return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs)
def _encode_name(backend, name):
"""
The X509_NAME created will not be gc'd. Use _encode_name_gc if needed.
"""
subject = backend._lib.X509_NAME_new()
for rdn in name.rdns:
set_flag = 0 # indicate whether to add to last RDN or create new RDN
for attribute in rdn:
name_entry = _encode_name_entry(backend, attribute)
# X509_NAME_add_entry dups the object so we need to gc this copy
name_entry = backend._ffi.gc(
name_entry, backend._lib.X509_NAME_ENTRY_free
)
res = backend._lib.X509_NAME_add_entry(
subject, name_entry, -1, set_flag)
backend.openssl_assert(res == 1)
set_flag = -1
return subject
def _encode_name_gc(backend, attributes):
subject = _encode_name(backend, attributes)
subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free)
return subject
def _encode_sk_name_entry(backend, attributes):
"""
The sk_X509_NAME_ENTRY created will not be gc'd.
"""
stack = backend._lib.sk_X509_NAME_ENTRY_new_null()
for attribute in attributes:
name_entry = _encode_name_entry(backend, attribute)
res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
backend.openssl_assert(res == 1)
return stack
def _encode_name_entry(backend, attribute):
if attribute._type is _ASN1Type.BMPString:
value = attribute.value.encode('utf_16_be')
else:
value = attribute.value.encode('utf8')
obj = _txt2obj_gc(backend, attribute.oid.dotted_string)
name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ(
backend._ffi.NULL, obj, attribute._type.value, value, len(value)
)
return name_entry
def _encode_crl_number_delta_crl_indicator(backend, ext):
return _encode_asn1_int_gc(backend, ext.crl_number)
def _encode_crl_reason(backend, crl_reason):
asn1enum = backend._lib.ASN1_ENUMERATED_new()
backend.openssl_assert(asn1enum != backend._ffi.NULL)
asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free)
res = backend._lib.ASN1_ENUMERATED_set(
asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason]
)
backend.openssl_assert(res == 1)
return asn1enum
def _encode_invalidity_date(backend, invalidity_date):
time = backend._lib.ASN1_GENERALIZEDTIME_set(
backend._ffi.NULL, calendar.timegm(
invalidity_date.invalidity_date.timetuple()
)
)
backend.openssl_assert(time != backend._ffi.NULL)
time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free)
return time
def _encode_certificate_policies(backend, certificate_policies):
cp = backend._lib.sk_POLICYINFO_new_null()
backend.openssl_assert(cp != backend._ffi.NULL)
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
for policy_info in certificate_policies:
pi = backend._lib.POLICYINFO_new()
backend.openssl_assert(pi != backend._ffi.NULL)
res = backend._lib.sk_POLICYINFO_push(cp, pi)
backend.openssl_assert(res >= 1)
oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string)
pi.policyid = oid
if policy_info.policy_qualifiers:
pqis = backend._lib.sk_POLICYQUALINFO_new_null()
backend.openssl_assert(pqis != backend._ffi.NULL)
for qualifier in policy_info.policy_qualifiers:
pqi = backend._lib.POLICYQUALINFO_new()
backend.openssl_assert(pqi != backend._ffi.NULL)
res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi)
backend.openssl_assert(res >= 1)
if isinstance(qualifier, six.text_type):
pqi.pqualid = _txt2obj(
backend, x509.OID_CPS_QUALIFIER.dotted_string
)
pqi.d.cpsuri = _encode_asn1_str(
backend,
qualifier.encode("ascii"),
)
else:
assert isinstance(qualifier, x509.UserNotice)
pqi.pqualid = _txt2obj(
backend, x509.OID_CPS_USER_NOTICE.dotted_string
)
un = backend._lib.USERNOTICE_new()
backend.openssl_assert(un != backend._ffi.NULL)
pqi.d.usernotice = un
if qualifier.explicit_text:
un.exptext = _encode_asn1_utf8_str(
backend, qualifier.explicit_text
)
un.noticeref = _encode_notice_reference(
backend, qualifier.notice_reference
)
pi.qualifiers = pqis
return cp
def _encode_notice_reference(backend, notice):
if notice is None:
return backend._ffi.NULL
else:
nr = backend._lib.NOTICEREF_new()
backend.openssl_assert(nr != backend._ffi.NULL)
# organization is a required field
nr.organization = _encode_asn1_utf8_str(backend, notice.organization)
notice_stack = backend._lib.sk_ASN1_INTEGER_new_null()
nr.noticenos = notice_stack
for number in notice.notice_numbers:
num = _encode_asn1_int(backend, number)
res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num)
backend.openssl_assert(res >= 1)
return nr
def _txt2obj(backend, name):
"""
Converts a Python string with an ASN.1 object ID in dotted form to a
ASN1_OBJECT.
"""
name = name.encode('ascii')
obj = backend._lib.OBJ_txt2obj(name, 1)
backend.openssl_assert(obj != backend._ffi.NULL)
return obj
def _txt2obj_gc(backend, name):
obj = _txt2obj(backend, name)
obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free)
return obj
def _encode_ocsp_nocheck(backend, ext):
# Doesn't need to be GC'd
return backend._lib.ASN1_NULL_new()
def _encode_key_usage(backend, key_usage):
set_bit = backend._lib.ASN1_BIT_STRING_set_bit
ku = backend._lib.ASN1_BIT_STRING_new()
ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free)
res = set_bit(ku, 0, key_usage.digital_signature)
backend.openssl_assert(res == 1)
res = set_bit(ku, 1, key_usage.content_commitment)
backend.openssl_assert(res == 1)
res = set_bit(ku, 2, key_usage.key_encipherment)
backend.openssl_assert(res == 1)
res = set_bit(ku, 3, key_usage.data_encipherment)
backend.openssl_assert(res == 1)
res = set_bit(ku, 4, key_usage.key_agreement)
backend.openssl_assert(res == 1)
res = set_bit(ku, 5, key_usage.key_cert_sign)
backend.openssl_assert(res == 1)
res = set_bit(ku, 6, key_usage.crl_sign)
backend.openssl_assert(res == 1)
if key_usage.key_agreement:
res = set_bit(ku, 7, key_usage.encipher_only)
backend.openssl_assert(res == 1)
res = set_bit(ku, 8, key_usage.decipher_only)
backend.openssl_assert(res == 1)
else:
res = set_bit(ku, 7, 0)
backend.openssl_assert(res == 1)
res = set_bit(ku, 8, 0)
backend.openssl_assert(res == 1)
return ku
def _encode_authority_key_identifier(backend, authority_keyid):
akid = backend._lib.AUTHORITY_KEYID_new()
backend.openssl_assert(akid != backend._ffi.NULL)
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
if authority_keyid.key_identifier is not None:
akid.keyid = _encode_asn1_str(
backend,
authority_keyid.key_identifier,
)
if authority_keyid.authority_cert_issuer is not None:
akid.issuer = _encode_general_names(
backend, authority_keyid.authority_cert_issuer
)
if authority_keyid.authority_cert_serial_number is not None:
akid.serial = _encode_asn1_int(
backend, authority_keyid.authority_cert_serial_number
)
return akid
def _encode_basic_constraints(backend, basic_constraints):
constraints = backend._lib.BASIC_CONSTRAINTS_new()
constraints = backend._ffi.gc(
constraints, backend._lib.BASIC_CONSTRAINTS_free
)
constraints.ca = 255 if basic_constraints.ca else 0
if basic_constraints.ca and basic_constraints.path_length is not None:
constraints.pathlen = _encode_asn1_int(
backend, basic_constraints.path_length
)
return constraints
def _encode_authority_information_access(backend, authority_info_access):
aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
backend.openssl_assert(aia != backend._ffi.NULL)
aia = backend._ffi.gc(
aia, backend._lib.sk_ACCESS_DESCRIPTION_free
)
for access_description in authority_info_access:
ad = backend._lib.ACCESS_DESCRIPTION_new()
method = _txt2obj(
backend, access_description.access_method.dotted_string
)
gn = _encode_general_name(backend, access_description.access_location)
ad.method = method
ad.location = gn
res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
backend.openssl_assert(res >= 1)
return aia
def _encode_general_names(backend, names):
general_names = backend._lib.GENERAL_NAMES_new()
backend.openssl_assert(general_names != backend._ffi.NULL)
for name in names:
gn = _encode_general_name(backend, name)
res = backend._lib.sk_GENERAL_NAME_push(general_names, gn)
backend.openssl_assert(res != 0)
return general_names
def _encode_alt_name(backend, san):
general_names = _encode_general_names(backend, san)
general_names = backend._ffi.gc(
general_names, backend._lib.GENERAL_NAMES_free
)
return general_names
def _encode_subject_key_identifier(backend, ski):
return _encode_asn1_str_gc(backend, ski.digest)
def _encode_general_name(backend, name):
if isinstance(name, x509.DNSName):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
gn.type = backend._lib.GEN_DNS
ia5 = backend._lib.ASN1_IA5STRING_new()
backend.openssl_assert(ia5 != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
value = name.value.encode("utf8")
res = backend._lib.ASN1_STRING_set(ia5, value, len(value))
backend.openssl_assert(res == 1)
gn.d.dNSName = ia5
elif isinstance(name, x509.RegisteredID):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
gn.type = backend._lib.GEN_RID
obj = backend._lib.OBJ_txt2obj(
name.value.dotted_string.encode('ascii'), 1
)
backend.openssl_assert(obj != backend._ffi.NULL)
gn.d.registeredID = obj
elif isinstance(name, x509.DirectoryName):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
dir_name = _encode_name(backend, name.value)
gn.type = backend._lib.GEN_DIRNAME
gn.d.directoryName = dir_name
elif isinstance(name, x509.IPAddress):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
if isinstance(name.value, ipaddress.IPv4Network):
packed = (
name.value.network_address.packed +
utils.int_to_bytes(((1 << 32) - name.value.num_addresses), 4)
)
elif isinstance(name.value, ipaddress.IPv6Network):
packed = (
name.value.network_address.packed +
utils.int_to_bytes((1 << 128) - name.value.num_addresses, 16)
)
else:
packed = name.value.packed
ipaddr = _encode_asn1_str(backend, packed)
gn.type = backend._lib.GEN_IPADD
gn.d.iPAddress = ipaddr
elif isinstance(name, x509.OtherName):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
other_name = backend._lib.OTHERNAME_new()
backend.openssl_assert(other_name != backend._ffi.NULL)
type_id = backend._lib.OBJ_txt2obj(
name.type_id.dotted_string.encode('ascii'), 1
)
backend.openssl_assert(type_id != backend._ffi.NULL)
data = backend._ffi.new("unsigned char[]", name.value)
data_ptr_ptr = backend._ffi.new("unsigned char **")
data_ptr_ptr[0] = data
value = backend._lib.d2i_ASN1_TYPE(
backend._ffi.NULL, data_ptr_ptr, len(name.value)
)
if value == backend._ffi.NULL:
backend._consume_errors()
raise ValueError("Invalid ASN.1 data")
other_name.type_id = type_id
other_name.value = value
gn.type = backend._lib.GEN_OTHERNAME
gn.d.otherName = other_name
elif isinstance(name, x509.RFC822Name):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
data = name.value.encode("utf8")
asn1_str = _encode_asn1_str(backend, data)
gn.type = backend._lib.GEN_EMAIL
gn.d.rfc822Name = asn1_str
elif isinstance(name, x509.UniformResourceIdentifier):
gn = backend._lib.GENERAL_NAME_new()
backend.openssl_assert(gn != backend._ffi.NULL)
# ia5strings are supposed to be ITU T.50 but to allow round-tripping
# of broken certs that encode utf8 we'll encode utf8 here too.
data = name.value.encode("utf8")
asn1_str = _encode_asn1_str(backend, data)
gn.type = backend._lib.GEN_URI
gn.d.uniformResourceIdentifier = asn1_str
else:
raise ValueError(
"{0} is an unknown GeneralName type".format(name)
)
return gn
def _encode_extended_key_usage(backend, extended_key_usage):
eku = backend._lib.sk_ASN1_OBJECT_new_null()
eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free)
for oid in extended_key_usage:
obj = _txt2obj(backend, oid.dotted_string)
res = backend._lib.sk_ASN1_OBJECT_push(eku, obj)
backend.openssl_assert(res >= 1)
return eku
_CRLREASONFLAGS = {
x509.ReasonFlags.key_compromise: 1,
x509.ReasonFlags.ca_compromise: 2,
x509.ReasonFlags.affiliation_changed: 3,
x509.ReasonFlags.superseded: 4,
x509.ReasonFlags.cessation_of_operation: 5,
x509.ReasonFlags.certificate_hold: 6,
x509.ReasonFlags.privilege_withdrawn: 7,
x509.ReasonFlags.aa_compromise: 8,
}
def _encode_cdps_freshest_crl(backend, cdps):
cdp = backend._lib.sk_DIST_POINT_new_null()
cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
for point in cdps:
dp = backend._lib.DIST_POINT_new()
backend.openssl_assert(dp != backend._ffi.NULL)
if point.reasons:
bitmask = backend._lib.ASN1_BIT_STRING_new()
backend.openssl_assert(bitmask != backend._ffi.NULL)
dp.reasons = bitmask
for reason in point.reasons:
res = backend._lib.ASN1_BIT_STRING_set_bit(
bitmask, _CRLREASONFLAGS[reason], 1
)
backend.openssl_assert(res == 1)
if point.full_name:
dpn = backend._lib.DIST_POINT_NAME_new()
backend.openssl_assert(dpn != backend._ffi.NULL)
dpn.type = _DISTPOINT_TYPE_FULLNAME
dpn.name.fullname = _encode_general_names(backend, point.full_name)
dp.distpoint = dpn
if point.relative_name:
dpn = backend._lib.DIST_POINT_NAME_new()
backend.openssl_assert(dpn != backend._ffi.NULL)
dpn.type = _DISTPOINT_TYPE_RELATIVENAME
relativename = _encode_sk_name_entry(backend, point.relative_name)
backend.openssl_assert(relativename != backend._ffi.NULL)
dpn.name.relativename = relativename
dp.distpoint = dpn
if point.crl_issuer:
dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
res = backend._lib.sk_DIST_POINT_push(cdp, dp)
backend.openssl_assert(res >= 1)
return cdp
def _encode_name_constraints(backend, name_constraints):
nc = backend._lib.NAME_CONSTRAINTS_new()
backend.openssl_assert(nc != backend._ffi.NULL)
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
permitted = _encode_general_subtree(
backend, name_constraints.permitted_subtrees
)
nc.permittedSubtrees = permitted
excluded = _encode_general_subtree(
backend, name_constraints.excluded_subtrees
)
nc.excludedSubtrees = excluded
return nc
def _encode_policy_constraints(backend, policy_constraints):
pc = backend._lib.POLICY_CONSTRAINTS_new()
backend.openssl_assert(pc != backend._ffi.NULL)
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
if policy_constraints.require_explicit_policy is not None:
pc.requireExplicitPolicy = _encode_asn1_int(
backend, policy_constraints.require_explicit_policy
)
if policy_constraints.inhibit_policy_mapping is not None:
pc.inhibitPolicyMapping = _encode_asn1_int(
backend, policy_constraints.inhibit_policy_mapping
)
return pc
def _encode_general_subtree(backend, subtrees):
if subtrees is None:
return backend._ffi.NULL
else:
general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null()
for name in subtrees:
gs = backend._lib.GENERAL_SUBTREE_new()
gs.base = _encode_general_name(backend, name)
res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs)
assert res >= 1
return general_subtrees
def _encode_nonce(backend, nonce):
return _encode_asn1_str_gc(backend, nonce.nonce)
_EXTENSION_ENCODE_HANDLERS = {
ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
ExtensionOID.KEY_USAGE: _encode_key_usage,
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_encode_authority_information_access
),
ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_cdps_freshest_crl,
ExtensionOID.FRESHEST_CRL: _encode_cdps_freshest_crl,
ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy,
ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck,
ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints,
ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints,
}
_CRL_EXTENSION_ENCODE_HANDLERS = {
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
_encode_authority_information_access
),
ExtensionOID.CRL_NUMBER: _encode_crl_number_delta_crl_indicator,
ExtensionOID.DELTA_CRL_INDICATOR: _encode_crl_number_delta_crl_indicator,
}
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name,
CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
}
_OCSP_REQUEST_EXTENSION_ENCODE_HANDLERS = {
OCSPExtensionOID.NONCE: _encode_nonce,
}
_OCSP_BASICRESP_EXTENSION_ENCODE_HANDLERS = {
OCSPExtensionOID.NONCE: _encode_nonce,
}

View file

@ -18,11 +18,11 @@ class _HashContext(object):
self._backend = backend
if ctx is None:
ctx = self._backend._lib.EVP_MD_CTX_create()
ctx = self._backend._ffi.gc(ctx,
self._backend._lib.EVP_MD_CTX_destroy)
evp_md = self._backend._lib.EVP_get_digestbyname(
algorithm.name.encode("ascii"))
ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
)
evp_md = self._backend._evp_md_from_algorithm(algorithm)
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
@ -38,9 +38,9 @@ class _HashContext(object):
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._lib.EVP_MD_CTX_create()
copied_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.EVP_MD_CTX_destroy
copied_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
)
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
self._backend.openssl_assert(res != 0)
@ -57,6 +57,4 @@ class _HashContext(object):
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx)
self._backend.openssl_assert(res == 1)
return self._backend._ffi.buffer(buf)[:outlen[0]]

View file

@ -9,10 +9,10 @@ from cryptography import utils
from cryptography.exceptions import (
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
from cryptography.hazmat.primitives import constant_time, hashes, mac
@utils.register_interface(interfaces.MACContext)
@utils.register_interface(mac.MACContext)
@utils.register_interface(hashes.HashContext)
class _HMACContext(object):
def __init__(self, backend, key, algorithm, ctx=None):
@ -20,20 +20,19 @@ class _HMACContext(object):
self._backend = backend
if ctx is None:
ctx = self._backend._ffi.new("HMAC_CTX *")
self._backend._lib.HMAC_CTX_init(ctx)
ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(
ctx, self._backend._lib.HMAC_CTX_cleanup
ctx, self._backend._lib.Cryptography_HMAC_CTX_free
)
evp_md = self._backend._lib.EVP_get_digestbyname(
algorithm.name.encode('ascii'))
evp_md = self._backend._evp_md_from_algorithm(algorithm)
if evp_md == self._backend._ffi.NULL:
raise UnsupportedAlgorithm(
"{0} is not a supported hash on this backend.".format(
"{0} is not a supported hash on this backend".format(
algorithm.name),
_Reasons.UNSUPPORTED_HASH
)
res = self._backend._lib.Cryptography_HMAC_Init_ex(
res = self._backend._lib.HMAC_Init_ex(
ctx, key, len(key), evp_md, self._backend._ffi.NULL
)
self._backend.openssl_assert(res != 0)
@ -44,35 +43,28 @@ class _HMACContext(object):
algorithm = utils.read_only_property("_algorithm")
def copy(self):
copied_ctx = self._backend._ffi.new("HMAC_CTX *")
self._backend._lib.HMAC_CTX_init(copied_ctx)
copied_ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL)
copied_ctx = self._backend._ffi.gc(
copied_ctx, self._backend._lib.HMAC_CTX_cleanup
)
res = self._backend._lib.Cryptography_HMAC_CTX_copy(
copied_ctx, self._ctx
copied_ctx, self._backend._lib.Cryptography_HMAC_CTX_free
)
res = self._backend._lib.HMAC_CTX_copy(copied_ctx, self._ctx)
self._backend.openssl_assert(res != 0)
return _HMACContext(
self._backend, self._key, self.algorithm, ctx=copied_ctx
)
def update(self, data):
res = self._backend._lib.Cryptography_HMAC_Update(
self._ctx, data, len(data)
)
res = self._backend._lib.HMAC_Update(self._ctx, data, len(data))
self._backend.openssl_assert(res != 0)
def finalize(self):
buf = self._backend._ffi.new("unsigned char[]",
self._backend._lib.EVP_MAX_MD_SIZE)
outlen = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.Cryptography_HMAC_Final(
self._ctx, buf, outlen
)
res = self._backend._lib.HMAC_Final(self._ctx, buf, outlen)
self._backend.openssl_assert(res != 0)
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
self._backend._lib.HMAC_CTX_cleanup(self._ctx)
return self._backend._ffi.buffer(buf)[:outlen[0]]
def verify(self, signature):

View file

@ -0,0 +1,370 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import functools
from cryptography import utils, x509
from cryptography.exceptions import UnsupportedAlgorithm
from cryptography.hazmat.backends.openssl.decode_asn1 import (
_CRL_ENTRY_REASON_CODE_TO_ENUM, _OCSP_BASICRESP_EXT_PARSER,
_OCSP_REQ_EXT_PARSER, _asn1_integer_to_int,
_asn1_string_to_bytes, _decode_x509_name, _obj2txt,
_parse_asn1_generalized_time,
)
from cryptography.hazmat.backends.openssl.x509 import _Certificate
from cryptography.hazmat.primitives import serialization
from cryptography.x509.ocsp import (
OCSPCertStatus, OCSPRequest, OCSPResponse, OCSPResponseStatus,
_CERT_STATUS_TO_ENUM, _OIDS_TO_HASH, _RESPONSE_STATUS_TO_ENUM,
)
def _requires_successful_response(func):
@functools.wraps(func)
def wrapper(self, *args):
if self.response_status != OCSPResponseStatus.SUCCESSFUL:
raise ValueError(
"OCSP response status is not successful so the property "
"has no value"
)
else:
return func(self, *args)
return wrapper
def _issuer_key_hash(backend, cert_id):
key_hash = backend._ffi.new("ASN1_OCTET_STRING **")
res = backend._lib.OCSP_id_get0_info(
backend._ffi.NULL, backend._ffi.NULL,
key_hash, backend._ffi.NULL, cert_id
)
backend.openssl_assert(res == 1)
backend.openssl_assert(key_hash[0] != backend._ffi.NULL)
return _asn1_string_to_bytes(backend, key_hash[0])
def _issuer_name_hash(backend, cert_id):
name_hash = backend._ffi.new("ASN1_OCTET_STRING **")
res = backend._lib.OCSP_id_get0_info(
name_hash, backend._ffi.NULL,
backend._ffi.NULL, backend._ffi.NULL, cert_id
)
backend.openssl_assert(res == 1)
backend.openssl_assert(name_hash[0] != backend._ffi.NULL)
return _asn1_string_to_bytes(backend, name_hash[0])
def _serial_number(backend, cert_id):
num = backend._ffi.new("ASN1_INTEGER **")
res = backend._lib.OCSP_id_get0_info(
backend._ffi.NULL, backend._ffi.NULL,
backend._ffi.NULL, num, cert_id
)
backend.openssl_assert(res == 1)
backend.openssl_assert(num[0] != backend._ffi.NULL)
return _asn1_integer_to_int(backend, num[0])
def _hash_algorithm(backend, cert_id):
asn1obj = backend._ffi.new("ASN1_OBJECT **")
res = backend._lib.OCSP_id_get0_info(
backend._ffi.NULL, asn1obj,
backend._ffi.NULL, backend._ffi.NULL, cert_id
)
backend.openssl_assert(res == 1)
backend.openssl_assert(asn1obj[0] != backend._ffi.NULL)
oid = _obj2txt(backend, asn1obj[0])
try:
return _OIDS_TO_HASH[oid]
except KeyError:
raise UnsupportedAlgorithm(
"Signature algorithm OID: {0} not recognized".format(oid)
)
@utils.register_interface(OCSPResponse)
class _OCSPResponse(object):
def __init__(self, backend, ocsp_response):
self._backend = backend
self._ocsp_response = ocsp_response
status = self._backend._lib.OCSP_response_status(self._ocsp_response)
self._backend.openssl_assert(status in _RESPONSE_STATUS_TO_ENUM)
self._status = _RESPONSE_STATUS_TO_ENUM[status]
if self._status is OCSPResponseStatus.SUCCESSFUL:
basic = self._backend._lib.OCSP_response_get1_basic(
self._ocsp_response
)
self._backend.openssl_assert(basic != self._backend._ffi.NULL)
self._basic = self._backend._ffi.gc(
basic, self._backend._lib.OCSP_BASICRESP_free
)
self._backend.openssl_assert(
self._backend._lib.OCSP_resp_count(self._basic) == 1
)
self._single = self._backend._lib.OCSP_resp_get0(self._basic, 0)
self._backend.openssl_assert(
self._single != self._backend._ffi.NULL
)
self._cert_id = self._backend._lib.OCSP_SINGLERESP_get0_id(
self._single
)
self._backend.openssl_assert(
self._cert_id != self._backend._ffi.NULL
)
response_status = utils.read_only_property("_status")
@property
@_requires_successful_response
def signature_algorithm_oid(self):
alg = self._backend._lib.OCSP_resp_get0_tbs_sigalg(self._basic)
self._backend.openssl_assert(alg != self._backend._ffi.NULL)
oid = _obj2txt(self._backend, alg.algorithm)
return x509.ObjectIdentifier(oid)
@property
@_requires_successful_response
def signature(self):
sig = self._backend._lib.OCSP_resp_get0_signature(self._basic)
self._backend.openssl_assert(sig != self._backend._ffi.NULL)
return _asn1_string_to_bytes(self._backend, sig)
@property
@_requires_successful_response
def tbs_response_bytes(self):
respdata = self._backend._lib.OCSP_resp_get0_respdata(self._basic)
self._backend.openssl_assert(respdata != self._backend._ffi.NULL)
pp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.i2d_OCSP_RESPDATA(respdata, pp)
self._backend.openssl_assert(pp[0] != self._backend._ffi.NULL)
pp = self._backend._ffi.gc(
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
)
self._backend.openssl_assert(res > 0)
return self._backend._ffi.buffer(pp[0], res)[:]
@property
@_requires_successful_response
def certificates(self):
sk_x509 = self._backend._lib.OCSP_resp_get0_certs(self._basic)
num = self._backend._lib.sk_X509_num(sk_x509)
certs = []
for i in range(num):
x509 = self._backend._lib.sk_X509_value(sk_x509, i)
self._backend.openssl_assert(x509 != self._backend._ffi.NULL)
cert = _Certificate(self._backend, x509)
# We need to keep the OCSP response that the certificate came from
# alive until the Certificate object itself goes out of scope, so
# we give it a private reference.
cert._ocsp_resp = self
certs.append(cert)
return certs
@property
@_requires_successful_response
def responder_key_hash(self):
_, asn1_string = self._responder_key_name()
if asn1_string == self._backend._ffi.NULL:
return None
else:
return _asn1_string_to_bytes(self._backend, asn1_string)
@property
@_requires_successful_response
def responder_name(self):
x509_name, _ = self._responder_key_name()
if x509_name == self._backend._ffi.NULL:
return None
else:
return _decode_x509_name(self._backend, x509_name)
def _responder_key_name(self):
asn1_string = self._backend._ffi.new("ASN1_OCTET_STRING **")
x509_name = self._backend._ffi.new("X509_NAME **")
res = self._backend._lib.OCSP_resp_get0_id(
self._basic, asn1_string, x509_name
)
self._backend.openssl_assert(res == 1)
return x509_name[0], asn1_string[0]
@property
@_requires_successful_response
def produced_at(self):
produced_at = self._backend._lib.OCSP_resp_get0_produced_at(
self._basic
)
return _parse_asn1_generalized_time(self._backend, produced_at)
@property
@_requires_successful_response
def certificate_status(self):
status = self._backend._lib.OCSP_single_get0_status(
self._single,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
)
self._backend.openssl_assert(status in _CERT_STATUS_TO_ENUM)
return _CERT_STATUS_TO_ENUM[status]
@property
@_requires_successful_response
def revocation_time(self):
if self.certificate_status is not OCSPCertStatus.REVOKED:
return None
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
self._backend._lib.OCSP_single_get0_status(
self._single,
self._backend._ffi.NULL,
asn1_time,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
)
self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
@property
@_requires_successful_response
def revocation_reason(self):
if self.certificate_status is not OCSPCertStatus.REVOKED:
return None
reason_ptr = self._backend._ffi.new("int *")
self._backend._lib.OCSP_single_get0_status(
self._single,
reason_ptr,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
)
# If no reason is encoded OpenSSL returns -1
if reason_ptr[0] == -1:
return None
else:
self._backend.openssl_assert(
reason_ptr[0] in _CRL_ENTRY_REASON_CODE_TO_ENUM
)
return _CRL_ENTRY_REASON_CODE_TO_ENUM[reason_ptr[0]]
@property
@_requires_successful_response
def this_update(self):
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
self._backend._lib.OCSP_single_get0_status(
self._single,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
asn1_time,
self._backend._ffi.NULL,
)
self._backend.openssl_assert(asn1_time[0] != self._backend._ffi.NULL)
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
@property
@_requires_successful_response
def next_update(self):
asn1_time = self._backend._ffi.new("ASN1_GENERALIZEDTIME **")
self._backend._lib.OCSP_single_get0_status(
self._single,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
self._backend._ffi.NULL,
asn1_time,
)
if asn1_time[0] != self._backend._ffi.NULL:
return _parse_asn1_generalized_time(self._backend, asn1_time[0])
else:
return None
@property
@_requires_successful_response
def issuer_key_hash(self):
return _issuer_key_hash(self._backend, self._cert_id)
@property
@_requires_successful_response
def issuer_name_hash(self):
return _issuer_name_hash(self._backend, self._cert_id)
@property
@_requires_successful_response
def hash_algorithm(self):
return _hash_algorithm(self._backend, self._cert_id)
@property
@_requires_successful_response
def serial_number(self):
return _serial_number(self._backend, self._cert_id)
@utils.cached_property
@_requires_successful_response
def extensions(self):
return _OCSP_BASICRESP_EXT_PARSER.parse(self._backend, self._basic)
def public_bytes(self, encoding):
if encoding is not serialization.Encoding.DER:
raise ValueError(
"The only allowed encoding value is Encoding.DER"
)
bio = self._backend._create_mem_bio_gc()
res = self._backend._lib.i2d_OCSP_RESPONSE_bio(
bio, self._ocsp_response
)
self._backend.openssl_assert(res > 0)
return self._backend._read_mem_bio(bio)
@utils.register_interface(OCSPRequest)
class _OCSPRequest(object):
def __init__(self, backend, ocsp_request):
if backend._lib.OCSP_request_onereq_count(ocsp_request) > 1:
raise NotImplementedError(
'OCSP request contains more than one request'
)
self._backend = backend
self._ocsp_request = ocsp_request
self._request = self._backend._lib.OCSP_request_onereq_get0(
self._ocsp_request, 0
)
self._backend.openssl_assert(self._request != self._backend._ffi.NULL)
self._cert_id = self._backend._lib.OCSP_onereq_get0_id(self._request)
self._backend.openssl_assert(self._cert_id != self._backend._ffi.NULL)
@property
def issuer_key_hash(self):
return _issuer_key_hash(self._backend, self._cert_id)
@property
def issuer_name_hash(self):
return _issuer_name_hash(self._backend, self._cert_id)
@property
def serial_number(self):
return _serial_number(self._backend, self._cert_id)
@property
def hash_algorithm(self):
return _hash_algorithm(self._backend, self._cert_id)
@utils.cached_property
def extensions(self):
return _OCSP_REQ_EXT_PARSER.parse(self._backend, self._ocsp_request)
def public_bytes(self, encoding):
if encoding is not serialization.Encoding.DER:
raise ValueError(
"The only allowed encoding value is Encoding.DER"
)
bio = self._backend._create_mem_bio_gc()
res = self._backend._lib.i2d_OCSP_REQUEST_bio(bio, self._ocsp_request)
self._backend.openssl_assert(res > 0)
return self._backend._read_mem_bio(bio)

View file

@ -8,29 +8,29 @@ import math
from cryptography import utils
from cryptography.exceptions import (
AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons
InvalidSignature, UnsupportedAlgorithm, _Reasons
)
from cryptography.hazmat.backends.openssl.utils import (
_calculate_digest_and_algorithm, _check_not_prehashed,
_warn_sign_verify_deprecated
)
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import (
AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
)
from cryptography.hazmat.primitives.asymmetric.padding import (
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS, calculate_max_pss_salt_length
)
from cryptography.hazmat.primitives.asymmetric.rsa import (
RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
)
def _get_rsa_pss_salt_length(pss, key_size, digest_size):
def _get_rsa_pss_salt_length(pss, key, hash_algorithm):
salt = pss._salt_length
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
# bit length - 1 per RFC 3447
emlen = int(math.ceil((key_size - 1) / 8.0))
salt_length = emlen - digest_size - 2
assert salt_length >= 0
return salt_length
return calculate_max_pss_salt_length(key, hash_algorithm)
else:
return salt
@ -43,27 +43,20 @@ def _enc_dec_rsa(backend, key, data, padding):
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, OAEP):
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
if not isinstance(padding._mgf._algorithm, hashes.SHA1):
if not backend.rsa_padding_supported(padding):
raise UnsupportedAlgorithm(
"This backend supports only SHA1 inside MGF1 when "
"using OAEP.",
_Reasons.UNSUPPORTED_HASH
"This combination of padding and hash algorithm is not "
"supported by this backend.",
_Reasons.UNSUPPORTED_PADDING
)
if padding._label is not None and padding._label != b"":
raise ValueError("This backend does not support OAEP labels.")
if not isinstance(padding._algorithm, hashes.SHA1):
raise UnsupportedAlgorithm(
"This backend only supports SHA1 when using OAEP.",
_Reasons.UNSUPPORTED_HASH
)
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(
@ -72,19 +65,16 @@ def _enc_dec_rsa(backend, key, data, padding):
_Reasons.UNSUPPORTED_PADDING
)
if backend._lib.Cryptography_HAS_PKEY_CTX:
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum)
else:
return _enc_dec_rsa_098(backend, key, data, padding_enum)
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum):
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
if isinstance(key, _RSAPublicKey):
init = backend._lib.EVP_PKEY_encrypt_init
crypt = backend._lib.Cryptography_EVP_PKEY_encrypt
crypt = backend._lib.EVP_PKEY_encrypt
else:
init = backend._lib.EVP_PKEY_decrypt_init
crypt = backend._lib.Cryptography_EVP_PKEY_decrypt
crypt = backend._lib.EVP_PKEY_decrypt
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
key._evp_pkey, backend._ffi.NULL
@ -98,8 +88,35 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum):
backend.openssl_assert(res > 0)
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(buf_size > 0)
if (
isinstance(padding, OAEP) and
backend._lib.Cryptography_HAS_RSA_OAEP_MD
):
mgf1_md = backend._evp_md_non_null_from_algorithm(
padding._mgf._algorithm)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
oaep_md = backend._evp_md_non_null_from_algorithm(padding._algorithm)
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
backend.openssl_assert(res > 0)
if (
isinstance(padding, OAEP) and
padding._label is not None and
len(padding._label) > 0
):
# set0_rsa_oaep_label takes ownership of the char * so we need to
# copy it into some new memory
labelptr = backend._lib.OPENSSL_malloc(len(padding._label))
backend.openssl_assert(labelptr != backend._ffi.NULL)
backend._ffi.memmove(labelptr, padding._label, len(padding._label))
res = backend._lib.EVP_PKEY_CTX_set0_rsa_oaep_label(
pkey_ctx, labelptr, len(padding._label)
)
backend.openssl_assert(res == 1)
outlen = backend._ffi.new("size_t *", buf_size)
buf = backend._ffi.new("char[]", buf_size)
buf = backend._ffi.new("unsigned char[]", buf_size)
res = crypt(pkey_ctx, buf, outlen, data, len(data))
if res <= 0:
_handle_rsa_enc_dec_error(backend, key)
@ -107,25 +124,9 @@ def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum):
return backend._ffi.buffer(buf)[:outlen[0]]
def _enc_dec_rsa_098(backend, key, data, padding_enum):
if isinstance(key, _RSAPublicKey):
crypt = backend._lib.RSA_public_encrypt
else:
crypt = backend._lib.RSA_private_decrypt
key_size = backend._lib.RSA_size(key._rsa_cdata)
backend.openssl_assert(key_size > 0)
buf = backend._ffi.new("unsigned char[]", key_size)
res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum)
if res < 0:
_handle_rsa_enc_dec_error(backend, key)
return backend._ffi.buffer(buf)[:res]
def _handle_rsa_enc_dec_error(backend, key):
errors = backend._consume_errors()
assert errors
backend.openssl_assert(errors)
assert errors[0].lib == backend._lib.ERR_LIB_RSA
if isinstance(key, _RSAPublicKey):
assert (errors[0].reason ==
@ -139,6 +140,10 @@ def _handle_rsa_enc_dec_error(backend, key):
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
backend._lib.RSA_R_OAEP_DECODING_ERROR,
# Though this error looks similar to the
# RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE, this occurs on decrypts,
# rather than on encrypts
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_MODULUS,
]
if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
@ -147,57 +152,133 @@ def _handle_rsa_enc_dec_error(backend, key):
raise ValueError("Decryption failed.")
def _rsa_sig_determine_padding(backend, key, padding, algorithm):
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
pkey_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
backend.openssl_assert(pkey_size > 0)
if isinstance(padding, PKCS1v15):
padding_enum = backend._lib.RSA_PKCS1_PADDING
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError("Digest too large for key size. Use a larger "
"key or different digest.")
padding_enum = backend._lib.RSA_PKCS1_PSS_PADDING
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(padding.name),
_Reasons.UNSUPPORTED_PADDING
)
return padding_enum
def _rsa_sig_setup(backend, padding, algorithm, key, data, init_func):
padding_enum = _rsa_sig_determine_padding(backend, key, padding, algorithm)
evp_md = backend._evp_md_non_null_from_algorithm(algorithm)
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(key._evp_pkey, backend._ffi.NULL)
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
res = init_func(pkey_ctx)
backend.openssl_assert(res == 1)
res = backend._lib.EVP_PKEY_CTX_set_signature_md(pkey_ctx, evp_md)
if res == 0:
backend._consume_errors()
raise UnsupportedAlgorithm(
"{0} is not supported by this backend for RSA signing.".format(
algorithm.name
),
_Reasons.UNSUPPORTED_HASH
)
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(pkey_ctx, padding_enum)
backend.openssl_assert(res > 0)
if isinstance(padding, PSS):
res = backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx, _get_rsa_pss_salt_length(padding, key, algorithm)
)
backend.openssl_assert(res > 0)
mgf1_md = backend._evp_md_non_null_from_algorithm(
padding._mgf._algorithm)
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
backend.openssl_assert(res > 0)
return pkey_ctx
def _rsa_sig_sign(backend, padding, algorithm, private_key, data):
pkey_ctx = _rsa_sig_setup(
backend, padding, algorithm, private_key, data,
backend._lib.EVP_PKEY_sign_init
)
buflen = backend._ffi.new("size_t *")
res = backend._lib.EVP_PKEY_sign(
pkey_ctx,
backend._ffi.NULL,
buflen,
data,
len(data)
)
backend.openssl_assert(res == 1)
buf = backend._ffi.new("unsigned char[]", buflen[0])
res = backend._lib.EVP_PKEY_sign(
pkey_ctx, buf, buflen, data, len(data))
if res != 1:
errors = backend._consume_errors()
assert errors[0].lib == backend._lib.ERR_LIB_RSA
reason = None
if (errors[0].reason ==
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
reason = ("Salt length too long for key size. Try using "
"MAX_LENGTH instead.")
else:
assert (errors[0].reason ==
backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
reason = "Digest too large for key size. Use a larger key."
assert reason is not None
raise ValueError(reason)
return backend._ffi.buffer(buf)[:]
def _rsa_sig_verify(backend, padding, algorithm, public_key, signature, data):
pkey_ctx = _rsa_sig_setup(
backend, padding, algorithm, public_key, data,
backend._lib.EVP_PKEY_verify_init
)
res = backend._lib.EVP_PKEY_verify(
pkey_ctx, signature, len(signature), data, len(data)
)
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
backend.openssl_assert(res >= 0)
if res == 0:
backend._consume_errors()
raise InvalidSignature
@utils.register_interface(AsymmetricSignatureContext)
class _RSASignatureContext(object):
def __init__(self, backend, private_key, padding, algorithm):
self._backend = backend
self._private_key = private_key
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
self._pkey_size = self._backend._lib.EVP_PKEY_size(
self._private_key._evp_pkey
)
self._backend.openssl_assert(self._pkey_size > 0)
if isinstance(padding, PKCS1v15):
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._finalize_method = self._finalize_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
else:
self._finalize_method = self._finalize_pkcs1
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if self._pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError("Digest too large for key size. Use a larger "
"key.")
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
raise UnsupportedAlgorithm(
"When OpenSSL is older than 1.0.1 then only SHA1 is "
"supported with MGF1.",
_Reasons.UNSUPPORTED_HASH
)
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._finalize_method = self._finalize_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
else:
self._finalize_method = self._finalize_pss
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(padding.name),
_Reasons.UNSUPPORTED_PADDING
)
# We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
# we need to make a pointless call to it here so we maintain the
# API of erroring on init with this context if the values are invalid.
_rsa_sig_determine_padding(backend, private_key, padding, algorithm)
self._padding = padding
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
@ -206,135 +287,13 @@ class _RSASignatureContext(object):
self._hash_ctx.update(data)
def finalize(self):
evp_md = self._backend._lib.EVP_get_digestbyname(
self._algorithm.name.encode("ascii"))
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
return self._finalize_method(evp_md)
def _finalize_pkey_ctx(self, evp_md):
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
self._private_key._evp_pkey, self._backend._ffi.NULL
return _rsa_sig_sign(
self._backend,
self._padding,
self._algorithm,
self._private_key,
self._hash_ctx.finalize()
)
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
self._backend._lib.EVP_PKEY_CTX_free)
res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx)
self._backend.openssl_assert(res == 1)
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
pkey_ctx, evp_md)
self._backend.openssl_assert(res > 0)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
pkey_ctx, self._padding_enum)
self._backend.openssl_assert(res > 0)
if isinstance(self._padding, PSS):
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx,
_get_rsa_pss_salt_length(
self._padding,
self._private_key.key_size,
self._hash_ctx.algorithm.digest_size
)
)
self._backend.openssl_assert(res > 0)
if self._backend._lib.Cryptography_HAS_MGF1_MD:
# MGF1 MD is configurable in OpenSSL 1.0.1+
mgf1_md = self._backend._lib.EVP_get_digestbyname(
self._padding._mgf._algorithm.name.encode("ascii"))
self._backend.openssl_assert(
mgf1_md != self._backend._ffi.NULL
)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
pkey_ctx, mgf1_md
)
self._backend.openssl_assert(res > 0)
data_to_sign = self._hash_ctx.finalize()
buflen = self._backend._ffi.new("size_t *")
res = self._backend._lib.EVP_PKEY_sign(
pkey_ctx,
self._backend._ffi.NULL,
buflen,
data_to_sign,
len(data_to_sign)
)
self._backend.openssl_assert(res == 1)
buf = self._backend._ffi.new("unsigned char[]", buflen[0])
res = self._backend._lib.EVP_PKEY_sign(
pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign))
if res != 1:
errors = self._backend._consume_errors()
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
reason = None
if (errors[0].reason ==
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
reason = ("Salt length too long for key size. Try using "
"MAX_LENGTH instead.")
else:
assert (errors[0].reason ==
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
reason = "Digest too large for key size. Use a larger key."
assert reason is not None
raise ValueError(reason)
return self._backend._ffi.buffer(buf)[:]
def _finalize_pkcs1(self, evp_md):
if self._hash_ctx._ctx is None:
raise AlreadyFinalized("Context has already been finalized.")
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
sig_len = self._backend._ffi.new("unsigned int *")
res = self._backend._lib.EVP_SignFinal(
self._hash_ctx._ctx._ctx,
sig_buf,
sig_len,
self._private_key._evp_pkey
)
self._hash_ctx.finalize()
if res == 0:
errors = self._backend._consume_errors()
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
assert (errors[0].reason ==
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
raise ValueError("Digest too large for key size. Use a larger "
"key.")
return self._backend._ffi.buffer(sig_buf)[:sig_len[0]]
def _finalize_pss(self, evp_md):
data_to_sign = self._hash_ctx.finalize()
padded = self._backend._ffi.new("unsigned char[]", self._pkey_size)
res = self._backend._lib.RSA_padding_add_PKCS1_PSS(
self._private_key._rsa_cdata,
padded,
data_to_sign,
evp_md,
_get_rsa_pss_salt_length(
self._padding,
self._private_key.key_size,
len(data_to_sign)
)
)
if res != 1:
errors = self._backend._consume_errors()
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
assert (errors[0].reason ==
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
raise ValueError("Salt length too long for key size. Try using "
"MAX_LENGTH instead.")
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
sig_len = self._backend._lib.RSA_private_encrypt(
self._pkey_size,
padded,
sig_buf,
self._private_key._rsa_cdata,
self._backend._lib.RSA_NO_PADDING
)
self._backend.openssl_assert(sig_len != -1)
return self._backend._ffi.buffer(sig_buf)[:sig_len]
@utils.register_interface(AsymmetricVerificationContext)
@ -343,55 +302,13 @@ class _RSAVerificationContext(object):
self._backend = backend
self._public_key = public_key
self._signature = signature
if not isinstance(padding, AsymmetricPadding):
raise TypeError("Expected provider of AsymmetricPadding.")
self._pkey_size = self._backend._lib.EVP_PKEY_size(
self._public_key._evp_pkey
)
self._backend.openssl_assert(self._pkey_size > 0)
if isinstance(padding, PKCS1v15):
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._verify_method = self._verify_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
else:
self._verify_method = self._verify_pkcs1
elif isinstance(padding, PSS):
if not isinstance(padding._mgf, MGF1):
raise UnsupportedAlgorithm(
"Only MGF1 is supported by this backend.",
_Reasons.UNSUPPORTED_MGF
)
# Size of key in bytes - 2 is the maximum
# PSS signature length (salt length is checked later)
if self._pkey_size - algorithm.digest_size - 2 < 0:
raise ValueError(
"Digest too large for key size. Check that you have the "
"correct key and digest algorithm."
)
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
raise UnsupportedAlgorithm(
"When OpenSSL is older than 1.0.1 then only SHA1 is "
"supported with MGF1.",
_Reasons.UNSUPPORTED_HASH
)
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
self._verify_method = self._verify_pkey_ctx
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
else:
self._verify_method = self._verify_pss
else:
raise UnsupportedAlgorithm(
"{0} is not supported by this backend.".format(padding.name),
_Reasons.UNSUPPORTED_PADDING
)
self._padding = padding
# We now call _rsa_sig_determine_padding in _rsa_sig_setup. However
# we need to make a pointless call to it here so we maintain the
# API of erroring on init with this context if the values are invalid.
_rsa_sig_determine_padding(backend, public_key, padding, algorithm)
padding = padding
self._algorithm = algorithm
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
@ -399,117 +316,14 @@ class _RSAVerificationContext(object):
self._hash_ctx.update(data)
def verify(self):
evp_md = self._backend._lib.EVP_get_digestbyname(
self._algorithm.name.encode("ascii"))
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
self._verify_method(evp_md)
def _verify_pkey_ctx(self, evp_md):
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
self._public_key._evp_pkey, self._backend._ffi.NULL
)
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
self._backend._lib.EVP_PKEY_CTX_free)
res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx)
self._backend.openssl_assert(res == 1)
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
pkey_ctx, evp_md)
self._backend.openssl_assert(res > 0)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
pkey_ctx, self._padding_enum)
self._backend.openssl_assert(res > 0)
if isinstance(self._padding, PSS):
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
pkey_ctx,
_get_rsa_pss_salt_length(
self._padding,
self._public_key.key_size,
self._hash_ctx.algorithm.digest_size
)
)
self._backend.openssl_assert(res > 0)
if self._backend._lib.Cryptography_HAS_MGF1_MD:
# MGF1 MD is configurable in OpenSSL 1.0.1+
mgf1_md = self._backend._lib.EVP_get_digestbyname(
self._padding._mgf._algorithm.name.encode("ascii"))
self._backend.openssl_assert(
mgf1_md != self._backend._ffi.NULL
)
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
pkey_ctx, mgf1_md
)
self._backend.openssl_assert(res > 0)
data_to_verify = self._hash_ctx.finalize()
res = self._backend._lib.EVP_PKEY_verify(
pkey_ctx,
return _rsa_sig_verify(
self._backend,
self._padding,
self._algorithm,
self._public_key,
self._signature,
len(self._signature),
data_to_verify,
len(data_to_verify)
self._hash_ctx.finalize()
)
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
self._backend.openssl_assert(res >= 0)
if res == 0:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
def _verify_pkcs1(self, evp_md):
if self._hash_ctx._ctx is None:
raise AlreadyFinalized("Context has already been finalized.")
res = self._backend._lib.EVP_VerifyFinal(
self._hash_ctx._ctx._ctx,
self._signature,
len(self._signature),
self._public_key._evp_pkey
)
self._hash_ctx.finalize()
# The previous call can return negative numbers in the event of an
# error. This is not a signature failure but we need to fail if it
# occurs.
self._backend.openssl_assert(res >= 0)
if res == 0:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
def _verify_pss(self, evp_md):
buf = self._backend._ffi.new("unsigned char[]", self._pkey_size)
res = self._backend._lib.RSA_public_decrypt(
len(self._signature),
self._signature,
buf,
self._public_key._rsa_cdata,
self._backend._lib.RSA_NO_PADDING
)
if res != self._pkey_size:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
data_to_verify = self._hash_ctx.finalize()
res = self._backend._lib.RSA_verify_PKCS1_PSS(
self._public_key._rsa_cdata,
data_to_verify,
evp_md,
buf,
_get_rsa_pss_salt_length(
self._padding,
self._public_key.key_size,
len(data_to_verify)
)
)
if res != 1:
errors = self._backend._consume_errors()
assert errors
raise InvalidSignature
@utils.register_interface(RSAPrivateKeyWithSerialization)
@ -519,11 +333,19 @@ class _RSAPrivateKey(object):
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, self._backend._ffi.NULL,
self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
key_size = utils.read_only_property("_key_size")
def signer(self, padding, algorithm):
_warn_sign_verify_deprecated()
_check_not_prehashed(algorithm)
return _RSASignatureContext(self._backend, self, padding, algorithm)
def decrypt(self, ciphertext, padding):
@ -534,27 +356,46 @@ class _RSAPrivateKey(object):
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
def public_key(self):
ctx = self._backend._lib.RSA_new()
ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e)
ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n)
res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
return _RSAPublicKey(self._backend, ctx, evp_pkey)
def private_numbers(self):
n = self._backend._ffi.new("BIGNUM **")
e = self._backend._ffi.new("BIGNUM **")
d = self._backend._ffi.new("BIGNUM **")
p = self._backend._ffi.new("BIGNUM **")
q = self._backend._ffi.new("BIGNUM **")
dmp1 = self._backend._ffi.new("BIGNUM **")
dmq1 = self._backend._ffi.new("BIGNUM **")
iqmp = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
self._backend._lib.RSA_get0_crt_params(
self._rsa_cdata, dmp1, dmq1, iqmp
)
self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
return rsa.RSAPrivateNumbers(
p=self._backend._bn_to_int(self._rsa_cdata.p),
q=self._backend._bn_to_int(self._rsa_cdata.q),
d=self._backend._bn_to_int(self._rsa_cdata.d),
dmp1=self._backend._bn_to_int(self._rsa_cdata.dmp1),
dmq1=self._backend._bn_to_int(self._rsa_cdata.dmq1),
iqmp=self._backend._bn_to_int(self._rsa_cdata.iqmp),
p=self._backend._bn_to_int(p[0]),
q=self._backend._bn_to_int(q[0]),
d=self._backend._bn_to_int(d[0]),
dmp1=self._backend._bn_to_int(dmp1[0]),
dmq1=self._backend._bn_to_int(dmq1[0]),
iqmp=self._backend._bn_to_int(iqmp[0]),
public_numbers=rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(self._rsa_cdata.e),
n=self._backend._bn_to_int(self._rsa_cdata.n),
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
)
)
@ -567,6 +408,12 @@ class _RSAPrivateKey(object):
self._rsa_cdata
)
def sign(self, data, padding, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _rsa_sig_sign(self._backend, padding, algorithm, self, data)
@utils.register_interface(RSAPublicKeyWithSerialization)
class _RSAPublicKey(object):
@ -575,14 +422,22 @@ class _RSAPublicKey(object):
self._rsa_cdata = rsa_cdata
self._evp_pkey = evp_pkey
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
n = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, self._backend._ffi.NULL,
self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._key_size = self._backend._lib.BN_num_bits(n[0])
key_size = utils.read_only_property("_key_size")
def verifier(self, signature, padding, algorithm):
_warn_sign_verify_deprecated()
if not isinstance(signature, bytes):
raise TypeError("signature must be bytes.")
_check_not_prehashed(algorithm)
return _RSAVerificationContext(
self._backend, self, signature, padding, algorithm
)
@ -591,15 +446,31 @@ class _RSAPublicKey(object):
return _enc_dec_rsa(self._backend, self, plaintext, padding)
def public_numbers(self):
n = self._backend._ffi.new("BIGNUM **")
e = self._backend._ffi.new("BIGNUM **")
self._backend._lib.RSA_get0_key(
self._rsa_cdata, n, e, self._backend._ffi.NULL
)
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
return rsa.RSAPublicNumbers(
e=self._backend._bn_to_int(self._rsa_cdata.e),
n=self._backend._bn_to_int(self._rsa_cdata.n),
e=self._backend._bn_to_int(e[0]),
n=self._backend._bn_to_int(n[0]),
)
def public_bytes(self, encoding, format):
return self._backend._public_key_bytes(
encoding,
format,
self,
self._evp_pkey,
self._rsa_cdata
)
def verify(self, signature, data, padding, algorithm):
data, algorithm = _calculate_digest_and_algorithm(
self._backend, data, algorithm
)
return _rsa_sig_verify(
self._backend, padding, algorithm, self, signature, data
)

View file

@ -4,23 +4,42 @@
from __future__ import absolute_import, division, print_function
import six
import warnings
from cryptography import utils
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric.utils import Prehashed
def _truncate_digest(digest, order_bits):
digest_len = len(digest)
def _calculate_digest_and_algorithm(backend, data, algorithm):
if not isinstance(algorithm, Prehashed):
hash_ctx = hashes.Hash(algorithm, backend)
hash_ctx.update(data)
data = hash_ctx.finalize()
else:
algorithm = algorithm._algorithm
if 8 * digest_len > order_bits:
digest_len = (order_bits + 7) // 8
digest = digest[:digest_len]
if len(data) != algorithm.digest_size:
raise ValueError(
"The provided data must be the same length as the hash "
"algorithm's digest size."
)
if 8 * digest_len > order_bits:
rshift = 8 - (order_bits & 0x7)
assert 0 < rshift < 8
return (data, algorithm)
mask = 0xFF >> rshift << rshift
# Set the bottom rshift bits to 0
digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask)
def _check_not_prehashed(signature_algorithm):
if isinstance(signature_algorithm, Prehashed):
raise TypeError(
"Prehashed is only supported in the sign and verify methods. "
"It cannot be used with signer or verifier."
)
return digest
def _warn_sign_verify_deprecated():
warnings.warn(
"signer and verifier have been deprecated. Please use sign "
"and verify instead.",
utils.PersistentlyDeprecated,
stacklevel=3
)

View file

@ -0,0 +1,79 @@
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from cryptography import utils
from cryptography.hazmat.primitives.asymmetric.x25519 import (
X25519PrivateKey, X25519PublicKey
)
@utils.register_interface(X25519PublicKey)
class _X25519PublicKey(object):
def __init__(self, backend, evp_pkey):
self._backend = backend
self._evp_pkey = evp_pkey
def public_bytes(self):
ucharpp = self._backend._ffi.new("unsigned char **")
res = self._backend._lib.EVP_PKEY_get1_tls_encodedpoint(
self._evp_pkey, ucharpp
)
self._backend.openssl_assert(res == 32)
self._backend.openssl_assert(ucharpp[0] != self._backend._ffi.NULL)
data = self._backend._ffi.gc(
ucharpp[0], self._backend._lib.OPENSSL_free
)
return self._backend._ffi.buffer(data, res)[:]
@utils.register_interface(X25519PrivateKey)
class _X25519PrivateKey(object):
def __init__(self, backend, evp_pkey):
self._backend = backend
self._evp_pkey = evp_pkey
def public_key(self):
bio = self._backend._create_mem_bio_gc()
res = self._backend._lib.i2d_PUBKEY_bio(bio, self._evp_pkey)
self._backend.openssl_assert(res == 1)
evp_pkey = self._backend._lib.d2i_PUBKEY_bio(
bio, self._backend._ffi.NULL
)
self._backend.openssl_assert(evp_pkey != self._backend._ffi.NULL)
evp_pkey = self._backend._ffi.gc(
evp_pkey, self._backend._lib.EVP_PKEY_free
)
return _X25519PublicKey(self._backend, evp_pkey)
def exchange(self, peer_public_key):
if not isinstance(peer_public_key, X25519PublicKey):
raise TypeError("peer_public_key must be X25519PublicKey.")
ctx = self._backend._lib.EVP_PKEY_CTX_new(
self._evp_pkey, self._backend._ffi.NULL
)
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
ctx = self._backend._ffi.gc(ctx, self._backend._lib.EVP_PKEY_CTX_free)
res = self._backend._lib.EVP_PKEY_derive_init(ctx)
self._backend.openssl_assert(res == 1)
res = self._backend._lib.EVP_PKEY_derive_set_peer(
ctx, peer_public_key._evp_pkey
)
self._backend.openssl_assert(res == 1)
keylen = self._backend._ffi.new("size_t *")
res = self._backend._lib.EVP_PKEY_derive(
ctx, self._backend._ffi.NULL, keylen
)
self._backend.openssl_assert(res == 1)
self._backend.openssl_assert(keylen[0] > 0)
buf = self._backend._ffi.new("unsigned char[]", keylen[0])
res = self._backend._lib.EVP_PKEY_derive(ctx, buf, keylen)
if res != 1:
raise ValueError(
"Null shared key derived from public/private pair."
)
return self._backend._ffi.buffer(buf, keylen[0])[:]