use pyOpenSSL from system
This commit is contained in:
parent
6881f3471a
commit
88e5a9bf60
199 changed files with 47 additions and 59257 deletions
File diff suppressed because it is too large
Load diff
|
@ -1,12 +0,0 @@
|
||||||
# Copyright (C) AB Strakt
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
pyOpenSSL - A simple wrapper around the OpenSSL library
|
|
||||||
"""
|
|
||||||
|
|
||||||
from OpenSSL import rand, crypto, SSL
|
|
||||||
from OpenSSL.version import __version__
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'rand', 'crypto', 'SSL', 'tsafe', '__version__']
|
|
|
@ -1,127 +0,0 @@
|
||||||
from warnings import warn
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from six import PY3, binary_type, text_type
|
|
||||||
|
|
||||||
from cryptography.hazmat.bindings.openssl.binding import Binding
|
|
||||||
binding = Binding()
|
|
||||||
ffi = binding.ffi
|
|
||||||
lib = binding.lib
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def text(charp):
|
|
||||||
"""
|
|
||||||
Get a native string type representing of the given CFFI ``char*`` object.
|
|
||||||
|
|
||||||
:param charp: A C-style string represented using CFFI.
|
|
||||||
|
|
||||||
:return: :class:`str`
|
|
||||||
"""
|
|
||||||
if not charp:
|
|
||||||
return ""
|
|
||||||
return native(ffi.string(charp))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def exception_from_error_queue(exception_type):
|
|
||||||
"""
|
|
||||||
Convert an OpenSSL library failure into a Python exception.
|
|
||||||
|
|
||||||
When a call to the native OpenSSL library fails, this is usually signalled
|
|
||||||
by the return value, and an error code is stored in an error queue
|
|
||||||
associated with the current thread. The err library provides functions to
|
|
||||||
obtain these error codes and textual error messages.
|
|
||||||
"""
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
while True:
|
|
||||||
error = lib.ERR_get_error()
|
|
||||||
if error == 0:
|
|
||||||
break
|
|
||||||
errors.append((
|
|
||||||
text(lib.ERR_lib_error_string(error)),
|
|
||||||
text(lib.ERR_func_error_string(error)),
|
|
||||||
text(lib.ERR_reason_error_string(error))))
|
|
||||||
|
|
||||||
raise exception_type(errors)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def native(s):
|
|
||||||
"""
|
|
||||||
Convert :py:class:`bytes` or :py:class:`unicode` to the native
|
|
||||||
:py:class:`str` type, using UTF-8 encoding if conversion is necessary.
|
|
||||||
|
|
||||||
:raise UnicodeError: The input string is not UTF-8 decodeable.
|
|
||||||
|
|
||||||
:raise TypeError: The input is neither :py:class:`bytes` nor
|
|
||||||
:py:class:`unicode`.
|
|
||||||
"""
|
|
||||||
if not isinstance(s, (binary_type, text_type)):
|
|
||||||
raise TypeError("%r is neither bytes nor unicode" % s)
|
|
||||||
if PY3:
|
|
||||||
if isinstance(s, binary_type):
|
|
||||||
return s.decode("utf-8")
|
|
||||||
else:
|
|
||||||
if isinstance(s, text_type):
|
|
||||||
return s.encode("utf-8")
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def path_string(s):
|
|
||||||
"""
|
|
||||||
Convert a Python string to a :py:class:`bytes` string identifying the same
|
|
||||||
path and which can be passed into an OpenSSL API accepting a filename.
|
|
||||||
|
|
||||||
:param s: An instance of :py:class:`bytes` or :py:class:`unicode`.
|
|
||||||
|
|
||||||
:return: An instance of :py:class:`bytes`.
|
|
||||||
"""
|
|
||||||
if isinstance(s, binary_type):
|
|
||||||
return s
|
|
||||||
elif isinstance(s, text_type):
|
|
||||||
return s.encode(sys.getfilesystemencoding())
|
|
||||||
else:
|
|
||||||
raise TypeError("Path must be represented as bytes or unicode string")
|
|
||||||
|
|
||||||
|
|
||||||
if PY3:
|
|
||||||
def byte_string(s):
|
|
||||||
return s.encode("charmap")
|
|
||||||
else:
|
|
||||||
def byte_string(s):
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
# A marker object to observe whether some optional arguments are passed any
|
|
||||||
# value or not.
|
|
||||||
UNSPECIFIED = object()
|
|
||||||
|
|
||||||
_TEXT_WARNING = (
|
|
||||||
text_type.__name__ + " for {0} is no longer accepted, use bytes"
|
|
||||||
)
|
|
||||||
|
|
||||||
def text_to_bytes_and_warn(label, obj):
|
|
||||||
"""
|
|
||||||
If ``obj`` is text, emit a warning that it should be bytes instead and try
|
|
||||||
to convert it to bytes automatically.
|
|
||||||
|
|
||||||
:param str label: The name of the parameter from which ``obj`` was taken
|
|
||||||
(so a developer can easily find the source of the problem and correct
|
|
||||||
it).
|
|
||||||
|
|
||||||
:return: If ``obj`` is the text string type, a ``bytes`` object giving the
|
|
||||||
UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is
|
|
||||||
returned.
|
|
||||||
"""
|
|
||||||
if isinstance(obj, text_type):
|
|
||||||
warn(
|
|
||||||
_TEXT_WARNING.format(label),
|
|
||||||
category=DeprecationWarning,
|
|
||||||
stacklevel=3
|
|
||||||
)
|
|
||||||
return obj.encode('utf-8')
|
|
||||||
return obj
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,180 +0,0 @@
|
||||||
"""
|
|
||||||
PRNG management routines, thin wrappers.
|
|
||||||
|
|
||||||
See the file RATIONALE for a short explanation of why this module was written.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
from six import integer_types as _integer_types
|
|
||||||
|
|
||||||
from OpenSSL._util import (
|
|
||||||
ffi as _ffi,
|
|
||||||
lib as _lib,
|
|
||||||
exception_from_error_queue as _exception_from_error_queue,
|
|
||||||
path_string as _path_string)
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
"""
|
|
||||||
An error occurred in an `OpenSSL.rand` API.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_raise_current_error = partial(_exception_from_error_queue, Error)
|
|
||||||
|
|
||||||
_unspecified = object()
|
|
||||||
|
|
||||||
_builtin_bytes = bytes
|
|
||||||
|
|
||||||
def bytes(num_bytes):
|
|
||||||
"""
|
|
||||||
Get some random bytes as a string.
|
|
||||||
|
|
||||||
:param num_bytes: The number of bytes to fetch
|
|
||||||
:return: A string of random bytes
|
|
||||||
"""
|
|
||||||
if not isinstance(num_bytes, _integer_types):
|
|
||||||
raise TypeError("num_bytes must be an integer")
|
|
||||||
|
|
||||||
if num_bytes < 0:
|
|
||||||
raise ValueError("num_bytes must not be negative")
|
|
||||||
|
|
||||||
result_buffer = _ffi.new("char[]", num_bytes)
|
|
||||||
result_code = _lib.RAND_bytes(result_buffer, num_bytes)
|
|
||||||
if result_code == -1:
|
|
||||||
# TODO: No tests for this code path. Triggering a RAND_bytes failure
|
|
||||||
# might involve supplying a custom ENGINE? That's hard.
|
|
||||||
_raise_current_error()
|
|
||||||
|
|
||||||
return _ffi.buffer(result_buffer)[:]
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def add(buffer, entropy):
|
|
||||||
"""
|
|
||||||
Add data with a given entropy to the PRNG
|
|
||||||
|
|
||||||
:param buffer: Buffer with random data
|
|
||||||
:param entropy: The entropy (in bytes) measurement of the buffer
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
if not isinstance(buffer, _builtin_bytes):
|
|
||||||
raise TypeError("buffer must be a byte string")
|
|
||||||
|
|
||||||
if not isinstance(entropy, int):
|
|
||||||
raise TypeError("entropy must be an integer")
|
|
||||||
|
|
||||||
# TODO Nothing tests this call actually being made, or made properly.
|
|
||||||
_lib.RAND_add(buffer, len(buffer), entropy)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def seed(buffer):
|
|
||||||
"""
|
|
||||||
Alias for rand_add, with entropy equal to length
|
|
||||||
|
|
||||||
:param buffer: Buffer with random data
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
if not isinstance(buffer, _builtin_bytes):
|
|
||||||
raise TypeError("buffer must be a byte string")
|
|
||||||
|
|
||||||
# TODO Nothing tests this call actually being made, or made properly.
|
|
||||||
_lib.RAND_seed(buffer, len(buffer))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def status():
|
|
||||||
"""
|
|
||||||
Retrieve the status of the PRNG
|
|
||||||
|
|
||||||
:return: True if the PRNG is seeded enough, false otherwise
|
|
||||||
"""
|
|
||||||
return _lib.RAND_status()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def egd(path, bytes=_unspecified):
|
|
||||||
"""
|
|
||||||
Query an entropy gathering daemon (EGD) for random data and add it to the
|
|
||||||
PRNG. I haven't found any problems when the socket is missing, the function
|
|
||||||
just returns 0.
|
|
||||||
|
|
||||||
:param path: The path to the EGD socket
|
|
||||||
:param bytes: (optional) The number of bytes to read, default is 255
|
|
||||||
:returns: The number of bytes read (NB: a value of 0 isn't necessarily an
|
|
||||||
error, check rand.status())
|
|
||||||
"""
|
|
||||||
if not isinstance(path, _builtin_bytes):
|
|
||||||
raise TypeError("path must be a byte string")
|
|
||||||
|
|
||||||
if bytes is _unspecified:
|
|
||||||
bytes = 255
|
|
||||||
elif not isinstance(bytes, int):
|
|
||||||
raise TypeError("bytes must be an integer")
|
|
||||||
|
|
||||||
return _lib.RAND_egd_bytes(path, bytes)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup():
|
|
||||||
"""
|
|
||||||
Erase the memory used by the PRNG.
|
|
||||||
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
# TODO Nothing tests this call actually being made, or made properly.
|
|
||||||
_lib.RAND_cleanup()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def load_file(filename, maxbytes=_unspecified):
|
|
||||||
"""
|
|
||||||
Seed the PRNG with data from a file
|
|
||||||
|
|
||||||
:param filename: The file to read data from (``bytes`` or ``unicode``).
|
|
||||||
:param maxbytes: (optional) The number of bytes to read, default is to read
|
|
||||||
the entire file
|
|
||||||
|
|
||||||
:return: The number of bytes read
|
|
||||||
"""
|
|
||||||
filename = _path_string(filename)
|
|
||||||
|
|
||||||
if maxbytes is _unspecified:
|
|
||||||
maxbytes = -1
|
|
||||||
elif not isinstance(maxbytes, int):
|
|
||||||
raise TypeError("maxbytes must be an integer")
|
|
||||||
|
|
||||||
return _lib.RAND_load_file(filename, maxbytes)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def write_file(filename):
|
|
||||||
"""
|
|
||||||
Save PRNG state to a file
|
|
||||||
|
|
||||||
:param filename: The file to write data to (``bytes`` or ``unicode``).
|
|
||||||
|
|
||||||
:return: The number of bytes written
|
|
||||||
"""
|
|
||||||
filename = _path_string(filename)
|
|
||||||
return _lib.RAND_write_file(filename)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO There are no tests for screen at all
|
|
||||||
def screen():
|
|
||||||
"""
|
|
||||||
Add the current contents of the screen to the PRNG state. Availability:
|
|
||||||
Windows.
|
|
||||||
|
|
||||||
:return: None
|
|
||||||
"""
|
|
||||||
_lib.RAND_screen()
|
|
||||||
|
|
||||||
if getattr(_lib, 'RAND_screen', None) is None:
|
|
||||||
del screen
|
|
||||||
|
|
||||||
|
|
||||||
# TODO There are no tests for the RAND strings being loaded, whatever that
|
|
||||||
# means.
|
|
||||||
_lib.ERR_load_RAND_strings()
|
|
|
@ -1,6 +0,0 @@
|
||||||
# Copyright (C) Jean-Paul Calderone
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Package containing unit tests for :py:mod:`OpenSSL`.
|
|
||||||
"""
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,223 +0,0 @@
|
||||||
# Copyright (c) Frederick Dean
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Unit tests for :py:obj:`OpenSSL.rand`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from unittest import main
|
|
||||||
import os
|
|
||||||
import stat
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from OpenSSL.test.util import NON_ASCII, TestCase, b
|
|
||||||
from OpenSSL import rand
|
|
||||||
|
|
||||||
|
|
||||||
class RandTests(TestCase):
|
|
||||||
def test_bytes_wrong_args(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.bytes` raises :py:obj:`TypeError` if called with the wrong
|
|
||||||
number of arguments or with a non-:py:obj:`int` argument.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.bytes)
|
|
||||||
self.assertRaises(TypeError, rand.bytes, None)
|
|
||||||
self.assertRaises(TypeError, rand.bytes, 3, None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_insufficientMemory(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.bytes` raises :py:obj:`MemoryError` if more bytes
|
|
||||||
are requested than will fit in memory.
|
|
||||||
"""
|
|
||||||
self.assertRaises(MemoryError, rand.bytes, sys.maxsize)
|
|
||||||
|
|
||||||
|
|
||||||
def test_bytes(self):
|
|
||||||
"""
|
|
||||||
Verify that we can obtain bytes from rand_bytes() and
|
|
||||||
that they are different each time. Test the parameter
|
|
||||||
of rand_bytes() for bad values.
|
|
||||||
"""
|
|
||||||
b1 = rand.bytes(50)
|
|
||||||
self.assertEqual(len(b1), 50)
|
|
||||||
b2 = rand.bytes(num_bytes=50) # parameter by name
|
|
||||||
self.assertNotEqual(b1, b2) # Hip, Hip, Horay! FIPS complaince
|
|
||||||
b3 = rand.bytes(num_bytes=0)
|
|
||||||
self.assertEqual(len(b3), 0)
|
|
||||||
exc = self.assertRaises(ValueError, rand.bytes, -1)
|
|
||||||
self.assertEqual(str(exc), "num_bytes must not be negative")
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_wrong_args(self):
|
|
||||||
"""
|
|
||||||
When called with the wrong number of arguments, or with arguments not of
|
|
||||||
type :py:obj:`str` and :py:obj:`int`, :py:obj:`OpenSSL.rand.add` raises :py:obj:`TypeError`.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.add)
|
|
||||||
self.assertRaises(TypeError, rand.add, b("foo"), None)
|
|
||||||
self.assertRaises(TypeError, rand.add, None, 3)
|
|
||||||
self.assertRaises(TypeError, rand.add, b("foo"), 3, None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_add(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.add` adds entropy to the PRNG.
|
|
||||||
"""
|
|
||||||
rand.add(b('hamburger'), 3)
|
|
||||||
|
|
||||||
|
|
||||||
def test_seed_wrong_args(self):
|
|
||||||
"""
|
|
||||||
When called with the wrong number of arguments, or with a non-:py:obj:`str`
|
|
||||||
argument, :py:obj:`OpenSSL.rand.seed` raises :py:obj:`TypeError`.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.seed)
|
|
||||||
self.assertRaises(TypeError, rand.seed, None)
|
|
||||||
self.assertRaises(TypeError, rand.seed, b("foo"), None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_seed(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.seed` adds entropy to the PRNG.
|
|
||||||
"""
|
|
||||||
rand.seed(b('milk shake'))
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_wrong_args(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.status` raises :py:obj:`TypeError` when called with any
|
|
||||||
arguments.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.status, None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_status(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.status` returns :py:obj:`True` if the PRNG has sufficient
|
|
||||||
entropy, :py:obj:`False` otherwise.
|
|
||||||
"""
|
|
||||||
# It's hard to know what it is actually going to return. Different
|
|
||||||
# OpenSSL random engines decide differently whether they have enough
|
|
||||||
# entropy or not.
|
|
||||||
self.assertTrue(rand.status() in (1, 2))
|
|
||||||
|
|
||||||
|
|
||||||
def test_egd_wrong_args(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.egd` raises :py:obj:`TypeError` when called with the wrong
|
|
||||||
number of arguments or with arguments not of type :py:obj:`str` and :py:obj:`int`.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.egd)
|
|
||||||
self.assertRaises(TypeError, rand.egd, None)
|
|
||||||
self.assertRaises(TypeError, rand.egd, "foo", None)
|
|
||||||
self.assertRaises(TypeError, rand.egd, None, 3)
|
|
||||||
self.assertRaises(TypeError, rand.egd, "foo", 3, None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_egd_missing(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the
|
|
||||||
EGD socket passed to it does not exist.
|
|
||||||
"""
|
|
||||||
result = rand.egd(self.mktemp())
|
|
||||||
expected = (-1, 0)
|
|
||||||
self.assertTrue(
|
|
||||||
result in expected,
|
|
||||||
"%r not in %r" % (result, expected))
|
|
||||||
|
|
||||||
|
|
||||||
def test_egd_missing_and_bytes(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the
|
|
||||||
EGD socket passed to it does not exist even if a size argument is
|
|
||||||
explicitly passed.
|
|
||||||
"""
|
|
||||||
result = rand.egd(self.mktemp(), 1024)
|
|
||||||
expected = (-1, 0)
|
|
||||||
self.assertTrue(
|
|
||||||
result in expected,
|
|
||||||
"%r not in %r" % (result, expected))
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleanup_wrong_args(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.cleanup` raises :py:obj:`TypeError` when called with any
|
|
||||||
arguments.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.cleanup, None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cleanup(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.cleanup` releases the memory used by the PRNG and returns
|
|
||||||
:py:obj:`None`.
|
|
||||||
"""
|
|
||||||
self.assertIdentical(rand.cleanup(), None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_load_file_wrong_args(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.load_file` raises :py:obj:`TypeError` when called the wrong
|
|
||||||
number of arguments or arguments not of type :py:obj:`str` and :py:obj:`int`.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.load_file)
|
|
||||||
self.assertRaises(TypeError, rand.load_file, "foo", None)
|
|
||||||
self.assertRaises(TypeError, rand.load_file, None, 1)
|
|
||||||
self.assertRaises(TypeError, rand.load_file, "foo", 1, None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_write_file_wrong_args(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.rand.write_file` raises :py:obj:`TypeError` when called with the
|
|
||||||
wrong number of arguments or a non-:py:obj:`str` argument.
|
|
||||||
"""
|
|
||||||
self.assertRaises(TypeError, rand.write_file)
|
|
||||||
self.assertRaises(TypeError, rand.write_file, None)
|
|
||||||
self.assertRaises(TypeError, rand.write_file, "foo", None)
|
|
||||||
|
|
||||||
def _read_write_test(self, path):
|
|
||||||
"""
|
|
||||||
Verify that ``rand.write_file`` and ``rand.load_file`` can be used.
|
|
||||||
"""
|
|
||||||
# Create the file so cleanup is more straightforward
|
|
||||||
with open(path, "w"):
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Write random bytes to a file
|
|
||||||
rand.write_file(path)
|
|
||||||
|
|
||||||
# Verify length of written file
|
|
||||||
size = os.stat(path)[stat.ST_SIZE]
|
|
||||||
self.assertEqual(1024, size)
|
|
||||||
|
|
||||||
# Read random bytes from file
|
|
||||||
rand.load_file(path)
|
|
||||||
rand.load_file(path, 4) # specify a length
|
|
||||||
finally:
|
|
||||||
# Cleanup
|
|
||||||
os.unlink(path)
|
|
||||||
|
|
||||||
|
|
||||||
def test_bytes_paths(self):
|
|
||||||
"""
|
|
||||||
Random data can be saved and loaded to files with paths specified as
|
|
||||||
bytes.
|
|
||||||
"""
|
|
||||||
path = self.mktemp()
|
|
||||||
path += NON_ASCII.encode(sys.getfilesystemencoding())
|
|
||||||
self._read_write_test(path)
|
|
||||||
|
|
||||||
|
|
||||||
def test_unicode_paths(self):
|
|
||||||
"""
|
|
||||||
Random data can be saved and loaded to files with paths specified as
|
|
||||||
unicode.
|
|
||||||
"""
|
|
||||||
path = self.mktemp().decode('utf-8') + NON_ASCII
|
|
||||||
self._read_write_test(path)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,24 +0,0 @@
|
||||||
# Copyright (C) Jean-Paul Calderone
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Unit tests for :py:obj:`OpenSSL.tsafe`.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from OpenSSL.SSL import TLSv1_METHOD, Context
|
|
||||||
from OpenSSL.tsafe import Connection
|
|
||||||
from OpenSSL.test.util import TestCase
|
|
||||||
|
|
||||||
|
|
||||||
class ConnectionTest(TestCase):
|
|
||||||
"""
|
|
||||||
Tests for :py:obj:`OpenSSL.tsafe.Connection`.
|
|
||||||
"""
|
|
||||||
def test_instantiation(self):
|
|
||||||
"""
|
|
||||||
:py:obj:`OpenSSL.tsafe.Connection` can be instantiated.
|
|
||||||
"""
|
|
||||||
# The following line should not throw an error. This isn't an ideal
|
|
||||||
# test. It would be great to refactor the other Connection tests so
|
|
||||||
# they could automatically be applied to this class too.
|
|
||||||
Connection(Context(TLSv1_METHOD), None)
|
|
|
@ -1,17 +0,0 @@
|
||||||
from OpenSSL._util import exception_from_error_queue, lib
|
|
||||||
from OpenSSL.test.util import TestCase
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ErrorTests(TestCase):
|
|
||||||
"""
|
|
||||||
Tests for handling of certain OpenSSL error cases.
|
|
||||||
"""
|
|
||||||
def test_exception_from_error_queue_nonexistent_reason(self):
|
|
||||||
"""
|
|
||||||
:py:func:`exception_from_error_queue` raises ``ValueError`` when it
|
|
||||||
encounters an OpenSSL error code which does not have a reason string.
|
|
||||||
"""
|
|
||||||
lib.ERR_put_error(lib.ERR_LIB_EVP, 0, 1112, b"", 10)
|
|
||||||
exc = self.assertRaises(ValueError, exception_from_error_queue, ValueError)
|
|
||||||
self.assertEqual(exc.args[0][0][2], "")
|
|
|
@ -1,463 +0,0 @@
|
||||||
# Copyright (C) Jean-Paul Calderone
|
|
||||||
# Copyright (C) Twisted Matrix Laboratories.
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Helpers for the OpenSSL test suite, largely copied from
|
|
||||||
U{Twisted<http://twistedmatrix.com/>}.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import shutil
|
|
||||||
import traceback
|
|
||||||
import os, os.path
|
|
||||||
from tempfile import mktemp
|
|
||||||
from unittest import TestCase
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from six import PY3
|
|
||||||
|
|
||||||
from OpenSSL._util import exception_from_error_queue
|
|
||||||
from OpenSSL.crypto import Error
|
|
||||||
|
|
||||||
try:
|
|
||||||
import memdbg
|
|
||||||
except Exception:
|
|
||||||
class _memdbg(object): heap = None
|
|
||||||
memdbg = _memdbg()
|
|
||||||
|
|
||||||
from OpenSSL._util import ffi, lib, byte_string as b
|
|
||||||
|
|
||||||
|
|
||||||
# This is the UTF-8 encoding of the SNOWMAN unicode code point.
|
|
||||||
NON_ASCII = b("\xe2\x98\x83").decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
class TestCase(TestCase):
|
|
||||||
"""
|
|
||||||
:py:class:`TestCase` adds useful testing functionality beyond what is available
|
|
||||||
from the standard library :py:class:`unittest.TestCase`.
|
|
||||||
"""
|
|
||||||
def run(self, result):
|
|
||||||
run = super(TestCase, self).run
|
|
||||||
if memdbg.heap is None:
|
|
||||||
return run(result)
|
|
||||||
|
|
||||||
# Run the test as usual
|
|
||||||
before = set(memdbg.heap)
|
|
||||||
run(result)
|
|
||||||
|
|
||||||
# Clean up some long-lived allocations so they won't be reported as
|
|
||||||
# memory leaks.
|
|
||||||
lib.CRYPTO_cleanup_all_ex_data()
|
|
||||||
lib.ERR_remove_thread_state(ffi.NULL)
|
|
||||||
after = set(memdbg.heap)
|
|
||||||
|
|
||||||
if not after - before:
|
|
||||||
# No leaks, fast succeed
|
|
||||||
return
|
|
||||||
|
|
||||||
if result.wasSuccessful():
|
|
||||||
# If it passed, run it again with memory debugging
|
|
||||||
before = set(memdbg.heap)
|
|
||||||
run(result)
|
|
||||||
|
|
||||||
# Clean up some long-lived allocations so they won't be reported as
|
|
||||||
# memory leaks.
|
|
||||||
lib.CRYPTO_cleanup_all_ex_data()
|
|
||||||
lib.ERR_remove_thread_state(ffi.NULL)
|
|
||||||
|
|
||||||
after = set(memdbg.heap)
|
|
||||||
|
|
||||||
self._reportLeaks(after - before, result)
|
|
||||||
|
|
||||||
|
|
||||||
def _reportLeaks(self, leaks, result):
|
|
||||||
def format_leak(p):
|
|
||||||
stacks = memdbg.heap[p]
|
|
||||||
# Eventually look at multiple stacks for the realloc() case. For
|
|
||||||
# now just look at the original allocation location.
|
|
||||||
(size, python_stack, c_stack) = stacks[0]
|
|
||||||
|
|
||||||
stack = traceback.format_list(python_stack)[:-1]
|
|
||||||
|
|
||||||
# c_stack looks something like this (interesting parts indicated
|
|
||||||
# with inserted arrows not part of the data):
|
|
||||||
#
|
|
||||||
# /home/exarkun/Projects/pyOpenSSL/branches/use-opentls/__pycache__/_cffi__x89095113xb9185b9b.so(+0x12cf) [0x7fe2e20582cf]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyCFunction_Call+0x8b) [0x56265a]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python() [0x4d5f52]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python() [0x4d6419]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python() [0x4d6129]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalCodeEx+0x1043) [0x4d3726]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python() [0x55fd51]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyObject_Call+0x7e) [0x420ee6]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyEval_CallObjectWithKeywords+0x158) [0x4d56ec]
|
|
||||||
# /home/exarkun/.local/lib/python2.7/site-packages/cffi-0.5-py2.7-linux-x86_64.egg/_cffi_backend.so(+0xe96e) [0x7fe2e38be96e]
|
|
||||||
# /usr/lib/x86_64-linux-gnu/libffi.so.6(ffi_closure_unix64_inner+0x1b9) [0x7fe2e36ad819]
|
|
||||||
# /usr/lib/x86_64-linux-gnu/libffi.so.6(ffi_closure_unix64+0x46) [0x7fe2e36adb7c]
|
|
||||||
# /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(CRYPTO_malloc+0x64) [0x7fe2e1cef784] <------ end interesting
|
|
||||||
# /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(lh_insert+0x16b) [0x7fe2e1d6a24b] .
|
|
||||||
# /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(+0x61c18) [0x7fe2e1cf0c18] .
|
|
||||||
# /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(+0x625ec) [0x7fe2e1cf15ec] .
|
|
||||||
# /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(DSA_new_method+0xe6) [0x7fe2e1d524d6] .
|
|
||||||
# /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(DSA_generate_parameters+0x3a) [0x7fe2e1d5364a] <------ begin interesting
|
|
||||||
# /home/exarkun/Projects/opentls/trunk/tls/c/__pycache__/_cffi__x305d4698xb539baaa.so(+0x1f397) [0x7fe2df84d397]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyCFunction_Call+0x8b) [0x56265a]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python() [0x4d5f52]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e]
|
|
||||||
# /home/exarkun/Projects/cpython/2.7/python() [0x4d6419]
|
|
||||||
# ...
|
|
||||||
#
|
|
||||||
# Notice the stack is upside down compared to a Python traceback.
|
|
||||||
# Identify the start and end of interesting bits and stuff it into the stack we report.
|
|
||||||
|
|
||||||
saved = list(c_stack)
|
|
||||||
|
|
||||||
# Figure the first interesting frame will be after a the cffi-compiled module
|
|
||||||
while c_stack and '/__pycache__/_cffi__' not in c_stack[-1]:
|
|
||||||
c_stack.pop()
|
|
||||||
|
|
||||||
# Figure the last interesting frame will always be CRYPTO_malloc,
|
|
||||||
# since that's where we hooked in to things.
|
|
||||||
while c_stack and 'CRYPTO_malloc' not in c_stack[0] and 'CRYPTO_realloc' not in c_stack[0]:
|
|
||||||
c_stack.pop(0)
|
|
||||||
|
|
||||||
if c_stack:
|
|
||||||
c_stack.reverse()
|
|
||||||
else:
|
|
||||||
c_stack = saved[::-1]
|
|
||||||
stack.extend([frame + "\n" for frame in c_stack])
|
|
||||||
|
|
||||||
stack.insert(0, "Leaked (%s) at:\n")
|
|
||||||
return "".join(stack)
|
|
||||||
|
|
||||||
if leaks:
|
|
||||||
unique_leaks = {}
|
|
||||||
for p in leaks:
|
|
||||||
size = memdbg.heap[p][-1][0]
|
|
||||||
new_leak = format_leak(p)
|
|
||||||
if new_leak not in unique_leaks:
|
|
||||||
unique_leaks[new_leak] = [(size, p)]
|
|
||||||
else:
|
|
||||||
unique_leaks[new_leak].append((size, p))
|
|
||||||
memdbg.free(p)
|
|
||||||
|
|
||||||
for (stack, allocs) in unique_leaks.iteritems():
|
|
||||||
allocs_accum = []
|
|
||||||
for (size, pointer) in allocs:
|
|
||||||
|
|
||||||
addr = int(ffi.cast('uintptr_t', pointer))
|
|
||||||
allocs_accum.append("%d@0x%x" % (size, addr))
|
|
||||||
allocs_report = ", ".join(sorted(allocs_accum))
|
|
||||||
|
|
||||||
result.addError(
|
|
||||||
self,
|
|
||||||
(None, Exception(stack % (allocs_report,)), None))
|
|
||||||
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
"""
|
|
||||||
Clean up any files or directories created using :py:meth:`TestCase.mktemp`.
|
|
||||||
Subclasses must invoke this method if they override it or the
|
|
||||||
cleanup will not occur.
|
|
||||||
"""
|
|
||||||
if False and self._temporaryFiles is not None:
|
|
||||||
for temp in self._temporaryFiles:
|
|
||||||
if os.path.isdir(temp):
|
|
||||||
shutil.rmtree(temp)
|
|
||||||
elif os.path.exists(temp):
|
|
||||||
os.unlink(temp)
|
|
||||||
try:
|
|
||||||
exception_from_error_queue(Error)
|
|
||||||
except Error:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
if e.args != ([],):
|
|
||||||
self.fail("Left over errors in OpenSSL error queue: " + repr(e))
|
|
||||||
|
|
||||||
|
|
||||||
def assertIsInstance(self, instance, classOrTuple, message=None):
|
|
||||||
"""
|
|
||||||
Fail if C{instance} is not an instance of the given class or of
|
|
||||||
one of the given classes.
|
|
||||||
|
|
||||||
@param instance: the object to test the type (first argument of the
|
|
||||||
C{isinstance} call).
|
|
||||||
@type instance: any.
|
|
||||||
@param classOrTuple: the class or classes to test against (second
|
|
||||||
argument of the C{isinstance} call).
|
|
||||||
@type classOrTuple: class, type, or tuple.
|
|
||||||
|
|
||||||
@param message: Custom text to include in the exception text if the
|
|
||||||
assertion fails.
|
|
||||||
"""
|
|
||||||
if not isinstance(instance, classOrTuple):
|
|
||||||
if message is None:
|
|
||||||
suffix = ""
|
|
||||||
else:
|
|
||||||
suffix = ": " + message
|
|
||||||
self.fail("%r is not an instance of %s%s" % (
|
|
||||||
instance, classOrTuple, suffix))
|
|
||||||
|
|
||||||
|
|
||||||
def failUnlessIn(self, containee, container, msg=None):
|
|
||||||
"""
|
|
||||||
Fail the test if :py:data:`containee` is not found in :py:data:`container`.
|
|
||||||
|
|
||||||
:param containee: the value that should be in :py:class:`container`
|
|
||||||
:param container: a sequence type, or in the case of a mapping type,
|
|
||||||
will follow semantics of 'if key in dict.keys()'
|
|
||||||
:param msg: if msg is None, then the failure message will be
|
|
||||||
'%r not in %r' % (first, second)
|
|
||||||
"""
|
|
||||||
if containee not in container:
|
|
||||||
raise self.failureException(msg or "%r not in %r"
|
|
||||||
% (containee, container))
|
|
||||||
return containee
|
|
||||||
assertIn = failUnlessIn
|
|
||||||
|
|
||||||
def assertNotIn(self, containee, container, msg=None):
|
|
||||||
"""
|
|
||||||
Fail the test if C{containee} is found in C{container}.
|
|
||||||
|
|
||||||
@param containee: the value that should not be in C{container}
|
|
||||||
@param container: a sequence type, or in the case of a mapping type,
|
|
||||||
will follow semantics of 'if key in dict.keys()'
|
|
||||||
@param msg: if msg is None, then the failure message will be
|
|
||||||
'%r in %r' % (first, second)
|
|
||||||
"""
|
|
||||||
if containee in container:
|
|
||||||
raise self.failureException(msg or "%r in %r"
|
|
||||||
% (containee, container))
|
|
||||||
return containee
|
|
||||||
failIfIn = assertNotIn
|
|
||||||
|
|
||||||
|
|
||||||
def assertIs(self, first, second, msg=None):
|
|
||||||
"""
|
|
||||||
Fail the test if :py:data:`first` is not :py:data:`second`. This is an
|
|
||||||
obect-identity-equality test, not an object equality
|
|
||||||
(i.e. :py:func:`__eq__`) test.
|
|
||||||
|
|
||||||
:param msg: if msg is None, then the failure message will be
|
|
||||||
'%r is not %r' % (first, second)
|
|
||||||
"""
|
|
||||||
if first is not second:
|
|
||||||
raise self.failureException(msg or '%r is not %r' % (first, second))
|
|
||||||
return first
|
|
||||||
assertIdentical = failUnlessIdentical = assertIs
|
|
||||||
|
|
||||||
|
|
||||||
def assertIsNot(self, first, second, msg=None):
|
|
||||||
"""
|
|
||||||
Fail the test if :py:data:`first` is :py:data:`second`. This is an
|
|
||||||
obect-identity-equality test, not an object equality
|
|
||||||
(i.e. :py:func:`__eq__`) test.
|
|
||||||
|
|
||||||
:param msg: if msg is None, then the failure message will be
|
|
||||||
'%r is %r' % (first, second)
|
|
||||||
"""
|
|
||||||
if first is second:
|
|
||||||
raise self.failureException(msg or '%r is %r' % (first, second))
|
|
||||||
return first
|
|
||||||
assertNotIdentical = failIfIdentical = assertIsNot
|
|
||||||
|
|
||||||
|
|
||||||
def failUnlessRaises(self, exception, f, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Fail the test unless calling the function :py:data:`f` with the given
|
|
||||||
:py:data:`args` and :py:data:`kwargs` raises :py:data:`exception`. The
|
|
||||||
failure will report the traceback and call stack of the unexpected
|
|
||||||
exception.
|
|
||||||
|
|
||||||
:param exception: exception type that is to be expected
|
|
||||||
:param f: the function to call
|
|
||||||
|
|
||||||
:return: The raised exception instance, if it is of the given type.
|
|
||||||
:raise self.failureException: Raised if the function call does
|
|
||||||
not raise an exception or if it raises an exception of a
|
|
||||||
different type.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
result = f(*args, **kwargs)
|
|
||||||
except exception:
|
|
||||||
inst = sys.exc_info()[1]
|
|
||||||
return inst
|
|
||||||
except:
|
|
||||||
raise self.failureException('%s raised instead of %s'
|
|
||||||
% (sys.exc_info()[0],
|
|
||||||
exception.__name__,
|
|
||||||
))
|
|
||||||
else:
|
|
||||||
raise self.failureException('%s not raised (%r returned)'
|
|
||||||
% (exception.__name__, result))
|
|
||||||
assertRaises = failUnlessRaises
|
|
||||||
|
|
||||||
|
|
||||||
_temporaryFiles = None
|
|
||||||
def mktemp(self):
|
|
||||||
"""
|
|
||||||
Pathetic substitute for twisted.trial.unittest.TestCase.mktemp.
|
|
||||||
"""
|
|
||||||
if self._temporaryFiles is None:
|
|
||||||
self._temporaryFiles = []
|
|
||||||
temp = b(mktemp(dir="."))
|
|
||||||
self._temporaryFiles.append(temp)
|
|
||||||
return temp
|
|
||||||
|
|
||||||
|
|
||||||
# Other stuff
|
|
||||||
def assertConsistentType(self, theType, name, *constructionArgs):
|
|
||||||
"""
|
|
||||||
Perform various assertions about :py:data:`theType` to ensure that it is a
|
|
||||||
well-defined type. This is useful for extension types, where it's
|
|
||||||
pretty easy to do something wacky. If something about the type is
|
|
||||||
unusual, an exception will be raised.
|
|
||||||
|
|
||||||
:param theType: The type object about which to make assertions.
|
|
||||||
:param name: A string giving the name of the type.
|
|
||||||
:param constructionArgs: Positional arguments to use with :py:data:`theType` to
|
|
||||||
create an instance of it.
|
|
||||||
"""
|
|
||||||
self.assertEqual(theType.__name__, name)
|
|
||||||
self.assertTrue(isinstance(theType, type))
|
|
||||||
instance = theType(*constructionArgs)
|
|
||||||
self.assertIdentical(type(instance), theType)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class EqualityTestsMixin(object):
|
|
||||||
"""
|
|
||||||
A mixin defining tests for the standard implementation of C{==} and C{!=}.
|
|
||||||
"""
|
|
||||||
def anInstance(self):
|
|
||||||
"""
|
|
||||||
Return an instance of the class under test. Each call to this method
|
|
||||||
must return a different object. All objects returned must be equal to
|
|
||||||
each other.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
def anotherInstance(self):
|
|
||||||
"""
|
|
||||||
Return an instance of the class under test. Each call to this method
|
|
||||||
must return a different object. The objects must not be equal to the
|
|
||||||
objects returned by C{anInstance}. They may or may not be equal to
|
|
||||||
each other (they will not be compared against each other).
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
def test_identicalEq(self):
|
|
||||||
"""
|
|
||||||
An object compares equal to itself using the C{==} operator.
|
|
||||||
"""
|
|
||||||
o = self.anInstance()
|
|
||||||
self.assertTrue(o == o)
|
|
||||||
|
|
||||||
|
|
||||||
def test_identicalNe(self):
|
|
||||||
"""
|
|
||||||
An object doesn't compare not equal to itself using the C{!=} operator.
|
|
||||||
"""
|
|
||||||
o = self.anInstance()
|
|
||||||
self.assertFalse(o != o)
|
|
||||||
|
|
||||||
|
|
||||||
def test_sameEq(self):
|
|
||||||
"""
|
|
||||||
Two objects that are equal to each other compare equal to each other
|
|
||||||
using the C{==} operator.
|
|
||||||
"""
|
|
||||||
a = self.anInstance()
|
|
||||||
b = self.anInstance()
|
|
||||||
self.assertTrue(a == b)
|
|
||||||
|
|
||||||
|
|
||||||
def test_sameNe(self):
|
|
||||||
"""
|
|
||||||
Two objects that are equal to each other do not compare not equal to
|
|
||||||
each other using the C{!=} operator.
|
|
||||||
"""
|
|
||||||
a = self.anInstance()
|
|
||||||
b = self.anInstance()
|
|
||||||
self.assertFalse(a != b)
|
|
||||||
|
|
||||||
|
|
||||||
def test_differentEq(self):
|
|
||||||
"""
|
|
||||||
Two objects that are not equal to each other do not compare equal to
|
|
||||||
each other using the C{==} operator.
|
|
||||||
"""
|
|
||||||
a = self.anInstance()
|
|
||||||
b = self.anotherInstance()
|
|
||||||
self.assertFalse(a == b)
|
|
||||||
|
|
||||||
|
|
||||||
def test_differentNe(self):
|
|
||||||
"""
|
|
||||||
Two objects that are not equal to each other compare not equal to each
|
|
||||||
other using the C{!=} operator.
|
|
||||||
"""
|
|
||||||
a = self.anInstance()
|
|
||||||
b = self.anotherInstance()
|
|
||||||
self.assertTrue(a != b)
|
|
||||||
|
|
||||||
|
|
||||||
def test_anotherTypeEq(self):
|
|
||||||
"""
|
|
||||||
The object does not compare equal to an object of an unrelated type
|
|
||||||
(which does not implement the comparison) using the C{==} operator.
|
|
||||||
"""
|
|
||||||
a = self.anInstance()
|
|
||||||
b = object()
|
|
||||||
self.assertFalse(a == b)
|
|
||||||
|
|
||||||
|
|
||||||
def test_anotherTypeNe(self):
|
|
||||||
"""
|
|
||||||
The object compares not equal to an object of an unrelated type (which
|
|
||||||
does not implement the comparison) using the C{!=} operator.
|
|
||||||
"""
|
|
||||||
a = self.anInstance()
|
|
||||||
b = object()
|
|
||||||
self.assertTrue(a != b)
|
|
||||||
|
|
||||||
|
|
||||||
def test_delegatedEq(self):
|
|
||||||
"""
|
|
||||||
The result of comparison using C{==} is delegated to the right-hand
|
|
||||||
operand if it is of an unrelated type.
|
|
||||||
"""
|
|
||||||
class Delegate(object):
|
|
||||||
def __eq__(self, other):
|
|
||||||
# Do something crazy and obvious.
|
|
||||||
return [self]
|
|
||||||
|
|
||||||
a = self.anInstance()
|
|
||||||
b = Delegate()
|
|
||||||
self.assertEqual(a == b, [b])
|
|
||||||
|
|
||||||
|
|
||||||
def test_delegateNe(self):
|
|
||||||
"""
|
|
||||||
The result of comparison using C{!=} is delegated to the right-hand
|
|
||||||
operand if it is of an unrelated type.
|
|
||||||
"""
|
|
||||||
class Delegate(object):
|
|
||||||
def __ne__(self, other):
|
|
||||||
# Do something crazy and obvious.
|
|
||||||
return [self]
|
|
||||||
|
|
||||||
a = self.anInstance()
|
|
||||||
b = Delegate()
|
|
||||||
self.assertEqual(a != b, [b])
|
|
||||||
|
|
||||||
|
|
||||||
# The type name expected in warnings about using the wrong string type.
|
|
||||||
if PY3:
|
|
||||||
WARNING_TYPE_EXPECTED = "str"
|
|
||||||
else:
|
|
||||||
WARNING_TYPE_EXPECTED = "unicode"
|
|
|
@ -1,28 +0,0 @@
|
||||||
from OpenSSL import SSL
|
|
||||||
_ssl = SSL
|
|
||||||
del SSL
|
|
||||||
|
|
||||||
import threading
|
|
||||||
_RLock = threading.RLock
|
|
||||||
del threading
|
|
||||||
|
|
||||||
class Connection:
|
|
||||||
def __init__(self, *args):
|
|
||||||
self._ssl_conn = _ssl.Connection(*args)
|
|
||||||
self._lock = _RLock()
|
|
||||||
|
|
||||||
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
|
|
||||||
'renegotiate', 'bind', 'listen', 'connect', 'accept',
|
|
||||||
'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list',
|
|
||||||
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
|
|
||||||
'makefile', 'get_app_data', 'set_app_data', 'state_string',
|
|
||||||
'sock_shutdown', 'get_peer_certificate', 'get_peer_cert_chain', 'want_read',
|
|
||||||
'want_write', 'set_connect_state', 'set_accept_state',
|
|
||||||
'connect_ex', 'sendall'):
|
|
||||||
exec("""def %s(self, *args):
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
return self._ssl_conn.%s(*args)
|
|
||||||
finally:
|
|
||||||
self._lock.release()\n""" % (f, f))
|
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
# Copyright (C) AB Strakt
|
|
||||||
# Copyright (C) Jean-Paul Calderone
|
|
||||||
# See LICENSE for details.
|
|
||||||
|
|
||||||
"""
|
|
||||||
pyOpenSSL - A simple wrapper around the OpenSSL library
|
|
||||||
"""
|
|
||||||
|
|
||||||
__version__ = '0.15.1'
|
|
Binary file not shown.
|
@ -1,31 +0,0 @@
|
||||||
Metadata-Version: 1.1
|
|
||||||
Name: cffi
|
|
||||||
Version: 1.3.0
|
|
||||||
Summary: Foreign Function Interface for Python calling C code.
|
|
||||||
Home-page: http://cffi.readthedocs.org
|
|
||||||
Author: Armin Rigo, Maciej Fijalkowski
|
|
||||||
Author-email: python-cffi@googlegroups.com
|
|
||||||
License: MIT
|
|
||||||
Description:
|
|
||||||
CFFI
|
|
||||||
====
|
|
||||||
|
|
||||||
Foreign Function Interface for Python calling C code.
|
|
||||||
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
|
|
||||||
|
|
||||||
Contact
|
|
||||||
-------
|
|
||||||
|
|
||||||
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
|
|
||||||
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 2
|
|
||||||
Classifier: Programming Language :: Python :: 2.6
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.2
|
|
||||||
Classifier: Programming Language :: Python :: 3.3
|
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
|
@ -1,145 +0,0 @@
|
||||||
AUTHORS
|
|
||||||
LICENSE
|
|
||||||
MANIFEST.in
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
setup_base.py
|
|
||||||
c/_cffi_backend.c
|
|
||||||
c/cdlopen.c
|
|
||||||
c/cffi1_module.c
|
|
||||||
c/cglob.c
|
|
||||||
c/ffi_obj.c
|
|
||||||
c/file_emulator.h
|
|
||||||
c/lib_obj.c
|
|
||||||
c/malloc_closure.h
|
|
||||||
c/minibuffer.h
|
|
||||||
c/misc_thread.h
|
|
||||||
c/misc_win32.h
|
|
||||||
c/parse_c_type.c
|
|
||||||
c/realize_c_type.c
|
|
||||||
c/test_c.py
|
|
||||||
c/wchar_helper.h
|
|
||||||
c/libffi_msvc/ffi.c
|
|
||||||
c/libffi_msvc/ffi.h
|
|
||||||
c/libffi_msvc/ffi_common.h
|
|
||||||
c/libffi_msvc/fficonfig.h
|
|
||||||
c/libffi_msvc/ffitarget.h
|
|
||||||
c/libffi_msvc/prep_cif.c
|
|
||||||
c/libffi_msvc/types.c
|
|
||||||
c/libffi_msvc/win32.c
|
|
||||||
c/libffi_msvc/win64.asm
|
|
||||||
c/libffi_msvc/win64.obj
|
|
||||||
cffi/__init__.py
|
|
||||||
cffi/_cffi_include.h
|
|
||||||
cffi/api.py
|
|
||||||
cffi/backend_ctypes.py
|
|
||||||
cffi/cffi_opcode.py
|
|
||||||
cffi/commontypes.py
|
|
||||||
cffi/cparser.py
|
|
||||||
cffi/ffiplatform.py
|
|
||||||
cffi/gc_weakref.py
|
|
||||||
cffi/lock.py
|
|
||||||
cffi/model.py
|
|
||||||
cffi/parse_c_type.h
|
|
||||||
cffi/recompiler.py
|
|
||||||
cffi/setuptools_ext.py
|
|
||||||
cffi/vengine_cpy.py
|
|
||||||
cffi/vengine_gen.py
|
|
||||||
cffi/verifier.py
|
|
||||||
cffi.egg-info/PKG-INFO
|
|
||||||
cffi.egg-info/SOURCES.txt
|
|
||||||
cffi.egg-info/dependency_links.txt
|
|
||||||
cffi.egg-info/entry_points.txt
|
|
||||||
cffi.egg-info/not-zip-safe
|
|
||||||
cffi.egg-info/requires.txt
|
|
||||||
cffi.egg-info/top_level.txt
|
|
||||||
demo/_curses.py
|
|
||||||
demo/_curses_build.py
|
|
||||||
demo/_curses_setup.py
|
|
||||||
demo/api.py
|
|
||||||
demo/bsdopendirtype.py
|
|
||||||
demo/bsdopendirtype_build.py
|
|
||||||
demo/bsdopendirtype_setup.py
|
|
||||||
demo/btrfs-snap.py
|
|
||||||
demo/cffi-cocoa.py
|
|
||||||
demo/fastcsv.py
|
|
||||||
demo/gmp.py
|
|
||||||
demo/manual.c
|
|
||||||
demo/manual2.py
|
|
||||||
demo/pwuid.py
|
|
||||||
demo/py.cleanup
|
|
||||||
demo/pyobj.py
|
|
||||||
demo/readdir.py
|
|
||||||
demo/readdir2.py
|
|
||||||
demo/readdir2_build.py
|
|
||||||
demo/readdir2_setup.py
|
|
||||||
demo/readdir_build.py
|
|
||||||
demo/readdir_ctypes.py
|
|
||||||
demo/readdir_setup.py
|
|
||||||
demo/recopendirtype.py
|
|
||||||
demo/recopendirtype_build.py
|
|
||||||
demo/setup.py
|
|
||||||
demo/setup_manual.py
|
|
||||||
demo/winclipboard.py
|
|
||||||
demo/xclient.py
|
|
||||||
doc/Makefile
|
|
||||||
doc/make.bat
|
|
||||||
doc/misc/design.rst
|
|
||||||
doc/misc/grant-cffi-1.0.rst
|
|
||||||
doc/misc/parse_c_type.rst
|
|
||||||
doc/source/cdef.rst
|
|
||||||
doc/source/conf.py
|
|
||||||
doc/source/index.rst
|
|
||||||
doc/source/installation.rst
|
|
||||||
doc/source/overview.rst
|
|
||||||
doc/source/using.rst
|
|
||||||
doc/source/whatsnew.rst
|
|
||||||
testing/__init__.py
|
|
||||||
testing/support.py
|
|
||||||
testing/udir.py
|
|
||||||
testing/cffi0/__init__.py
|
|
||||||
testing/cffi0/backend_tests.py
|
|
||||||
testing/cffi0/callback_in_thread.py
|
|
||||||
testing/cffi0/test_cdata.py
|
|
||||||
testing/cffi0/test_ctypes.py
|
|
||||||
testing/cffi0/test_ffi_backend.py
|
|
||||||
testing/cffi0/test_function.py
|
|
||||||
testing/cffi0/test_model.py
|
|
||||||
testing/cffi0/test_ownlib.py
|
|
||||||
testing/cffi0/test_parsing.py
|
|
||||||
testing/cffi0/test_platform.py
|
|
||||||
testing/cffi0/test_unicode_literals.py
|
|
||||||
testing/cffi0/test_verify.py
|
|
||||||
testing/cffi0/test_verify2.py
|
|
||||||
testing/cffi0/test_version.py
|
|
||||||
testing/cffi0/test_vgen.py
|
|
||||||
testing/cffi0/test_vgen2.py
|
|
||||||
testing/cffi0/test_zdistutils.py
|
|
||||||
testing/cffi0/test_zintegration.py
|
|
||||||
testing/cffi0/snippets/distutils_module/setup.py
|
|
||||||
testing/cffi0/snippets/distutils_module/snip_basic_verify.py
|
|
||||||
testing/cffi0/snippets/distutils_package_1/setup.py
|
|
||||||
testing/cffi0/snippets/distutils_package_1/snip_basic_verify1/__init__.py
|
|
||||||
testing/cffi0/snippets/distutils_package_2/setup.py
|
|
||||||
testing/cffi0/snippets/distutils_package_2/snip_basic_verify2/__init__.py
|
|
||||||
testing/cffi0/snippets/infrastructure/setup.py
|
|
||||||
testing/cffi0/snippets/infrastructure/snip_infrastructure/__init__.py
|
|
||||||
testing/cffi0/snippets/setuptools_module/setup.py
|
|
||||||
testing/cffi0/snippets/setuptools_module/snip_setuptools_verify.py
|
|
||||||
testing/cffi0/snippets/setuptools_package_1/setup.py
|
|
||||||
testing/cffi0/snippets/setuptools_package_1/snip_setuptools_verify1/__init__.py
|
|
||||||
testing/cffi0/snippets/setuptools_package_2/setup.py
|
|
||||||
testing/cffi0/snippets/setuptools_package_2/snip_setuptools_verify2/__init__.py
|
|
||||||
testing/cffi1/__init__.py
|
|
||||||
testing/cffi1/test_cffi_binary.py
|
|
||||||
testing/cffi1/test_dlopen.py
|
|
||||||
testing/cffi1/test_dlopen_unicode_literals.py
|
|
||||||
testing/cffi1/test_ffi_obj.py
|
|
||||||
testing/cffi1/test_new_ffi_1.py
|
|
||||||
testing/cffi1/test_parse_c_type.py
|
|
||||||
testing/cffi1/test_re_python.py
|
|
||||||
testing/cffi1/test_realize_c_type.py
|
|
||||||
testing/cffi1/test_recompiler.py
|
|
||||||
testing/cffi1/test_unicode_literals.py
|
|
||||||
testing/cffi1/test_verify1.py
|
|
||||||
testing/cffi1/test_zdist.py
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
[distutils.setup_keywords]
|
|
||||||
cffi_modules = cffi.setuptools_ext:cffi_modules
|
|
||||||
|
|
|
@ -1,41 +0,0 @@
|
||||||
../cffi/gc_weakref.py
|
|
||||||
../cffi/recompiler.py
|
|
||||||
../cffi/verifier.py
|
|
||||||
../cffi/backend_ctypes.py
|
|
||||||
../cffi/api.py
|
|
||||||
../cffi/ffiplatform.py
|
|
||||||
../cffi/cparser.py
|
|
||||||
../cffi/vengine_cpy.py
|
|
||||||
../cffi/lock.py
|
|
||||||
../cffi/cffi_opcode.py
|
|
||||||
../cffi/setuptools_ext.py
|
|
||||||
../cffi/vengine_gen.py
|
|
||||||
../cffi/model.py
|
|
||||||
../cffi/__init__.py
|
|
||||||
../cffi/commontypes.py
|
|
||||||
../cffi/_cffi_include.h
|
|
||||||
../cffi/parse_c_type.h
|
|
||||||
../cffi/__pycache__/gc_weakref.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/recompiler.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/verifier.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/backend_ctypes.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/api.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/ffiplatform.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/cparser.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/vengine_cpy.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/lock.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/cffi_opcode.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/setuptools_ext.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/vengine_gen.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/model.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cffi/__pycache__/commontypes.cpython-34.pyc
|
|
||||||
../_cffi_backend.cpython-34m.so
|
|
||||||
./
|
|
||||||
PKG-INFO
|
|
||||||
SOURCES.txt
|
|
||||||
entry_points.txt
|
|
||||||
not-zip-safe
|
|
||||||
dependency_links.txt
|
|
||||||
requires.txt
|
|
||||||
top_level.txt
|
|
|
@ -1 +0,0 @@
|
||||||
pycparser
|
|
|
@ -1,2 +0,0 @@
|
||||||
_cffi_backend
|
|
||||||
cffi
|
|
|
@ -1,13 +0,0 @@
|
||||||
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
|
||||||
'FFIError']
|
|
||||||
|
|
||||||
from .api import FFI, CDefError, FFIError
|
|
||||||
from .ffiplatform import VerificationError, VerificationMissing
|
|
||||||
|
|
||||||
__version__ = "1.3.0"
|
|
||||||
__version_info__ = (1, 3, 0)
|
|
||||||
|
|
||||||
# The verifier module file names are based on the CRC32 of a string that
|
|
||||||
# contains the following version number. It may be older than __version__
|
|
||||||
# if nothing is clearly incompatible.
|
|
||||||
__version_verifier_modules__ = "0.8.6"
|
|
|
@ -1,229 +0,0 @@
|
||||||
#define _CFFI_
|
|
||||||
#include <Python.h>
|
|
||||||
#ifdef __cplusplus
|
|
||||||
extern "C" {
|
|
||||||
#endif
|
|
||||||
#include <stddef.h>
|
|
||||||
#include "parse_c_type.h"
|
|
||||||
|
|
||||||
/* this block of #ifs should be kept exactly identical between
|
|
||||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
|
||||||
and cffi/_cffi_include.h */
|
|
||||||
#if defined(_MSC_VER)
|
|
||||||
# include <malloc.h> /* for alloca() */
|
|
||||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
|
||||||
typedef __int8 int8_t;
|
|
||||||
typedef __int16 int16_t;
|
|
||||||
typedef __int32 int32_t;
|
|
||||||
typedef __int64 int64_t;
|
|
||||||
typedef unsigned __int8 uint8_t;
|
|
||||||
typedef unsigned __int16 uint16_t;
|
|
||||||
typedef unsigned __int32 uint32_t;
|
|
||||||
typedef unsigned __int64 uint64_t;
|
|
||||||
typedef __int8 int_least8_t;
|
|
||||||
typedef __int16 int_least16_t;
|
|
||||||
typedef __int32 int_least32_t;
|
|
||||||
typedef __int64 int_least64_t;
|
|
||||||
typedef unsigned __int8 uint_least8_t;
|
|
||||||
typedef unsigned __int16 uint_least16_t;
|
|
||||||
typedef unsigned __int32 uint_least32_t;
|
|
||||||
typedef unsigned __int64 uint_least64_t;
|
|
||||||
typedef __int8 int_fast8_t;
|
|
||||||
typedef __int16 int_fast16_t;
|
|
||||||
typedef __int32 int_fast32_t;
|
|
||||||
typedef __int64 int_fast64_t;
|
|
||||||
typedef unsigned __int8 uint_fast8_t;
|
|
||||||
typedef unsigned __int16 uint_fast16_t;
|
|
||||||
typedef unsigned __int32 uint_fast32_t;
|
|
||||||
typedef unsigned __int64 uint_fast64_t;
|
|
||||||
typedef __int64 intmax_t;
|
|
||||||
typedef unsigned __int64 uintmax_t;
|
|
||||||
# else
|
|
||||||
# include <stdint.h>
|
|
||||||
# endif
|
|
||||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
|
||||||
typedef unsigned char _Bool;
|
|
||||||
# endif
|
|
||||||
#else
|
|
||||||
# include <stdint.h>
|
|
||||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
|
||||||
# include <alloca.h>
|
|
||||||
# endif
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#ifdef __GNUC__
|
|
||||||
# define _CFFI_UNUSED_FN __attribute__((unused))
|
|
||||||
#else
|
|
||||||
# define _CFFI_UNUSED_FN /* nothing */
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/********** CPython-specific section **********/
|
|
||||||
#ifndef PYPY_VERSION
|
|
||||||
|
|
||||||
|
|
||||||
#if PY_MAJOR_VERSION >= 3
|
|
||||||
# define PyInt_FromLong PyLong_FromLong
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#define _cffi_from_c_double PyFloat_FromDouble
|
|
||||||
#define _cffi_from_c_float PyFloat_FromDouble
|
|
||||||
#define _cffi_from_c_long PyInt_FromLong
|
|
||||||
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
|
|
||||||
#define _cffi_from_c_longlong PyLong_FromLongLong
|
|
||||||
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
|
|
||||||
|
|
||||||
#define _cffi_to_c_double PyFloat_AsDouble
|
|
||||||
#define _cffi_to_c_float PyFloat_AsDouble
|
|
||||||
|
|
||||||
#define _cffi_from_c_int(x, type) \
|
|
||||||
(((type)-1) > 0 ? /* unsigned */ \
|
|
||||||
(sizeof(type) < sizeof(long) ? \
|
|
||||||
PyInt_FromLong((long)x) : \
|
|
||||||
sizeof(type) == sizeof(long) ? \
|
|
||||||
PyLong_FromUnsignedLong((unsigned long)x) : \
|
|
||||||
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
|
|
||||||
(sizeof(type) <= sizeof(long) ? \
|
|
||||||
PyInt_FromLong((long)x) : \
|
|
||||||
PyLong_FromLongLong((long long)x)))
|
|
||||||
|
|
||||||
#define _cffi_to_c_int(o, type) \
|
|
||||||
((type)( \
|
|
||||||
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
|
|
||||||
: (type)_cffi_to_c_i8(o)) : \
|
|
||||||
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
|
|
||||||
: (type)_cffi_to_c_i16(o)) : \
|
|
||||||
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
|
|
||||||
: (type)_cffi_to_c_i32(o)) : \
|
|
||||||
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
|
|
||||||
: (type)_cffi_to_c_i64(o)) : \
|
|
||||||
(Py_FatalError("unsupported size for type " #type), (type)0)))
|
|
||||||
|
|
||||||
#define _cffi_to_c_i8 \
|
|
||||||
((int(*)(PyObject *))_cffi_exports[1])
|
|
||||||
#define _cffi_to_c_u8 \
|
|
||||||
((int(*)(PyObject *))_cffi_exports[2])
|
|
||||||
#define _cffi_to_c_i16 \
|
|
||||||
((int(*)(PyObject *))_cffi_exports[3])
|
|
||||||
#define _cffi_to_c_u16 \
|
|
||||||
((int(*)(PyObject *))_cffi_exports[4])
|
|
||||||
#define _cffi_to_c_i32 \
|
|
||||||
((int(*)(PyObject *))_cffi_exports[5])
|
|
||||||
#define _cffi_to_c_u32 \
|
|
||||||
((unsigned int(*)(PyObject *))_cffi_exports[6])
|
|
||||||
#define _cffi_to_c_i64 \
|
|
||||||
((long long(*)(PyObject *))_cffi_exports[7])
|
|
||||||
#define _cffi_to_c_u64 \
|
|
||||||
((unsigned long long(*)(PyObject *))_cffi_exports[8])
|
|
||||||
#define _cffi_to_c_char \
|
|
||||||
((int(*)(PyObject *))_cffi_exports[9])
|
|
||||||
#define _cffi_from_c_pointer \
|
|
||||||
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
|
|
||||||
#define _cffi_to_c_pointer \
|
|
||||||
((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
|
|
||||||
#define _cffi_get_struct_layout \
|
|
||||||
not used any more
|
|
||||||
#define _cffi_restore_errno \
|
|
||||||
((void(*)(void))_cffi_exports[13])
|
|
||||||
#define _cffi_save_errno \
|
|
||||||
((void(*)(void))_cffi_exports[14])
|
|
||||||
#define _cffi_from_c_char \
|
|
||||||
((PyObject *(*)(char))_cffi_exports[15])
|
|
||||||
#define _cffi_from_c_deref \
|
|
||||||
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
|
|
||||||
#define _cffi_to_c \
|
|
||||||
((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
|
|
||||||
#define _cffi_from_c_struct \
|
|
||||||
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
|
|
||||||
#define _cffi_to_c_wchar_t \
|
|
||||||
((wchar_t(*)(PyObject *))_cffi_exports[19])
|
|
||||||
#define _cffi_from_c_wchar_t \
|
|
||||||
((PyObject *(*)(wchar_t))_cffi_exports[20])
|
|
||||||
#define _cffi_to_c_long_double \
|
|
||||||
((long double(*)(PyObject *))_cffi_exports[21])
|
|
||||||
#define _cffi_to_c__Bool \
|
|
||||||
((_Bool(*)(PyObject *))_cffi_exports[22])
|
|
||||||
#define _cffi_prepare_pointer_call_argument \
|
|
||||||
((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
|
|
||||||
#define _cffi_convert_array_from_object \
|
|
||||||
((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
|
|
||||||
#define _CFFI_NUM_EXPORTS 25
|
|
||||||
|
|
||||||
typedef struct _ctypedescr CTypeDescrObject;
|
|
||||||
|
|
||||||
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
|
|
||||||
|
|
||||||
#define _cffi_type(index) ( \
|
|
||||||
assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
|
|
||||||
(CTypeDescrObject *)_cffi_types[index])
|
|
||||||
|
|
||||||
static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
|
|
||||||
const struct _cffi_type_context_s *ctx)
|
|
||||||
{
|
|
||||||
PyObject *module, *o_arg, *new_module;
|
|
||||||
void *raw[] = {
|
|
||||||
(void *)module_name,
|
|
||||||
(void *)version,
|
|
||||||
(void *)_cffi_exports,
|
|
||||||
(void *)ctx,
|
|
||||||
};
|
|
||||||
|
|
||||||
module = PyImport_ImportModule("_cffi_backend");
|
|
||||||
if (module == NULL)
|
|
||||||
goto failure;
|
|
||||||
|
|
||||||
o_arg = PyLong_FromVoidPtr((void *)raw);
|
|
||||||
if (o_arg == NULL)
|
|
||||||
goto failure;
|
|
||||||
|
|
||||||
new_module = PyObject_CallMethod(
|
|
||||||
module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
|
|
||||||
|
|
||||||
Py_DECREF(o_arg);
|
|
||||||
Py_DECREF(module);
|
|
||||||
return new_module;
|
|
||||||
|
|
||||||
failure:
|
|
||||||
Py_XDECREF(module);
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
|
|
||||||
_CFFI_UNUSED_FN
|
|
||||||
static PyObject **_cffi_unpack_args(PyObject *args_tuple, Py_ssize_t expected,
|
|
||||||
const char *fnname)
|
|
||||||
{
|
|
||||||
if (PyTuple_GET_SIZE(args_tuple) != expected) {
|
|
||||||
PyErr_Format(PyExc_TypeError,
|
|
||||||
"%.150s() takes exactly %zd arguments (%zd given)",
|
|
||||||
fnname, expected, PyTuple_GET_SIZE(args_tuple));
|
|
||||||
return NULL;
|
|
||||||
}
|
|
||||||
return &PyTuple_GET_ITEM(args_tuple, 0); /* pointer to the first item,
|
|
||||||
the others follow */
|
|
||||||
}
|
|
||||||
|
|
||||||
#endif
|
|
||||||
/********** end CPython-specific section **********/
|
|
||||||
|
|
||||||
|
|
||||||
#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
|
|
||||||
|
|
||||||
#define _cffi_prim_int(size, sign) \
|
|
||||||
((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
|
|
||||||
(size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
|
|
||||||
(size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
|
|
||||||
(size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
|
|
||||||
_CFFI__UNKNOWN_PRIM)
|
|
||||||
|
|
||||||
#define _cffi_prim_float(size) \
|
|
||||||
((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
|
|
||||||
(size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
|
|
||||||
(size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
|
|
||||||
_CFFI__UNKNOWN_FLOAT_PRIM)
|
|
||||||
|
|
||||||
#define _cffi_check_int(got, got_nonpos, expected) \
|
|
||||||
((got_nonpos) == (expected <= 0) && \
|
|
||||||
(got) == (unsigned long long)expected)
|
|
||||||
|
|
||||||
#ifdef __cplusplus
|
|
||||||
}
|
|
||||||
#endif
|
|
|
@ -1,724 +0,0 @@
|
||||||
import sys, types
|
|
||||||
from .lock import allocate_lock
|
|
||||||
|
|
||||||
try:
|
|
||||||
callable
|
|
||||||
except NameError:
|
|
||||||
# Python 3.1
|
|
||||||
from collections import Callable
|
|
||||||
callable = lambda x: isinstance(x, Callable)
|
|
||||||
|
|
||||||
try:
|
|
||||||
basestring
|
|
||||||
except NameError:
|
|
||||||
# Python 3.x
|
|
||||||
basestring = str
|
|
||||||
|
|
||||||
|
|
||||||
class FFIError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class CDefError(Exception):
|
|
||||||
def __str__(self):
|
|
||||||
try:
|
|
||||||
line = 'line %d: ' % (self.args[1].coord.line,)
|
|
||||||
except (AttributeError, TypeError, IndexError):
|
|
||||||
line = ''
|
|
||||||
return '%s%s' % (line, self.args[0])
|
|
||||||
|
|
||||||
|
|
||||||
class FFI(object):
|
|
||||||
r'''
|
|
||||||
The main top-level class that you instantiate once, or once per module.
|
|
||||||
|
|
||||||
Example usage:
|
|
||||||
|
|
||||||
ffi = FFI()
|
|
||||||
ffi.cdef("""
|
|
||||||
int printf(const char *, ...);
|
|
||||||
""")
|
|
||||||
|
|
||||||
C = ffi.dlopen(None) # standard library
|
|
||||||
-or-
|
|
||||||
C = ffi.verify() # use a C compiler: verify the decl above is right
|
|
||||||
|
|
||||||
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
|
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self, backend=None):
|
|
||||||
"""Create an FFI instance. The 'backend' argument is used to
|
|
||||||
select a non-default backend, mostly for tests.
|
|
||||||
"""
|
|
||||||
from . import cparser, model
|
|
||||||
if backend is None:
|
|
||||||
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
|
|
||||||
# _cffi_backend.so compiled.
|
|
||||||
import _cffi_backend as backend
|
|
||||||
from . import __version__
|
|
||||||
assert backend.__version__ == __version__, \
|
|
||||||
"version mismatch, %s != %s" % (backend.__version__, __version__)
|
|
||||||
# (If you insist you can also try to pass the option
|
|
||||||
# 'backend=backend_ctypes.CTypesBackend()', but don't
|
|
||||||
# rely on it! It's probably not going to work well.)
|
|
||||||
|
|
||||||
self._backend = backend
|
|
||||||
self._lock = allocate_lock()
|
|
||||||
self._parser = cparser.Parser()
|
|
||||||
self._cached_btypes = {}
|
|
||||||
self._parsed_types = types.ModuleType('parsed_types').__dict__
|
|
||||||
self._new_types = types.ModuleType('new_types').__dict__
|
|
||||||
self._function_caches = []
|
|
||||||
self._libraries = []
|
|
||||||
self._cdefsources = []
|
|
||||||
self._included_ffis = []
|
|
||||||
self._windows_unicode = None
|
|
||||||
if hasattr(backend, 'set_ffi'):
|
|
||||||
backend.set_ffi(self)
|
|
||||||
for name in backend.__dict__:
|
|
||||||
if name.startswith('RTLD_'):
|
|
||||||
setattr(self, name, getattr(backend, name))
|
|
||||||
#
|
|
||||||
with self._lock:
|
|
||||||
self.BVoidP = self._get_cached_btype(model.voidp_type)
|
|
||||||
self.BCharA = self._get_cached_btype(model.char_array_type)
|
|
||||||
if isinstance(backend, types.ModuleType):
|
|
||||||
# _cffi_backend: attach these constants to the class
|
|
||||||
if not hasattr(FFI, 'NULL'):
|
|
||||||
FFI.NULL = self.cast(self.BVoidP, 0)
|
|
||||||
FFI.CData, FFI.CType = backend._get_types()
|
|
||||||
else:
|
|
||||||
# ctypes backend: attach these constants to the instance
|
|
||||||
self.NULL = self.cast(self.BVoidP, 0)
|
|
||||||
self.CData, self.CType = backend._get_types()
|
|
||||||
|
|
||||||
def cdef(self, csource, override=False, packed=False):
|
|
||||||
"""Parse the given C source. This registers all declared functions,
|
|
||||||
types, and global variables. The functions and global variables can
|
|
||||||
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
|
|
||||||
The types can be used in 'ffi.new()' and other functions.
|
|
||||||
If 'packed' is specified as True, all structs declared inside this
|
|
||||||
cdef are packed, i.e. laid out without any field alignment at all.
|
|
||||||
"""
|
|
||||||
if not isinstance(csource, str): # unicode, on Python 2
|
|
||||||
if not isinstance(csource, basestring):
|
|
||||||
raise TypeError("cdef() argument must be a string")
|
|
||||||
csource = csource.encode('ascii')
|
|
||||||
with self._lock:
|
|
||||||
self._parser.parse(csource, override=override, packed=packed)
|
|
||||||
self._cdefsources.append(csource)
|
|
||||||
if override:
|
|
||||||
for cache in self._function_caches:
|
|
||||||
cache.clear()
|
|
||||||
finishlist = self._parser._recomplete
|
|
||||||
if finishlist:
|
|
||||||
self._parser._recomplete = []
|
|
||||||
for tp in finishlist:
|
|
||||||
tp.finish_backend_type(self, finishlist)
|
|
||||||
|
|
||||||
def dlopen(self, name, flags=0):
|
|
||||||
"""Load and return a dynamic library identified by 'name'.
|
|
||||||
The standard C library can be loaded by passing None.
|
|
||||||
Note that functions and types declared by 'ffi.cdef()' are not
|
|
||||||
linked to a particular library, just like C headers; in the
|
|
||||||
library we only look for the actual (untyped) symbols.
|
|
||||||
"""
|
|
||||||
assert isinstance(name, basestring) or name is None
|
|
||||||
with self._lock:
|
|
||||||
lib, function_cache = _make_ffi_library(self, name, flags)
|
|
||||||
self._function_caches.append(function_cache)
|
|
||||||
self._libraries.append(lib)
|
|
||||||
return lib
|
|
||||||
|
|
||||||
def _typeof_locked(self, cdecl):
|
|
||||||
# call me with the lock!
|
|
||||||
key = cdecl
|
|
||||||
if key in self._parsed_types:
|
|
||||||
return self._parsed_types[key]
|
|
||||||
#
|
|
||||||
if not isinstance(cdecl, str): # unicode, on Python 2
|
|
||||||
cdecl = cdecl.encode('ascii')
|
|
||||||
#
|
|
||||||
type = self._parser.parse_type(cdecl)
|
|
||||||
really_a_function_type = type.is_raw_function
|
|
||||||
if really_a_function_type:
|
|
||||||
type = type.as_function_pointer()
|
|
||||||
btype = self._get_cached_btype(type)
|
|
||||||
result = btype, really_a_function_type
|
|
||||||
self._parsed_types[key] = result
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _typeof(self, cdecl, consider_function_as_funcptr=False):
|
|
||||||
# string -> ctype object
|
|
||||||
try:
|
|
||||||
result = self._parsed_types[cdecl]
|
|
||||||
except KeyError:
|
|
||||||
with self._lock:
|
|
||||||
result = self._typeof_locked(cdecl)
|
|
||||||
#
|
|
||||||
btype, really_a_function_type = result
|
|
||||||
if really_a_function_type and not consider_function_as_funcptr:
|
|
||||||
raise CDefError("the type %r is a function type, not a "
|
|
||||||
"pointer-to-function type" % (cdecl,))
|
|
||||||
return btype
|
|
||||||
|
|
||||||
def typeof(self, cdecl):
|
|
||||||
"""Parse the C type given as a string and return the
|
|
||||||
corresponding <ctype> object.
|
|
||||||
It can also be used on 'cdata' instance to get its C type.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
return self._typeof(cdecl)
|
|
||||||
if isinstance(cdecl, self.CData):
|
|
||||||
return self._backend.typeof(cdecl)
|
|
||||||
if isinstance(cdecl, types.BuiltinFunctionType):
|
|
||||||
res = _builtin_function_type(cdecl)
|
|
||||||
if res is not None:
|
|
||||||
return res
|
|
||||||
if (isinstance(cdecl, types.FunctionType)
|
|
||||||
and hasattr(cdecl, '_cffi_base_type')):
|
|
||||||
with self._lock:
|
|
||||||
return self._get_cached_btype(cdecl._cffi_base_type)
|
|
||||||
raise TypeError(type(cdecl))
|
|
||||||
|
|
||||||
def sizeof(self, cdecl):
|
|
||||||
"""Return the size in bytes of the argument. It can be a
|
|
||||||
string naming a C type, or a 'cdata' instance.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
BType = self._typeof(cdecl)
|
|
||||||
return self._backend.sizeof(BType)
|
|
||||||
else:
|
|
||||||
return self._backend.sizeof(cdecl)
|
|
||||||
|
|
||||||
def alignof(self, cdecl):
|
|
||||||
"""Return the natural alignment size in bytes of the C type
|
|
||||||
given as a string.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl)
|
|
||||||
return self._backend.alignof(cdecl)
|
|
||||||
|
|
||||||
def offsetof(self, cdecl, *fields_or_indexes):
|
|
||||||
"""Return the offset of the named field inside the given
|
|
||||||
structure or array, which must be given as a C type name.
|
|
||||||
You can give several field names in case of nested structures.
|
|
||||||
You can also give numeric values which correspond to array
|
|
||||||
items, in case of an array type.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl)
|
|
||||||
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
|
|
||||||
|
|
||||||
def new(self, cdecl, init=None):
|
|
||||||
"""Allocate an instance according to the specified C type and
|
|
||||||
return a pointer to it. The specified C type must be either a
|
|
||||||
pointer or an array: ``new('X *')`` allocates an X and returns
|
|
||||||
a pointer to it, whereas ``new('X[n]')`` allocates an array of
|
|
||||||
n X'es and returns an array referencing it (which works
|
|
||||||
mostly like a pointer, like in C). You can also use
|
|
||||||
``new('X[]', n)`` to allocate an array of a non-constant
|
|
||||||
length n.
|
|
||||||
|
|
||||||
The memory is initialized following the rules of declaring a
|
|
||||||
global variable in C: by default it is zero-initialized, but
|
|
||||||
an explicit initializer can be given which can be used to
|
|
||||||
fill all or part of the memory.
|
|
||||||
|
|
||||||
When the returned <cdata> object goes out of scope, the memory
|
|
||||||
is freed. In other words the returned <cdata> object has
|
|
||||||
ownership of the value of type 'cdecl' that it points to. This
|
|
||||||
means that the raw data can be used as long as this object is
|
|
||||||
kept alive, but must not be used for a longer time. Be careful
|
|
||||||
about that when copying the pointer to the memory somewhere
|
|
||||||
else, e.g. into another structure.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl)
|
|
||||||
return self._backend.newp(cdecl, init)
|
|
||||||
|
|
||||||
def new_allocator(self, alloc=None, free=None,
|
|
||||||
should_clear_after_alloc=True):
|
|
||||||
"""Return a new allocator, i.e. a function that behaves like ffi.new()
|
|
||||||
but uses the provided low-level 'alloc' and 'free' functions.
|
|
||||||
|
|
||||||
'alloc' is called with the size as argument. If it returns NULL, a
|
|
||||||
MemoryError is raised. 'free' is called with the result of 'alloc'
|
|
||||||
as argument. Both can be either Python function or directly C
|
|
||||||
functions. If 'free' is None, then no free function is called.
|
|
||||||
If both 'alloc' and 'free' are None, the default is used.
|
|
||||||
|
|
||||||
If 'should_clear_after_alloc' is set to False, then the memory
|
|
||||||
returned by 'alloc' is assumed to be already cleared (or you are
|
|
||||||
fine with garbage); otherwise CFFI will clear it.
|
|
||||||
"""
|
|
||||||
compiled_ffi = self._backend.FFI()
|
|
||||||
allocator = compiled_ffi.new_allocator(alloc, free,
|
|
||||||
should_clear_after_alloc)
|
|
||||||
def allocate(cdecl, init=None):
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl)
|
|
||||||
return allocator(cdecl, init)
|
|
||||||
return allocate
|
|
||||||
|
|
||||||
def cast(self, cdecl, source):
|
|
||||||
"""Similar to a C cast: returns an instance of the named C
|
|
||||||
type initialized with the given 'source'. The source is
|
|
||||||
casted between integers or pointers of any type.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl)
|
|
||||||
return self._backend.cast(cdecl, source)
|
|
||||||
|
|
||||||
def string(self, cdata, maxlen=-1):
|
|
||||||
"""Return a Python string (or unicode string) from the 'cdata'.
|
|
||||||
If 'cdata' is a pointer or array of characters or bytes, returns
|
|
||||||
the null-terminated string. The returned string extends until
|
|
||||||
the first null character, or at most 'maxlen' characters. If
|
|
||||||
'cdata' is an array then 'maxlen' defaults to its length.
|
|
||||||
|
|
||||||
If 'cdata' is a pointer or array of wchar_t, returns a unicode
|
|
||||||
string following the same rules.
|
|
||||||
|
|
||||||
If 'cdata' is a single character or byte or a wchar_t, returns
|
|
||||||
it as a string or unicode string.
|
|
||||||
|
|
||||||
If 'cdata' is an enum, returns the value of the enumerator as a
|
|
||||||
string, or 'NUMBER' if the value is out of range.
|
|
||||||
"""
|
|
||||||
return self._backend.string(cdata, maxlen)
|
|
||||||
|
|
||||||
def buffer(self, cdata, size=-1):
|
|
||||||
"""Return a read-write buffer object that references the raw C data
|
|
||||||
pointed to by the given 'cdata'. The 'cdata' must be a pointer or
|
|
||||||
an array. Can be passed to functions expecting a buffer, or directly
|
|
||||||
manipulated with:
|
|
||||||
|
|
||||||
buf[:] get a copy of it in a regular string, or
|
|
||||||
buf[idx] as a single character
|
|
||||||
buf[:] = ...
|
|
||||||
buf[idx] = ... change the content
|
|
||||||
"""
|
|
||||||
return self._backend.buffer(cdata, size)
|
|
||||||
|
|
||||||
def from_buffer(self, python_buffer):
|
|
||||||
"""Return a <cdata 'char[]'> that points to the data of the
|
|
||||||
given Python object, which must support the buffer interface.
|
|
||||||
Note that this is not meant to be used on the built-in types str,
|
|
||||||
unicode, or bytearray (you can build 'char[]' arrays explicitly)
|
|
||||||
but only on objects containing large quantities of raw data
|
|
||||||
in some other format, like 'array.array' or numpy arrays.
|
|
||||||
"""
|
|
||||||
return self._backend.from_buffer(self.BCharA, python_buffer)
|
|
||||||
|
|
||||||
def memmove(self, dest, src, n):
|
|
||||||
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
|
|
||||||
|
|
||||||
Like the C function memmove(), the memory areas may overlap;
|
|
||||||
apart from that it behaves like the C function memcpy().
|
|
||||||
|
|
||||||
'src' can be any cdata ptr or array, or any Python buffer object.
|
|
||||||
'dest' can be any cdata ptr or array, or a writable Python buffer
|
|
||||||
object. The size to copy, 'n', is always measured in bytes.
|
|
||||||
|
|
||||||
Unlike other methods, this one supports all Python buffer including
|
|
||||||
byte strings and bytearrays---but it still does not support
|
|
||||||
non-contiguous buffers.
|
|
||||||
"""
|
|
||||||
return self._backend.memmove(dest, src, n)
|
|
||||||
|
|
||||||
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
|
|
||||||
"""Return a callback object or a decorator making such a
|
|
||||||
callback object. 'cdecl' must name a C function pointer type.
|
|
||||||
The callback invokes the specified 'python_callable' (which may
|
|
||||||
be provided either directly or via a decorator). Important: the
|
|
||||||
callback object must be manually kept alive for as long as the
|
|
||||||
callback may be invoked from the C level.
|
|
||||||
"""
|
|
||||||
def callback_decorator_wrap(python_callable):
|
|
||||||
if not callable(python_callable):
|
|
||||||
raise TypeError("the 'python_callable' argument "
|
|
||||||
"is not callable")
|
|
||||||
return self._backend.callback(cdecl, python_callable,
|
|
||||||
error, onerror)
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
|
|
||||||
if python_callable is None:
|
|
||||||
return callback_decorator_wrap # decorator mode
|
|
||||||
else:
|
|
||||||
return callback_decorator_wrap(python_callable) # direct mode
|
|
||||||
|
|
||||||
def getctype(self, cdecl, replace_with=''):
|
|
||||||
"""Return a string giving the C type 'cdecl', which may be itself
|
|
||||||
a string or a <ctype> object. If 'replace_with' is given, it gives
|
|
||||||
extra text to append (or insert for more complicated C types), like
|
|
||||||
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
|
|
||||||
"""
|
|
||||||
if isinstance(cdecl, basestring):
|
|
||||||
cdecl = self._typeof(cdecl)
|
|
||||||
replace_with = replace_with.strip()
|
|
||||||
if (replace_with.startswith('*')
|
|
||||||
and '&[' in self._backend.getcname(cdecl, '&')):
|
|
||||||
replace_with = '(%s)' % replace_with
|
|
||||||
elif replace_with and not replace_with[0] in '[(':
|
|
||||||
replace_with = ' ' + replace_with
|
|
||||||
return self._backend.getcname(cdecl, replace_with)
|
|
||||||
|
|
||||||
def gc(self, cdata, destructor):
|
|
||||||
"""Return a new cdata object that points to the same
|
|
||||||
data. Later, when this new cdata object is garbage-collected,
|
|
||||||
'destructor(old_cdata_object)' will be called.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
gcp = self._backend.gcp
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return gcp(cdata, destructor)
|
|
||||||
#
|
|
||||||
with self._lock:
|
|
||||||
try:
|
|
||||||
gc_weakrefs = self.gc_weakrefs
|
|
||||||
except AttributeError:
|
|
||||||
from .gc_weakref import GcWeakrefs
|
|
||||||
gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self)
|
|
||||||
return gc_weakrefs.build(cdata, destructor)
|
|
||||||
|
|
||||||
def _get_cached_btype(self, type):
|
|
||||||
assert self._lock.acquire(False) is False
|
|
||||||
# call me with the lock!
|
|
||||||
try:
|
|
||||||
BType = self._cached_btypes[type]
|
|
||||||
except KeyError:
|
|
||||||
finishlist = []
|
|
||||||
BType = type.get_cached_btype(self, finishlist)
|
|
||||||
for type in finishlist:
|
|
||||||
type.finish_backend_type(self, finishlist)
|
|
||||||
return BType
|
|
||||||
|
|
||||||
def verify(self, source='', tmpdir=None, **kwargs):
|
|
||||||
"""Verify that the current ffi signatures compile on this
|
|
||||||
machine, and return a dynamic library object. The dynamic
|
|
||||||
library can be used to call functions and access global
|
|
||||||
variables declared in this 'ffi'. The library is compiled
|
|
||||||
by the C compiler: it gives you C-level API compatibility
|
|
||||||
(including calling macros). This is unlike 'ffi.dlopen()',
|
|
||||||
which requires binary compatibility in the signatures.
|
|
||||||
"""
|
|
||||||
from .verifier import Verifier, _caller_dir_pycache
|
|
||||||
#
|
|
||||||
# If set_unicode(True) was called, insert the UNICODE and
|
|
||||||
# _UNICODE macro declarations
|
|
||||||
if self._windows_unicode:
|
|
||||||
self._apply_windows_unicode(kwargs)
|
|
||||||
#
|
|
||||||
# Set the tmpdir here, and not in Verifier.__init__: it picks
|
|
||||||
# up the caller's directory, which we want to be the caller of
|
|
||||||
# ffi.verify(), as opposed to the caller of Veritier().
|
|
||||||
tmpdir = tmpdir or _caller_dir_pycache()
|
|
||||||
#
|
|
||||||
# Make a Verifier() and use it to load the library.
|
|
||||||
self.verifier = Verifier(self, source, tmpdir, **kwargs)
|
|
||||||
lib = self.verifier.load_library()
|
|
||||||
#
|
|
||||||
# Save the loaded library for keep-alive purposes, even
|
|
||||||
# if the caller doesn't keep it alive itself (it should).
|
|
||||||
self._libraries.append(lib)
|
|
||||||
return lib
|
|
||||||
|
|
||||||
def _get_errno(self):
|
|
||||||
return self._backend.get_errno()
|
|
||||||
def _set_errno(self, errno):
|
|
||||||
self._backend.set_errno(errno)
|
|
||||||
errno = property(_get_errno, _set_errno, None,
|
|
||||||
"the value of 'errno' from/to the C calls")
|
|
||||||
|
|
||||||
def getwinerror(self, code=-1):
|
|
||||||
return self._backend.getwinerror(code)
|
|
||||||
|
|
||||||
def _pointer_to(self, ctype):
|
|
||||||
from . import model
|
|
||||||
with self._lock:
|
|
||||||
return model.pointer_cache(self, ctype)
|
|
||||||
|
|
||||||
def addressof(self, cdata, *fields_or_indexes):
|
|
||||||
"""Return the address of a <cdata 'struct-or-union'>.
|
|
||||||
If 'fields_or_indexes' are given, returns the address of that
|
|
||||||
field or array item in the structure or array, recursively in
|
|
||||||
case of nested structures.
|
|
||||||
"""
|
|
||||||
ctype = self._backend.typeof(cdata)
|
|
||||||
if fields_or_indexes:
|
|
||||||
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
|
|
||||||
else:
|
|
||||||
if ctype.kind == "pointer":
|
|
||||||
raise TypeError("addressof(pointer)")
|
|
||||||
offset = 0
|
|
||||||
ctypeptr = self._pointer_to(ctype)
|
|
||||||
return self._backend.rawaddressof(ctypeptr, cdata, offset)
|
|
||||||
|
|
||||||
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
|
|
||||||
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
|
|
||||||
for field1 in fields_or_indexes:
|
|
||||||
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
|
|
||||||
offset += offset1
|
|
||||||
return ctype, offset
|
|
||||||
|
|
||||||
def include(self, ffi_to_include):
|
|
||||||
"""Includes the typedefs, structs, unions and enums defined
|
|
||||||
in another FFI instance. Usage is similar to a #include in C,
|
|
||||||
where a part of the program might include types defined in
|
|
||||||
another part for its own usage. Note that the include()
|
|
||||||
method has no effect on functions, constants and global
|
|
||||||
variables, which must anyway be accessed directly from the
|
|
||||||
lib object returned by the original FFI instance.
|
|
||||||
"""
|
|
||||||
if not isinstance(ffi_to_include, FFI):
|
|
||||||
raise TypeError("ffi.include() expects an argument that is also of"
|
|
||||||
" type cffi.FFI, not %r" % (
|
|
||||||
type(ffi_to_include).__name__,))
|
|
||||||
if ffi_to_include is self:
|
|
||||||
raise ValueError("self.include(self)")
|
|
||||||
with ffi_to_include._lock:
|
|
||||||
with self._lock:
|
|
||||||
self._parser.include(ffi_to_include._parser)
|
|
||||||
self._cdefsources.append('[')
|
|
||||||
self._cdefsources.extend(ffi_to_include._cdefsources)
|
|
||||||
self._cdefsources.append(']')
|
|
||||||
self._included_ffis.append(ffi_to_include)
|
|
||||||
|
|
||||||
def new_handle(self, x):
|
|
||||||
return self._backend.newp_handle(self.BVoidP, x)
|
|
||||||
|
|
||||||
def from_handle(self, x):
|
|
||||||
return self._backend.from_handle(x)
|
|
||||||
|
|
||||||
def set_unicode(self, enabled_flag):
|
|
||||||
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
|
|
||||||
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
|
|
||||||
to be (pointers to) wchar_t. If 'enabled_flag' is False,
|
|
||||||
declare these types to be (pointers to) plain 8-bit characters.
|
|
||||||
This is mostly for backward compatibility; you usually want True.
|
|
||||||
"""
|
|
||||||
if self._windows_unicode is not None:
|
|
||||||
raise ValueError("set_unicode() can only be called once")
|
|
||||||
enabled_flag = bool(enabled_flag)
|
|
||||||
if enabled_flag:
|
|
||||||
self.cdef("typedef wchar_t TBYTE;"
|
|
||||||
"typedef wchar_t TCHAR;"
|
|
||||||
"typedef const wchar_t *LPCTSTR;"
|
|
||||||
"typedef const wchar_t *PCTSTR;"
|
|
||||||
"typedef wchar_t *LPTSTR;"
|
|
||||||
"typedef wchar_t *PTSTR;"
|
|
||||||
"typedef TBYTE *PTBYTE;"
|
|
||||||
"typedef TCHAR *PTCHAR;")
|
|
||||||
else:
|
|
||||||
self.cdef("typedef char TBYTE;"
|
|
||||||
"typedef char TCHAR;"
|
|
||||||
"typedef const char *LPCTSTR;"
|
|
||||||
"typedef const char *PCTSTR;"
|
|
||||||
"typedef char *LPTSTR;"
|
|
||||||
"typedef char *PTSTR;"
|
|
||||||
"typedef TBYTE *PTBYTE;"
|
|
||||||
"typedef TCHAR *PTCHAR;")
|
|
||||||
self._windows_unicode = enabled_flag
|
|
||||||
|
|
||||||
def _apply_windows_unicode(self, kwds):
|
|
||||||
defmacros = kwds.get('define_macros', ())
|
|
||||||
if not isinstance(defmacros, (list, tuple)):
|
|
||||||
raise TypeError("'define_macros' must be a list or tuple")
|
|
||||||
defmacros = list(defmacros) + [('UNICODE', '1'),
|
|
||||||
('_UNICODE', '1')]
|
|
||||||
kwds['define_macros'] = defmacros
|
|
||||||
|
|
||||||
def set_source(self, module_name, source, source_extension='.c', **kwds):
|
|
||||||
if hasattr(self, '_assigned_source'):
|
|
||||||
raise ValueError("set_source() cannot be called several times "
|
|
||||||
"per ffi object")
|
|
||||||
if not isinstance(module_name, basestring):
|
|
||||||
raise TypeError("'module_name' must be a string")
|
|
||||||
self._assigned_source = (str(module_name), source,
|
|
||||||
source_extension, kwds)
|
|
||||||
|
|
||||||
def distutils_extension(self, tmpdir='build', verbose=True):
|
|
||||||
from distutils.dir_util import mkpath
|
|
||||||
from .recompiler import recompile
|
|
||||||
#
|
|
||||||
if not hasattr(self, '_assigned_source'):
|
|
||||||
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
|
|
||||||
return self.verifier.get_extension()
|
|
||||||
raise ValueError("set_source() must be called before"
|
|
||||||
" distutils_extension()")
|
|
||||||
module_name, source, source_extension, kwds = self._assigned_source
|
|
||||||
if source is None:
|
|
||||||
raise TypeError("distutils_extension() is only for C extension "
|
|
||||||
"modules, not for dlopen()-style pure Python "
|
|
||||||
"modules")
|
|
||||||
mkpath(tmpdir)
|
|
||||||
ext, updated = recompile(self, module_name,
|
|
||||||
source, tmpdir=tmpdir, extradir=tmpdir,
|
|
||||||
source_extension=source_extension,
|
|
||||||
call_c_compiler=False, **kwds)
|
|
||||||
if verbose:
|
|
||||||
if updated:
|
|
||||||
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
|
|
||||||
else:
|
|
||||||
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
|
|
||||||
return ext
|
|
||||||
|
|
||||||
def emit_c_code(self, filename):
|
|
||||||
from .recompiler import recompile
|
|
||||||
#
|
|
||||||
if not hasattr(self, '_assigned_source'):
|
|
||||||
raise ValueError("set_source() must be called before emit_c_code()")
|
|
||||||
module_name, source, source_extension, kwds = self._assigned_source
|
|
||||||
if source is None:
|
|
||||||
raise TypeError("emit_c_code() is only for C extension modules, "
|
|
||||||
"not for dlopen()-style pure Python modules")
|
|
||||||
recompile(self, module_name, source,
|
|
||||||
c_file=filename, call_c_compiler=False, **kwds)
|
|
||||||
|
|
||||||
def emit_python_code(self, filename):
|
|
||||||
from .recompiler import recompile
|
|
||||||
#
|
|
||||||
if not hasattr(self, '_assigned_source'):
|
|
||||||
raise ValueError("set_source() must be called before emit_c_code()")
|
|
||||||
module_name, source, source_extension, kwds = self._assigned_source
|
|
||||||
if source is not None:
|
|
||||||
raise TypeError("emit_python_code() is only for dlopen()-style "
|
|
||||||
"pure Python modules, not for C extension modules")
|
|
||||||
recompile(self, module_name, source,
|
|
||||||
c_file=filename, call_c_compiler=False, **kwds)
|
|
||||||
|
|
||||||
def compile(self, tmpdir='.'):
|
|
||||||
from .recompiler import recompile
|
|
||||||
#
|
|
||||||
if not hasattr(self, '_assigned_source'):
|
|
||||||
raise ValueError("set_source() must be called before compile()")
|
|
||||||
module_name, source, source_extension, kwds = self._assigned_source
|
|
||||||
return recompile(self, module_name, source, tmpdir=tmpdir,
|
|
||||||
source_extension=source_extension, **kwds)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_backend_lib(backend, name, flags):
|
|
||||||
if name is None:
|
|
||||||
if sys.platform != "win32":
|
|
||||||
return backend.load_library(None, flags)
|
|
||||||
name = "c" # Windows: load_library(None) fails, but this works
|
|
||||||
# (backward compatibility hack only)
|
|
||||||
try:
|
|
||||||
if '.' not in name and '/' not in name:
|
|
||||||
raise OSError("library not found: %r" % (name,))
|
|
||||||
return backend.load_library(name, flags)
|
|
||||||
except OSError:
|
|
||||||
import ctypes.util
|
|
||||||
path = ctypes.util.find_library(name)
|
|
||||||
if path is None:
|
|
||||||
raise # propagate the original OSError
|
|
||||||
return backend.load_library(path, flags)
|
|
||||||
|
|
||||||
def _make_ffi_library(ffi, libname, flags):
|
|
||||||
import os
|
|
||||||
backend = ffi._backend
|
|
||||||
backendlib = _load_backend_lib(backend, libname, flags)
|
|
||||||
copied_enums = []
|
|
||||||
#
|
|
||||||
def make_accessor_locked(name):
|
|
||||||
key = 'function ' + name
|
|
||||||
if key in ffi._parser._declarations:
|
|
||||||
tp, _ = ffi._parser._declarations[key]
|
|
||||||
BType = ffi._get_cached_btype(tp)
|
|
||||||
try:
|
|
||||||
value = backendlib.load_function(BType, name)
|
|
||||||
except KeyError as e:
|
|
||||||
raise AttributeError('%s: %s' % (name, e))
|
|
||||||
library.__dict__[name] = value
|
|
||||||
return
|
|
||||||
#
|
|
||||||
key = 'variable ' + name
|
|
||||||
if key in ffi._parser._declarations:
|
|
||||||
tp, _ = ffi._parser._declarations[key]
|
|
||||||
BType = ffi._get_cached_btype(tp)
|
|
||||||
read_variable = backendlib.read_variable
|
|
||||||
write_variable = backendlib.write_variable
|
|
||||||
setattr(FFILibrary, name, property(
|
|
||||||
lambda self: read_variable(BType, name),
|
|
||||||
lambda self, value: write_variable(BType, name, value)))
|
|
||||||
return
|
|
||||||
#
|
|
||||||
if not copied_enums:
|
|
||||||
from . import model
|
|
||||||
error = None
|
|
||||||
for key, (tp, _) in ffi._parser._declarations.items():
|
|
||||||
if not isinstance(tp, model.EnumType):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
tp.check_not_partial()
|
|
||||||
except Exception as e:
|
|
||||||
error = e
|
|
||||||
continue
|
|
||||||
for enumname, enumval in zip(tp.enumerators, tp.enumvalues):
|
|
||||||
if enumname not in library.__dict__:
|
|
||||||
library.__dict__[enumname] = enumval
|
|
||||||
if error is not None:
|
|
||||||
if name in library.__dict__:
|
|
||||||
return # ignore error, about a different enum
|
|
||||||
raise error
|
|
||||||
|
|
||||||
for key, val in ffi._parser._int_constants.items():
|
|
||||||
if key not in library.__dict__:
|
|
||||||
library.__dict__[key] = val
|
|
||||||
|
|
||||||
copied_enums.append(True)
|
|
||||||
if name in library.__dict__:
|
|
||||||
return
|
|
||||||
#
|
|
||||||
key = 'constant ' + name
|
|
||||||
if key in ffi._parser._declarations:
|
|
||||||
raise NotImplementedError("fetching a non-integer constant "
|
|
||||||
"after dlopen()")
|
|
||||||
#
|
|
||||||
raise AttributeError(name)
|
|
||||||
#
|
|
||||||
def make_accessor(name):
|
|
||||||
with ffi._lock:
|
|
||||||
if name in library.__dict__ or name in FFILibrary.__dict__:
|
|
||||||
return # added by another thread while waiting for the lock
|
|
||||||
make_accessor_locked(name)
|
|
||||||
#
|
|
||||||
class FFILibrary(object):
|
|
||||||
def __getattr__(self, name):
|
|
||||||
make_accessor(name)
|
|
||||||
return getattr(self, name)
|
|
||||||
def __setattr__(self, name, value):
|
|
||||||
try:
|
|
||||||
property = getattr(self.__class__, name)
|
|
||||||
except AttributeError:
|
|
||||||
make_accessor(name)
|
|
||||||
setattr(self, name, value)
|
|
||||||
else:
|
|
||||||
property.__set__(self, value)
|
|
||||||
#
|
|
||||||
if libname is not None:
|
|
||||||
try:
|
|
||||||
if not isinstance(libname, str): # unicode, on Python 2
|
|
||||||
libname = libname.encode('utf-8')
|
|
||||||
FFILibrary.__name__ = 'FFILibrary_%s' % libname
|
|
||||||
except UnicodeError:
|
|
||||||
pass
|
|
||||||
library = FFILibrary()
|
|
||||||
return library, library.__dict__
|
|
||||||
|
|
||||||
def _builtin_function_type(func):
|
|
||||||
# a hack to make at least ffi.typeof(builtin_function) work,
|
|
||||||
# if the builtin function was obtained by 'vengine_cpy'.
|
|
||||||
import sys
|
|
||||||
try:
|
|
||||||
module = sys.modules[func.__module__]
|
|
||||||
ffi = module._cffi_original_ffi
|
|
||||||
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
|
|
||||||
tp = types_of_builtin_funcs[func]
|
|
||||||
except (KeyError, AttributeError, TypeError):
|
|
||||||
return None
|
|
||||||
else:
|
|
||||||
with ffi._lock:
|
|
||||||
return ffi._get_cached_btype(tp)
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,176 +0,0 @@
|
||||||
|
|
||||||
class CffiOp(object):
|
|
||||||
def __init__(self, op, arg):
|
|
||||||
self.op = op
|
|
||||||
self.arg = arg
|
|
||||||
|
|
||||||
def as_c_expr(self):
|
|
||||||
if self.op is None:
|
|
||||||
assert isinstance(self.arg, str)
|
|
||||||
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
|
||||||
classname = CLASS_NAME[self.op]
|
|
||||||
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
|
||||||
|
|
||||||
def as_python_bytes(self):
|
|
||||||
if self.op is None and self.arg.isdigit():
|
|
||||||
value = int(self.arg) # non-negative: '-' not in self.arg
|
|
||||||
if value >= 2**31:
|
|
||||||
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
|
||||||
% (self.arg,))
|
|
||||||
return format_four_bytes(value)
|
|
||||||
if isinstance(self.arg, str):
|
|
||||||
from .ffiplatform import VerificationError
|
|
||||||
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
|
||||||
return format_four_bytes((self.arg << 8) | self.op)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
classname = CLASS_NAME.get(self.op, self.op)
|
|
||||||
return '(%s %s)' % (classname, self.arg)
|
|
||||||
|
|
||||||
def format_four_bytes(num):
|
|
||||||
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
|
||||||
(num >> 24) & 0xFF,
|
|
||||||
(num >> 16) & 0xFF,
|
|
||||||
(num >> 8) & 0xFF,
|
|
||||||
(num ) & 0xFF)
|
|
||||||
|
|
||||||
OP_PRIMITIVE = 1
|
|
||||||
OP_POINTER = 3
|
|
||||||
OP_ARRAY = 5
|
|
||||||
OP_OPEN_ARRAY = 7
|
|
||||||
OP_STRUCT_UNION = 9
|
|
||||||
OP_ENUM = 11
|
|
||||||
OP_FUNCTION = 13
|
|
||||||
OP_FUNCTION_END = 15
|
|
||||||
OP_NOOP = 17
|
|
||||||
OP_BITFIELD = 19
|
|
||||||
OP_TYPENAME = 21
|
|
||||||
OP_CPYTHON_BLTN_V = 23 # varargs
|
|
||||||
OP_CPYTHON_BLTN_N = 25 # noargs
|
|
||||||
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
|
||||||
OP_CONSTANT = 29
|
|
||||||
OP_CONSTANT_INT = 31
|
|
||||||
OP_GLOBAL_VAR = 33
|
|
||||||
OP_DLOPEN_FUNC = 35
|
|
||||||
OP_DLOPEN_CONST = 37
|
|
||||||
OP_GLOBAL_VAR_F = 39
|
|
||||||
|
|
||||||
PRIM_VOID = 0
|
|
||||||
PRIM_BOOL = 1
|
|
||||||
PRIM_CHAR = 2
|
|
||||||
PRIM_SCHAR = 3
|
|
||||||
PRIM_UCHAR = 4
|
|
||||||
PRIM_SHORT = 5
|
|
||||||
PRIM_USHORT = 6
|
|
||||||
PRIM_INT = 7
|
|
||||||
PRIM_UINT = 8
|
|
||||||
PRIM_LONG = 9
|
|
||||||
PRIM_ULONG = 10
|
|
||||||
PRIM_LONGLONG = 11
|
|
||||||
PRIM_ULONGLONG = 12
|
|
||||||
PRIM_FLOAT = 13
|
|
||||||
PRIM_DOUBLE = 14
|
|
||||||
PRIM_LONGDOUBLE = 15
|
|
||||||
|
|
||||||
PRIM_WCHAR = 16
|
|
||||||
PRIM_INT8 = 17
|
|
||||||
PRIM_UINT8 = 18
|
|
||||||
PRIM_INT16 = 19
|
|
||||||
PRIM_UINT16 = 20
|
|
||||||
PRIM_INT32 = 21
|
|
||||||
PRIM_UINT32 = 22
|
|
||||||
PRIM_INT64 = 23
|
|
||||||
PRIM_UINT64 = 24
|
|
||||||
PRIM_INTPTR = 25
|
|
||||||
PRIM_UINTPTR = 26
|
|
||||||
PRIM_PTRDIFF = 27
|
|
||||||
PRIM_SIZE = 28
|
|
||||||
PRIM_SSIZE = 29
|
|
||||||
PRIM_INT_LEAST8 = 30
|
|
||||||
PRIM_UINT_LEAST8 = 31
|
|
||||||
PRIM_INT_LEAST16 = 32
|
|
||||||
PRIM_UINT_LEAST16 = 33
|
|
||||||
PRIM_INT_LEAST32 = 34
|
|
||||||
PRIM_UINT_LEAST32 = 35
|
|
||||||
PRIM_INT_LEAST64 = 36
|
|
||||||
PRIM_UINT_LEAST64 = 37
|
|
||||||
PRIM_INT_FAST8 = 38
|
|
||||||
PRIM_UINT_FAST8 = 39
|
|
||||||
PRIM_INT_FAST16 = 40
|
|
||||||
PRIM_UINT_FAST16 = 41
|
|
||||||
PRIM_INT_FAST32 = 42
|
|
||||||
PRIM_UINT_FAST32 = 43
|
|
||||||
PRIM_INT_FAST64 = 44
|
|
||||||
PRIM_UINT_FAST64 = 45
|
|
||||||
PRIM_INTMAX = 46
|
|
||||||
PRIM_UINTMAX = 47
|
|
||||||
|
|
||||||
_NUM_PRIM = 48
|
|
||||||
_UNKNOWN_PRIM = -1
|
|
||||||
_UNKNOWN_FLOAT_PRIM = -2
|
|
||||||
_UNKNOWN_LONG_DOUBLE = -3
|
|
||||||
|
|
||||||
PRIMITIVE_TO_INDEX = {
|
|
||||||
'char': PRIM_CHAR,
|
|
||||||
'short': PRIM_SHORT,
|
|
||||||
'int': PRIM_INT,
|
|
||||||
'long': PRIM_LONG,
|
|
||||||
'long long': PRIM_LONGLONG,
|
|
||||||
'signed char': PRIM_SCHAR,
|
|
||||||
'unsigned char': PRIM_UCHAR,
|
|
||||||
'unsigned short': PRIM_USHORT,
|
|
||||||
'unsigned int': PRIM_UINT,
|
|
||||||
'unsigned long': PRIM_ULONG,
|
|
||||||
'unsigned long long': PRIM_ULONGLONG,
|
|
||||||
'float': PRIM_FLOAT,
|
|
||||||
'double': PRIM_DOUBLE,
|
|
||||||
'long double': PRIM_LONGDOUBLE,
|
|
||||||
'_Bool': PRIM_BOOL,
|
|
||||||
'wchar_t': PRIM_WCHAR,
|
|
||||||
'int8_t': PRIM_INT8,
|
|
||||||
'uint8_t': PRIM_UINT8,
|
|
||||||
'int16_t': PRIM_INT16,
|
|
||||||
'uint16_t': PRIM_UINT16,
|
|
||||||
'int32_t': PRIM_INT32,
|
|
||||||
'uint32_t': PRIM_UINT32,
|
|
||||||
'int64_t': PRIM_INT64,
|
|
||||||
'uint64_t': PRIM_UINT64,
|
|
||||||
'intptr_t': PRIM_INTPTR,
|
|
||||||
'uintptr_t': PRIM_UINTPTR,
|
|
||||||
'ptrdiff_t': PRIM_PTRDIFF,
|
|
||||||
'size_t': PRIM_SIZE,
|
|
||||||
'ssize_t': PRIM_SSIZE,
|
|
||||||
'int_least8_t': PRIM_INT_LEAST8,
|
|
||||||
'uint_least8_t': PRIM_UINT_LEAST8,
|
|
||||||
'int_least16_t': PRIM_INT_LEAST16,
|
|
||||||
'uint_least16_t': PRIM_UINT_LEAST16,
|
|
||||||
'int_least32_t': PRIM_INT_LEAST32,
|
|
||||||
'uint_least32_t': PRIM_UINT_LEAST32,
|
|
||||||
'int_least64_t': PRIM_INT_LEAST64,
|
|
||||||
'uint_least64_t': PRIM_UINT_LEAST64,
|
|
||||||
'int_fast8_t': PRIM_INT_FAST8,
|
|
||||||
'uint_fast8_t': PRIM_UINT_FAST8,
|
|
||||||
'int_fast16_t': PRIM_INT_FAST16,
|
|
||||||
'uint_fast16_t': PRIM_UINT_FAST16,
|
|
||||||
'int_fast32_t': PRIM_INT_FAST32,
|
|
||||||
'uint_fast32_t': PRIM_UINT_FAST32,
|
|
||||||
'int_fast64_t': PRIM_INT_FAST64,
|
|
||||||
'uint_fast64_t': PRIM_UINT_FAST64,
|
|
||||||
'intmax_t': PRIM_INTMAX,
|
|
||||||
'uintmax_t': PRIM_UINTMAX,
|
|
||||||
}
|
|
||||||
|
|
||||||
F_UNION = 0x01
|
|
||||||
F_CHECK_FIELDS = 0x02
|
|
||||||
F_PACKED = 0x04
|
|
||||||
F_EXTERNAL = 0x08
|
|
||||||
F_OPAQUE = 0x10
|
|
||||||
|
|
||||||
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
|
||||||
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
|
||||||
'F_EXTERNAL', 'F_OPAQUE']])
|
|
||||||
|
|
||||||
CLASS_NAME = {}
|
|
||||||
for _name, _value in list(globals().items()):
|
|
||||||
if _name.startswith('OP_') and isinstance(_value, int):
|
|
||||||
CLASS_NAME[_value] = _name[3:]
|
|
|
@ -1,252 +0,0 @@
|
||||||
import sys
|
|
||||||
from . import api, model
|
|
||||||
|
|
||||||
|
|
||||||
COMMON_TYPES = {
|
|
||||||
'FILE': model.unknown_type('FILE', '_IO_FILE'),
|
|
||||||
'bool': '_Bool',
|
|
||||||
}
|
|
||||||
|
|
||||||
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
|
||||||
if _type.endswith('_t'):
|
|
||||||
COMMON_TYPES[_type] = _type
|
|
||||||
del _type
|
|
||||||
|
|
||||||
_CACHE = {}
|
|
||||||
|
|
||||||
def resolve_common_type(commontype):
|
|
||||||
try:
|
|
||||||
return _CACHE[commontype]
|
|
||||||
except KeyError:
|
|
||||||
result = COMMON_TYPES.get(commontype, commontype)
|
|
||||||
if not isinstance(result, str):
|
|
||||||
pass # result is already a BaseType
|
|
||||||
elif result.endswith(' *'):
|
|
||||||
if result.startswith('const '):
|
|
||||||
result = model.ConstPointerType(
|
|
||||||
resolve_common_type(result[6:-2]))
|
|
||||||
else:
|
|
||||||
result = model.PointerType(resolve_common_type(result[:-2]))
|
|
||||||
elif result in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
|
||||||
result = model.PrimitiveType(result)
|
|
||||||
elif result == 'set-unicode-needed':
|
|
||||||
raise api.FFIError("The Windows type %r is only available after "
|
|
||||||
"you call ffi.set_unicode()" % (commontype,))
|
|
||||||
else:
|
|
||||||
if commontype == result:
|
|
||||||
raise api.FFIError("Unsupported type: %r. Please file a bug "
|
|
||||||
"if you think it should be." % (commontype,))
|
|
||||||
result = resolve_common_type(result) # recursively
|
|
||||||
assert isinstance(result, model.BaseTypeByIdentity)
|
|
||||||
_CACHE[commontype] = result
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
# ____________________________________________________________
|
|
||||||
# Windows common types
|
|
||||||
|
|
||||||
|
|
||||||
def win_common_types(maxsize):
|
|
||||||
result = {}
|
|
||||||
if maxsize < (1<<32):
|
|
||||||
result.update({ # Windows 32-bits
|
|
||||||
'HALF_PTR': 'short',
|
|
||||||
'INT_PTR': 'int',
|
|
||||||
'LONG_PTR': 'long',
|
|
||||||
'UHALF_PTR': 'unsigned short',
|
|
||||||
'UINT_PTR': 'unsigned int',
|
|
||||||
'ULONG_PTR': 'unsigned long',
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
result.update({ # Windows 64-bits
|
|
||||||
'HALF_PTR': 'int',
|
|
||||||
'INT_PTR': 'long long',
|
|
||||||
'LONG_PTR': 'long long',
|
|
||||||
'UHALF_PTR': 'unsigned int',
|
|
||||||
'UINT_PTR': 'unsigned long long',
|
|
||||||
'ULONG_PTR': 'unsigned long long',
|
|
||||||
})
|
|
||||||
result.update({
|
|
||||||
"BYTE": "unsigned char",
|
|
||||||
"BOOL": "int",
|
|
||||||
"CCHAR": "char",
|
|
||||||
"CHAR": "char",
|
|
||||||
"DWORD": "unsigned long",
|
|
||||||
"DWORD32": "unsigned int",
|
|
||||||
"DWORD64": "unsigned long long",
|
|
||||||
"FLOAT": "float",
|
|
||||||
"INT": "int",
|
|
||||||
"INT8": "signed char",
|
|
||||||
"INT16": "short",
|
|
||||||
"INT32": "int",
|
|
||||||
"INT64": "long long",
|
|
||||||
"LONG": "long",
|
|
||||||
"LONGLONG": "long long",
|
|
||||||
"LONG32": "int",
|
|
||||||
"LONG64": "long long",
|
|
||||||
"WORD": "unsigned short",
|
|
||||||
"PVOID": model.voidp_type,
|
|
||||||
"ULONGLONG": "unsigned long long",
|
|
||||||
"WCHAR": "wchar_t",
|
|
||||||
"SHORT": "short",
|
|
||||||
"UCHAR": "unsigned char",
|
|
||||||
"UINT": "unsigned int",
|
|
||||||
"UINT8": "unsigned char",
|
|
||||||
"UINT16": "unsigned short",
|
|
||||||
"UINT32": "unsigned int",
|
|
||||||
"UINT64": "unsigned long long",
|
|
||||||
"ULONG": "unsigned long",
|
|
||||||
"ULONG32": "unsigned int",
|
|
||||||
"ULONG64": "unsigned long long",
|
|
||||||
"USHORT": "unsigned short",
|
|
||||||
|
|
||||||
"SIZE_T": "ULONG_PTR",
|
|
||||||
"SSIZE_T": "LONG_PTR",
|
|
||||||
"ATOM": "WORD",
|
|
||||||
"BOOLEAN": "BYTE",
|
|
||||||
"COLORREF": "DWORD",
|
|
||||||
|
|
||||||
"HANDLE": "PVOID",
|
|
||||||
"DWORDLONG": "ULONGLONG",
|
|
||||||
"DWORD_PTR": "ULONG_PTR",
|
|
||||||
"HACCEL": "HANDLE",
|
|
||||||
|
|
||||||
"HBITMAP": "HANDLE",
|
|
||||||
"HBRUSH": "HANDLE",
|
|
||||||
"HCOLORSPACE": "HANDLE",
|
|
||||||
"HCONV": "HANDLE",
|
|
||||||
"HCONVLIST": "HANDLE",
|
|
||||||
"HDC": "HANDLE",
|
|
||||||
"HDDEDATA": "HANDLE",
|
|
||||||
"HDESK": "HANDLE",
|
|
||||||
"HDROP": "HANDLE",
|
|
||||||
"HDWP": "HANDLE",
|
|
||||||
"HENHMETAFILE": "HANDLE",
|
|
||||||
"HFILE": "int",
|
|
||||||
"HFONT": "HANDLE",
|
|
||||||
"HGDIOBJ": "HANDLE",
|
|
||||||
"HGLOBAL": "HANDLE",
|
|
||||||
"HHOOK": "HANDLE",
|
|
||||||
"HICON": "HANDLE",
|
|
||||||
"HCURSOR": "HICON",
|
|
||||||
"HINSTANCE": "HANDLE",
|
|
||||||
"HKEY": "HANDLE",
|
|
||||||
"HKL": "HANDLE",
|
|
||||||
"HLOCAL": "HANDLE",
|
|
||||||
"HMENU": "HANDLE",
|
|
||||||
"HMETAFILE": "HANDLE",
|
|
||||||
"HMODULE": "HINSTANCE",
|
|
||||||
"HMONITOR": "HANDLE",
|
|
||||||
"HPALETTE": "HANDLE",
|
|
||||||
"HPEN": "HANDLE",
|
|
||||||
"HRESULT": "LONG",
|
|
||||||
"HRGN": "HANDLE",
|
|
||||||
"HRSRC": "HANDLE",
|
|
||||||
"HSZ": "HANDLE",
|
|
||||||
"WINSTA": "HANDLE",
|
|
||||||
"HWND": "HANDLE",
|
|
||||||
|
|
||||||
"LANGID": "WORD",
|
|
||||||
"LCID": "DWORD",
|
|
||||||
"LCTYPE": "DWORD",
|
|
||||||
"LGRPID": "DWORD",
|
|
||||||
"LPARAM": "LONG_PTR",
|
|
||||||
"LPBOOL": "BOOL *",
|
|
||||||
"LPBYTE": "BYTE *",
|
|
||||||
"LPCOLORREF": "DWORD *",
|
|
||||||
"LPCSTR": "const char *",
|
|
||||||
|
|
||||||
"LPCVOID": model.const_voidp_type,
|
|
||||||
"LPCWSTR": "const WCHAR *",
|
|
||||||
"LPDWORD": "DWORD *",
|
|
||||||
"LPHANDLE": "HANDLE *",
|
|
||||||
"LPINT": "int *",
|
|
||||||
"LPLONG": "long *",
|
|
||||||
"LPSTR": "CHAR *",
|
|
||||||
"LPWSTR": "WCHAR *",
|
|
||||||
"LPVOID": model.voidp_type,
|
|
||||||
"LPWORD": "WORD *",
|
|
||||||
"LRESULT": "LONG_PTR",
|
|
||||||
"PBOOL": "BOOL *",
|
|
||||||
"PBOOLEAN": "BOOLEAN *",
|
|
||||||
"PBYTE": "BYTE *",
|
|
||||||
"PCHAR": "CHAR *",
|
|
||||||
"PCSTR": "const CHAR *",
|
|
||||||
"PCWSTR": "const WCHAR *",
|
|
||||||
"PDWORD": "DWORD *",
|
|
||||||
"PDWORDLONG": "DWORDLONG *",
|
|
||||||
"PDWORD_PTR": "DWORD_PTR *",
|
|
||||||
"PDWORD32": "DWORD32 *",
|
|
||||||
"PDWORD64": "DWORD64 *",
|
|
||||||
"PFLOAT": "FLOAT *",
|
|
||||||
"PHALF_PTR": "HALF_PTR *",
|
|
||||||
"PHANDLE": "HANDLE *",
|
|
||||||
"PHKEY": "HKEY *",
|
|
||||||
"PINT": "int *",
|
|
||||||
"PINT_PTR": "INT_PTR *",
|
|
||||||
"PINT8": "INT8 *",
|
|
||||||
"PINT16": "INT16 *",
|
|
||||||
"PINT32": "INT32 *",
|
|
||||||
"PINT64": "INT64 *",
|
|
||||||
"PLCID": "PDWORD",
|
|
||||||
"PLONG": "LONG *",
|
|
||||||
"PLONGLONG": "LONGLONG *",
|
|
||||||
"PLONG_PTR": "LONG_PTR *",
|
|
||||||
"PLONG32": "LONG32 *",
|
|
||||||
"PLONG64": "LONG64 *",
|
|
||||||
"PSHORT": "SHORT *",
|
|
||||||
"PSIZE_T": "SIZE_T *",
|
|
||||||
"PSSIZE_T": "SSIZE_T *",
|
|
||||||
"PSTR": "CHAR *",
|
|
||||||
"PUCHAR": "UCHAR *",
|
|
||||||
"PUHALF_PTR": "UHALF_PTR *",
|
|
||||||
"PUINT": "UINT *",
|
|
||||||
"PUINT_PTR": "UINT_PTR *",
|
|
||||||
"PUINT8": "UINT8 *",
|
|
||||||
"PUINT16": "UINT16 *",
|
|
||||||
"PUINT32": "UINT32 *",
|
|
||||||
"PUINT64": "UINT64 *",
|
|
||||||
"PULONG": "ULONG *",
|
|
||||||
"PULONGLONG": "ULONGLONG *",
|
|
||||||
"PULONG_PTR": "ULONG_PTR *",
|
|
||||||
"PULONG32": "ULONG32 *",
|
|
||||||
"PULONG64": "ULONG64 *",
|
|
||||||
"PUSHORT": "USHORT *",
|
|
||||||
"PWCHAR": "WCHAR *",
|
|
||||||
"PWORD": "WORD *",
|
|
||||||
"PWSTR": "WCHAR *",
|
|
||||||
"QWORD": "unsigned long long",
|
|
||||||
"SC_HANDLE": "HANDLE",
|
|
||||||
"SC_LOCK": "LPVOID",
|
|
||||||
"SERVICE_STATUS_HANDLE": "HANDLE",
|
|
||||||
|
|
||||||
"UNICODE_STRING": model.StructType(
|
|
||||||
"_UNICODE_STRING",
|
|
||||||
["Length",
|
|
||||||
"MaximumLength",
|
|
||||||
"Buffer"],
|
|
||||||
[model.PrimitiveType("unsigned short"),
|
|
||||||
model.PrimitiveType("unsigned short"),
|
|
||||||
model.PointerType(model.PrimitiveType("wchar_t"))],
|
|
||||||
[-1, -1, -1]),
|
|
||||||
"PUNICODE_STRING": "UNICODE_STRING *",
|
|
||||||
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
|
||||||
|
|
||||||
"USN": "LONGLONG",
|
|
||||||
"VOID": model.void_type,
|
|
||||||
"WPARAM": "UINT_PTR",
|
|
||||||
|
|
||||||
"TBYTE": "set-unicode-needed",
|
|
||||||
"TCHAR": "set-unicode-needed",
|
|
||||||
"LPCTSTR": "set-unicode-needed",
|
|
||||||
"PCTSTR": "set-unicode-needed",
|
|
||||||
"LPTSTR": "set-unicode-needed",
|
|
||||||
"PTSTR": "set-unicode-needed",
|
|
||||||
"PTBYTE": "set-unicode-needed",
|
|
||||||
"PTCHAR": "set-unicode-needed",
|
|
||||||
})
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
COMMON_TYPES.update(win_common_types(sys.maxsize))
|
|
|
@ -1,712 +0,0 @@
|
||||||
from . import api, model
|
|
||||||
from .commontypes import COMMON_TYPES, resolve_common_type
|
|
||||||
try:
|
|
||||||
from . import _pycparser as pycparser
|
|
||||||
except ImportError:
|
|
||||||
import pycparser
|
|
||||||
import weakref, re, sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
if sys.version_info < (3,):
|
|
||||||
import thread as _thread
|
|
||||||
else:
|
|
||||||
import _thread
|
|
||||||
lock = _thread.allocate_lock()
|
|
||||||
except ImportError:
|
|
||||||
lock = None
|
|
||||||
|
|
||||||
_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
|
|
||||||
re.DOTALL | re.MULTILINE)
|
|
||||||
_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
|
|
||||||
r"\b((?:[^\n\\]|\\.)*?)$",
|
|
||||||
re.DOTALL | re.MULTILINE)
|
|
||||||
_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
|
|
||||||
_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
|
|
||||||
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
|
|
||||||
_r_words = re.compile(r"\w+|\S")
|
|
||||||
_parser_cache = None
|
|
||||||
_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
|
|
||||||
_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
|
|
||||||
_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
|
|
||||||
_r_cdecl = re.compile(r"\b__cdecl\b")
|
|
||||||
|
|
||||||
def _get_parser():
|
|
||||||
global _parser_cache
|
|
||||||
if _parser_cache is None:
|
|
||||||
_parser_cache = pycparser.CParser()
|
|
||||||
return _parser_cache
|
|
||||||
|
|
||||||
def _preprocess(csource):
|
|
||||||
# Remove comments. NOTE: this only work because the cdef() section
|
|
||||||
# should not contain any string literal!
|
|
||||||
csource = _r_comment.sub(' ', csource)
|
|
||||||
# Remove the "#define FOO x" lines
|
|
||||||
macros = {}
|
|
||||||
for match in _r_define.finditer(csource):
|
|
||||||
macroname, macrovalue = match.groups()
|
|
||||||
macrovalue = macrovalue.replace('\\\n', '').strip()
|
|
||||||
macros[macroname] = macrovalue
|
|
||||||
csource = _r_define.sub('', csource)
|
|
||||||
# BIG HACK: replace WINAPI or __stdcall with "volatile const".
|
|
||||||
# It doesn't make sense for the return type of a function to be
|
|
||||||
# "volatile volatile const", so we abuse it to detect __stdcall...
|
|
||||||
# Hack number 2 is that "int(volatile *fptr)();" is not valid C
|
|
||||||
# syntax, so we place the "volatile" before the opening parenthesis.
|
|
||||||
csource = _r_stdcall2.sub(' volatile volatile const(', csource)
|
|
||||||
csource = _r_stdcall1.sub(' volatile volatile const ', csource)
|
|
||||||
csource = _r_cdecl.sub(' ', csource)
|
|
||||||
# Replace "[...]" with "[__dotdotdotarray__]"
|
|
||||||
csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
|
|
||||||
# Replace "...}" with "__dotdotdotNUM__}". This construction should
|
|
||||||
# occur only at the end of enums; at the end of structs we have "...;}"
|
|
||||||
# and at the end of vararg functions "...);". Also replace "=...[,}]"
|
|
||||||
# with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
|
|
||||||
# giving an unknown value.
|
|
||||||
matches = list(_r_partial_enum.finditer(csource))
|
|
||||||
for number, match in enumerate(reversed(matches)):
|
|
||||||
p = match.start()
|
|
||||||
if csource[p] == '=':
|
|
||||||
p2 = csource.find('...', p, match.end())
|
|
||||||
assert p2 > p
|
|
||||||
csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
|
|
||||||
csource[p2+3:])
|
|
||||||
else:
|
|
||||||
assert csource[p:p+3] == '...'
|
|
||||||
csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
|
|
||||||
csource[p+3:])
|
|
||||||
# Replace all remaining "..." with the same name, "__dotdotdot__",
|
|
||||||
# which is declared with a typedef for the purpose of C parsing.
|
|
||||||
return csource.replace('...', ' __dotdotdot__ '), macros
|
|
||||||
|
|
||||||
def _common_type_names(csource):
|
|
||||||
# Look in the source for what looks like usages of types from the
|
|
||||||
# list of common types. A "usage" is approximated here as the
|
|
||||||
# appearance of the word, minus a "definition" of the type, which
|
|
||||||
# is the last word in a "typedef" statement. Approximative only
|
|
||||||
# but should be fine for all the common types.
|
|
||||||
look_for_words = set(COMMON_TYPES)
|
|
||||||
look_for_words.add(';')
|
|
||||||
look_for_words.add(',')
|
|
||||||
look_for_words.add('(')
|
|
||||||
look_for_words.add(')')
|
|
||||||
look_for_words.add('typedef')
|
|
||||||
words_used = set()
|
|
||||||
is_typedef = False
|
|
||||||
paren = 0
|
|
||||||
previous_word = ''
|
|
||||||
for word in _r_words.findall(csource):
|
|
||||||
if word in look_for_words:
|
|
||||||
if word == ';':
|
|
||||||
if is_typedef:
|
|
||||||
words_used.discard(previous_word)
|
|
||||||
look_for_words.discard(previous_word)
|
|
||||||
is_typedef = False
|
|
||||||
elif word == 'typedef':
|
|
||||||
is_typedef = True
|
|
||||||
paren = 0
|
|
||||||
elif word == '(':
|
|
||||||
paren += 1
|
|
||||||
elif word == ')':
|
|
||||||
paren -= 1
|
|
||||||
elif word == ',':
|
|
||||||
if is_typedef and paren == 0:
|
|
||||||
words_used.discard(previous_word)
|
|
||||||
look_for_words.discard(previous_word)
|
|
||||||
else: # word in COMMON_TYPES
|
|
||||||
words_used.add(word)
|
|
||||||
previous_word = word
|
|
||||||
return words_used
|
|
||||||
|
|
||||||
|
|
||||||
class Parser(object):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._declarations = {}
|
|
||||||
self._included_declarations = set()
|
|
||||||
self._anonymous_counter = 0
|
|
||||||
self._structnode2type = weakref.WeakKeyDictionary()
|
|
||||||
self._override = False
|
|
||||||
self._packed = False
|
|
||||||
self._int_constants = {}
|
|
||||||
self._recomplete = []
|
|
||||||
self._uses_new_feature = None
|
|
||||||
|
|
||||||
def _parse(self, csource):
|
|
||||||
csource, macros = _preprocess(csource)
|
|
||||||
# XXX: for more efficiency we would need to poke into the
|
|
||||||
# internals of CParser... the following registers the
|
|
||||||
# typedefs, because their presence or absence influences the
|
|
||||||
# parsing itself (but what they are typedef'ed to plays no role)
|
|
||||||
ctn = _common_type_names(csource)
|
|
||||||
typenames = []
|
|
||||||
for name in sorted(self._declarations):
|
|
||||||
if name.startswith('typedef '):
|
|
||||||
name = name[8:]
|
|
||||||
typenames.append(name)
|
|
||||||
ctn.discard(name)
|
|
||||||
typenames += sorted(ctn)
|
|
||||||
#
|
|
||||||
csourcelines = ['typedef int %s;' % typename for typename in typenames]
|
|
||||||
csourcelines.append('typedef int __dotdotdot__;')
|
|
||||||
csourcelines.append(csource)
|
|
||||||
csource = '\n'.join(csourcelines)
|
|
||||||
if lock is not None:
|
|
||||||
lock.acquire() # pycparser is not thread-safe...
|
|
||||||
try:
|
|
||||||
ast = _get_parser().parse(csource)
|
|
||||||
except pycparser.c_parser.ParseError as e:
|
|
||||||
self.convert_pycparser_error(e, csource)
|
|
||||||
finally:
|
|
||||||
if lock is not None:
|
|
||||||
lock.release()
|
|
||||||
# csource will be used to find buggy source text
|
|
||||||
return ast, macros, csource
|
|
||||||
|
|
||||||
def _convert_pycparser_error(self, e, csource):
|
|
||||||
# xxx look for ":NUM:" at the start of str(e) and try to interpret
|
|
||||||
# it as a line number
|
|
||||||
line = None
|
|
||||||
msg = str(e)
|
|
||||||
if msg.startswith(':') and ':' in msg[1:]:
|
|
||||||
linenum = msg[1:msg.find(':',1)]
|
|
||||||
if linenum.isdigit():
|
|
||||||
linenum = int(linenum, 10)
|
|
||||||
csourcelines = csource.splitlines()
|
|
||||||
if 1 <= linenum <= len(csourcelines):
|
|
||||||
line = csourcelines[linenum-1]
|
|
||||||
return line
|
|
||||||
|
|
||||||
def convert_pycparser_error(self, e, csource):
|
|
||||||
line = self._convert_pycparser_error(e, csource)
|
|
||||||
|
|
||||||
msg = str(e)
|
|
||||||
if line:
|
|
||||||
msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
|
|
||||||
else:
|
|
||||||
msg = 'parse error\n%s' % (msg,)
|
|
||||||
raise api.CDefError(msg)
|
|
||||||
|
|
||||||
def parse(self, csource, override=False, packed=False):
|
|
||||||
prev_override = self._override
|
|
||||||
prev_packed = self._packed
|
|
||||||
try:
|
|
||||||
self._override = override
|
|
||||||
self._packed = packed
|
|
||||||
self._internal_parse(csource)
|
|
||||||
finally:
|
|
||||||
self._override = prev_override
|
|
||||||
self._packed = prev_packed
|
|
||||||
|
|
||||||
def _internal_parse(self, csource):
|
|
||||||
ast, macros, csource = self._parse(csource)
|
|
||||||
# add the macros
|
|
||||||
self._process_macros(macros)
|
|
||||||
# find the first "__dotdotdot__" and use that as a separator
|
|
||||||
# between the repeated typedefs and the real csource
|
|
||||||
iterator = iter(ast.ext)
|
|
||||||
for decl in iterator:
|
|
||||||
if decl.name == '__dotdotdot__':
|
|
||||||
break
|
|
||||||
#
|
|
||||||
try:
|
|
||||||
for decl in iterator:
|
|
||||||
if isinstance(decl, pycparser.c_ast.Decl):
|
|
||||||
self._parse_decl(decl)
|
|
||||||
elif isinstance(decl, pycparser.c_ast.Typedef):
|
|
||||||
if not decl.name:
|
|
||||||
raise api.CDefError("typedef does not declare any name",
|
|
||||||
decl)
|
|
||||||
quals = 0
|
|
||||||
if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
|
|
||||||
and decl.type.type.names[-1] == '__dotdotdot__'):
|
|
||||||
realtype = self._get_unknown_type(decl)
|
|
||||||
elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
|
|
||||||
isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
|
|
||||||
isinstance(decl.type.type.type,
|
|
||||||
pycparser.c_ast.IdentifierType) and
|
|
||||||
decl.type.type.type.names == ['__dotdotdot__']):
|
|
||||||
realtype = model.unknown_ptr_type(decl.name)
|
|
||||||
else:
|
|
||||||
realtype, quals = self._get_type_and_quals(
|
|
||||||
decl.type, name=decl.name)
|
|
||||||
self._declare('typedef ' + decl.name, realtype, quals=quals)
|
|
||||||
else:
|
|
||||||
raise api.CDefError("unrecognized construct", decl)
|
|
||||||
except api.FFIError as e:
|
|
||||||
msg = self._convert_pycparser_error(e, csource)
|
|
||||||
if msg:
|
|
||||||
e.args = (e.args[0] + "\n *** Err: %s" % msg,)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _add_constants(self, key, val):
|
|
||||||
if key in self._int_constants:
|
|
||||||
if self._int_constants[key] == val:
|
|
||||||
return # ignore identical double declarations
|
|
||||||
raise api.FFIError(
|
|
||||||
"multiple declarations of constant: %s" % (key,))
|
|
||||||
self._int_constants[key] = val
|
|
||||||
|
|
||||||
def _add_integer_constant(self, name, int_str):
|
|
||||||
int_str = int_str.lower().rstrip("ul")
|
|
||||||
neg = int_str.startswith('-')
|
|
||||||
if neg:
|
|
||||||
int_str = int_str[1:]
|
|
||||||
# "010" is not valid oct in py3
|
|
||||||
if (int_str.startswith("0") and int_str != '0'
|
|
||||||
and not int_str.startswith("0x")):
|
|
||||||
int_str = "0o" + int_str[1:]
|
|
||||||
pyvalue = int(int_str, 0)
|
|
||||||
if neg:
|
|
||||||
pyvalue = -pyvalue
|
|
||||||
self._add_constants(name, pyvalue)
|
|
||||||
self._declare('macro ' + name, pyvalue)
|
|
||||||
|
|
||||||
def _process_macros(self, macros):
|
|
||||||
for key, value in macros.items():
|
|
||||||
value = value.strip()
|
|
||||||
if _r_int_literal.match(value):
|
|
||||||
self._add_integer_constant(key, value)
|
|
||||||
elif value == '...':
|
|
||||||
self._declare('macro ' + key, value)
|
|
||||||
else:
|
|
||||||
raise api.CDefError(
|
|
||||||
'only supports one of the following syntax:\n'
|
|
||||||
' #define %s ... (literally dot-dot-dot)\n'
|
|
||||||
' #define %s NUMBER (with NUMBER an integer'
|
|
||||||
' constant, decimal/hex/octal)\n'
|
|
||||||
'got:\n'
|
|
||||||
' #define %s %s'
|
|
||||||
% (key, key, key, value))
|
|
||||||
|
|
||||||
def _parse_decl(self, decl):
|
|
||||||
node = decl.type
|
|
||||||
if isinstance(node, pycparser.c_ast.FuncDecl):
|
|
||||||
tp, quals = self._get_type_and_quals(node, name=decl.name)
|
|
||||||
assert isinstance(tp, model.RawFunctionType)
|
|
||||||
tp = self._get_type_pointer(tp, quals)
|
|
||||||
self._declare('function ' + decl.name, tp)
|
|
||||||
else:
|
|
||||||
if isinstance(node, pycparser.c_ast.Struct):
|
|
||||||
self._get_struct_union_enum_type('struct', node)
|
|
||||||
elif isinstance(node, pycparser.c_ast.Union):
|
|
||||||
self._get_struct_union_enum_type('union', node)
|
|
||||||
elif isinstance(node, pycparser.c_ast.Enum):
|
|
||||||
self._get_struct_union_enum_type('enum', node)
|
|
||||||
elif not decl.name:
|
|
||||||
raise api.CDefError("construct does not declare any variable",
|
|
||||||
decl)
|
|
||||||
#
|
|
||||||
if decl.name:
|
|
||||||
tp, quals = self._get_type_and_quals(node,
|
|
||||||
partial_length_ok=True)
|
|
||||||
if tp.is_raw_function:
|
|
||||||
tp = self._get_type_pointer(tp, quals)
|
|
||||||
self._declare('function ' + decl.name, tp)
|
|
||||||
elif (tp.is_integer_type() and
|
|
||||||
hasattr(decl, 'init') and
|
|
||||||
hasattr(decl.init, 'value') and
|
|
||||||
_r_int_literal.match(decl.init.value)):
|
|
||||||
self._add_integer_constant(decl.name, decl.init.value)
|
|
||||||
elif (tp.is_integer_type() and
|
|
||||||
isinstance(decl.init, pycparser.c_ast.UnaryOp) and
|
|
||||||
decl.init.op == '-' and
|
|
||||||
hasattr(decl.init.expr, 'value') and
|
|
||||||
_r_int_literal.match(decl.init.expr.value)):
|
|
||||||
self._add_integer_constant(decl.name,
|
|
||||||
'-' + decl.init.expr.value)
|
|
||||||
elif (quals & model.Q_CONST) and not tp.is_array_type:
|
|
||||||
self._declare('constant ' + decl.name, tp, quals=quals)
|
|
||||||
else:
|
|
||||||
self._declare('variable ' + decl.name, tp, quals=quals)
|
|
||||||
|
|
||||||
def parse_type(self, cdecl):
|
|
||||||
ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
|
|
||||||
assert not macros
|
|
||||||
exprnode = ast.ext[-1].type.args.params[0]
|
|
||||||
if isinstance(exprnode, pycparser.c_ast.ID):
|
|
||||||
raise api.CDefError("unknown identifier '%s'" % (exprnode.name,))
|
|
||||||
tp, quals = self._get_type_and_quals(exprnode.type)
|
|
||||||
return tp
|
|
||||||
|
|
||||||
def _declare(self, name, obj, included=False, quals=0):
|
|
||||||
if name in self._declarations:
|
|
||||||
prevobj, prevquals = self._declarations[name]
|
|
||||||
if prevobj is obj and prevquals == quals:
|
|
||||||
return
|
|
||||||
if not self._override:
|
|
||||||
raise api.FFIError(
|
|
||||||
"multiple declarations of %s (for interactive usage, "
|
|
||||||
"try cdef(xx, override=True))" % (name,))
|
|
||||||
assert '__dotdotdot__' not in name.split()
|
|
||||||
self._declarations[name] = (obj, quals)
|
|
||||||
if included:
|
|
||||||
self._included_declarations.add(obj)
|
|
||||||
|
|
||||||
def _extract_quals(self, type):
|
|
||||||
quals = 0
|
|
||||||
if isinstance(type, (pycparser.c_ast.TypeDecl,
|
|
||||||
pycparser.c_ast.PtrDecl)):
|
|
||||||
if 'const' in type.quals:
|
|
||||||
quals |= model.Q_CONST
|
|
||||||
if 'restrict' in type.quals:
|
|
||||||
quals |= model.Q_RESTRICT
|
|
||||||
return quals
|
|
||||||
|
|
||||||
def _get_type_pointer(self, type, quals, declname=None):
|
|
||||||
if isinstance(type, model.RawFunctionType):
|
|
||||||
return type.as_function_pointer()
|
|
||||||
if (isinstance(type, model.StructOrUnionOrEnum) and
|
|
||||||
type.name.startswith('$') and type.name[1:].isdigit() and
|
|
||||||
type.forcename is None and declname is not None):
|
|
||||||
return model.NamedPointerType(type, declname, quals)
|
|
||||||
return model.PointerType(type, quals)
|
|
||||||
|
|
||||||
def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
|
|
||||||
# first, dereference typedefs, if we have it already parsed, we're good
|
|
||||||
if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
|
|
||||||
isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
|
|
||||||
len(typenode.type.names) == 1 and
|
|
||||||
('typedef ' + typenode.type.names[0]) in self._declarations):
|
|
||||||
tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
|
|
||||||
quals |= self._extract_quals(typenode)
|
|
||||||
return tp, quals
|
|
||||||
#
|
|
||||||
if isinstance(typenode, pycparser.c_ast.ArrayDecl):
|
|
||||||
# array type
|
|
||||||
if typenode.dim is None:
|
|
||||||
length = None
|
|
||||||
else:
|
|
||||||
length = self._parse_constant(
|
|
||||||
typenode.dim, partial_length_ok=partial_length_ok)
|
|
||||||
tp, quals = self._get_type_and_quals(typenode.type,
|
|
||||||
partial_length_ok=partial_length_ok)
|
|
||||||
return model.ArrayType(tp, length), quals
|
|
||||||
#
|
|
||||||
if isinstance(typenode, pycparser.c_ast.PtrDecl):
|
|
||||||
# pointer type
|
|
||||||
itemtype, itemquals = self._get_type_and_quals(typenode.type)
|
|
||||||
tp = self._get_type_pointer(itemtype, itemquals, declname=name)
|
|
||||||
quals = self._extract_quals(typenode)
|
|
||||||
return tp, quals
|
|
||||||
#
|
|
||||||
if isinstance(typenode, pycparser.c_ast.TypeDecl):
|
|
||||||
quals = self._extract_quals(typenode)
|
|
||||||
type = typenode.type
|
|
||||||
if isinstance(type, pycparser.c_ast.IdentifierType):
|
|
||||||
# assume a primitive type. get it from .names, but reduce
|
|
||||||
# synonyms to a single chosen combination
|
|
||||||
names = list(type.names)
|
|
||||||
if names != ['signed', 'char']: # keep this unmodified
|
|
||||||
prefixes = {}
|
|
||||||
while names:
|
|
||||||
name = names[0]
|
|
||||||
if name in ('short', 'long', 'signed', 'unsigned'):
|
|
||||||
prefixes[name] = prefixes.get(name, 0) + 1
|
|
||||||
del names[0]
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
# ignore the 'signed' prefix below, and reorder the others
|
|
||||||
newnames = []
|
|
||||||
for prefix in ('unsigned', 'short', 'long'):
|
|
||||||
for i in range(prefixes.get(prefix, 0)):
|
|
||||||
newnames.append(prefix)
|
|
||||||
if not names:
|
|
||||||
names = ['int'] # implicitly
|
|
||||||
if names == ['int']: # but kill it if 'short' or 'long'
|
|
||||||
if 'short' in prefixes or 'long' in prefixes:
|
|
||||||
names = []
|
|
||||||
names = newnames + names
|
|
||||||
ident = ' '.join(names)
|
|
||||||
if ident == 'void':
|
|
||||||
return model.void_type, quals
|
|
||||||
if ident == '__dotdotdot__':
|
|
||||||
raise api.FFIError(':%d: bad usage of "..."' %
|
|
||||||
typenode.coord.line)
|
|
||||||
return resolve_common_type(ident), quals
|
|
||||||
#
|
|
||||||
if isinstance(type, pycparser.c_ast.Struct):
|
|
||||||
# 'struct foobar'
|
|
||||||
tp = self._get_struct_union_enum_type('struct', type, name)
|
|
||||||
return tp, quals
|
|
||||||
#
|
|
||||||
if isinstance(type, pycparser.c_ast.Union):
|
|
||||||
# 'union foobar'
|
|
||||||
tp = self._get_struct_union_enum_type('union', type, name)
|
|
||||||
return tp, quals
|
|
||||||
#
|
|
||||||
if isinstance(type, pycparser.c_ast.Enum):
|
|
||||||
# 'enum foobar'
|
|
||||||
tp = self._get_struct_union_enum_type('enum', type, name)
|
|
||||||
return tp, quals
|
|
||||||
#
|
|
||||||
if isinstance(typenode, pycparser.c_ast.FuncDecl):
|
|
||||||
# a function type
|
|
||||||
return self._parse_function_type(typenode, name), 0
|
|
||||||
#
|
|
||||||
# nested anonymous structs or unions end up here
|
|
||||||
if isinstance(typenode, pycparser.c_ast.Struct):
|
|
||||||
return self._get_struct_union_enum_type('struct', typenode, name,
|
|
||||||
nested=True), 0
|
|
||||||
if isinstance(typenode, pycparser.c_ast.Union):
|
|
||||||
return self._get_struct_union_enum_type('union', typenode, name,
|
|
||||||
nested=True), 0
|
|
||||||
#
|
|
||||||
raise api.FFIError(":%d: bad or unsupported type declaration" %
|
|
||||||
typenode.coord.line)
|
|
||||||
|
|
||||||
def _parse_function_type(self, typenode, funcname=None):
|
|
||||||
params = list(getattr(typenode.args, 'params', []))
|
|
||||||
ellipsis = (
|
|
||||||
len(params) > 0 and
|
|
||||||
isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
|
|
||||||
isinstance(params[-1].type.type,
|
|
||||||
pycparser.c_ast.IdentifierType) and
|
|
||||||
params[-1].type.type.names == ['__dotdotdot__'])
|
|
||||||
if ellipsis:
|
|
||||||
params.pop()
|
|
||||||
if not params:
|
|
||||||
raise api.CDefError(
|
|
||||||
"%s: a function with only '(...)' as argument"
|
|
||||||
" is not correct C" % (funcname or 'in expression'))
|
|
||||||
args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
|
|
||||||
for argdeclnode in params]
|
|
||||||
if not ellipsis and args == [model.void_type]:
|
|
||||||
args = []
|
|
||||||
result, quals = self._get_type_and_quals(typenode.type)
|
|
||||||
# the 'quals' on the result type are ignored. HACK: we absure them
|
|
||||||
# to detect __stdcall functions: we textually replace "__stdcall"
|
|
||||||
# with "volatile volatile const" above.
|
|
||||||
abi = None
|
|
||||||
if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
|
|
||||||
if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
|
|
||||||
abi = '__stdcall'
|
|
||||||
return model.RawFunctionType(tuple(args), result, ellipsis, abi)
|
|
||||||
|
|
||||||
def _as_func_arg(self, type, quals):
|
|
||||||
if isinstance(type, model.ArrayType):
|
|
||||||
return model.PointerType(type.item, quals)
|
|
||||||
elif isinstance(type, model.RawFunctionType):
|
|
||||||
return type.as_function_pointer()
|
|
||||||
else:
|
|
||||||
return type
|
|
||||||
|
|
||||||
def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
|
|
||||||
# First, a level of caching on the exact 'type' node of the AST.
|
|
||||||
# This is obscure, but needed because pycparser "unrolls" declarations
|
|
||||||
# such as "typedef struct { } foo_t, *foo_p" and we end up with
|
|
||||||
# an AST that is not a tree, but a DAG, with the "type" node of the
|
|
||||||
# two branches foo_t and foo_p of the trees being the same node.
|
|
||||||
# It's a bit silly but detecting "DAG-ness" in the AST tree seems
|
|
||||||
# to be the only way to distinguish this case from two independent
|
|
||||||
# structs. See test_struct_with_two_usages.
|
|
||||||
try:
|
|
||||||
return self._structnode2type[type]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
#
|
|
||||||
# Note that this must handle parsing "struct foo" any number of
|
|
||||||
# times and always return the same StructType object. Additionally,
|
|
||||||
# one of these times (not necessarily the first), the fields of
|
|
||||||
# the struct can be specified with "struct foo { ...fields... }".
|
|
||||||
# If no name is given, then we have to create a new anonymous struct
|
|
||||||
# with no caching; in this case, the fields are either specified
|
|
||||||
# right now or never.
|
|
||||||
#
|
|
||||||
force_name = name
|
|
||||||
name = type.name
|
|
||||||
#
|
|
||||||
# get the type or create it if needed
|
|
||||||
if name is None:
|
|
||||||
# 'force_name' is used to guess a more readable name for
|
|
||||||
# anonymous structs, for the common case "typedef struct { } foo".
|
|
||||||
if force_name is not None:
|
|
||||||
explicit_name = '$%s' % force_name
|
|
||||||
else:
|
|
||||||
self._anonymous_counter += 1
|
|
||||||
explicit_name = '$%d' % self._anonymous_counter
|
|
||||||
tp = None
|
|
||||||
else:
|
|
||||||
explicit_name = name
|
|
||||||
key = '%s %s' % (kind, name)
|
|
||||||
tp, _ = self._declarations.get(key, (None, None))
|
|
||||||
#
|
|
||||||
if tp is None:
|
|
||||||
if kind == 'struct':
|
|
||||||
tp = model.StructType(explicit_name, None, None, None)
|
|
||||||
elif kind == 'union':
|
|
||||||
tp = model.UnionType(explicit_name, None, None, None)
|
|
||||||
elif kind == 'enum':
|
|
||||||
if explicit_name == '__dotdotdot__':
|
|
||||||
raise CDefError("Enums cannot be declared with ...")
|
|
||||||
tp = self._build_enum_type(explicit_name, type.values)
|
|
||||||
else:
|
|
||||||
raise AssertionError("kind = %r" % (kind,))
|
|
||||||
if name is not None:
|
|
||||||
self._declare(key, tp)
|
|
||||||
else:
|
|
||||||
if kind == 'enum' and type.values is not None:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"enum %s: the '{}' declaration should appear on the first "
|
|
||||||
"time the enum is mentioned, not later" % explicit_name)
|
|
||||||
if not tp.forcename:
|
|
||||||
tp.force_the_name(force_name)
|
|
||||||
if tp.forcename and '$' in tp.name:
|
|
||||||
self._declare('anonymous %s' % tp.forcename, tp)
|
|
||||||
#
|
|
||||||
self._structnode2type[type] = tp
|
|
||||||
#
|
|
||||||
# enums: done here
|
|
||||||
if kind == 'enum':
|
|
||||||
return tp
|
|
||||||
#
|
|
||||||
# is there a 'type.decls'? If yes, then this is the place in the
|
|
||||||
# C sources that declare the fields. If no, then just return the
|
|
||||||
# existing type, possibly still incomplete.
|
|
||||||
if type.decls is None:
|
|
||||||
return tp
|
|
||||||
#
|
|
||||||
if tp.fldnames is not None:
|
|
||||||
raise api.CDefError("duplicate declaration of struct %s" % name)
|
|
||||||
fldnames = []
|
|
||||||
fldtypes = []
|
|
||||||
fldbitsize = []
|
|
||||||
fldquals = []
|
|
||||||
for decl in type.decls:
|
|
||||||
if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
|
|
||||||
''.join(decl.type.names) == '__dotdotdot__'):
|
|
||||||
# XXX pycparser is inconsistent: 'names' should be a list
|
|
||||||
# of strings, but is sometimes just one string. Use
|
|
||||||
# str.join() as a way to cope with both.
|
|
||||||
self._make_partial(tp, nested)
|
|
||||||
continue
|
|
||||||
if decl.bitsize is None:
|
|
||||||
bitsize = -1
|
|
||||||
else:
|
|
||||||
bitsize = self._parse_constant(decl.bitsize)
|
|
||||||
self._partial_length = False
|
|
||||||
type, fqual = self._get_type_and_quals(decl.type,
|
|
||||||
partial_length_ok=True)
|
|
||||||
if self._partial_length:
|
|
||||||
self._make_partial(tp, nested)
|
|
||||||
if isinstance(type, model.StructType) and type.partial:
|
|
||||||
self._make_partial(tp, nested)
|
|
||||||
fldnames.append(decl.name or '')
|
|
||||||
fldtypes.append(type)
|
|
||||||
fldbitsize.append(bitsize)
|
|
||||||
fldquals.append(fqual)
|
|
||||||
tp.fldnames = tuple(fldnames)
|
|
||||||
tp.fldtypes = tuple(fldtypes)
|
|
||||||
tp.fldbitsize = tuple(fldbitsize)
|
|
||||||
tp.fldquals = tuple(fldquals)
|
|
||||||
if fldbitsize != [-1] * len(fldbitsize):
|
|
||||||
if isinstance(tp, model.StructType) and tp.partial:
|
|
||||||
raise NotImplementedError("%s: using both bitfields and '...;'"
|
|
||||||
% (tp,))
|
|
||||||
tp.packed = self._packed
|
|
||||||
if tp.completed: # must be re-completed: it is not opaque any more
|
|
||||||
tp.completed = 0
|
|
||||||
self._recomplete.append(tp)
|
|
||||||
return tp
|
|
||||||
|
|
||||||
def _make_partial(self, tp, nested):
|
|
||||||
if not isinstance(tp, model.StructOrUnion):
|
|
||||||
raise api.CDefError("%s cannot be partial" % (tp,))
|
|
||||||
if not tp.has_c_name() and not nested:
|
|
||||||
raise NotImplementedError("%s is partial but has no C name" %(tp,))
|
|
||||||
tp.partial = True
|
|
||||||
|
|
||||||
def _parse_constant(self, exprnode, partial_length_ok=False):
|
|
||||||
# for now, limited to expressions that are an immediate number
|
|
||||||
# or positive/negative number
|
|
||||||
if isinstance(exprnode, pycparser.c_ast.Constant):
|
|
||||||
s = exprnode.value
|
|
||||||
if s.startswith('0'):
|
|
||||||
if s.startswith('0x') or s.startswith('0X'):
|
|
||||||
return int(s, 16)
|
|
||||||
return int(s, 8)
|
|
||||||
elif '1' <= s[0] <= '9':
|
|
||||||
return int(s, 10)
|
|
||||||
elif s[0] == "'" and s[-1] == "'" and (
|
|
||||||
len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
|
|
||||||
return ord(s[-2])
|
|
||||||
else:
|
|
||||||
raise api.CDefError("invalid constant %r" % (s,))
|
|
||||||
#
|
|
||||||
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
|
||||||
exprnode.op == '+'):
|
|
||||||
return self._parse_constant(exprnode.expr)
|
|
||||||
#
|
|
||||||
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
|
||||||
exprnode.op == '-'):
|
|
||||||
return -self._parse_constant(exprnode.expr)
|
|
||||||
# load previously defined int constant
|
|
||||||
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
|
||||||
exprnode.name in self._int_constants):
|
|
||||||
return self._int_constants[exprnode.name]
|
|
||||||
#
|
|
||||||
if partial_length_ok:
|
|
||||||
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
|
||||||
exprnode.name == '__dotdotdotarray__'):
|
|
||||||
self._partial_length = True
|
|
||||||
return '...'
|
|
||||||
#
|
|
||||||
raise api.FFIError(":%d: unsupported expression: expected a "
|
|
||||||
"simple numeric constant" % exprnode.coord.line)
|
|
||||||
|
|
||||||
def _build_enum_type(self, explicit_name, decls):
|
|
||||||
if decls is not None:
|
|
||||||
partial = False
|
|
||||||
enumerators = []
|
|
||||||
enumvalues = []
|
|
||||||
nextenumvalue = 0
|
|
||||||
for enum in decls.enumerators:
|
|
||||||
if _r_enum_dotdotdot.match(enum.name):
|
|
||||||
partial = True
|
|
||||||
continue
|
|
||||||
if enum.value is not None:
|
|
||||||
nextenumvalue = self._parse_constant(enum.value)
|
|
||||||
enumerators.append(enum.name)
|
|
||||||
enumvalues.append(nextenumvalue)
|
|
||||||
self._add_constants(enum.name, nextenumvalue)
|
|
||||||
nextenumvalue += 1
|
|
||||||
enumerators = tuple(enumerators)
|
|
||||||
enumvalues = tuple(enumvalues)
|
|
||||||
tp = model.EnumType(explicit_name, enumerators, enumvalues)
|
|
||||||
tp.partial = partial
|
|
||||||
else: # opaque enum
|
|
||||||
tp = model.EnumType(explicit_name, (), ())
|
|
||||||
return tp
|
|
||||||
|
|
||||||
def include(self, other):
|
|
||||||
for name, (tp, quals) in other._declarations.items():
|
|
||||||
if name.startswith('anonymous $enum_$'):
|
|
||||||
continue # fix for test_anonymous_enum_include
|
|
||||||
kind = name.split(' ', 1)[0]
|
|
||||||
if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
|
|
||||||
self._declare(name, tp, included=True, quals=quals)
|
|
||||||
for k, v in other._int_constants.items():
|
|
||||||
self._add_constants(k, v)
|
|
||||||
|
|
||||||
def _get_unknown_type(self, decl):
|
|
||||||
typenames = decl.type.type.names
|
|
||||||
assert typenames[-1] == '__dotdotdot__'
|
|
||||||
if len(typenames) == 1:
|
|
||||||
return model.unknown_type(decl.name)
|
|
||||||
|
|
||||||
if (typenames[:-1] == ['float'] or
|
|
||||||
typenames[:-1] == ['double']):
|
|
||||||
# not for 'long double' so far
|
|
||||||
result = model.UnknownFloatType(decl.name)
|
|
||||||
else:
|
|
||||||
for t in typenames[:-1]:
|
|
||||||
if t not in ['int', 'short', 'long', 'signed',
|
|
||||||
'unsigned', 'char']:
|
|
||||||
raise api.FFIError(':%d: bad usage of "..."' %
|
|
||||||
decl.coord.line)
|
|
||||||
result = model.UnknownIntegerType(decl.name)
|
|
||||||
|
|
||||||
if self._uses_new_feature is None:
|
|
||||||
self._uses_new_feature = "'typedef %s... %s'" % (
|
|
||||||
' '.join(typenames[:-1]), decl.name)
|
|
||||||
|
|
||||||
return result
|
|
|
@ -1,115 +0,0 @@
|
||||||
import sys, os
|
|
||||||
|
|
||||||
|
|
||||||
class VerificationError(Exception):
|
|
||||||
""" An error raised when verification fails
|
|
||||||
"""
|
|
||||||
|
|
||||||
class VerificationMissing(Exception):
|
|
||||||
""" An error raised when incomplete structures are passed into
|
|
||||||
cdef, but no verification has been done
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
|
|
||||||
'extra_objects', 'depends']
|
|
||||||
|
|
||||||
def get_extension(srcfilename, modname, sources=(), **kwds):
|
|
||||||
from distutils.core import Extension
|
|
||||||
allsources = [srcfilename]
|
|
||||||
allsources.extend(sources)
|
|
||||||
return Extension(name=modname, sources=allsources, **kwds)
|
|
||||||
|
|
||||||
def compile(tmpdir, ext):
|
|
||||||
"""Compile a C extension module using distutils."""
|
|
||||||
|
|
||||||
saved_environ = os.environ.copy()
|
|
||||||
try:
|
|
||||||
outputfilename = _build(tmpdir, ext)
|
|
||||||
outputfilename = os.path.abspath(outputfilename)
|
|
||||||
finally:
|
|
||||||
# workaround for a distutils bugs where some env vars can
|
|
||||||
# become longer and longer every time it is used
|
|
||||||
for key, value in saved_environ.items():
|
|
||||||
if os.environ.get(key) != value:
|
|
||||||
os.environ[key] = value
|
|
||||||
return outputfilename
|
|
||||||
|
|
||||||
def _build(tmpdir, ext):
|
|
||||||
# XXX compact but horrible :-(
|
|
||||||
from distutils.core import Distribution
|
|
||||||
import distutils.errors
|
|
||||||
#
|
|
||||||
dist = Distribution({'ext_modules': [ext]})
|
|
||||||
dist.parse_config_files()
|
|
||||||
options = dist.get_option_dict('build_ext')
|
|
||||||
options['force'] = ('ffiplatform', True)
|
|
||||||
options['build_lib'] = ('ffiplatform', tmpdir)
|
|
||||||
options['build_temp'] = ('ffiplatform', tmpdir)
|
|
||||||
#
|
|
||||||
try:
|
|
||||||
dist.run_command('build_ext')
|
|
||||||
except (distutils.errors.CompileError,
|
|
||||||
distutils.errors.LinkError) as e:
|
|
||||||
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
|
|
||||||
#
|
|
||||||
cmd_obj = dist.get_command_obj('build_ext')
|
|
||||||
[soname] = cmd_obj.get_outputs()
|
|
||||||
return soname
|
|
||||||
|
|
||||||
try:
|
|
||||||
from os.path import samefile
|
|
||||||
except ImportError:
|
|
||||||
def samefile(f1, f2):
|
|
||||||
return os.path.abspath(f1) == os.path.abspath(f2)
|
|
||||||
|
|
||||||
def maybe_relative_path(path):
|
|
||||||
if not os.path.isabs(path):
|
|
||||||
return path # already relative
|
|
||||||
dir = path
|
|
||||||
names = []
|
|
||||||
while True:
|
|
||||||
prevdir = dir
|
|
||||||
dir, name = os.path.split(prevdir)
|
|
||||||
if dir == prevdir or not dir:
|
|
||||||
return path # failed to make it relative
|
|
||||||
names.append(name)
|
|
||||||
try:
|
|
||||||
if samefile(dir, os.curdir):
|
|
||||||
names.reverse()
|
|
||||||
return os.path.join(*names)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# ____________________________________________________________
|
|
||||||
|
|
||||||
try:
|
|
||||||
int_or_long = (int, long)
|
|
||||||
import cStringIO
|
|
||||||
except NameError:
|
|
||||||
int_or_long = int # Python 3
|
|
||||||
import io as cStringIO
|
|
||||||
|
|
||||||
def _flatten(x, f):
|
|
||||||
if isinstance(x, str):
|
|
||||||
f.write('%ds%s' % (len(x), x))
|
|
||||||
elif isinstance(x, dict):
|
|
||||||
keys = sorted(x.keys())
|
|
||||||
f.write('%dd' % len(keys))
|
|
||||||
for key in keys:
|
|
||||||
_flatten(key, f)
|
|
||||||
_flatten(x[key], f)
|
|
||||||
elif isinstance(x, (list, tuple)):
|
|
||||||
f.write('%dl' % len(x))
|
|
||||||
for value in x:
|
|
||||||
_flatten(value, f)
|
|
||||||
elif isinstance(x, int_or_long):
|
|
||||||
f.write('%di' % (x,))
|
|
||||||
else:
|
|
||||||
raise TypeError(
|
|
||||||
"the keywords to verify() contains unsupported object %r" % (x,))
|
|
||||||
|
|
||||||
def flatten(x):
|
|
||||||
f = cStringIO.StringIO()
|
|
||||||
_flatten(x, f)
|
|
||||||
return f.getvalue()
|
|
|
@ -1,22 +0,0 @@
|
||||||
from weakref import ref
|
|
||||||
|
|
||||||
|
|
||||||
class GcWeakrefs(object):
|
|
||||||
def __init__(self, ffi):
|
|
||||||
self.ffi = ffi
|
|
||||||
self.data = {}
|
|
||||||
|
|
||||||
def build(self, cdata, destructor):
|
|
||||||
# make a new cdata of the same type as the original one
|
|
||||||
new_cdata = self.ffi.cast(self.ffi._backend.typeof(cdata), cdata)
|
|
||||||
#
|
|
||||||
def remove(key):
|
|
||||||
# careful, this function is not protected by any lock
|
|
||||||
old_key = self.data.pop(index)
|
|
||||||
assert old_key is key
|
|
||||||
destructor(cdata)
|
|
||||||
#
|
|
||||||
key = ref(new_cdata, remove)
|
|
||||||
index = object()
|
|
||||||
self.data[index] = key
|
|
||||||
return new_cdata
|
|
|
@ -1,30 +0,0 @@
|
||||||
import sys
|
|
||||||
|
|
||||||
if sys.version_info < (3,):
|
|
||||||
try:
|
|
||||||
from thread import allocate_lock
|
|
||||||
except ImportError:
|
|
||||||
from dummy_thread import allocate_lock
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
from _thread import allocate_lock
|
|
||||||
except ImportError:
|
|
||||||
from _dummy_thread import allocate_lock
|
|
||||||
|
|
||||||
|
|
||||||
##import sys
|
|
||||||
##l1 = allocate_lock
|
|
||||||
|
|
||||||
##class allocate_lock(object):
|
|
||||||
## def __init__(self):
|
|
||||||
## self._real = l1()
|
|
||||||
## def __enter__(self):
|
|
||||||
## for i in range(4, 0, -1):
|
|
||||||
## print sys._getframe(i).f_code
|
|
||||||
## print
|
|
||||||
## return self._real.__enter__()
|
|
||||||
## def __exit__(self, *args):
|
|
||||||
## return self._real.__exit__(*args)
|
|
||||||
## def acquire(self, f):
|
|
||||||
## assert f is False
|
|
||||||
## return self._real.acquire(f)
|
|
|
@ -1,594 +0,0 @@
|
||||||
import types, sys
|
|
||||||
import weakref
|
|
||||||
|
|
||||||
from .lock import allocate_lock
|
|
||||||
|
|
||||||
|
|
||||||
# type qualifiers
|
|
||||||
Q_CONST = 0x01
|
|
||||||
Q_RESTRICT = 0x02
|
|
||||||
|
|
||||||
def qualify(quals, replace_with):
|
|
||||||
if quals & Q_CONST:
|
|
||||||
replace_with = ' const ' + replace_with.lstrip()
|
|
||||||
if quals & Q_RESTRICT:
|
|
||||||
# It seems that __restrict is supported by gcc and msvc.
|
|
||||||
# If you hit some different compiler, add a #define in
|
|
||||||
# _cffi_include.h for it (and in its copies, documented there)
|
|
||||||
replace_with = ' __restrict ' + replace_with.lstrip()
|
|
||||||
return replace_with
|
|
||||||
|
|
||||||
|
|
||||||
class BaseTypeByIdentity(object):
|
|
||||||
is_array_type = False
|
|
||||||
is_raw_function = False
|
|
||||||
|
|
||||||
def get_c_name(self, replace_with='', context='a C file', quals=0):
|
|
||||||
result = self.c_name_with_marker
|
|
||||||
assert result.count('&') == 1
|
|
||||||
# some logic duplication with ffi.getctype()... :-(
|
|
||||||
replace_with = replace_with.strip()
|
|
||||||
if replace_with:
|
|
||||||
if replace_with.startswith('*') and '&[' in result:
|
|
||||||
replace_with = '(%s)' % replace_with
|
|
||||||
elif not replace_with[0] in '[(':
|
|
||||||
replace_with = ' ' + replace_with
|
|
||||||
replace_with = qualify(quals, replace_with)
|
|
||||||
result = result.replace('&', replace_with)
|
|
||||||
if '$' in result:
|
|
||||||
from .ffiplatform import VerificationError
|
|
||||||
raise VerificationError(
|
|
||||||
"cannot generate '%s' in %s: unknown type name"
|
|
||||||
% (self._get_c_name(), context))
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _get_c_name(self):
|
|
||||||
return self.c_name_with_marker.replace('&', '')
|
|
||||||
|
|
||||||
def has_c_name(self):
|
|
||||||
return '$' not in self._get_c_name()
|
|
||||||
|
|
||||||
def is_integer_type(self):
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
|
||||||
try:
|
|
||||||
BType = ffi._cached_btypes[self]
|
|
||||||
except KeyError:
|
|
||||||
BType = self.build_backend_type(ffi, finishlist)
|
|
||||||
BType2 = ffi._cached_btypes.setdefault(self, BType)
|
|
||||||
assert BType2 is BType
|
|
||||||
return BType
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '<%s>' % (self._get_c_name(),)
|
|
||||||
|
|
||||||
def _get_items(self):
|
|
||||||
return [(name, getattr(self, name)) for name in self._attrs_]
|
|
||||||
|
|
||||||
|
|
||||||
class BaseType(BaseTypeByIdentity):
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return (self.__class__ == other.__class__ and
|
|
||||||
self._get_items() == other._get_items())
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((self.__class__, tuple(self._get_items())))
|
|
||||||
|
|
||||||
|
|
||||||
class VoidType(BaseType):
|
|
||||||
_attrs_ = ()
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.c_name_with_marker = 'void&'
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
return global_cache(self, ffi, 'new_void_type')
|
|
||||||
|
|
||||||
void_type = VoidType()
|
|
||||||
|
|
||||||
|
|
||||||
class BasePrimitiveType(BaseType):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class PrimitiveType(BasePrimitiveType):
|
|
||||||
_attrs_ = ('name',)
|
|
||||||
|
|
||||||
ALL_PRIMITIVE_TYPES = {
|
|
||||||
'char': 'c',
|
|
||||||
'short': 'i',
|
|
||||||
'int': 'i',
|
|
||||||
'long': 'i',
|
|
||||||
'long long': 'i',
|
|
||||||
'signed char': 'i',
|
|
||||||
'unsigned char': 'i',
|
|
||||||
'unsigned short': 'i',
|
|
||||||
'unsigned int': 'i',
|
|
||||||
'unsigned long': 'i',
|
|
||||||
'unsigned long long': 'i',
|
|
||||||
'float': 'f',
|
|
||||||
'double': 'f',
|
|
||||||
'long double': 'f',
|
|
||||||
'_Bool': 'i',
|
|
||||||
# the following types are not primitive in the C sense
|
|
||||||
'wchar_t': 'c',
|
|
||||||
'int8_t': 'i',
|
|
||||||
'uint8_t': 'i',
|
|
||||||
'int16_t': 'i',
|
|
||||||
'uint16_t': 'i',
|
|
||||||
'int32_t': 'i',
|
|
||||||
'uint32_t': 'i',
|
|
||||||
'int64_t': 'i',
|
|
||||||
'uint64_t': 'i',
|
|
||||||
'int_least8_t': 'i',
|
|
||||||
'uint_least8_t': 'i',
|
|
||||||
'int_least16_t': 'i',
|
|
||||||
'uint_least16_t': 'i',
|
|
||||||
'int_least32_t': 'i',
|
|
||||||
'uint_least32_t': 'i',
|
|
||||||
'int_least64_t': 'i',
|
|
||||||
'uint_least64_t': 'i',
|
|
||||||
'int_fast8_t': 'i',
|
|
||||||
'uint_fast8_t': 'i',
|
|
||||||
'int_fast16_t': 'i',
|
|
||||||
'uint_fast16_t': 'i',
|
|
||||||
'int_fast32_t': 'i',
|
|
||||||
'uint_fast32_t': 'i',
|
|
||||||
'int_fast64_t': 'i',
|
|
||||||
'uint_fast64_t': 'i',
|
|
||||||
'intptr_t': 'i',
|
|
||||||
'uintptr_t': 'i',
|
|
||||||
'intmax_t': 'i',
|
|
||||||
'uintmax_t': 'i',
|
|
||||||
'ptrdiff_t': 'i',
|
|
||||||
'size_t': 'i',
|
|
||||||
'ssize_t': 'i',
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, name):
|
|
||||||
assert name in self.ALL_PRIMITIVE_TYPES
|
|
||||||
self.name = name
|
|
||||||
self.c_name_with_marker = name + '&'
|
|
||||||
|
|
||||||
def is_char_type(self):
|
|
||||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
|
|
||||||
def is_integer_type(self):
|
|
||||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
|
|
||||||
def is_float_type(self):
|
|
||||||
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
return global_cache(self, ffi, 'new_primitive_type', self.name)
|
|
||||||
|
|
||||||
|
|
||||||
class UnknownIntegerType(BasePrimitiveType):
|
|
||||||
_attrs_ = ('name',)
|
|
||||||
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
self.c_name_with_marker = name + '&'
|
|
||||||
|
|
||||||
def is_integer_type(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
raise NotImplementedError("integer type '%s' can only be used after "
|
|
||||||
"compilation" % self.name)
|
|
||||||
|
|
||||||
class UnknownFloatType(BasePrimitiveType):
|
|
||||||
_attrs_ = ('name', )
|
|
||||||
|
|
||||||
def __init__(self, name):
|
|
||||||
self.name = name
|
|
||||||
self.c_name_with_marker = name + '&'
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
raise NotImplementedError("float type '%s' can only be used after "
|
|
||||||
"compilation" % self.name)
|
|
||||||
|
|
||||||
|
|
||||||
class BaseFunctionType(BaseType):
|
|
||||||
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
|
|
||||||
|
|
||||||
def __init__(self, args, result, ellipsis, abi=None):
|
|
||||||
self.args = args
|
|
||||||
self.result = result
|
|
||||||
self.ellipsis = ellipsis
|
|
||||||
self.abi = abi
|
|
||||||
#
|
|
||||||
reprargs = [arg._get_c_name() for arg in self.args]
|
|
||||||
if self.ellipsis:
|
|
||||||
reprargs.append('...')
|
|
||||||
reprargs = reprargs or ['void']
|
|
||||||
replace_with = self._base_pattern % (', '.join(reprargs),)
|
|
||||||
if abi is not None:
|
|
||||||
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
|
|
||||||
self.c_name_with_marker = (
|
|
||||||
self.result.c_name_with_marker.replace('&', replace_with))
|
|
||||||
|
|
||||||
|
|
||||||
class RawFunctionType(BaseFunctionType):
|
|
||||||
# Corresponds to a C type like 'int(int)', which is the C type of
|
|
||||||
# a function, but not a pointer-to-function. The backend has no
|
|
||||||
# notion of such a type; it's used temporarily by parsing.
|
|
||||||
_base_pattern = '(&)(%s)'
|
|
||||||
is_raw_function = True
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
from . import api
|
|
||||||
raise api.CDefError("cannot render the type %r: it is a function "
|
|
||||||
"type, not a pointer-to-function type" % (self,))
|
|
||||||
|
|
||||||
def as_function_pointer(self):
|
|
||||||
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
|
|
||||||
|
|
||||||
|
|
||||||
class FunctionPtrType(BaseFunctionType):
|
|
||||||
_base_pattern = '(*&)(%s)'
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
result = self.result.get_cached_btype(ffi, finishlist)
|
|
||||||
args = []
|
|
||||||
for tp in self.args:
|
|
||||||
args.append(tp.get_cached_btype(ffi, finishlist))
|
|
||||||
abi_args = ()
|
|
||||||
if self.abi == "__stdcall":
|
|
||||||
if not self.ellipsis: # __stdcall ignored for variadic funcs
|
|
||||||
try:
|
|
||||||
abi_args = (ffi._backend.FFI_STDCALL,)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
return global_cache(self, ffi, 'new_function_type',
|
|
||||||
tuple(args), result, self.ellipsis, *abi_args)
|
|
||||||
|
|
||||||
def as_raw_function(self):
|
|
||||||
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
|
|
||||||
|
|
||||||
|
|
||||||
class PointerType(BaseType):
|
|
||||||
_attrs_ = ('totype', 'quals')
|
|
||||||
|
|
||||||
def __init__(self, totype, quals=0):
|
|
||||||
self.totype = totype
|
|
||||||
self.quals = quals
|
|
||||||
extra = qualify(quals, " *&")
|
|
||||||
if totype.is_array_type:
|
|
||||||
extra = "(%s)" % (extra.lstrip(),)
|
|
||||||
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
|
|
||||||
return global_cache(self, ffi, 'new_pointer_type', BItem)
|
|
||||||
|
|
||||||
voidp_type = PointerType(void_type)
|
|
||||||
|
|
||||||
def ConstPointerType(totype):
|
|
||||||
return PointerType(totype, Q_CONST)
|
|
||||||
|
|
||||||
const_voidp_type = ConstPointerType(void_type)
|
|
||||||
|
|
||||||
|
|
||||||
class NamedPointerType(PointerType):
|
|
||||||
_attrs_ = ('totype', 'name')
|
|
||||||
|
|
||||||
def __init__(self, totype, name, quals=0):
|
|
||||||
PointerType.__init__(self, totype, quals)
|
|
||||||
self.name = name
|
|
||||||
self.c_name_with_marker = name + '&'
|
|
||||||
|
|
||||||
|
|
||||||
class ArrayType(BaseType):
|
|
||||||
_attrs_ = ('item', 'length')
|
|
||||||
is_array_type = True
|
|
||||||
|
|
||||||
def __init__(self, item, length):
|
|
||||||
self.item = item
|
|
||||||
self.length = length
|
|
||||||
#
|
|
||||||
if length is None:
|
|
||||||
brackets = '&[]'
|
|
||||||
elif length == '...':
|
|
||||||
brackets = '&[/*...*/]'
|
|
||||||
else:
|
|
||||||
brackets = '&[%s]' % length
|
|
||||||
self.c_name_with_marker = (
|
|
||||||
self.item.c_name_with_marker.replace('&', brackets))
|
|
||||||
|
|
||||||
def resolve_length(self, newlength):
|
|
||||||
return ArrayType(self.item, newlength)
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
if self.length == '...':
|
|
||||||
from . import api
|
|
||||||
raise api.CDefError("cannot render the type %r: unknown length" %
|
|
||||||
(self,))
|
|
||||||
self.item.get_cached_btype(ffi, finishlist) # force the item BType
|
|
||||||
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
|
|
||||||
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
|
|
||||||
|
|
||||||
char_array_type = ArrayType(PrimitiveType('char'), None)
|
|
||||||
|
|
||||||
|
|
||||||
class StructOrUnionOrEnum(BaseTypeByIdentity):
|
|
||||||
_attrs_ = ('name',)
|
|
||||||
forcename = None
|
|
||||||
|
|
||||||
def build_c_name_with_marker(self):
|
|
||||||
name = self.forcename or '%s %s' % (self.kind, self.name)
|
|
||||||
self.c_name_with_marker = name + '&'
|
|
||||||
|
|
||||||
def force_the_name(self, forcename):
|
|
||||||
self.forcename = forcename
|
|
||||||
self.build_c_name_with_marker()
|
|
||||||
|
|
||||||
def get_official_name(self):
|
|
||||||
assert self.c_name_with_marker.endswith('&')
|
|
||||||
return self.c_name_with_marker[:-1]
|
|
||||||
|
|
||||||
|
|
||||||
class StructOrUnion(StructOrUnionOrEnum):
|
|
||||||
fixedlayout = None
|
|
||||||
completed = 0
|
|
||||||
partial = False
|
|
||||||
packed = False
|
|
||||||
|
|
||||||
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
|
|
||||||
self.name = name
|
|
||||||
self.fldnames = fldnames
|
|
||||||
self.fldtypes = fldtypes
|
|
||||||
self.fldbitsize = fldbitsize
|
|
||||||
self.fldquals = fldquals
|
|
||||||
self.build_c_name_with_marker()
|
|
||||||
|
|
||||||
def has_anonymous_struct_fields(self):
|
|
||||||
if self.fldtypes is None:
|
|
||||||
return False
|
|
||||||
for name, type in zip(self.fldnames, self.fldtypes):
|
|
||||||
if name == '' and isinstance(type, StructOrUnion):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def enumfields(self):
|
|
||||||
fldquals = self.fldquals
|
|
||||||
if fldquals is None:
|
|
||||||
fldquals = (0,) * len(self.fldnames)
|
|
||||||
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
|
|
||||||
self.fldbitsize, fldquals):
|
|
||||||
if name == '' and isinstance(type, StructOrUnion):
|
|
||||||
# nested anonymous struct/union
|
|
||||||
for result in type.enumfields():
|
|
||||||
yield result
|
|
||||||
else:
|
|
||||||
yield (name, type, bitsize, quals)
|
|
||||||
|
|
||||||
def force_flatten(self):
|
|
||||||
# force the struct or union to have a declaration that lists
|
|
||||||
# directly all fields returned by enumfields(), flattening
|
|
||||||
# nested anonymous structs/unions.
|
|
||||||
names = []
|
|
||||||
types = []
|
|
||||||
bitsizes = []
|
|
||||||
fldquals = []
|
|
||||||
for name, type, bitsize, quals in self.enumfields():
|
|
||||||
names.append(name)
|
|
||||||
types.append(type)
|
|
||||||
bitsizes.append(bitsize)
|
|
||||||
fldquals.append(quals)
|
|
||||||
self.fldnames = tuple(names)
|
|
||||||
self.fldtypes = tuple(types)
|
|
||||||
self.fldbitsize = tuple(bitsizes)
|
|
||||||
self.fldquals = tuple(fldquals)
|
|
||||||
|
|
||||||
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
|
||||||
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
|
|
||||||
can_delay)
|
|
||||||
if not can_delay:
|
|
||||||
self.finish_backend_type(ffi, finishlist)
|
|
||||||
return BType
|
|
||||||
|
|
||||||
def finish_backend_type(self, ffi, finishlist):
|
|
||||||
if self.completed:
|
|
||||||
if self.completed != 2:
|
|
||||||
raise NotImplementedError("recursive structure declaration "
|
|
||||||
"for '%s'" % (self.name,))
|
|
||||||
return
|
|
||||||
BType = ffi._cached_btypes[self]
|
|
||||||
#
|
|
||||||
self.completed = 1
|
|
||||||
#
|
|
||||||
if self.fldtypes is None:
|
|
||||||
pass # not completing it: it's an opaque struct
|
|
||||||
#
|
|
||||||
elif self.fixedlayout is None:
|
|
||||||
fldtypes = [tp.get_cached_btype(ffi, finishlist)
|
|
||||||
for tp in self.fldtypes]
|
|
||||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
|
|
||||||
sflags = 0
|
|
||||||
if self.packed:
|
|
||||||
sflags = 8 # SF_PACKED
|
|
||||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
|
||||||
-1, -1, sflags)
|
|
||||||
#
|
|
||||||
else:
|
|
||||||
fldtypes = []
|
|
||||||
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
|
|
||||||
for i in range(len(self.fldnames)):
|
|
||||||
fsize = fieldsize[i]
|
|
||||||
ftype = self.fldtypes[i]
|
|
||||||
#
|
|
||||||
if isinstance(ftype, ArrayType) and ftype.length == '...':
|
|
||||||
# fix the length to match the total size
|
|
||||||
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
|
|
||||||
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
|
|
||||||
if nrest != 0:
|
|
||||||
self._verification_error(
|
|
||||||
"field '%s.%s' has a bogus size?" % (
|
|
||||||
self.name, self.fldnames[i] or '{}'))
|
|
||||||
ftype = ftype.resolve_length(nlen)
|
|
||||||
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
|
|
||||||
self.fldtypes[i+1:])
|
|
||||||
#
|
|
||||||
BFieldType = ftype.get_cached_btype(ffi, finishlist)
|
|
||||||
if isinstance(ftype, ArrayType) and ftype.length is None:
|
|
||||||
assert fsize == 0
|
|
||||||
else:
|
|
||||||
bitemsize = ffi.sizeof(BFieldType)
|
|
||||||
if bitemsize != fsize:
|
|
||||||
self._verification_error(
|
|
||||||
"field '%s.%s' is declared as %d bytes, but is "
|
|
||||||
"really %d bytes" % (self.name,
|
|
||||||
self.fldnames[i] or '{}',
|
|
||||||
bitemsize, fsize))
|
|
||||||
fldtypes.append(BFieldType)
|
|
||||||
#
|
|
||||||
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
|
|
||||||
ffi._backend.complete_struct_or_union(BType, lst, self,
|
|
||||||
totalsize, totalalignment)
|
|
||||||
self.completed = 2
|
|
||||||
|
|
||||||
def _verification_error(self, msg):
|
|
||||||
from .ffiplatform import VerificationError
|
|
||||||
raise VerificationError(msg)
|
|
||||||
|
|
||||||
def check_not_partial(self):
|
|
||||||
if self.partial and self.fixedlayout is None:
|
|
||||||
from . import ffiplatform
|
|
||||||
raise ffiplatform.VerificationMissing(self._get_c_name())
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
self.check_not_partial()
|
|
||||||
finishlist.append(self)
|
|
||||||
#
|
|
||||||
return global_cache(self, ffi, 'new_%s_type' % self.kind,
|
|
||||||
self.get_official_name(), key=self)
|
|
||||||
|
|
||||||
|
|
||||||
class StructType(StructOrUnion):
|
|
||||||
kind = 'struct'
|
|
||||||
|
|
||||||
|
|
||||||
class UnionType(StructOrUnion):
|
|
||||||
kind = 'union'
|
|
||||||
|
|
||||||
|
|
||||||
class EnumType(StructOrUnionOrEnum):
|
|
||||||
kind = 'enum'
|
|
||||||
partial = False
|
|
||||||
partial_resolved = False
|
|
||||||
|
|
||||||
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
|
|
||||||
self.name = name
|
|
||||||
self.enumerators = enumerators
|
|
||||||
self.enumvalues = enumvalues
|
|
||||||
self.baseinttype = baseinttype
|
|
||||||
self.build_c_name_with_marker()
|
|
||||||
|
|
||||||
def force_the_name(self, forcename):
|
|
||||||
StructOrUnionOrEnum.force_the_name(self, forcename)
|
|
||||||
if self.forcename is None:
|
|
||||||
name = self.get_official_name()
|
|
||||||
self.forcename = '$' + name.replace(' ', '_')
|
|
||||||
|
|
||||||
def check_not_partial(self):
|
|
||||||
if self.partial and not self.partial_resolved:
|
|
||||||
from . import ffiplatform
|
|
||||||
raise ffiplatform.VerificationMissing(self._get_c_name())
|
|
||||||
|
|
||||||
def build_backend_type(self, ffi, finishlist):
|
|
||||||
self.check_not_partial()
|
|
||||||
base_btype = self.build_baseinttype(ffi, finishlist)
|
|
||||||
return global_cache(self, ffi, 'new_enum_type',
|
|
||||||
self.get_official_name(),
|
|
||||||
self.enumerators, self.enumvalues,
|
|
||||||
base_btype, key=self)
|
|
||||||
|
|
||||||
def build_baseinttype(self, ffi, finishlist):
|
|
||||||
if self.baseinttype is not None:
|
|
||||||
return self.baseinttype.get_cached_btype(ffi, finishlist)
|
|
||||||
#
|
|
||||||
if self.enumvalues:
|
|
||||||
smallest_value = min(self.enumvalues)
|
|
||||||
largest_value = max(self.enumvalues)
|
|
||||||
else:
|
|
||||||
smallest_value = 0
|
|
||||||
largest_value = 0
|
|
||||||
if smallest_value < 0: # needs a signed type
|
|
||||||
sign = 1
|
|
||||||
candidate1 = PrimitiveType("int")
|
|
||||||
candidate2 = PrimitiveType("long")
|
|
||||||
else:
|
|
||||||
sign = 0
|
|
||||||
candidate1 = PrimitiveType("unsigned int")
|
|
||||||
candidate2 = PrimitiveType("unsigned long")
|
|
||||||
btype1 = candidate1.get_cached_btype(ffi, finishlist)
|
|
||||||
btype2 = candidate2.get_cached_btype(ffi, finishlist)
|
|
||||||
size1 = ffi.sizeof(btype1)
|
|
||||||
size2 = ffi.sizeof(btype2)
|
|
||||||
if (smallest_value >= ((-1) << (8*size1-1)) and
|
|
||||||
largest_value < (1 << (8*size1-sign))):
|
|
||||||
return btype1
|
|
||||||
if (smallest_value >= ((-1) << (8*size2-1)) and
|
|
||||||
largest_value < (1 << (8*size2-sign))):
|
|
||||||
return btype2
|
|
||||||
raise api.CDefError("%s values don't all fit into either 'long' "
|
|
||||||
"or 'unsigned long'" % self._get_c_name())
|
|
||||||
|
|
||||||
def unknown_type(name, structname=None):
|
|
||||||
if structname is None:
|
|
||||||
structname = '$%s' % name
|
|
||||||
tp = StructType(structname, None, None, None)
|
|
||||||
tp.force_the_name(name)
|
|
||||||
tp.origin = "unknown_type"
|
|
||||||
return tp
|
|
||||||
|
|
||||||
def unknown_ptr_type(name, structname=None):
|
|
||||||
if structname is None:
|
|
||||||
structname = '$$%s' % name
|
|
||||||
tp = StructType(structname, None, None, None)
|
|
||||||
return NamedPointerType(tp, name)
|
|
||||||
|
|
||||||
|
|
||||||
global_lock = allocate_lock()
|
|
||||||
|
|
||||||
def global_cache(srctype, ffi, funcname, *args, **kwds):
|
|
||||||
key = kwds.pop('key', (funcname, args))
|
|
||||||
assert not kwds
|
|
||||||
try:
|
|
||||||
return ffi._backend.__typecache[key]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
except AttributeError:
|
|
||||||
# initialize the __typecache attribute, either at the module level
|
|
||||||
# if ffi._backend is a module, or at the class level if ffi._backend
|
|
||||||
# is some instance.
|
|
||||||
if isinstance(ffi._backend, types.ModuleType):
|
|
||||||
ffi._backend.__typecache = weakref.WeakValueDictionary()
|
|
||||||
else:
|
|
||||||
type(ffi._backend).__typecache = weakref.WeakValueDictionary()
|
|
||||||
try:
|
|
||||||
res = getattr(ffi._backend, funcname)(*args)
|
|
||||||
except NotImplementedError as e:
|
|
||||||
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
|
|
||||||
# note that setdefault() on WeakValueDictionary is not atomic
|
|
||||||
# and contains a rare bug (http://bugs.python.org/issue19542);
|
|
||||||
# we have to use a lock and do it ourselves
|
|
||||||
cache = ffi._backend.__typecache
|
|
||||||
with global_lock:
|
|
||||||
res1 = cache.get(key)
|
|
||||||
if res1 is None:
|
|
||||||
cache[key] = res
|
|
||||||
return res
|
|
||||||
else:
|
|
||||||
return res1
|
|
||||||
|
|
||||||
def pointer_cache(ffi, BType):
|
|
||||||
return global_cache('?', ffi, 'new_pointer_type', BType)
|
|
||||||
|
|
||||||
def attach_exception_info(e, name):
|
|
||||||
if e.args and type(e.args[0]) is str:
|
|
||||||
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
|
|
|
@ -1,167 +0,0 @@
|
||||||
|
|
||||||
/* See doc/misc/parse_c_type.rst in the source of CFFI for more information */
|
|
||||||
|
|
||||||
typedef void *_cffi_opcode_t;
|
|
||||||
|
|
||||||
#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
|
|
||||||
#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
|
|
||||||
#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
|
|
||||||
|
|
||||||
#define _CFFI_OP_PRIMITIVE 1
|
|
||||||
#define _CFFI_OP_POINTER 3
|
|
||||||
#define _CFFI_OP_ARRAY 5
|
|
||||||
#define _CFFI_OP_OPEN_ARRAY 7
|
|
||||||
#define _CFFI_OP_STRUCT_UNION 9
|
|
||||||
#define _CFFI_OP_ENUM 11
|
|
||||||
#define _CFFI_OP_FUNCTION 13
|
|
||||||
#define _CFFI_OP_FUNCTION_END 15
|
|
||||||
#define _CFFI_OP_NOOP 17
|
|
||||||
#define _CFFI_OP_BITFIELD 19
|
|
||||||
#define _CFFI_OP_TYPENAME 21
|
|
||||||
#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
|
|
||||||
#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
|
|
||||||
#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
|
|
||||||
#define _CFFI_OP_CONSTANT 29
|
|
||||||
#define _CFFI_OP_CONSTANT_INT 31
|
|
||||||
#define _CFFI_OP_GLOBAL_VAR 33
|
|
||||||
#define _CFFI_OP_DLOPEN_FUNC 35
|
|
||||||
#define _CFFI_OP_DLOPEN_CONST 37
|
|
||||||
#define _CFFI_OP_GLOBAL_VAR_F 39
|
|
||||||
|
|
||||||
#define _CFFI_PRIM_VOID 0
|
|
||||||
#define _CFFI_PRIM_BOOL 1
|
|
||||||
#define _CFFI_PRIM_CHAR 2
|
|
||||||
#define _CFFI_PRIM_SCHAR 3
|
|
||||||
#define _CFFI_PRIM_UCHAR 4
|
|
||||||
#define _CFFI_PRIM_SHORT 5
|
|
||||||
#define _CFFI_PRIM_USHORT 6
|
|
||||||
#define _CFFI_PRIM_INT 7
|
|
||||||
#define _CFFI_PRIM_UINT 8
|
|
||||||
#define _CFFI_PRIM_LONG 9
|
|
||||||
#define _CFFI_PRIM_ULONG 10
|
|
||||||
#define _CFFI_PRIM_LONGLONG 11
|
|
||||||
#define _CFFI_PRIM_ULONGLONG 12
|
|
||||||
#define _CFFI_PRIM_FLOAT 13
|
|
||||||
#define _CFFI_PRIM_DOUBLE 14
|
|
||||||
#define _CFFI_PRIM_LONGDOUBLE 15
|
|
||||||
|
|
||||||
#define _CFFI_PRIM_WCHAR 16
|
|
||||||
#define _CFFI_PRIM_INT8 17
|
|
||||||
#define _CFFI_PRIM_UINT8 18
|
|
||||||
#define _CFFI_PRIM_INT16 19
|
|
||||||
#define _CFFI_PRIM_UINT16 20
|
|
||||||
#define _CFFI_PRIM_INT32 21
|
|
||||||
#define _CFFI_PRIM_UINT32 22
|
|
||||||
#define _CFFI_PRIM_INT64 23
|
|
||||||
#define _CFFI_PRIM_UINT64 24
|
|
||||||
#define _CFFI_PRIM_INTPTR 25
|
|
||||||
#define _CFFI_PRIM_UINTPTR 26
|
|
||||||
#define _CFFI_PRIM_PTRDIFF 27
|
|
||||||
#define _CFFI_PRIM_SIZE 28
|
|
||||||
#define _CFFI_PRIM_SSIZE 29
|
|
||||||
#define _CFFI_PRIM_INT_LEAST8 30
|
|
||||||
#define _CFFI_PRIM_UINT_LEAST8 31
|
|
||||||
#define _CFFI_PRIM_INT_LEAST16 32
|
|
||||||
#define _CFFI_PRIM_UINT_LEAST16 33
|
|
||||||
#define _CFFI_PRIM_INT_LEAST32 34
|
|
||||||
#define _CFFI_PRIM_UINT_LEAST32 35
|
|
||||||
#define _CFFI_PRIM_INT_LEAST64 36
|
|
||||||
#define _CFFI_PRIM_UINT_LEAST64 37
|
|
||||||
#define _CFFI_PRIM_INT_FAST8 38
|
|
||||||
#define _CFFI_PRIM_UINT_FAST8 39
|
|
||||||
#define _CFFI_PRIM_INT_FAST16 40
|
|
||||||
#define _CFFI_PRIM_UINT_FAST16 41
|
|
||||||
#define _CFFI_PRIM_INT_FAST32 42
|
|
||||||
#define _CFFI_PRIM_UINT_FAST32 43
|
|
||||||
#define _CFFI_PRIM_INT_FAST64 44
|
|
||||||
#define _CFFI_PRIM_UINT_FAST64 45
|
|
||||||
#define _CFFI_PRIM_INTMAX 46
|
|
||||||
#define _CFFI_PRIM_UINTMAX 47
|
|
||||||
|
|
||||||
#define _CFFI__NUM_PRIM 48
|
|
||||||
#define _CFFI__UNKNOWN_PRIM (-1)
|
|
||||||
#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
|
|
||||||
#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
|
|
||||||
|
|
||||||
|
|
||||||
struct _cffi_global_s {
|
|
||||||
const char *name;
|
|
||||||
void *address;
|
|
||||||
_cffi_opcode_t type_op;
|
|
||||||
void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
|
|
||||||
// OP_CPYTHON_BLTN_*: addr of direct function
|
|
||||||
};
|
|
||||||
|
|
||||||
struct _cffi_getconst_s {
|
|
||||||
unsigned long long value;
|
|
||||||
const struct _cffi_type_context_s *ctx;
|
|
||||||
int gindex;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct _cffi_struct_union_s {
|
|
||||||
const char *name;
|
|
||||||
int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
|
|
||||||
int flags; // _CFFI_F_* flags below
|
|
||||||
size_t size;
|
|
||||||
int alignment;
|
|
||||||
int first_field_index; // -> _cffi_fields array
|
|
||||||
int num_fields;
|
|
||||||
};
|
|
||||||
#define _CFFI_F_UNION 0x01 // is a union, not a struct
|
|
||||||
#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
|
|
||||||
// "standard layout" or if some are missing
|
|
||||||
#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
|
|
||||||
#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
|
|
||||||
#define _CFFI_F_OPAQUE 0x10 // opaque
|
|
||||||
|
|
||||||
struct _cffi_field_s {
|
|
||||||
const char *name;
|
|
||||||
size_t field_offset;
|
|
||||||
size_t field_size;
|
|
||||||
_cffi_opcode_t field_type_op;
|
|
||||||
};
|
|
||||||
|
|
||||||
struct _cffi_enum_s {
|
|
||||||
const char *name;
|
|
||||||
int type_index; // -> _cffi_types, on a OP_ENUM
|
|
||||||
int type_prim; // _CFFI_PRIM_xxx
|
|
||||||
const char *enumerators; // comma-delimited string
|
|
||||||
};
|
|
||||||
|
|
||||||
struct _cffi_typename_s {
|
|
||||||
const char *name;
|
|
||||||
int type_index; /* if opaque, points to a possibly artificial
|
|
||||||
OP_STRUCT which is itself opaque */
|
|
||||||
};
|
|
||||||
|
|
||||||
struct _cffi_type_context_s {
|
|
||||||
_cffi_opcode_t *types;
|
|
||||||
const struct _cffi_global_s *globals;
|
|
||||||
const struct _cffi_field_s *fields;
|
|
||||||
const struct _cffi_struct_union_s *struct_unions;
|
|
||||||
const struct _cffi_enum_s *enums;
|
|
||||||
const struct _cffi_typename_s *typenames;
|
|
||||||
int num_globals;
|
|
||||||
int num_struct_unions;
|
|
||||||
int num_enums;
|
|
||||||
int num_typenames;
|
|
||||||
const char *const *includes;
|
|
||||||
int num_types;
|
|
||||||
int flags; /* future extension */
|
|
||||||
};
|
|
||||||
|
|
||||||
struct _cffi_parse_info_s {
|
|
||||||
const struct _cffi_type_context_s *ctx;
|
|
||||||
_cffi_opcode_t *output;
|
|
||||||
unsigned int output_size;
|
|
||||||
size_t error_location;
|
|
||||||
const char *error_message;
|
|
||||||
};
|
|
||||||
|
|
||||||
#ifdef _CFFI_INTERNAL
|
|
||||||
static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
|
|
||||||
static int search_in_globals(const struct _cffi_type_context_s *ctx,
|
|
||||||
const char *search, size_t search_len);
|
|
||||||
static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
|
|
||||||
const char *search, size_t search_len);
|
|
||||||
#endif
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,161 +0,0 @@
|
||||||
import os
|
|
||||||
|
|
||||||
try:
|
|
||||||
basestring
|
|
||||||
except NameError:
|
|
||||||
# Python 3.x
|
|
||||||
basestring = str
|
|
||||||
|
|
||||||
def error(msg):
|
|
||||||
from distutils.errors import DistutilsSetupError
|
|
||||||
raise DistutilsSetupError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def execfile(filename, glob):
|
|
||||||
# We use execfile() (here rewritten for Python 3) instead of
|
|
||||||
# __import__() to load the build script. The problem with
|
|
||||||
# a normal import is that in some packages, the intermediate
|
|
||||||
# __init__.py files may already try to import the file that
|
|
||||||
# we are generating.
|
|
||||||
with open(filename) as f:
|
|
||||||
src = f.read()
|
|
||||||
src += '\n' # Python 2.6 compatibility
|
|
||||||
code = compile(src, filename, 'exec')
|
|
||||||
exec(code, glob, glob)
|
|
||||||
|
|
||||||
|
|
||||||
def add_cffi_module(dist, mod_spec):
|
|
||||||
from cffi.api import FFI
|
|
||||||
|
|
||||||
if not isinstance(mod_spec, basestring):
|
|
||||||
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
|
||||||
" not %r" % (type(mod_spec).__name__,))
|
|
||||||
mod_spec = str(mod_spec)
|
|
||||||
try:
|
|
||||||
build_file_name, ffi_var_name = mod_spec.split(':')
|
|
||||||
except ValueError:
|
|
||||||
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
|
||||||
(mod_spec,))
|
|
||||||
if not os.path.exists(build_file_name):
|
|
||||||
ext = ''
|
|
||||||
rewritten = build_file_name.replace('.', '/') + '.py'
|
|
||||||
if os.path.exists(rewritten):
|
|
||||||
ext = ' (rewrite cffi_modules to [%r])' % (
|
|
||||||
rewritten + ':' + ffi_var_name,)
|
|
||||||
error("%r does not name an existing file%s" % (build_file_name, ext))
|
|
||||||
|
|
||||||
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
|
||||||
execfile(build_file_name, mod_vars)
|
|
||||||
|
|
||||||
try:
|
|
||||||
ffi = mod_vars[ffi_var_name]
|
|
||||||
except KeyError:
|
|
||||||
error("%r: object %r not found in module" % (mod_spec,
|
|
||||||
ffi_var_name))
|
|
||||||
if not isinstance(ffi, FFI):
|
|
||||||
ffi = ffi() # maybe it's a function instead of directly an ffi
|
|
||||||
if not isinstance(ffi, FFI):
|
|
||||||
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
|
||||||
type(ffi).__name__))
|
|
||||||
if not hasattr(ffi, '_assigned_source'):
|
|
||||||
error("%r: the set_source() method was not called" % (mod_spec,))
|
|
||||||
module_name, source, source_extension, kwds = ffi._assigned_source
|
|
||||||
if ffi._windows_unicode:
|
|
||||||
kwds = kwds.copy()
|
|
||||||
ffi._apply_windows_unicode(kwds)
|
|
||||||
|
|
||||||
if source is None:
|
|
||||||
_add_py_module(dist, ffi, module_name)
|
|
||||||
else:
|
|
||||||
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
|
||||||
|
|
||||||
|
|
||||||
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
|
||||||
from distutils.core import Extension
|
|
||||||
from distutils.command.build_ext import build_ext
|
|
||||||
from distutils.dir_util import mkpath
|
|
||||||
from distutils import log
|
|
||||||
from cffi import recompiler
|
|
||||||
|
|
||||||
allsources = ['$PLACEHOLDER']
|
|
||||||
allsources.extend(kwds.pop('sources', []))
|
|
||||||
ext = Extension(name=module_name, sources=allsources, **kwds)
|
|
||||||
|
|
||||||
def make_mod(tmpdir, pre_run=None):
|
|
||||||
c_file = os.path.join(tmpdir, module_name + source_extension)
|
|
||||||
log.info("generating cffi module %r" % c_file)
|
|
||||||
mkpath(tmpdir)
|
|
||||||
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
|
||||||
# arguments just before we turn the ffi into C code. To use it,
|
|
||||||
# subclass the 'distutils.command.build_ext.build_ext' class and
|
|
||||||
# add a method 'def pre_run(self, ext, ffi)'.
|
|
||||||
if pre_run is not None:
|
|
||||||
pre_run(ext, ffi)
|
|
||||||
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
|
||||||
if not updated:
|
|
||||||
log.info("already up-to-date")
|
|
||||||
return c_file
|
|
||||||
|
|
||||||
if dist.ext_modules is None:
|
|
||||||
dist.ext_modules = []
|
|
||||||
dist.ext_modules.append(ext)
|
|
||||||
|
|
||||||
base_class = dist.cmdclass.get('build_ext', build_ext)
|
|
||||||
class build_ext_make_mod(base_class):
|
|
||||||
def run(self):
|
|
||||||
if ext.sources[0] == '$PLACEHOLDER':
|
|
||||||
pre_run = getattr(self, 'pre_run', None)
|
|
||||||
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
|
||||||
base_class.run(self)
|
|
||||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
|
||||||
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
|
||||||
# classes. Even in this case the 'build_ext' command should be
|
|
||||||
# run once; but just in case, the logic above does nothing if
|
|
||||||
# called again.
|
|
||||||
|
|
||||||
|
|
||||||
def _add_py_module(dist, ffi, module_name):
|
|
||||||
from distutils.dir_util import mkpath
|
|
||||||
from distutils.command.build_py import build_py
|
|
||||||
from distutils.command.build_ext import build_ext
|
|
||||||
from distutils import log
|
|
||||||
from cffi import recompiler
|
|
||||||
|
|
||||||
def generate_mod(py_file):
|
|
||||||
log.info("generating cffi module %r" % py_file)
|
|
||||||
mkpath(os.path.dirname(py_file))
|
|
||||||
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
|
||||||
if not updated:
|
|
||||||
log.info("already up-to-date")
|
|
||||||
|
|
||||||
base_class = dist.cmdclass.get('build_py', build_py)
|
|
||||||
class build_py_make_mod(base_class):
|
|
||||||
def run(self):
|
|
||||||
base_class.run(self)
|
|
||||||
module_path = module_name.split('.')
|
|
||||||
module_path[-1] += '.py'
|
|
||||||
generate_mod(os.path.join(self.build_lib, *module_path))
|
|
||||||
dist.cmdclass['build_py'] = build_py_make_mod
|
|
||||||
|
|
||||||
# the following is only for "build_ext -i"
|
|
||||||
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
|
||||||
class build_ext_make_mod(base_class_2):
|
|
||||||
def run(self):
|
|
||||||
base_class_2.run(self)
|
|
||||||
if self.inplace:
|
|
||||||
# from get_ext_fullpath() in distutils/command/build_ext.py
|
|
||||||
module_path = module_name.split('.')
|
|
||||||
package = '.'.join(module_path[:-1])
|
|
||||||
build_py = self.get_finalized_command('build_py')
|
|
||||||
package_dir = build_py.get_package_dir(package)
|
|
||||||
file_name = module_path[-1] + '.py'
|
|
||||||
generate_mod(os.path.join(package_dir, file_name))
|
|
||||||
dist.cmdclass['build_ext'] = build_ext_make_mod
|
|
||||||
|
|
||||||
def cffi_modules(dist, attr, value):
|
|
||||||
assert attr == 'cffi_modules'
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
value = [value]
|
|
||||||
|
|
||||||
for cffi_module in value:
|
|
||||||
add_cffi_module(dist, cffi_module)
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,668 +0,0 @@
|
||||||
import sys, os
|
|
||||||
import types
|
|
||||||
|
|
||||||
from . import model, ffiplatform
|
|
||||||
|
|
||||||
|
|
||||||
class VGenericEngine(object):
|
|
||||||
_class_key = 'g'
|
|
||||||
_gen_python_module = False
|
|
||||||
|
|
||||||
def __init__(self, verifier):
|
|
||||||
self.verifier = verifier
|
|
||||||
self.ffi = verifier.ffi
|
|
||||||
self.export_symbols = []
|
|
||||||
self._struct_pending_verification = {}
|
|
||||||
|
|
||||||
def patch_extension_kwds(self, kwds):
|
|
||||||
# add 'export_symbols' to the dictionary. Note that we add the
|
|
||||||
# list before filling it. When we fill it, it will thus also show
|
|
||||||
# up in kwds['export_symbols'].
|
|
||||||
kwds.setdefault('export_symbols', self.export_symbols)
|
|
||||||
|
|
||||||
def find_module(self, module_name, path, so_suffixes):
|
|
||||||
for so_suffix in so_suffixes:
|
|
||||||
basename = module_name + so_suffix
|
|
||||||
if path is None:
|
|
||||||
path = sys.path
|
|
||||||
for dirname in path:
|
|
||||||
filename = os.path.join(dirname, basename)
|
|
||||||
if os.path.isfile(filename):
|
|
||||||
return filename
|
|
||||||
|
|
||||||
def collect_types(self):
|
|
||||||
pass # not needed in the generic engine
|
|
||||||
|
|
||||||
def _prnt(self, what=''):
|
|
||||||
self._f.write(what + '\n')
|
|
||||||
|
|
||||||
def write_source_to_f(self):
|
|
||||||
prnt = self._prnt
|
|
||||||
# first paste some standard set of lines that are mostly '#include'
|
|
||||||
prnt(cffimod_header)
|
|
||||||
# then paste the C source given by the user, verbatim.
|
|
||||||
prnt(self.verifier.preamble)
|
|
||||||
#
|
|
||||||
# call generate_gen_xxx_decl(), for every xxx found from
|
|
||||||
# ffi._parser._declarations. This generates all the functions.
|
|
||||||
self._generate('decl')
|
|
||||||
#
|
|
||||||
# on Windows, distutils insists on putting init_cffi_xyz in
|
|
||||||
# 'export_symbols', so instead of fighting it, just give up and
|
|
||||||
# give it one
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
if sys.version_info >= (3,):
|
|
||||||
prefix = 'PyInit_'
|
|
||||||
else:
|
|
||||||
prefix = 'init'
|
|
||||||
modname = self.verifier.get_module_name()
|
|
||||||
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
|
||||||
|
|
||||||
def load_library(self, flags=0):
|
|
||||||
# import it with the CFFI backend
|
|
||||||
backend = self.ffi._backend
|
|
||||||
# needs to make a path that contains '/', on Posix
|
|
||||||
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
|
||||||
module = backend.load_library(filename, flags)
|
|
||||||
#
|
|
||||||
# call loading_gen_struct() to get the struct layout inferred by
|
|
||||||
# the C compiler
|
|
||||||
self._load(module, 'loading')
|
|
||||||
|
|
||||||
# build the FFILibrary class and instance, this is a module subclass
|
|
||||||
# because modules are expected to have usually-constant-attributes and
|
|
||||||
# in PyPy this means the JIT is able to treat attributes as constant,
|
|
||||||
# which we want.
|
|
||||||
class FFILibrary(types.ModuleType):
|
|
||||||
_cffi_generic_module = module
|
|
||||||
_cffi_ffi = self.ffi
|
|
||||||
_cffi_dir = []
|
|
||||||
def __dir__(self):
|
|
||||||
return FFILibrary._cffi_dir
|
|
||||||
library = FFILibrary("")
|
|
||||||
#
|
|
||||||
# finally, call the loaded_gen_xxx() functions. This will set
|
|
||||||
# up the 'library' object.
|
|
||||||
self._load(module, 'loaded', library=library)
|
|
||||||
return library
|
|
||||||
|
|
||||||
def _get_declarations(self):
|
|
||||||
lst = [(key, tp) for (key, (tp, qual)) in
|
|
||||||
self.ffi._parser._declarations.items()]
|
|
||||||
lst.sort()
|
|
||||||
return lst
|
|
||||||
|
|
||||||
def _generate(self, step_name):
|
|
||||||
for name, tp in self._get_declarations():
|
|
||||||
kind, realname = name.split(' ', 1)
|
|
||||||
try:
|
|
||||||
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
|
||||||
step_name))
|
|
||||||
except AttributeError:
|
|
||||||
raise ffiplatform.VerificationError(
|
|
||||||
"not implemented in verify(): %r" % name)
|
|
||||||
try:
|
|
||||||
method(tp, realname)
|
|
||||||
except Exception as e:
|
|
||||||
model.attach_exception_info(e, name)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _load(self, module, step_name, **kwds):
|
|
||||||
for name, tp in self._get_declarations():
|
|
||||||
kind, realname = name.split(' ', 1)
|
|
||||||
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
|
||||||
try:
|
|
||||||
method(tp, realname, module, **kwds)
|
|
||||||
except Exception as e:
|
|
||||||
model.attach_exception_info(e, name)
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _generate_nothing(self, tp, name):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _loaded_noop(self, tp, name, module, **kwds):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# typedefs: generates no code so far
|
|
||||||
|
|
||||||
_generate_gen_typedef_decl = _generate_nothing
|
|
||||||
_loading_gen_typedef = _loaded_noop
|
|
||||||
_loaded_gen_typedef = _loaded_noop
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# function declarations
|
|
||||||
|
|
||||||
def _generate_gen_function_decl(self, tp, name):
|
|
||||||
assert isinstance(tp, model.FunctionPtrType)
|
|
||||||
if tp.ellipsis:
|
|
||||||
# cannot support vararg functions better than this: check for its
|
|
||||||
# exact type (including the fixed arguments), and build it as a
|
|
||||||
# constant function pointer (no _cffi_f_%s wrapper)
|
|
||||||
self._generate_gen_const(False, name, tp)
|
|
||||||
return
|
|
||||||
prnt = self._prnt
|
|
||||||
numargs = len(tp.args)
|
|
||||||
argnames = []
|
|
||||||
for i, type in enumerate(tp.args):
|
|
||||||
indirection = ''
|
|
||||||
if isinstance(type, model.StructOrUnion):
|
|
||||||
indirection = '*'
|
|
||||||
argnames.append('%sx%d' % (indirection, i))
|
|
||||||
context = 'argument of %s' % name
|
|
||||||
arglist = [type.get_c_name(' %s' % arg, context)
|
|
||||||
for type, arg in zip(tp.args, argnames)]
|
|
||||||
tpresult = tp.result
|
|
||||||
if isinstance(tpresult, model.StructOrUnion):
|
|
||||||
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
|
||||||
tpresult = model.void_type
|
|
||||||
arglist = ', '.join(arglist) or 'void'
|
|
||||||
wrappername = '_cffi_f_%s' % name
|
|
||||||
self.export_symbols.append(wrappername)
|
|
||||||
if tp.abi:
|
|
||||||
abi = tp.abi + ' '
|
|
||||||
else:
|
|
||||||
abi = ''
|
|
||||||
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
|
||||||
context = 'result of %s' % name
|
|
||||||
prnt(tpresult.get_c_name(funcdecl, context))
|
|
||||||
prnt('{')
|
|
||||||
#
|
|
||||||
if isinstance(tp.result, model.StructOrUnion):
|
|
||||||
result_code = '*r = '
|
|
||||||
elif not isinstance(tp.result, model.VoidType):
|
|
||||||
result_code = 'return '
|
|
||||||
else:
|
|
||||||
result_code = ''
|
|
||||||
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
|
||||||
prnt('}')
|
|
||||||
prnt()
|
|
||||||
|
|
||||||
_loading_gen_function = _loaded_noop
|
|
||||||
|
|
||||||
def _loaded_gen_function(self, tp, name, module, library):
|
|
||||||
assert isinstance(tp, model.FunctionPtrType)
|
|
||||||
if tp.ellipsis:
|
|
||||||
newfunction = self._load_constant(False, tp, name, module)
|
|
||||||
else:
|
|
||||||
indirections = []
|
|
||||||
base_tp = tp
|
|
||||||
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
|
||||||
or isinstance(tp.result, model.StructOrUnion)):
|
|
||||||
indirect_args = []
|
|
||||||
for i, typ in enumerate(tp.args):
|
|
||||||
if isinstance(typ, model.StructOrUnion):
|
|
||||||
typ = model.PointerType(typ)
|
|
||||||
indirections.append((i, typ))
|
|
||||||
indirect_args.append(typ)
|
|
||||||
indirect_result = tp.result
|
|
||||||
if isinstance(indirect_result, model.StructOrUnion):
|
|
||||||
if indirect_result.fldtypes is None:
|
|
||||||
raise TypeError("'%s' is used as result type, "
|
|
||||||
"but is opaque" % (
|
|
||||||
indirect_result._get_c_name(),))
|
|
||||||
indirect_result = model.PointerType(indirect_result)
|
|
||||||
indirect_args.insert(0, indirect_result)
|
|
||||||
indirections.insert(0, ("result", indirect_result))
|
|
||||||
indirect_result = model.void_type
|
|
||||||
tp = model.FunctionPtrType(tuple(indirect_args),
|
|
||||||
indirect_result, tp.ellipsis)
|
|
||||||
BFunc = self.ffi._get_cached_btype(tp)
|
|
||||||
wrappername = '_cffi_f_%s' % name
|
|
||||||
newfunction = module.load_function(BFunc, wrappername)
|
|
||||||
for i, typ in indirections:
|
|
||||||
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
|
||||||
base_tp)
|
|
||||||
setattr(library, name, newfunction)
|
|
||||||
type(library)._cffi_dir.append(name)
|
|
||||||
|
|
||||||
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
|
||||||
backend = self.ffi._backend
|
|
||||||
BType = self.ffi._get_cached_btype(tp)
|
|
||||||
if i == "result":
|
|
||||||
ffi = self.ffi
|
|
||||||
def newfunc(*args):
|
|
||||||
res = ffi.new(BType)
|
|
||||||
oldfunc(res, *args)
|
|
||||||
return res[0]
|
|
||||||
else:
|
|
||||||
def newfunc(*args):
|
|
||||||
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
|
||||||
return oldfunc(*args)
|
|
||||||
newfunc._cffi_base_type = base_tp
|
|
||||||
return newfunc
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# named structs
|
|
||||||
|
|
||||||
def _generate_gen_struct_decl(self, tp, name):
|
|
||||||
assert name == tp.name
|
|
||||||
self._generate_struct_or_union_decl(tp, 'struct', name)
|
|
||||||
|
|
||||||
def _loading_gen_struct(self, tp, name, module):
|
|
||||||
self._loading_struct_or_union(tp, 'struct', name, module)
|
|
||||||
|
|
||||||
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
|
||||||
self._loaded_struct_or_union(tp)
|
|
||||||
|
|
||||||
def _generate_gen_union_decl(self, tp, name):
|
|
||||||
assert name == tp.name
|
|
||||||
self._generate_struct_or_union_decl(tp, 'union', name)
|
|
||||||
|
|
||||||
def _loading_gen_union(self, tp, name, module):
|
|
||||||
self._loading_struct_or_union(tp, 'union', name, module)
|
|
||||||
|
|
||||||
def _loaded_gen_union(self, tp, name, module, **kwds):
|
|
||||||
self._loaded_struct_or_union(tp)
|
|
||||||
|
|
||||||
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
|
||||||
if tp.fldnames is None:
|
|
||||||
return # nothing to do with opaque structs
|
|
||||||
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
|
||||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
|
||||||
cname = ('%s %s' % (prefix, name)).strip()
|
|
||||||
#
|
|
||||||
prnt = self._prnt
|
|
||||||
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
|
||||||
prnt('{')
|
|
||||||
prnt(' /* only to generate compile-time warnings or errors */')
|
|
||||||
prnt(' (void)p;')
|
|
||||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
|
||||||
if (isinstance(ftype, model.PrimitiveType)
|
|
||||||
and ftype.is_integer_type()) or fbitsize >= 0:
|
|
||||||
# accept all integers, but complain on float or double
|
|
||||||
prnt(' (void)((p->%s) << 1);' % fname)
|
|
||||||
else:
|
|
||||||
# only accept exactly the type declared.
|
|
||||||
try:
|
|
||||||
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
|
||||||
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
|
||||||
fname))
|
|
||||||
except ffiplatform.VerificationError as e:
|
|
||||||
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
|
||||||
prnt('}')
|
|
||||||
self.export_symbols.append(layoutfuncname)
|
|
||||||
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
|
||||||
prnt('{')
|
|
||||||
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
|
||||||
prnt(' static intptr_t nums[] = {')
|
|
||||||
prnt(' sizeof(%s),' % cname)
|
|
||||||
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
|
||||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
|
||||||
if fbitsize >= 0:
|
|
||||||
continue # xxx ignore fbitsize for now
|
|
||||||
prnt(' offsetof(%s, %s),' % (cname, fname))
|
|
||||||
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
|
||||||
prnt(' 0, /* %s */' % ftype._get_c_name())
|
|
||||||
else:
|
|
||||||
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
|
||||||
prnt(' -1')
|
|
||||||
prnt(' };')
|
|
||||||
prnt(' return nums[i];')
|
|
||||||
prnt(' /* the next line is not executed, but compiled */')
|
|
||||||
prnt(' %s(0);' % (checkfuncname,))
|
|
||||||
prnt('}')
|
|
||||||
prnt()
|
|
||||||
|
|
||||||
def _loading_struct_or_union(self, tp, prefix, name, module):
|
|
||||||
if tp.fldnames is None:
|
|
||||||
return # nothing to do with opaque structs
|
|
||||||
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
|
||||||
#
|
|
||||||
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
|
||||||
function = module.load_function(BFunc, layoutfuncname)
|
|
||||||
layout = []
|
|
||||||
num = 0
|
|
||||||
while True:
|
|
||||||
x = function(num)
|
|
||||||
if x < 0: break
|
|
||||||
layout.append(x)
|
|
||||||
num += 1
|
|
||||||
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
|
||||||
# use the function()'s sizes and offsets to guide the
|
|
||||||
# layout of the struct
|
|
||||||
totalsize = layout[0]
|
|
||||||
totalalignment = layout[1]
|
|
||||||
fieldofs = layout[2::2]
|
|
||||||
fieldsize = layout[3::2]
|
|
||||||
tp.force_flatten()
|
|
||||||
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
|
||||||
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
|
||||||
else:
|
|
||||||
cname = ('%s %s' % (prefix, name)).strip()
|
|
||||||
self._struct_pending_verification[tp] = layout, cname
|
|
||||||
|
|
||||||
def _loaded_struct_or_union(self, tp):
|
|
||||||
if tp.fldnames is None:
|
|
||||||
return # nothing to do with opaque structs
|
|
||||||
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
|
||||||
|
|
||||||
if tp in self._struct_pending_verification:
|
|
||||||
# check that the layout sizes and offsets match the real ones
|
|
||||||
def check(realvalue, expectedvalue, msg):
|
|
||||||
if realvalue != expectedvalue:
|
|
||||||
raise ffiplatform.VerificationError(
|
|
||||||
"%s (we have %d, but C compiler says %d)"
|
|
||||||
% (msg, expectedvalue, realvalue))
|
|
||||||
ffi = self.ffi
|
|
||||||
BStruct = ffi._get_cached_btype(tp)
|
|
||||||
layout, cname = self._struct_pending_verification.pop(tp)
|
|
||||||
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
|
||||||
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
|
||||||
i = 2
|
|
||||||
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
|
||||||
if fbitsize >= 0:
|
|
||||||
continue # xxx ignore fbitsize for now
|
|
||||||
check(layout[i], ffi.offsetof(BStruct, fname),
|
|
||||||
"wrong offset for field %r" % (fname,))
|
|
||||||
if layout[i+1] != 0:
|
|
||||||
BField = ffi._get_cached_btype(ftype)
|
|
||||||
check(layout[i+1], ffi.sizeof(BField),
|
|
||||||
"wrong size for field %r" % (fname,))
|
|
||||||
i += 2
|
|
||||||
assert i == len(layout)
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# 'anonymous' declarations. These are produced for anonymous structs
|
|
||||||
# or unions; the 'name' is obtained by a typedef.
|
|
||||||
|
|
||||||
def _generate_gen_anonymous_decl(self, tp, name):
|
|
||||||
if isinstance(tp, model.EnumType):
|
|
||||||
self._generate_gen_enum_decl(tp, name, '')
|
|
||||||
else:
|
|
||||||
self._generate_struct_or_union_decl(tp, '', name)
|
|
||||||
|
|
||||||
def _loading_gen_anonymous(self, tp, name, module):
|
|
||||||
if isinstance(tp, model.EnumType):
|
|
||||||
self._loading_gen_enum(tp, name, module, '')
|
|
||||||
else:
|
|
||||||
self._loading_struct_or_union(tp, '', name, module)
|
|
||||||
|
|
||||||
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
|
||||||
if isinstance(tp, model.EnumType):
|
|
||||||
self._loaded_gen_enum(tp, name, module, **kwds)
|
|
||||||
else:
|
|
||||||
self._loaded_struct_or_union(tp)
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# constants, likely declared with '#define'
|
|
||||||
|
|
||||||
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
|
||||||
check_value=None):
|
|
||||||
prnt = self._prnt
|
|
||||||
funcname = '_cffi_%s_%s' % (category, name)
|
|
||||||
self.export_symbols.append(funcname)
|
|
||||||
if check_value is not None:
|
|
||||||
assert is_int
|
|
||||||
assert category == 'const'
|
|
||||||
prnt('int %s(char *out_error)' % funcname)
|
|
||||||
prnt('{')
|
|
||||||
self._check_int_constant_value(name, check_value)
|
|
||||||
prnt(' return 0;')
|
|
||||||
prnt('}')
|
|
||||||
elif is_int:
|
|
||||||
assert category == 'const'
|
|
||||||
prnt('int %s(long long *out_value)' % funcname)
|
|
||||||
prnt('{')
|
|
||||||
prnt(' *out_value = (long long)(%s);' % (name,))
|
|
||||||
prnt(' return (%s) <= 0;' % (name,))
|
|
||||||
prnt('}')
|
|
||||||
else:
|
|
||||||
assert tp is not None
|
|
||||||
assert check_value is None
|
|
||||||
if category == 'var':
|
|
||||||
ampersand = '&'
|
|
||||||
else:
|
|
||||||
ampersand = ''
|
|
||||||
extra = ''
|
|
||||||
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
|
||||||
extra = 'const *'
|
|
||||||
ampersand = '&'
|
|
||||||
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
|
||||||
prnt('{')
|
|
||||||
prnt(' return (%s%s);' % (ampersand, name))
|
|
||||||
prnt('}')
|
|
||||||
prnt()
|
|
||||||
|
|
||||||
def _generate_gen_constant_decl(self, tp, name):
|
|
||||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
|
||||||
self._generate_gen_const(is_int, name, tp)
|
|
||||||
|
|
||||||
_loading_gen_constant = _loaded_noop
|
|
||||||
|
|
||||||
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
|
||||||
funcname = '_cffi_const_%s' % name
|
|
||||||
if check_value is not None:
|
|
||||||
assert is_int
|
|
||||||
self._load_known_int_constant(module, funcname)
|
|
||||||
value = check_value
|
|
||||||
elif is_int:
|
|
||||||
BType = self.ffi._typeof_locked("long long*")[0]
|
|
||||||
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
|
||||||
function = module.load_function(BFunc, funcname)
|
|
||||||
p = self.ffi.new(BType)
|
|
||||||
negative = function(p)
|
|
||||||
value = int(p[0])
|
|
||||||
if value < 0 and not negative:
|
|
||||||
BLongLong = self.ffi._typeof_locked("long long")[0]
|
|
||||||
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
|
||||||
else:
|
|
||||||
assert check_value is None
|
|
||||||
fntypeextra = '(*)(void)'
|
|
||||||
if isinstance(tp, model.StructOrUnion):
|
|
||||||
fntypeextra = '*' + fntypeextra
|
|
||||||
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
|
||||||
function = module.load_function(BFunc, funcname)
|
|
||||||
value = function()
|
|
||||||
if isinstance(tp, model.StructOrUnion):
|
|
||||||
value = value[0]
|
|
||||||
return value
|
|
||||||
|
|
||||||
def _loaded_gen_constant(self, tp, name, module, library):
|
|
||||||
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
|
||||||
value = self._load_constant(is_int, tp, name, module)
|
|
||||||
setattr(library, name, value)
|
|
||||||
type(library)._cffi_dir.append(name)
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# enums
|
|
||||||
|
|
||||||
def _check_int_constant_value(self, name, value):
|
|
||||||
prnt = self._prnt
|
|
||||||
if value <= 0:
|
|
||||||
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
|
||||||
name, name, value))
|
|
||||||
else:
|
|
||||||
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
|
||||||
name, name, value))
|
|
||||||
prnt(' char buf[64];')
|
|
||||||
prnt(' if ((%s) <= 0)' % name)
|
|
||||||
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
|
||||||
prnt(' else')
|
|
||||||
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
|
||||||
name)
|
|
||||||
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
|
||||||
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
|
||||||
prnt(' return -1;')
|
|
||||||
prnt(' }')
|
|
||||||
|
|
||||||
def _load_known_int_constant(self, module, funcname):
|
|
||||||
BType = self.ffi._typeof_locked("char[]")[0]
|
|
||||||
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
|
||||||
function = module.load_function(BFunc, funcname)
|
|
||||||
p = self.ffi.new(BType, 256)
|
|
||||||
if function(p) < 0:
|
|
||||||
error = self.ffi.string(p)
|
|
||||||
if sys.version_info >= (3,):
|
|
||||||
error = str(error, 'utf-8')
|
|
||||||
raise ffiplatform.VerificationError(error)
|
|
||||||
|
|
||||||
def _enum_funcname(self, prefix, name):
|
|
||||||
# "$enum_$1" => "___D_enum____D_1"
|
|
||||||
name = name.replace('$', '___D_')
|
|
||||||
return '_cffi_e_%s_%s' % (prefix, name)
|
|
||||||
|
|
||||||
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
|
||||||
if tp.partial:
|
|
||||||
for enumerator in tp.enumerators:
|
|
||||||
self._generate_gen_const(True, enumerator)
|
|
||||||
return
|
|
||||||
#
|
|
||||||
funcname = self._enum_funcname(prefix, name)
|
|
||||||
self.export_symbols.append(funcname)
|
|
||||||
prnt = self._prnt
|
|
||||||
prnt('int %s(char *out_error)' % funcname)
|
|
||||||
prnt('{')
|
|
||||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
|
||||||
self._check_int_constant_value(enumerator, enumvalue)
|
|
||||||
prnt(' return 0;')
|
|
||||||
prnt('}')
|
|
||||||
prnt()
|
|
||||||
|
|
||||||
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
|
||||||
if tp.partial:
|
|
||||||
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
|
||||||
for enumerator in tp.enumerators]
|
|
||||||
tp.enumvalues = tuple(enumvalues)
|
|
||||||
tp.partial_resolved = True
|
|
||||||
else:
|
|
||||||
funcname = self._enum_funcname(prefix, name)
|
|
||||||
self._load_known_int_constant(module, funcname)
|
|
||||||
|
|
||||||
def _loaded_gen_enum(self, tp, name, module, library):
|
|
||||||
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
|
||||||
setattr(library, enumerator, enumvalue)
|
|
||||||
type(library)._cffi_dir.append(enumerator)
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# macros: for now only for integers
|
|
||||||
|
|
||||||
def _generate_gen_macro_decl(self, tp, name):
|
|
||||||
if tp == '...':
|
|
||||||
check_value = None
|
|
||||||
else:
|
|
||||||
check_value = tp # an integer
|
|
||||||
self._generate_gen_const(True, name, check_value=check_value)
|
|
||||||
|
|
||||||
_loading_gen_macro = _loaded_noop
|
|
||||||
|
|
||||||
def _loaded_gen_macro(self, tp, name, module, library):
|
|
||||||
if tp == '...':
|
|
||||||
check_value = None
|
|
||||||
else:
|
|
||||||
check_value = tp # an integer
|
|
||||||
value = self._load_constant(True, tp, name, module,
|
|
||||||
check_value=check_value)
|
|
||||||
setattr(library, name, value)
|
|
||||||
type(library)._cffi_dir.append(name)
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
# global variables
|
|
||||||
|
|
||||||
def _generate_gen_variable_decl(self, tp, name):
|
|
||||||
if isinstance(tp, model.ArrayType):
|
|
||||||
if tp.length == '...':
|
|
||||||
prnt = self._prnt
|
|
||||||
funcname = '_cffi_sizeof_%s' % (name,)
|
|
||||||
self.export_symbols.append(funcname)
|
|
||||||
prnt("size_t %s(void)" % funcname)
|
|
||||||
prnt("{")
|
|
||||||
prnt(" return sizeof(%s);" % (name,))
|
|
||||||
prnt("}")
|
|
||||||
tp_ptr = model.PointerType(tp.item)
|
|
||||||
self._generate_gen_const(False, name, tp_ptr)
|
|
||||||
else:
|
|
||||||
tp_ptr = model.PointerType(tp)
|
|
||||||
self._generate_gen_const(False, name, tp_ptr, category='var')
|
|
||||||
|
|
||||||
_loading_gen_variable = _loaded_noop
|
|
||||||
|
|
||||||
def _loaded_gen_variable(self, tp, name, module, library):
|
|
||||||
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
|
||||||
# sense that "a=..." is forbidden
|
|
||||||
if tp.length == '...':
|
|
||||||
funcname = '_cffi_sizeof_%s' % (name,)
|
|
||||||
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
|
||||||
function = module.load_function(BFunc, funcname)
|
|
||||||
size = function()
|
|
||||||
BItemType = self.ffi._get_cached_btype(tp.item)
|
|
||||||
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
|
||||||
if rest != 0:
|
|
||||||
raise ffiplatform.VerificationError(
|
|
||||||
"bad size: %r does not seem to be an array of %s" %
|
|
||||||
(name, tp.item))
|
|
||||||
tp = tp.resolve_length(length)
|
|
||||||
tp_ptr = model.PointerType(tp.item)
|
|
||||||
value = self._load_constant(False, tp_ptr, name, module)
|
|
||||||
# 'value' is a <cdata 'type *'> which we have to replace with
|
|
||||||
# a <cdata 'type[N]'> if the N is actually known
|
|
||||||
if tp.length is not None:
|
|
||||||
BArray = self.ffi._get_cached_btype(tp)
|
|
||||||
value = self.ffi.cast(BArray, value)
|
|
||||||
setattr(library, name, value)
|
|
||||||
type(library)._cffi_dir.append(name)
|
|
||||||
return
|
|
||||||
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
|
||||||
# it by a property on the class, which reads/writes into ptr[0].
|
|
||||||
funcname = '_cffi_var_%s' % name
|
|
||||||
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
|
||||||
function = module.load_function(BFunc, funcname)
|
|
||||||
ptr = function()
|
|
||||||
def getter(library):
|
|
||||||
return ptr[0]
|
|
||||||
def setter(library, value):
|
|
||||||
ptr[0] = value
|
|
||||||
setattr(type(library), name, property(getter, setter))
|
|
||||||
type(library)._cffi_dir.append(name)
|
|
||||||
|
|
||||||
cffimod_header = r'''
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stddef.h>
|
|
||||||
#include <stdarg.h>
|
|
||||||
#include <errno.h>
|
|
||||||
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
|
||||||
|
|
||||||
/* this block of #ifs should be kept exactly identical between
|
|
||||||
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
|
|
||||||
#if defined(_MSC_VER)
|
|
||||||
# include <malloc.h> /* for alloca() */
|
|
||||||
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
|
||||||
typedef __int8 int8_t;
|
|
||||||
typedef __int16 int16_t;
|
|
||||||
typedef __int32 int32_t;
|
|
||||||
typedef __int64 int64_t;
|
|
||||||
typedef unsigned __int8 uint8_t;
|
|
||||||
typedef unsigned __int16 uint16_t;
|
|
||||||
typedef unsigned __int32 uint32_t;
|
|
||||||
typedef unsigned __int64 uint64_t;
|
|
||||||
typedef __int8 int_least8_t;
|
|
||||||
typedef __int16 int_least16_t;
|
|
||||||
typedef __int32 int_least32_t;
|
|
||||||
typedef __int64 int_least64_t;
|
|
||||||
typedef unsigned __int8 uint_least8_t;
|
|
||||||
typedef unsigned __int16 uint_least16_t;
|
|
||||||
typedef unsigned __int32 uint_least32_t;
|
|
||||||
typedef unsigned __int64 uint_least64_t;
|
|
||||||
typedef __int8 int_fast8_t;
|
|
||||||
typedef __int16 int_fast16_t;
|
|
||||||
typedef __int32 int_fast32_t;
|
|
||||||
typedef __int64 int_fast64_t;
|
|
||||||
typedef unsigned __int8 uint_fast8_t;
|
|
||||||
typedef unsigned __int16 uint_fast16_t;
|
|
||||||
typedef unsigned __int32 uint_fast32_t;
|
|
||||||
typedef unsigned __int64 uint_fast64_t;
|
|
||||||
typedef __int64 intmax_t;
|
|
||||||
typedef unsigned __int64 uintmax_t;
|
|
||||||
# else
|
|
||||||
# include <stdint.h>
|
|
||||||
# endif
|
|
||||||
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
|
||||||
typedef unsigned char _Bool;
|
|
||||||
# endif
|
|
||||||
#else
|
|
||||||
# include <stdint.h>
|
|
||||||
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
|
|
||||||
# include <alloca.h>
|
|
||||||
# endif
|
|
||||||
#endif
|
|
||||||
'''
|
|
|
@ -1,313 +0,0 @@
|
||||||
import sys, os, binascii, shutil, io
|
|
||||||
from . import __version_verifier_modules__
|
|
||||||
from . import ffiplatform
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 3):
|
|
||||||
import importlib.machinery
|
|
||||||
def _extension_suffixes():
|
|
||||||
return importlib.machinery.EXTENSION_SUFFIXES[:]
|
|
||||||
else:
|
|
||||||
import imp
|
|
||||||
def _extension_suffixes():
|
|
||||||
return [suffix for suffix, _, type in imp.get_suffixes()
|
|
||||||
if type == imp.C_EXTENSION]
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3,):
|
|
||||||
NativeIO = io.StringIO
|
|
||||||
else:
|
|
||||||
class NativeIO(io.BytesIO):
|
|
||||||
def write(self, s):
|
|
||||||
if isinstance(s, unicode):
|
|
||||||
s = s.encode('ascii')
|
|
||||||
super(NativeIO, self).write(s)
|
|
||||||
|
|
||||||
def _hack_at_distutils():
|
|
||||||
# Windows-only workaround for some configurations: see
|
|
||||||
# https://bugs.python.org/issue23246 (Python 2.7 with
|
|
||||||
# a specific MS compiler suite download)
|
|
||||||
if sys.platform == "win32":
|
|
||||||
try:
|
|
||||||
import setuptools # for side-effects, patches distutils
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Verifier(object):
|
|
||||||
|
|
||||||
def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
|
|
||||||
ext_package=None, tag='', force_generic_engine=False,
|
|
||||||
source_extension='.c', flags=None, relative_to=None, **kwds):
|
|
||||||
if ffi._parser._uses_new_feature:
|
|
||||||
raise ffiplatform.VerificationError(
|
|
||||||
"feature not supported with ffi.verify(), but only "
|
|
||||||
"with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
|
|
||||||
self.ffi = ffi
|
|
||||||
self.preamble = preamble
|
|
||||||
if not modulename:
|
|
||||||
flattened_kwds = ffiplatform.flatten(kwds)
|
|
||||||
vengine_class = _locate_engine_class(ffi, force_generic_engine)
|
|
||||||
self._vengine = vengine_class(self)
|
|
||||||
self._vengine.patch_extension_kwds(kwds)
|
|
||||||
self.flags = flags
|
|
||||||
self.kwds = self.make_relative_to(kwds, relative_to)
|
|
||||||
#
|
|
||||||
if modulename:
|
|
||||||
if tag:
|
|
||||||
raise TypeError("can't specify both 'modulename' and 'tag'")
|
|
||||||
else:
|
|
||||||
key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
|
|
||||||
preamble, flattened_kwds] +
|
|
||||||
ffi._cdefsources)
|
|
||||||
if sys.version_info >= (3,):
|
|
||||||
key = key.encode('utf-8')
|
|
||||||
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
|
|
||||||
k1 = k1.lstrip('0x').rstrip('L')
|
|
||||||
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
|
|
||||||
k2 = k2.lstrip('0').rstrip('L')
|
|
||||||
modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
|
|
||||||
k1, k2)
|
|
||||||
suffix = _get_so_suffixes()[0]
|
|
||||||
self.tmpdir = tmpdir or _caller_dir_pycache()
|
|
||||||
self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
|
|
||||||
self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
|
|
||||||
self.ext_package = ext_package
|
|
||||||
self._has_source = False
|
|
||||||
self._has_module = False
|
|
||||||
|
|
||||||
def write_source(self, file=None):
|
|
||||||
"""Write the C source code. It is produced in 'self.sourcefilename',
|
|
||||||
which can be tweaked beforehand."""
|
|
||||||
with self.ffi._lock:
|
|
||||||
if self._has_source and file is None:
|
|
||||||
raise ffiplatform.VerificationError(
|
|
||||||
"source code already written")
|
|
||||||
self._write_source(file)
|
|
||||||
|
|
||||||
def compile_module(self):
|
|
||||||
"""Write the C source code (if not done already) and compile it.
|
|
||||||
This produces a dynamic link library in 'self.modulefilename'."""
|
|
||||||
with self.ffi._lock:
|
|
||||||
if self._has_module:
|
|
||||||
raise ffiplatform.VerificationError("module already compiled")
|
|
||||||
if not self._has_source:
|
|
||||||
self._write_source()
|
|
||||||
self._compile_module()
|
|
||||||
|
|
||||||
def load_library(self):
|
|
||||||
"""Get a C module from this Verifier instance.
|
|
||||||
Returns an instance of a FFILibrary class that behaves like the
|
|
||||||
objects returned by ffi.dlopen(), but that delegates all
|
|
||||||
operations to the C module. If necessary, the C code is written
|
|
||||||
and compiled first.
|
|
||||||
"""
|
|
||||||
with self.ffi._lock:
|
|
||||||
if not self._has_module:
|
|
||||||
self._locate_module()
|
|
||||||
if not self._has_module:
|
|
||||||
if not self._has_source:
|
|
||||||
self._write_source()
|
|
||||||
self._compile_module()
|
|
||||||
return self._load_library()
|
|
||||||
|
|
||||||
def get_module_name(self):
|
|
||||||
basename = os.path.basename(self.modulefilename)
|
|
||||||
# kill both the .so extension and the other .'s, as introduced
|
|
||||||
# by Python 3: 'basename.cpython-33m.so'
|
|
||||||
basename = basename.split('.', 1)[0]
|
|
||||||
# and the _d added in Python 2 debug builds --- but try to be
|
|
||||||
# conservative and not kill a legitimate _d
|
|
||||||
if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
|
|
||||||
basename = basename[:-2]
|
|
||||||
return basename
|
|
||||||
|
|
||||||
def get_extension(self):
|
|
||||||
_hack_at_distutils() # backward compatibility hack
|
|
||||||
if not self._has_source:
|
|
||||||
with self.ffi._lock:
|
|
||||||
if not self._has_source:
|
|
||||||
self._write_source()
|
|
||||||
sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
|
|
||||||
modname = self.get_module_name()
|
|
||||||
return ffiplatform.get_extension(sourcename, modname, **self.kwds)
|
|
||||||
|
|
||||||
def generates_python_module(self):
|
|
||||||
return self._vengine._gen_python_module
|
|
||||||
|
|
||||||
def make_relative_to(self, kwds, relative_to):
|
|
||||||
if relative_to and os.path.dirname(relative_to):
|
|
||||||
dirname = os.path.dirname(relative_to)
|
|
||||||
kwds = kwds.copy()
|
|
||||||
for key in ffiplatform.LIST_OF_FILE_NAMES:
|
|
||||||
if key in kwds:
|
|
||||||
lst = kwds[key]
|
|
||||||
if not isinstance(lst, (list, tuple)):
|
|
||||||
raise TypeError("keyword '%s' should be a list or tuple"
|
|
||||||
% (key,))
|
|
||||||
lst = [os.path.join(dirname, fn) for fn in lst]
|
|
||||||
kwds[key] = lst
|
|
||||||
return kwds
|
|
||||||
|
|
||||||
# ----------
|
|
||||||
|
|
||||||
def _locate_module(self):
|
|
||||||
if not os.path.isfile(self.modulefilename):
|
|
||||||
if self.ext_package:
|
|
||||||
try:
|
|
||||||
pkg = __import__(self.ext_package, None, None, ['__doc__'])
|
|
||||||
except ImportError:
|
|
||||||
return # cannot import the package itself, give up
|
|
||||||
# (e.g. it might be called differently before installation)
|
|
||||||
path = pkg.__path__
|
|
||||||
else:
|
|
||||||
path = None
|
|
||||||
filename = self._vengine.find_module(self.get_module_name(), path,
|
|
||||||
_get_so_suffixes())
|
|
||||||
if filename is None:
|
|
||||||
return
|
|
||||||
self.modulefilename = filename
|
|
||||||
self._vengine.collect_types()
|
|
||||||
self._has_module = True
|
|
||||||
|
|
||||||
def _write_source_to(self, file):
|
|
||||||
self._vengine._f = file
|
|
||||||
try:
|
|
||||||
self._vengine.write_source_to_f()
|
|
||||||
finally:
|
|
||||||
del self._vengine._f
|
|
||||||
|
|
||||||
def _write_source(self, file=None):
|
|
||||||
if file is not None:
|
|
||||||
self._write_source_to(file)
|
|
||||||
else:
|
|
||||||
# Write our source file to an in memory file.
|
|
||||||
f = NativeIO()
|
|
||||||
self._write_source_to(f)
|
|
||||||
source_data = f.getvalue()
|
|
||||||
|
|
||||||
# Determine if this matches the current file
|
|
||||||
if os.path.exists(self.sourcefilename):
|
|
||||||
with open(self.sourcefilename, "r") as fp:
|
|
||||||
needs_written = not (fp.read() == source_data)
|
|
||||||
else:
|
|
||||||
needs_written = True
|
|
||||||
|
|
||||||
# Actually write the file out if it doesn't match
|
|
||||||
if needs_written:
|
|
||||||
_ensure_dir(self.sourcefilename)
|
|
||||||
with open(self.sourcefilename, "w") as fp:
|
|
||||||
fp.write(source_data)
|
|
||||||
|
|
||||||
# Set this flag
|
|
||||||
self._has_source = True
|
|
||||||
|
|
||||||
def _compile_module(self):
|
|
||||||
# compile this C source
|
|
||||||
tmpdir = os.path.dirname(self.sourcefilename)
|
|
||||||
outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
|
|
||||||
try:
|
|
||||||
same = ffiplatform.samefile(outputfilename, self.modulefilename)
|
|
||||||
except OSError:
|
|
||||||
same = False
|
|
||||||
if not same:
|
|
||||||
_ensure_dir(self.modulefilename)
|
|
||||||
shutil.move(outputfilename, self.modulefilename)
|
|
||||||
self._has_module = True
|
|
||||||
|
|
||||||
def _load_library(self):
|
|
||||||
assert self._has_module
|
|
||||||
if self.flags is not None:
|
|
||||||
return self._vengine.load_library(self.flags)
|
|
||||||
else:
|
|
||||||
return self._vengine.load_library()
|
|
||||||
|
|
||||||
# ____________________________________________________________
|
|
||||||
|
|
||||||
_FORCE_GENERIC_ENGINE = False # for tests
|
|
||||||
|
|
||||||
def _locate_engine_class(ffi, force_generic_engine):
|
|
||||||
if _FORCE_GENERIC_ENGINE:
|
|
||||||
force_generic_engine = True
|
|
||||||
if not force_generic_engine:
|
|
||||||
if '__pypy__' in sys.builtin_module_names:
|
|
||||||
force_generic_engine = True
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
import _cffi_backend
|
|
||||||
except ImportError:
|
|
||||||
_cffi_backend = '?'
|
|
||||||
if ffi._backend is not _cffi_backend:
|
|
||||||
force_generic_engine = True
|
|
||||||
if force_generic_engine:
|
|
||||||
from . import vengine_gen
|
|
||||||
return vengine_gen.VGenericEngine
|
|
||||||
else:
|
|
||||||
from . import vengine_cpy
|
|
||||||
return vengine_cpy.VCPythonEngine
|
|
||||||
|
|
||||||
# ____________________________________________________________
|
|
||||||
|
|
||||||
_TMPDIR = None
|
|
||||||
|
|
||||||
def _caller_dir_pycache():
|
|
||||||
if _TMPDIR:
|
|
||||||
return _TMPDIR
|
|
||||||
result = os.environ.get('CFFI_TMPDIR')
|
|
||||||
if result:
|
|
||||||
return result
|
|
||||||
filename = sys._getframe(2).f_code.co_filename
|
|
||||||
return os.path.abspath(os.path.join(os.path.dirname(filename),
|
|
||||||
'__pycache__'))
|
|
||||||
|
|
||||||
def set_tmpdir(dirname):
|
|
||||||
"""Set the temporary directory to use instead of __pycache__."""
|
|
||||||
global _TMPDIR
|
|
||||||
_TMPDIR = dirname
|
|
||||||
|
|
||||||
def cleanup_tmpdir(tmpdir=None, keep_so=False):
|
|
||||||
"""Clean up the temporary directory by removing all files in it
|
|
||||||
called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
|
|
||||||
tmpdir = tmpdir or _caller_dir_pycache()
|
|
||||||
try:
|
|
||||||
filelist = os.listdir(tmpdir)
|
|
||||||
except OSError:
|
|
||||||
return
|
|
||||||
if keep_so:
|
|
||||||
suffix = '.c' # only remove .c files
|
|
||||||
else:
|
|
||||||
suffix = _get_so_suffixes()[0].lower()
|
|
||||||
for fn in filelist:
|
|
||||||
if fn.lower().startswith('_cffi_') and (
|
|
||||||
fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
|
|
||||||
try:
|
|
||||||
os.unlink(os.path.join(tmpdir, fn))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
clean_dir = [os.path.join(tmpdir, 'build')]
|
|
||||||
for dir in clean_dir:
|
|
||||||
try:
|
|
||||||
for fn in os.listdir(dir):
|
|
||||||
fn = os.path.join(dir, fn)
|
|
||||||
if os.path.isdir(fn):
|
|
||||||
clean_dir.append(fn)
|
|
||||||
else:
|
|
||||||
os.unlink(fn)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _get_so_suffixes():
|
|
||||||
suffixes = _extension_suffixes()
|
|
||||||
if not suffixes:
|
|
||||||
# bah, no C_EXTENSION available. Occurs on pypy without cpyext
|
|
||||||
if sys.platform == 'win32':
|
|
||||||
suffixes = [".pyd"]
|
|
||||||
else:
|
|
||||||
suffixes = [".so"]
|
|
||||||
|
|
||||||
return suffixes
|
|
||||||
|
|
||||||
def _ensure_dir(filename):
|
|
||||||
try:
|
|
||||||
os.makedirs(os.path.dirname(filename))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
|
@ -1,85 +0,0 @@
|
||||||
Metadata-Version: 1.1
|
|
||||||
Name: cryptography
|
|
||||||
Version: 1.1
|
|
||||||
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
|
||||||
Home-page: https://github.com/pyca/cryptography
|
|
||||||
Author: The cryptography developers
|
|
||||||
Author-email: cryptography-dev@python.org
|
|
||||||
License: BSD or Apache License, Version 2.0
|
|
||||||
Description: Cryptography
|
|
||||||
============
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/pypi/v/cryptography.svg
|
|
||||||
:target: https://pypi.python.org/pypi/cryptography/
|
|
||||||
:alt: Latest Version
|
|
||||||
|
|
||||||
.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
|
|
||||||
:target: https://cryptography.io
|
|
||||||
:alt: Latest Docs
|
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master
|
|
||||||
:target: https://travis-ci.org/pyca/cryptography
|
|
||||||
|
|
||||||
.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master
|
|
||||||
:target: https://codecov.io/github/pyca/cryptography?branch=master
|
|
||||||
|
|
||||||
|
|
||||||
``cryptography`` is a package which provides cryptographic recipes and
|
|
||||||
primitives to Python developers. Our goal is for it to be your "cryptographic
|
|
||||||
standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+.
|
|
||||||
|
|
||||||
``cryptography`` includes both high level recipes, and low level interfaces to
|
|
||||||
common cryptographic algorithms such as symmetric ciphers, message digests and
|
|
||||||
key derivation functions. For example, to encrypt something with
|
|
||||||
``cryptography``'s high level symmetric encryption recipe:
|
|
||||||
|
|
||||||
.. code-block:: pycon
|
|
||||||
|
|
||||||
>>> from cryptography.fernet import Fernet
|
|
||||||
>>> # Put this somewhere safe!
|
|
||||||
>>> key = Fernet.generate_key()
|
|
||||||
>>> f = Fernet(key)
|
|
||||||
>>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
|
|
||||||
>>> token
|
|
||||||
'...'
|
|
||||||
>>> f.decrypt(token)
|
|
||||||
'A really secret message. Not for prying eyes.'
|
|
||||||
|
|
||||||
You can find more information in the `documentation`_.
|
|
||||||
|
|
||||||
Discussion
|
|
||||||
~~~~~~~~~~
|
|
||||||
|
|
||||||
If you run into bugs, you can file them in our `issue tracker`_.
|
|
||||||
|
|
||||||
We maintain a `cryptography-dev`_ mailing list for development discussion.
|
|
||||||
|
|
||||||
You can also join ``#cryptography-dev`` on Freenode to ask questions or get
|
|
||||||
involved.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`documentation`: https://cryptography.io/
|
|
||||||
.. _`issue tracker`: https://github.com/pyca/cryptography/issues
|
|
||||||
.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
|
|
||||||
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Intended Audience :: Developers
|
|
||||||
Classifier: License :: OSI Approved :: Apache Software License
|
|
||||||
Classifier: License :: OSI Approved :: BSD License
|
|
||||||
Classifier: Natural Language :: English
|
|
||||||
Classifier: Operating System :: MacOS :: MacOS X
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Operating System :: POSIX :: BSD
|
|
||||||
Classifier: Operating System :: POSIX :: Linux
|
|
||||||
Classifier: Operating System :: Microsoft :: Windows
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 2
|
|
||||||
Classifier: Programming Language :: Python :: 2.6
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3
|
|
||||||
Classifier: Programming Language :: Python :: 3.3
|
|
||||||
Classifier: Programming Language :: Python :: 3.4
|
|
||||||
Classifier: Programming Language :: Python :: 3.5
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
Classifier: Topic :: Security :: Cryptography
|
|
|
@ -1,260 +0,0 @@
|
||||||
AUTHORS.rst
|
|
||||||
CHANGELOG.rst
|
|
||||||
CONTRIBUTING.rst
|
|
||||||
LICENSE
|
|
||||||
LICENSE.APACHE
|
|
||||||
LICENSE.BSD
|
|
||||||
MANIFEST.in
|
|
||||||
README.rst
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
docs/Makefile
|
|
||||||
docs/api-stability.rst
|
|
||||||
docs/changelog.rst
|
|
||||||
docs/community.rst
|
|
||||||
docs/conf.py
|
|
||||||
docs/cryptography-docs.py
|
|
||||||
docs/doing-a-release.rst
|
|
||||||
docs/exceptions.rst
|
|
||||||
docs/faq.rst
|
|
||||||
docs/fernet.rst
|
|
||||||
docs/glossary.rst
|
|
||||||
docs/index.rst
|
|
||||||
docs/installation.rst
|
|
||||||
docs/limitations.rst
|
|
||||||
docs/make.bat
|
|
||||||
docs/random-numbers.rst
|
|
||||||
docs/security.rst
|
|
||||||
docs/spelling_wordlist.txt
|
|
||||||
docs/_static/.keep
|
|
||||||
docs/development/c-bindings.rst
|
|
||||||
docs/development/getting-started.rst
|
|
||||||
docs/development/index.rst
|
|
||||||
docs/development/reviewing-patches.rst
|
|
||||||
docs/development/submitting-patches.rst
|
|
||||||
docs/development/test-vectors.rst
|
|
||||||
docs/development/custom-vectors/cast5.rst
|
|
||||||
docs/development/custom-vectors/idea.rst
|
|
||||||
docs/development/custom-vectors/secp256k1.rst
|
|
||||||
docs/development/custom-vectors/seed.rst
|
|
||||||
docs/development/custom-vectors/cast5/generate_cast5.py
|
|
||||||
docs/development/custom-vectors/cast5/verify_cast5.go
|
|
||||||
docs/development/custom-vectors/idea/generate_idea.py
|
|
||||||
docs/development/custom-vectors/idea/verify_idea.py
|
|
||||||
docs/development/custom-vectors/secp256k1/generate_secp256k1.py
|
|
||||||
docs/development/custom-vectors/secp256k1/verify_secp256k1.py
|
|
||||||
docs/development/custom-vectors/seed/generate_seed.py
|
|
||||||
docs/development/custom-vectors/seed/verify_seed.py
|
|
||||||
docs/hazmat/backends/commoncrypto.rst
|
|
||||||
docs/hazmat/backends/index.rst
|
|
||||||
docs/hazmat/backends/interfaces.rst
|
|
||||||
docs/hazmat/backends/multibackend.rst
|
|
||||||
docs/hazmat/backends/openssl.rst
|
|
||||||
docs/hazmat/bindings/commoncrypto.rst
|
|
||||||
docs/hazmat/bindings/index.rst
|
|
||||||
docs/hazmat/bindings/openssl.rst
|
|
||||||
docs/hazmat/primitives/constant-time.rst
|
|
||||||
docs/hazmat/primitives/cryptographic-hashes.rst
|
|
||||||
docs/hazmat/primitives/index.rst
|
|
||||||
docs/hazmat/primitives/interfaces.rst
|
|
||||||
docs/hazmat/primitives/key-derivation-functions.rst
|
|
||||||
docs/hazmat/primitives/keywrap.rst
|
|
||||||
docs/hazmat/primitives/padding.rst
|
|
||||||
docs/hazmat/primitives/symmetric-encryption.rst
|
|
||||||
docs/hazmat/primitives/twofactor.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/dh.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/dsa.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/ec.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/index.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/interfaces.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/rsa.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/serialization.rst
|
|
||||||
docs/hazmat/primitives/asymmetric/utils.rst
|
|
||||||
docs/hazmat/primitives/mac/cmac.rst
|
|
||||||
docs/hazmat/primitives/mac/hmac.rst
|
|
||||||
docs/hazmat/primitives/mac/index.rst
|
|
||||||
docs/x509/index.rst
|
|
||||||
docs/x509/reference.rst
|
|
||||||
docs/x509/tutorial.rst
|
|
||||||
src/_cffi_src/__init__.py
|
|
||||||
src/_cffi_src/build_commoncrypto.py
|
|
||||||
src/_cffi_src/build_constant_time.py
|
|
||||||
src/_cffi_src/build_openssl.py
|
|
||||||
src/_cffi_src/build_padding.py
|
|
||||||
src/_cffi_src/utils.py
|
|
||||||
src/_cffi_src/commoncrypto/__init__.py
|
|
||||||
src/_cffi_src/commoncrypto/cf.py
|
|
||||||
src/_cffi_src/commoncrypto/common_cryptor.py
|
|
||||||
src/_cffi_src/commoncrypto/common_digest.py
|
|
||||||
src/_cffi_src/commoncrypto/common_hmac.py
|
|
||||||
src/_cffi_src/commoncrypto/common_key_derivation.py
|
|
||||||
src/_cffi_src/commoncrypto/common_symmetric_key_wrap.py
|
|
||||||
src/_cffi_src/commoncrypto/secimport.py
|
|
||||||
src/_cffi_src/commoncrypto/secitem.py
|
|
||||||
src/_cffi_src/commoncrypto/seckey.py
|
|
||||||
src/_cffi_src/commoncrypto/seckeychain.py
|
|
||||||
src/_cffi_src/commoncrypto/sectransform.py
|
|
||||||
src/_cffi_src/hazmat_src/constant_time.c
|
|
||||||
src/_cffi_src/hazmat_src/constant_time.h
|
|
||||||
src/_cffi_src/hazmat_src/padding.c
|
|
||||||
src/_cffi_src/hazmat_src/padding.h
|
|
||||||
src/_cffi_src/openssl/__init__.py
|
|
||||||
src/_cffi_src/openssl/aes.py
|
|
||||||
src/_cffi_src/openssl/asn1.py
|
|
||||||
src/_cffi_src/openssl/bignum.py
|
|
||||||
src/_cffi_src/openssl/bio.py
|
|
||||||
src/_cffi_src/openssl/cmac.py
|
|
||||||
src/_cffi_src/openssl/cms.py
|
|
||||||
src/_cffi_src/openssl/conf.py
|
|
||||||
src/_cffi_src/openssl/crypto.py
|
|
||||||
src/_cffi_src/openssl/dh.py
|
|
||||||
src/_cffi_src/openssl/dsa.py
|
|
||||||
src/_cffi_src/openssl/ec.py
|
|
||||||
src/_cffi_src/openssl/ecdh.py
|
|
||||||
src/_cffi_src/openssl/ecdsa.py
|
|
||||||
src/_cffi_src/openssl/engine.py
|
|
||||||
src/_cffi_src/openssl/err.py
|
|
||||||
src/_cffi_src/openssl/evp.py
|
|
||||||
src/_cffi_src/openssl/hmac.py
|
|
||||||
src/_cffi_src/openssl/nid.py
|
|
||||||
src/_cffi_src/openssl/objects.py
|
|
||||||
src/_cffi_src/openssl/opensslv.py
|
|
||||||
src/_cffi_src/openssl/pem.py
|
|
||||||
src/_cffi_src/openssl/pkcs12.py
|
|
||||||
src/_cffi_src/openssl/pkcs7.py
|
|
||||||
src/_cffi_src/openssl/rand.py
|
|
||||||
src/_cffi_src/openssl/rsa.py
|
|
||||||
src/_cffi_src/openssl/ssl.py
|
|
||||||
src/_cffi_src/openssl/x509.py
|
|
||||||
src/_cffi_src/openssl/x509_vfy.py
|
|
||||||
src/_cffi_src/openssl/x509name.py
|
|
||||||
src/_cffi_src/openssl/x509v3.py
|
|
||||||
src/cryptography/__about__.py
|
|
||||||
src/cryptography/__init__.py
|
|
||||||
src/cryptography/exceptions.py
|
|
||||||
src/cryptography/fernet.py
|
|
||||||
src/cryptography/utils.py
|
|
||||||
src/cryptography.egg-info/PKG-INFO
|
|
||||||
src/cryptography.egg-info/SOURCES.txt
|
|
||||||
src/cryptography.egg-info/dependency_links.txt
|
|
||||||
src/cryptography.egg-info/entry_points.txt
|
|
||||||
src/cryptography.egg-info/not-zip-safe
|
|
||||||
src/cryptography.egg-info/requires.txt
|
|
||||||
src/cryptography.egg-info/top_level.txt
|
|
||||||
src/cryptography/hazmat/__init__.py
|
|
||||||
src/cryptography/hazmat/backends/__init__.py
|
|
||||||
src/cryptography/hazmat/backends/interfaces.py
|
|
||||||
src/cryptography/hazmat/backends/multibackend.py
|
|
||||||
src/cryptography/hazmat/backends/commoncrypto/__init__.py
|
|
||||||
src/cryptography/hazmat/backends/commoncrypto/backend.py
|
|
||||||
src/cryptography/hazmat/backends/commoncrypto/ciphers.py
|
|
||||||
src/cryptography/hazmat/backends/commoncrypto/hashes.py
|
|
||||||
src/cryptography/hazmat/backends/commoncrypto/hmac.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/__init__.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/backend.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/ciphers.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/cmac.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/dsa.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/ec.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/hashes.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/hmac.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/rsa.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/utils.py
|
|
||||||
src/cryptography/hazmat/backends/openssl/x509.py
|
|
||||||
src/cryptography/hazmat/bindings/__init__.py
|
|
||||||
src/cryptography/hazmat/bindings/commoncrypto/__init__.py
|
|
||||||
src/cryptography/hazmat/bindings/commoncrypto/binding.py
|
|
||||||
src/cryptography/hazmat/bindings/openssl/__init__.py
|
|
||||||
src/cryptography/hazmat/bindings/openssl/_conditional.py
|
|
||||||
src/cryptography/hazmat/bindings/openssl/binding.py
|
|
||||||
src/cryptography/hazmat/primitives/__init__.py
|
|
||||||
src/cryptography/hazmat/primitives/cmac.py
|
|
||||||
src/cryptography/hazmat/primitives/constant_time.py
|
|
||||||
src/cryptography/hazmat/primitives/hashes.py
|
|
||||||
src/cryptography/hazmat/primitives/hmac.py
|
|
||||||
src/cryptography/hazmat/primitives/keywrap.py
|
|
||||||
src/cryptography/hazmat/primitives/padding.py
|
|
||||||
src/cryptography/hazmat/primitives/serialization.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/__init__.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/dh.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/dsa.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/ec.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/padding.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/rsa.py
|
|
||||||
src/cryptography/hazmat/primitives/asymmetric/utils.py
|
|
||||||
src/cryptography/hazmat/primitives/ciphers/__init__.py
|
|
||||||
src/cryptography/hazmat/primitives/ciphers/algorithms.py
|
|
||||||
src/cryptography/hazmat/primitives/ciphers/base.py
|
|
||||||
src/cryptography/hazmat/primitives/ciphers/modes.py
|
|
||||||
src/cryptography/hazmat/primitives/interfaces/__init__.py
|
|
||||||
src/cryptography/hazmat/primitives/kdf/__init__.py
|
|
||||||
src/cryptography/hazmat/primitives/kdf/concatkdf.py
|
|
||||||
src/cryptography/hazmat/primitives/kdf/hkdf.py
|
|
||||||
src/cryptography/hazmat/primitives/kdf/pbkdf2.py
|
|
||||||
src/cryptography/hazmat/primitives/kdf/x963kdf.py
|
|
||||||
src/cryptography/hazmat/primitives/twofactor/__init__.py
|
|
||||||
src/cryptography/hazmat/primitives/twofactor/hotp.py
|
|
||||||
src/cryptography/hazmat/primitives/twofactor/totp.py
|
|
||||||
src/cryptography/hazmat/primitives/twofactor/utils.py
|
|
||||||
src/cryptography/x509/__init__.py
|
|
||||||
src/cryptography/x509/base.py
|
|
||||||
src/cryptography/x509/extensions.py
|
|
||||||
src/cryptography/x509/general_name.py
|
|
||||||
src/cryptography/x509/name.py
|
|
||||||
src/cryptography/x509/oid.py
|
|
||||||
tests/__init__.py
|
|
||||||
tests/conftest.py
|
|
||||||
tests/test_fernet.py
|
|
||||||
tests/test_interfaces.py
|
|
||||||
tests/test_utils.py
|
|
||||||
tests/test_warnings.py
|
|
||||||
tests/test_x509.py
|
|
||||||
tests/test_x509_ext.py
|
|
||||||
tests/utils.py
|
|
||||||
tests/hazmat/__init__.py
|
|
||||||
tests/hazmat/backends/__init__.py
|
|
||||||
tests/hazmat/backends/test_commoncrypto.py
|
|
||||||
tests/hazmat/backends/test_multibackend.py
|
|
||||||
tests/hazmat/backends/test_openssl.py
|
|
||||||
tests/hazmat/bindings/test_commoncrypto.py
|
|
||||||
tests/hazmat/bindings/test_openssl.py
|
|
||||||
tests/hazmat/primitives/__init__.py
|
|
||||||
tests/hazmat/primitives/fixtures_dsa.py
|
|
||||||
tests/hazmat/primitives/fixtures_rsa.py
|
|
||||||
tests/hazmat/primitives/test_3des.py
|
|
||||||
tests/hazmat/primitives/test_aes.py
|
|
||||||
tests/hazmat/primitives/test_arc4.py
|
|
||||||
tests/hazmat/primitives/test_asym_utils.py
|
|
||||||
tests/hazmat/primitives/test_block.py
|
|
||||||
tests/hazmat/primitives/test_blowfish.py
|
|
||||||
tests/hazmat/primitives/test_camellia.py
|
|
||||||
tests/hazmat/primitives/test_cast5.py
|
|
||||||
tests/hazmat/primitives/test_ciphers.py
|
|
||||||
tests/hazmat/primitives/test_cmac.py
|
|
||||||
tests/hazmat/primitives/test_concatkdf.py
|
|
||||||
tests/hazmat/primitives/test_constant_time.py
|
|
||||||
tests/hazmat/primitives/test_dh.py
|
|
||||||
tests/hazmat/primitives/test_dsa.py
|
|
||||||
tests/hazmat/primitives/test_ec.py
|
|
||||||
tests/hazmat/primitives/test_hash_vectors.py
|
|
||||||
tests/hazmat/primitives/test_hashes.py
|
|
||||||
tests/hazmat/primitives/test_hkdf.py
|
|
||||||
tests/hazmat/primitives/test_hkdf_vectors.py
|
|
||||||
tests/hazmat/primitives/test_hmac.py
|
|
||||||
tests/hazmat/primitives/test_hmac_vectors.py
|
|
||||||
tests/hazmat/primitives/test_idea.py
|
|
||||||
tests/hazmat/primitives/test_keywrap.py
|
|
||||||
tests/hazmat/primitives/test_padding.py
|
|
||||||
tests/hazmat/primitives/test_pbkdf2hmac.py
|
|
||||||
tests/hazmat/primitives/test_pbkdf2hmac_vectors.py
|
|
||||||
tests/hazmat/primitives/test_rsa.py
|
|
||||||
tests/hazmat/primitives/test_seed.py
|
|
||||||
tests/hazmat/primitives/test_serialization.py
|
|
||||||
tests/hazmat/primitives/test_x963_vectors.py
|
|
||||||
tests/hazmat/primitives/test_x963kdf.py
|
|
||||||
tests/hazmat/primitives/utils.py
|
|
||||||
tests/hazmat/primitives/twofactor/__init__.py
|
|
||||||
tests/hazmat/primitives/twofactor/test_hotp.py
|
|
||||||
tests/hazmat/primitives/twofactor/test_totp.py
|
|
||||||
tests/hypothesis/__init__.py
|
|
||||||
tests/hypothesis/test_fernet.py
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
[cryptography.backends]
|
|
||||||
openssl = cryptography.hazmat.backends.openssl:backend
|
|
||||||
|
|
|
@ -1,143 +0,0 @@
|
||||||
../cryptography/__about__.py
|
|
||||||
../cryptography/__init__.py
|
|
||||||
../cryptography/exceptions.py
|
|
||||||
../cryptography/fernet.py
|
|
||||||
../cryptography/utils.py
|
|
||||||
../cryptography/hazmat/__init__.py
|
|
||||||
../cryptography/x509/__init__.py
|
|
||||||
../cryptography/x509/base.py
|
|
||||||
../cryptography/x509/extensions.py
|
|
||||||
../cryptography/x509/general_name.py
|
|
||||||
../cryptography/x509/name.py
|
|
||||||
../cryptography/x509/oid.py
|
|
||||||
../cryptography/hazmat/backends/__init__.py
|
|
||||||
../cryptography/hazmat/backends/interfaces.py
|
|
||||||
../cryptography/hazmat/backends/multibackend.py
|
|
||||||
../cryptography/hazmat/bindings/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/cmac.py
|
|
||||||
../cryptography/hazmat/primitives/constant_time.py
|
|
||||||
../cryptography/hazmat/primitives/hashes.py
|
|
||||||
../cryptography/hazmat/primitives/hmac.py
|
|
||||||
../cryptography/hazmat/primitives/keywrap.py
|
|
||||||
../cryptography/hazmat/primitives/padding.py
|
|
||||||
../cryptography/hazmat/primitives/serialization.py
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/__init__.py
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/backend.py
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/ciphers.py
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/hashes.py
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/hmac.py
|
|
||||||
../cryptography/hazmat/backends/openssl/__init__.py
|
|
||||||
../cryptography/hazmat/backends/openssl/backend.py
|
|
||||||
../cryptography/hazmat/backends/openssl/ciphers.py
|
|
||||||
../cryptography/hazmat/backends/openssl/cmac.py
|
|
||||||
../cryptography/hazmat/backends/openssl/dsa.py
|
|
||||||
../cryptography/hazmat/backends/openssl/ec.py
|
|
||||||
../cryptography/hazmat/backends/openssl/hashes.py
|
|
||||||
../cryptography/hazmat/backends/openssl/hmac.py
|
|
||||||
../cryptography/hazmat/backends/openssl/rsa.py
|
|
||||||
../cryptography/hazmat/backends/openssl/utils.py
|
|
||||||
../cryptography/hazmat/backends/openssl/x509.py
|
|
||||||
../cryptography/hazmat/bindings/commoncrypto/__init__.py
|
|
||||||
../cryptography/hazmat/bindings/commoncrypto/binding.py
|
|
||||||
../cryptography/hazmat/bindings/openssl/__init__.py
|
|
||||||
../cryptography/hazmat/bindings/openssl/_conditional.py
|
|
||||||
../cryptography/hazmat/bindings/openssl/binding.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/dh.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/dsa.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/ec.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/padding.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/rsa.py
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/utils.py
|
|
||||||
../cryptography/hazmat/primitives/ciphers/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/ciphers/algorithms.py
|
|
||||||
../cryptography/hazmat/primitives/ciphers/base.py
|
|
||||||
../cryptography/hazmat/primitives/ciphers/modes.py
|
|
||||||
../cryptography/hazmat/primitives/interfaces/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/kdf/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/kdf/concatkdf.py
|
|
||||||
../cryptography/hazmat/primitives/kdf/hkdf.py
|
|
||||||
../cryptography/hazmat/primitives/kdf/pbkdf2.py
|
|
||||||
../cryptography/hazmat/primitives/kdf/x963kdf.py
|
|
||||||
../cryptography/hazmat/primitives/twofactor/__init__.py
|
|
||||||
../cryptography/hazmat/primitives/twofactor/hotp.py
|
|
||||||
../cryptography/hazmat/primitives/twofactor/totp.py
|
|
||||||
../cryptography/hazmat/primitives/twofactor/utils.py
|
|
||||||
../cryptography/__pycache__/__about__.cpython-34.pyc
|
|
||||||
../cryptography/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/__pycache__/exceptions.cpython-34.pyc
|
|
||||||
../cryptography/__pycache__/fernet.cpython-34.pyc
|
|
||||||
../cryptography/__pycache__/utils.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/x509/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/x509/__pycache__/base.cpython-34.pyc
|
|
||||||
../cryptography/x509/__pycache__/extensions.cpython-34.pyc
|
|
||||||
../cryptography/x509/__pycache__/general_name.cpython-34.pyc
|
|
||||||
../cryptography/x509/__pycache__/name.cpython-34.pyc
|
|
||||||
../cryptography/x509/__pycache__/oid.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/__pycache__/interfaces.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/__pycache__/multibackend.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/cmac.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/constant_time.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/hashes.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/hmac.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/keywrap.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/padding.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/__pycache__/serialization.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/__pycache__/backend.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/__pycache__/ciphers.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/__pycache__/hashes.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/commoncrypto/__pycache__/hmac.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/commoncrypto/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/commoncrypto/__pycache__/binding.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/interfaces/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-34.pyc
|
|
||||||
../cryptography/hazmat/bindings/_openssl.cpython-34m.so
|
|
||||||
../cryptography/hazmat/bindings/_constant_time.cpython-34m.so
|
|
||||||
../cryptography/hazmat/bindings/_padding.cpython-34m.so
|
|
||||||
./
|
|
||||||
dependency_links.txt
|
|
||||||
entry_points.txt
|
|
||||||
not-zip-safe
|
|
||||||
PKG-INFO
|
|
||||||
requires.txt
|
|
||||||
SOURCES.txt
|
|
||||||
top_level.txt
|
|
|
@ -1,5 +0,0 @@
|
||||||
idna>=2.0
|
|
||||||
pyasn1>=0.1.8
|
|
||||||
six>=1.4.1
|
|
||||||
setuptools
|
|
||||||
cffi>=1.1.0
|
|
|
@ -1,4 +0,0 @@
|
||||||
_constant_time
|
|
||||||
_openssl
|
|
||||||
_padding
|
|
||||||
cryptography
|
|
|
@ -1,23 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
|
||||||
"__email__", "__license__", "__copyright__",
|
|
||||||
]
|
|
||||||
|
|
||||||
__title__ = "cryptography"
|
|
||||||
__summary__ = ("cryptography is a package which provides cryptographic recipes"
|
|
||||||
" and primitives to Python developers.")
|
|
||||||
__uri__ = "https://github.com/pyca/cryptography"
|
|
||||||
|
|
||||||
__version__ = "1.1"
|
|
||||||
|
|
||||||
__author__ = "The cryptography developers"
|
|
||||||
__email__ = "cryptography-dev@python.org"
|
|
||||||
|
|
||||||
__license__ = "BSD or Apache License, Version 2.0"
|
|
||||||
__copyright__ = "Copyright 2013-2015 {0}".format(__author__)
|
|
|
@ -1,26 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from cryptography.__about__ import (
|
|
||||||
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
|
||||||
__uri__, __version__
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
|
||||||
"__email__", "__license__", "__copyright__",
|
|
||||||
]
|
|
||||||
|
|
||||||
if sys.version_info[:2] == (2, 6):
|
|
||||||
warnings.warn(
|
|
||||||
"Python 2.6 is no longer supported by the Python core team, please "
|
|
||||||
"upgrade your Python.",
|
|
||||||
DeprecationWarning
|
|
||||||
)
|
|
|
@ -1,70 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.hazmat.primitives import twofactor
|
|
||||||
|
|
||||||
|
|
||||||
class _Reasons(Enum):
|
|
||||||
BACKEND_MISSING_INTERFACE = 0
|
|
||||||
UNSUPPORTED_HASH = 1
|
|
||||||
UNSUPPORTED_CIPHER = 2
|
|
||||||
UNSUPPORTED_PADDING = 3
|
|
||||||
UNSUPPORTED_MGF = 4
|
|
||||||
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
|
|
||||||
UNSUPPORTED_ELLIPTIC_CURVE = 6
|
|
||||||
UNSUPPORTED_SERIALIZATION = 7
|
|
||||||
UNSUPPORTED_X509 = 8
|
|
||||||
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
|
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedAlgorithm(Exception):
|
|
||||||
def __init__(self, message, reason=None):
|
|
||||||
super(UnsupportedAlgorithm, self).__init__(message)
|
|
||||||
self._reason = reason
|
|
||||||
|
|
||||||
|
|
||||||
class AlreadyFinalized(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class AlreadyUpdated(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class NotYetFinalized(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidTag(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidSignature(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InternalError(Exception):
|
|
||||||
def __init__(self, msg, err_code):
|
|
||||||
super(InternalError, self).__init__(msg)
|
|
||||||
self.err_code = err_code
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidKey(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
InvalidToken = utils.deprecated(
|
|
||||||
twofactor.InvalidToken,
|
|
||||||
__name__,
|
|
||||||
(
|
|
||||||
"The InvalidToken exception has moved to the "
|
|
||||||
"cryptography.hazmat.primitives.twofactor module"
|
|
||||||
),
|
|
||||||
utils.DeprecatedIn09
|
|
||||||
)
|
|
|
@ -1,141 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import binascii
|
|
||||||
import os
|
|
||||||
import struct
|
|
||||||
import time
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography.exceptions import InvalidSignature
|
|
||||||
from cryptography.hazmat.backends import default_backend
|
|
||||||
from cryptography.hazmat.primitives import hashes, padding
|
|
||||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
|
||||||
from cryptography.hazmat.primitives.hmac import HMAC
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidToken(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
_MAX_CLOCK_SKEW = 60
|
|
||||||
|
|
||||||
|
|
||||||
class Fernet(object):
|
|
||||||
def __init__(self, key, backend=None):
|
|
||||||
if backend is None:
|
|
||||||
backend = default_backend()
|
|
||||||
|
|
||||||
key = base64.urlsafe_b64decode(key)
|
|
||||||
if len(key) != 32:
|
|
||||||
raise ValueError(
|
|
||||||
"Fernet key must be 32 url-safe base64-encoded bytes."
|
|
||||||
)
|
|
||||||
|
|
||||||
self._signing_key = key[:16]
|
|
||||||
self._encryption_key = key[16:]
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def generate_key(cls):
|
|
||||||
return base64.urlsafe_b64encode(os.urandom(32))
|
|
||||||
|
|
||||||
def encrypt(self, data):
|
|
||||||
current_time = int(time.time())
|
|
||||||
iv = os.urandom(16)
|
|
||||||
return self._encrypt_from_parts(data, current_time, iv)
|
|
||||||
|
|
||||||
def _encrypt_from_parts(self, data, current_time, iv):
|
|
||||||
if not isinstance(data, bytes):
|
|
||||||
raise TypeError("data must be bytes.")
|
|
||||||
|
|
||||||
padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
|
||||||
padded_data = padder.update(data) + padder.finalize()
|
|
||||||
encryptor = Cipher(
|
|
||||||
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
|
|
||||||
).encryptor()
|
|
||||||
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
|
|
||||||
|
|
||||||
basic_parts = (
|
|
||||||
b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
|
|
||||||
)
|
|
||||||
|
|
||||||
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
|
|
||||||
h.update(basic_parts)
|
|
||||||
hmac = h.finalize()
|
|
||||||
return base64.urlsafe_b64encode(basic_parts + hmac)
|
|
||||||
|
|
||||||
def decrypt(self, token, ttl=None):
|
|
||||||
if not isinstance(token, bytes):
|
|
||||||
raise TypeError("token must be bytes.")
|
|
||||||
|
|
||||||
current_time = int(time.time())
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = base64.urlsafe_b64decode(token)
|
|
||||||
except (TypeError, binascii.Error):
|
|
||||||
raise InvalidToken
|
|
||||||
|
|
||||||
if not data or six.indexbytes(data, 0) != 0x80:
|
|
||||||
raise InvalidToken
|
|
||||||
|
|
||||||
try:
|
|
||||||
timestamp, = struct.unpack(">Q", data[1:9])
|
|
||||||
except struct.error:
|
|
||||||
raise InvalidToken
|
|
||||||
if ttl is not None:
|
|
||||||
if timestamp + ttl < current_time:
|
|
||||||
raise InvalidToken
|
|
||||||
if current_time + _MAX_CLOCK_SKEW < timestamp:
|
|
||||||
raise InvalidToken
|
|
||||||
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
|
|
||||||
h.update(data[:-32])
|
|
||||||
try:
|
|
||||||
h.verify(data[-32:])
|
|
||||||
except InvalidSignature:
|
|
||||||
raise InvalidToken
|
|
||||||
|
|
||||||
iv = data[9:25]
|
|
||||||
ciphertext = data[25:-32]
|
|
||||||
decryptor = Cipher(
|
|
||||||
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
|
|
||||||
).decryptor()
|
|
||||||
plaintext_padded = decryptor.update(ciphertext)
|
|
||||||
try:
|
|
||||||
plaintext_padded += decryptor.finalize()
|
|
||||||
except ValueError:
|
|
||||||
raise InvalidToken
|
|
||||||
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
|
||||||
|
|
||||||
unpadded = unpadder.update(plaintext_padded)
|
|
||||||
try:
|
|
||||||
unpadded += unpadder.finalize()
|
|
||||||
except ValueError:
|
|
||||||
raise InvalidToken
|
|
||||||
return unpadded
|
|
||||||
|
|
||||||
|
|
||||||
class MultiFernet(object):
|
|
||||||
def __init__(self, fernets):
|
|
||||||
fernets = list(fernets)
|
|
||||||
if not fernets:
|
|
||||||
raise ValueError(
|
|
||||||
"MultiFernet requires at least one Fernet instance"
|
|
||||||
)
|
|
||||||
self._fernets = fernets
|
|
||||||
|
|
||||||
def encrypt(self, msg):
|
|
||||||
return self._fernets[0].encrypt(msg)
|
|
||||||
|
|
||||||
def decrypt(self, msg, ttl=None):
|
|
||||||
for f in self._fernets:
|
|
||||||
try:
|
|
||||||
return f.decrypt(msg, ttl)
|
|
||||||
except InvalidToken:
|
|
||||||
pass
|
|
||||||
raise InvalidToken
|
|
|
@ -1,5 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
|
@ -1,42 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends.multibackend import MultiBackend
|
|
||||||
|
|
||||||
|
|
||||||
_available_backends_list = None
|
|
||||||
|
|
||||||
|
|
||||||
def _available_backends():
|
|
||||||
global _available_backends_list
|
|
||||||
|
|
||||||
if _available_backends_list is None:
|
|
||||||
_available_backends_list = [
|
|
||||||
# setuptools 11.3 deprecated support for the require parameter to
|
|
||||||
# load(), and introduced the new resolve() method instead.
|
|
||||||
# This can be removed if/when we can assume setuptools>=11.3. At
|
|
||||||
# some point we may wish to add a warning, to push people along,
|
|
||||||
# but at present this would result in too many warnings.
|
|
||||||
ep.resolve() if hasattr(ep, "resolve") else ep.load(require=False)
|
|
||||||
for ep in pkg_resources.iter_entry_points(
|
|
||||||
"cryptography.backends"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
return _available_backends_list
|
|
||||||
|
|
||||||
_default_backend = None
|
|
||||||
|
|
||||||
|
|
||||||
def default_backend():
|
|
||||||
global _default_backend
|
|
||||||
|
|
||||||
if _default_backend is None:
|
|
||||||
_default_backend = MultiBackend(_available_backends())
|
|
||||||
|
|
||||||
return _default_backend
|
|
|
@ -1,10 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends.commoncrypto.backend import backend
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["backend"]
|
|
|
@ -1,245 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import InternalError
|
|
||||||
from cryptography.hazmat.backends.commoncrypto.ciphers import (
|
|
||||||
_CipherContext, _GCMCipherContext
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext
|
|
||||||
from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext
|
|
||||||
from cryptography.hazmat.backends.interfaces import (
|
|
||||||
CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.bindings.commoncrypto.binding import Binding
|
|
||||||
from cryptography.hazmat.primitives.ciphers.algorithms import (
|
|
||||||
AES, ARC4, Blowfish, CAST5, TripleDES
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives.ciphers.modes import (
|
|
||||||
CBC, CFB, CFB8, CTR, ECB, GCM, OFB
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
HashMethods = namedtuple(
|
|
||||||
"HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(CipherBackend)
|
|
||||||
@utils.register_interface(HashBackend)
|
|
||||||
@utils.register_interface(HMACBackend)
|
|
||||||
@utils.register_interface(PBKDF2HMACBackend)
|
|
||||||
class Backend(object):
|
|
||||||
"""
|
|
||||||
CommonCrypto API wrapper.
|
|
||||||
"""
|
|
||||||
name = "commoncrypto"
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._binding = Binding()
|
|
||||||
self._ffi = self._binding.ffi
|
|
||||||
self._lib = self._binding.lib
|
|
||||||
|
|
||||||
self._cipher_registry = {}
|
|
||||||
self._register_default_ciphers()
|
|
||||||
self._hash_mapping = {
|
|
||||||
"md5": HashMethods(
|
|
||||||
"CC_MD5_CTX *", self._lib.CC_MD5_Init,
|
|
||||||
self._lib.CC_MD5_Update, self._lib.CC_MD5_Final
|
|
||||||
),
|
|
||||||
"sha1": HashMethods(
|
|
||||||
"CC_SHA1_CTX *", self._lib.CC_SHA1_Init,
|
|
||||||
self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final
|
|
||||||
),
|
|
||||||
"sha224": HashMethods(
|
|
||||||
"CC_SHA256_CTX *", self._lib.CC_SHA224_Init,
|
|
||||||
self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final
|
|
||||||
),
|
|
||||||
"sha256": HashMethods(
|
|
||||||
"CC_SHA256_CTX *", self._lib.CC_SHA256_Init,
|
|
||||||
self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final
|
|
||||||
),
|
|
||||||
"sha384": HashMethods(
|
|
||||||
"CC_SHA512_CTX *", self._lib.CC_SHA384_Init,
|
|
||||||
self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final
|
|
||||||
),
|
|
||||||
"sha512": HashMethods(
|
|
||||||
"CC_SHA512_CTX *", self._lib.CC_SHA512_Init,
|
|
||||||
self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
self._supported_hmac_algorithms = {
|
|
||||||
"md5": self._lib.kCCHmacAlgMD5,
|
|
||||||
"sha1": self._lib.kCCHmacAlgSHA1,
|
|
||||||
"sha224": self._lib.kCCHmacAlgSHA224,
|
|
||||||
"sha256": self._lib.kCCHmacAlgSHA256,
|
|
||||||
"sha384": self._lib.kCCHmacAlgSHA384,
|
|
||||||
"sha512": self._lib.kCCHmacAlgSHA512,
|
|
||||||
}
|
|
||||||
|
|
||||||
self._supported_pbkdf2_hmac_algorithms = {
|
|
||||||
"sha1": self._lib.kCCPRFHmacAlgSHA1,
|
|
||||||
"sha224": self._lib.kCCPRFHmacAlgSHA224,
|
|
||||||
"sha256": self._lib.kCCPRFHmacAlgSHA256,
|
|
||||||
"sha384": self._lib.kCCPRFHmacAlgSHA384,
|
|
||||||
"sha512": self._lib.kCCPRFHmacAlgSHA512,
|
|
||||||
}
|
|
||||||
|
|
||||||
def hash_supported(self, algorithm):
|
|
||||||
return algorithm.name in self._hash_mapping
|
|
||||||
|
|
||||||
def hmac_supported(self, algorithm):
|
|
||||||
return algorithm.name in self._supported_hmac_algorithms
|
|
||||||
|
|
||||||
def create_hash_ctx(self, algorithm):
|
|
||||||
return _HashContext(self, algorithm)
|
|
||||||
|
|
||||||
def create_hmac_ctx(self, key, algorithm):
|
|
||||||
return _HMACContext(self, key, algorithm)
|
|
||||||
|
|
||||||
def cipher_supported(self, cipher, mode):
|
|
||||||
return (type(cipher), type(mode)) in self._cipher_registry
|
|
||||||
|
|
||||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
|
||||||
if isinstance(mode, GCM):
|
|
||||||
return _GCMCipherContext(
|
|
||||||
self, cipher, mode, self._lib.kCCEncrypt
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt)
|
|
||||||
|
|
||||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
|
||||||
if isinstance(mode, GCM):
|
|
||||||
return _GCMCipherContext(
|
|
||||||
self, cipher, mode, self._lib.kCCDecrypt
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt)
|
|
||||||
|
|
||||||
def pbkdf2_hmac_supported(self, algorithm):
|
|
||||||
return algorithm.name in self._supported_pbkdf2_hmac_algorithms
|
|
||||||
|
|
||||||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
|
||||||
key_material):
|
|
||||||
alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name]
|
|
||||||
buf = self._ffi.new("char[]", length)
|
|
||||||
res = self._lib.CCKeyDerivationPBKDF(
|
|
||||||
self._lib.kCCPBKDF2,
|
|
||||||
key_material,
|
|
||||||
len(key_material),
|
|
||||||
salt,
|
|
||||||
len(salt),
|
|
||||||
alg_enum,
|
|
||||||
iterations,
|
|
||||||
buf,
|
|
||||||
length
|
|
||||||
)
|
|
||||||
self._check_cipher_response(res)
|
|
||||||
|
|
||||||
return self._ffi.buffer(buf)[:]
|
|
||||||
|
|
||||||
def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls,
|
|
||||||
mode_const):
|
|
||||||
if (cipher_cls, mode_cls) in self._cipher_registry:
|
|
||||||
raise ValueError("Duplicate registration for: {0} {1}.".format(
|
|
||||||
cipher_cls, mode_cls)
|
|
||||||
)
|
|
||||||
self._cipher_registry[cipher_cls, mode_cls] = (cipher_const,
|
|
||||||
mode_const)
|
|
||||||
|
|
||||||
def _register_default_ciphers(self):
|
|
||||||
for mode_cls, mode_const in [
|
|
||||||
(CBC, self._lib.kCCModeCBC),
|
|
||||||
(ECB, self._lib.kCCModeECB),
|
|
||||||
(CFB, self._lib.kCCModeCFB),
|
|
||||||
(CFB8, self._lib.kCCModeCFB8),
|
|
||||||
(OFB, self._lib.kCCModeOFB),
|
|
||||||
(CTR, self._lib.kCCModeCTR),
|
|
||||||
(GCM, self._lib.kCCModeGCM),
|
|
||||||
]:
|
|
||||||
self._register_cipher_adapter(
|
|
||||||
AES,
|
|
||||||
self._lib.kCCAlgorithmAES128,
|
|
||||||
mode_cls,
|
|
||||||
mode_const
|
|
||||||
)
|
|
||||||
for mode_cls, mode_const in [
|
|
||||||
(CBC, self._lib.kCCModeCBC),
|
|
||||||
(ECB, self._lib.kCCModeECB),
|
|
||||||
(CFB, self._lib.kCCModeCFB),
|
|
||||||
(CFB8, self._lib.kCCModeCFB8),
|
|
||||||
(OFB, self._lib.kCCModeOFB),
|
|
||||||
]:
|
|
||||||
self._register_cipher_adapter(
|
|
||||||
TripleDES,
|
|
||||||
self._lib.kCCAlgorithm3DES,
|
|
||||||
mode_cls,
|
|
||||||
mode_const
|
|
||||||
)
|
|
||||||
for mode_cls, mode_const in [
|
|
||||||
(CBC, self._lib.kCCModeCBC),
|
|
||||||
(ECB, self._lib.kCCModeECB),
|
|
||||||
(CFB, self._lib.kCCModeCFB),
|
|
||||||
(OFB, self._lib.kCCModeOFB)
|
|
||||||
]:
|
|
||||||
self._register_cipher_adapter(
|
|
||||||
Blowfish,
|
|
||||||
self._lib.kCCAlgorithmBlowfish,
|
|
||||||
mode_cls,
|
|
||||||
mode_const
|
|
||||||
)
|
|
||||||
for mode_cls, mode_const in [
|
|
||||||
(CBC, self._lib.kCCModeCBC),
|
|
||||||
(ECB, self._lib.kCCModeECB),
|
|
||||||
(CFB, self._lib.kCCModeCFB),
|
|
||||||
(OFB, self._lib.kCCModeOFB),
|
|
||||||
(CTR, self._lib.kCCModeCTR)
|
|
||||||
]:
|
|
||||||
self._register_cipher_adapter(
|
|
||||||
CAST5,
|
|
||||||
self._lib.kCCAlgorithmCAST,
|
|
||||||
mode_cls,
|
|
||||||
mode_const
|
|
||||||
)
|
|
||||||
self._register_cipher_adapter(
|
|
||||||
ARC4,
|
|
||||||
self._lib.kCCAlgorithmRC4,
|
|
||||||
type(None),
|
|
||||||
self._lib.kCCModeRC4
|
|
||||||
)
|
|
||||||
|
|
||||||
def _check_cipher_response(self, response):
|
|
||||||
if response == self._lib.kCCSuccess:
|
|
||||||
return
|
|
||||||
elif response == self._lib.kCCAlignmentError:
|
|
||||||
# This error is not currently triggered due to a bug filed as
|
|
||||||
# rdar://15589470
|
|
||||||
raise ValueError(
|
|
||||||
"The length of the provided data is not a multiple of "
|
|
||||||
"the block length."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise InternalError(
|
|
||||||
"The backend returned an unknown error, consider filing a bug."
|
|
||||||
" Code: {0}.".format(response),
|
|
||||||
response
|
|
||||||
)
|
|
||||||
|
|
||||||
def _release_cipher_ctx(self, ctx):
|
|
||||||
"""
|
|
||||||
Called by the garbage collector and used to safely dereference and
|
|
||||||
release the context.
|
|
||||||
"""
|
|
||||||
if ctx[0] != self._ffi.NULL:
|
|
||||||
res = self._lib.CCCryptorRelease(ctx[0])
|
|
||||||
self._check_cipher_response(res)
|
|
||||||
ctx[0] = self._ffi.NULL
|
|
||||||
|
|
||||||
|
|
||||||
backend = Backend()
|
|
|
@ -1,193 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
InvalidTag, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives import ciphers, constant_time
|
|
||||||
from cryptography.hazmat.primitives.ciphers import modes
|
|
||||||
from cryptography.hazmat.primitives.ciphers.modes import (
|
|
||||||
CFB, CFB8, CTR, OFB
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(ciphers.CipherContext)
|
|
||||||
class _CipherContext(object):
|
|
||||||
def __init__(self, backend, cipher, mode, operation):
|
|
||||||
self._backend = backend
|
|
||||||
self._cipher = cipher
|
|
||||||
self._mode = mode
|
|
||||||
self._operation = operation
|
|
||||||
# There is a bug in CommonCrypto where block ciphers do not raise
|
|
||||||
# kCCAlignmentError when finalizing if you supply non-block aligned
|
|
||||||
# data. To work around this we need to keep track of the block
|
|
||||||
# alignment ourselves, but only for alg+mode combos that require
|
|
||||||
# block alignment. OFB, CFB, and CTR make a block cipher algorithm
|
|
||||||
# into a stream cipher so we don't need to track them (and thus their
|
|
||||||
# block size is effectively 1 byte just like OpenSSL/CommonCrypto
|
|
||||||
# treat RC4 and other stream cipher block sizes).
|
|
||||||
# This bug has been filed as rdar://15589470
|
|
||||||
self._bytes_processed = 0
|
|
||||||
if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not
|
|
||||||
isinstance(mode, (OFB, CFB, CFB8, CTR))):
|
|
||||||
self._byte_block_size = cipher.block_size // 8
|
|
||||||
else:
|
|
||||||
self._byte_block_size = 1
|
|
||||||
|
|
||||||
registry = self._backend._cipher_registry
|
|
||||||
try:
|
|
||||||
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"cipher {0} in {1} mode is not supported "
|
|
||||||
"by this backend.".format(
|
|
||||||
cipher.name, mode.name if mode else mode),
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx = self._backend._ffi.new("CCCryptorRef *")
|
|
||||||
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
|
|
||||||
|
|
||||||
if isinstance(mode, modes.ModeWithInitializationVector):
|
|
||||||
iv_nonce = mode.initialization_vector
|
|
||||||
elif isinstance(mode, modes.ModeWithNonce):
|
|
||||||
iv_nonce = mode.nonce
|
|
||||||
else:
|
|
||||||
iv_nonce = self._backend._ffi.NULL
|
|
||||||
|
|
||||||
if isinstance(mode, CTR):
|
|
||||||
mode_option = self._backend._lib.kCCModeOptionCTR_BE
|
|
||||||
else:
|
|
||||||
mode_option = 0
|
|
||||||
|
|
||||||
res = self._backend._lib.CCCryptorCreateWithMode(
|
|
||||||
operation,
|
|
||||||
mode_enum, cipher_enum,
|
|
||||||
self._backend._lib.ccNoPadding, iv_nonce,
|
|
||||||
cipher.key, len(cipher.key),
|
|
||||||
self._backend._ffi.NULL, 0, 0, mode_option, ctx)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
# Count bytes processed to handle block alignment.
|
|
||||||
self._bytes_processed += len(data)
|
|
||||||
buf = self._backend._ffi.new(
|
|
||||||
"unsigned char[]", len(data) + self._byte_block_size - 1)
|
|
||||||
outlen = self._backend._ffi.new("size_t *")
|
|
||||||
res = self._backend._lib.CCCryptorUpdate(
|
|
||||||
self._ctx[0], data, len(data), buf,
|
|
||||||
len(data) + self._byte_block_size - 1, outlen)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
# Raise error if block alignment is wrong.
|
|
||||||
if self._bytes_processed % self._byte_block_size:
|
|
||||||
raise ValueError(
|
|
||||||
"The length of the provided data is not a multiple of "
|
|
||||||
"the block length."
|
|
||||||
)
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size)
|
|
||||||
outlen = self._backend._ffi.new("size_t *")
|
|
||||||
res = self._backend._lib.CCCryptorFinal(
|
|
||||||
self._ctx[0], buf, len(buf), outlen)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
self._backend._release_cipher_ctx(self._ctx)
|
|
||||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(ciphers.AEADCipherContext)
|
|
||||||
@utils.register_interface(ciphers.AEADEncryptionContext)
|
|
||||||
class _GCMCipherContext(object):
|
|
||||||
def __init__(self, backend, cipher, mode, operation):
|
|
||||||
self._backend = backend
|
|
||||||
self._cipher = cipher
|
|
||||||
self._mode = mode
|
|
||||||
self._operation = operation
|
|
||||||
self._tag = None
|
|
||||||
|
|
||||||
registry = self._backend._cipher_registry
|
|
||||||
try:
|
|
||||||
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"cipher {0} in {1} mode is not supported "
|
|
||||||
"by this backend.".format(
|
|
||||||
cipher.name, mode.name if mode else mode),
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER
|
|
||||||
)
|
|
||||||
|
|
||||||
ctx = self._backend._ffi.new("CCCryptorRef *")
|
|
||||||
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
res = self._backend._lib.CCCryptorCreateWithMode(
|
|
||||||
operation,
|
|
||||||
mode_enum, cipher_enum,
|
|
||||||
self._backend._lib.ccNoPadding,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
cipher.key, len(cipher.key),
|
|
||||||
self._backend._ffi.NULL, 0, 0, 0, self._ctx)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
|
|
||||||
res = self._backend._lib.CCCryptorGCMAddIV(
|
|
||||||
self._ctx[0],
|
|
||||||
mode.initialization_vector,
|
|
||||||
len(mode.initialization_vector)
|
|
||||||
)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
# CommonCrypto has a bug where calling update without at least one
|
|
||||||
# call to authenticate_additional_data will result in null byte output
|
|
||||||
# for ciphertext. The following empty byte string call prevents the
|
|
||||||
# issue, which is present in at least 10.8 and 10.9.
|
|
||||||
# Filed as rdar://18314544
|
|
||||||
self.authenticate_additional_data(b"")
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", len(data))
|
|
||||||
args = (self._ctx[0], data, len(data), buf)
|
|
||||||
if self._operation == self._backend._lib.kCCEncrypt:
|
|
||||||
res = self._backend._lib.CCCryptorGCMEncrypt(*args)
|
|
||||||
else:
|
|
||||||
res = self._backend._lib.CCCryptorGCMDecrypt(*args)
|
|
||||||
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
return self._backend._ffi.buffer(buf)[:]
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
# CommonCrypto has a yet another bug where you must make at least one
|
|
||||||
# call to update. If you pass just AAD and call finalize without a call
|
|
||||||
# to update you'll get null bytes for tag. The following update call
|
|
||||||
# prevents this issue, which is present in at least 10.8 and 10.9.
|
|
||||||
# Filed as rdar://18314580
|
|
||||||
self.update(b"")
|
|
||||||
tag_size = self._cipher.block_size // 8
|
|
||||||
tag_buf = self._backend._ffi.new("unsigned char[]", tag_size)
|
|
||||||
tag_len = self._backend._ffi.new("size_t *", tag_size)
|
|
||||||
res = self._backend._lib.CCCryptorGCMFinal(
|
|
||||||
self._ctx[0], tag_buf, tag_len
|
|
||||||
)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
self._backend._release_cipher_ctx(self._ctx)
|
|
||||||
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
|
||||||
if (self._operation == self._backend._lib.kCCDecrypt and
|
|
||||||
not constant_time.bytes_eq(
|
|
||||||
self._tag[:len(self._mode.tag)], self._mode.tag
|
|
||||||
)):
|
|
||||||
raise InvalidTag
|
|
||||||
return b""
|
|
||||||
|
|
||||||
def authenticate_additional_data(self, data):
|
|
||||||
res = self._backend._lib.CCCryptorGCMAddAAD(
|
|
||||||
self._ctx[0], data, len(data)
|
|
||||||
)
|
|
||||||
self._backend._check_cipher_response(res)
|
|
||||||
|
|
||||||
tag = utils.read_only_property("_tag")
|
|
|
@ -1,55 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(hashes.HashContext)
|
|
||||||
class _HashContext(object):
|
|
||||||
def __init__(self, backend, algorithm, ctx=None):
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
if ctx is None:
|
|
||||||
try:
|
|
||||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
ctx = self._backend._ffi.new(methods.ctx)
|
|
||||||
res = methods.hash_init(ctx)
|
|
||||||
assert res == 1
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
|
||||||
new_ctx = self._backend._ffi.new(methods.ctx)
|
|
||||||
# CommonCrypto has no APIs for copying hashes, so we have to copy the
|
|
||||||
# underlying struct.
|
|
||||||
new_ctx[0] = self._ctx[0]
|
|
||||||
|
|
||||||
return _HashContext(self._backend, self.algorithm, ctx=new_ctx)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
|
||||||
res = methods.hash_update(self._ctx, data, len(data))
|
|
||||||
assert res == 1
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
methods = self._backend._hash_mapping[self.algorithm.name]
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]",
|
|
||||||
self.algorithm.digest_size)
|
|
||||||
res = methods.hash_final(buf, self._ctx)
|
|
||||||
assert res == 1
|
|
||||||
return self._backend._ffi.buffer(buf)[:]
|
|
|
@ -1,59 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(interfaces.MACContext)
|
|
||||||
@utils.register_interface(hashes.HashContext)
|
|
||||||
class _HMACContext(object):
|
|
||||||
def __init__(self, backend, key, algorithm, ctx=None):
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._backend = backend
|
|
||||||
if ctx is None:
|
|
||||||
ctx = self._backend._ffi.new("CCHmacContext *")
|
|
||||||
try:
|
|
||||||
alg = self._backend._supported_hmac_algorithms[algorithm.name]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported HMAC hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
self._backend._lib.CCHmacInit(ctx, alg, key, len(key))
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
self._key = key
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
copied_ctx = self._backend._ffi.new("CCHmacContext *")
|
|
||||||
# CommonCrypto has no APIs for copying HMACs, so we have to copy the
|
|
||||||
# underlying struct.
|
|
||||||
copied_ctx[0] = self._ctx[0]
|
|
||||||
return _HMACContext(
|
|
||||||
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
|
||||||
)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._backend._lib.CCHmacUpdate(self._ctx, data, len(data))
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]",
|
|
||||||
self.algorithm.digest_size)
|
|
||||||
self._backend._lib.CCHmacFinal(self._ctx, buf)
|
|
||||||
return self._backend._ffi.buffer(buf)[:]
|
|
||||||
|
|
||||||
def verify(self, signature):
|
|
||||||
digest = self.finalize()
|
|
||||||
if not constant_time.bytes_eq(digest, signature):
|
|
||||||
raise InvalidSignature("Signature did not match digest.")
|
|
|
@ -1,345 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class CipherBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def cipher_supported(self, cipher, mode):
|
|
||||||
"""
|
|
||||||
Return True if the given cipher and mode are supported.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
|
||||||
"""
|
|
||||||
Get a CipherContext that can be used for encryption.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
|
||||||
"""
|
|
||||||
Get a CipherContext that can be used for decryption.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class HashBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def hash_supported(self, algorithm):
|
|
||||||
"""
|
|
||||||
Return True if the hash algorithm is supported by this backend.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_hash_ctx(self, algorithm):
|
|
||||||
"""
|
|
||||||
Create a HashContext for calculating a message digest.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class HMACBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def hmac_supported(self, algorithm):
|
|
||||||
"""
|
|
||||||
Return True if the hash algorithm is supported for HMAC by this
|
|
||||||
backend.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_hmac_ctx(self, key, algorithm):
|
|
||||||
"""
|
|
||||||
Create a MACContext for calculating a message authentication code.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class CMACBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def cmac_algorithm_supported(self, algorithm):
|
|
||||||
"""
|
|
||||||
Returns True if the block cipher is supported for CMAC by this backend
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_cmac_ctx(self, algorithm):
|
|
||||||
"""
|
|
||||||
Create a MACContext for calculating a message authentication code.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class PBKDF2HMACBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def pbkdf2_hmac_supported(self, algorithm):
|
|
||||||
"""
|
|
||||||
Return True if the hash algorithm is supported for PBKDF2 by this
|
|
||||||
backend.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
|
||||||
key_material):
|
|
||||||
"""
|
|
||||||
Return length bytes derived from provided PBKDF2 parameters.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class RSABackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_rsa_private_key(self, public_exponent, key_size):
|
|
||||||
"""
|
|
||||||
Generate an RSAPrivateKey instance with public_exponent and a modulus
|
|
||||||
of key_size bits.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def rsa_padding_supported(self, padding):
|
|
||||||
"""
|
|
||||||
Returns True if the backend supports the given padding options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
|
||||||
"""
|
|
||||||
Returns True if the backend supports the given parameters for key
|
|
||||||
generation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_rsa_private_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns an RSAPrivateKey provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_rsa_public_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns an RSAPublicKey provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DSABackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_dsa_parameters(self, key_size):
|
|
||||||
"""
|
|
||||||
Generate a DSAParameters instance with a modulus of key_size bits.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_dsa_private_key(self, parameters):
|
|
||||||
"""
|
|
||||||
Generate a DSAPrivateKey instance with parameters as a DSAParameters
|
|
||||||
object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_dsa_private_key_and_parameters(self, key_size):
|
|
||||||
"""
|
|
||||||
Generate a DSAPrivateKey instance using key size only.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def dsa_hash_supported(self, algorithm):
|
|
||||||
"""
|
|
||||||
Return True if the hash algorithm is supported by the backend for DSA.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def dsa_parameters_supported(self, p, q, g):
|
|
||||||
"""
|
|
||||||
Return True if the parameters are supported by the backend for DSA.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_dsa_private_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns a DSAPrivateKey provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_dsa_public_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns a DSAPublicKey provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_dsa_parameter_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns a DSAParameters provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class EllipticCurveBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def elliptic_curve_signature_algorithm_supported(
|
|
||||||
self, signature_algorithm, curve
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Returns True if the backend supports the named elliptic curve with the
|
|
||||||
specified signature algorithm.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def elliptic_curve_supported(self, curve):
|
|
||||||
"""
|
|
||||||
Returns True if the backend supports the named elliptic curve.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_elliptic_curve_private_key(self, curve):
|
|
||||||
"""
|
|
||||||
Return an object conforming to the EllipticCurvePrivateKey interface.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_elliptic_curve_public_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Return an EllipticCurvePublicKey provider using the given numbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_elliptic_curve_private_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Return an EllipticCurvePrivateKey provider using the given numbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
|
||||||
"""
|
|
||||||
Returns whether the exchange algorithm is supported by this backend.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class PEMSerializationBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_pem_private_key(self, data, password):
|
|
||||||
"""
|
|
||||||
Loads a private key from PEM encoded data, using the provided password
|
|
||||||
if the data is encrypted.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_pem_public_key(self, data):
|
|
||||||
"""
|
|
||||||
Loads a public key from PEM encoded data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DERSerializationBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_der_private_key(self, data, password):
|
|
||||||
"""
|
|
||||||
Loads a private key from DER encoded data. Uses the provided password
|
|
||||||
if the data is encrypted.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_der_public_key(self, data):
|
|
||||||
"""
|
|
||||||
Loads a public key from DER encoded data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class X509Backend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_pem_x509_certificate(self, data):
|
|
||||||
"""
|
|
||||||
Load an X.509 certificate from PEM encoded data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_der_x509_certificate(self, data):
|
|
||||||
"""
|
|
||||||
Load an X.509 certificate from DER encoded data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_der_x509_csr(self, data):
|
|
||||||
"""
|
|
||||||
Load an X.509 CSR from DER encoded data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_pem_x509_csr(self, data):
|
|
||||||
"""
|
|
||||||
Load an X.509 CSR from PEM encoded data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_x509_csr(self, builder, private_key, algorithm):
|
|
||||||
"""
|
|
||||||
Create and sign an X.509 CSR from a CSR builder object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create_x509_certificate(self, builder, private_key, algorithm):
|
|
||||||
"""
|
|
||||||
Create and sign an X.509 certificate from a CertificateBuilder object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHBackend(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_dh_parameters(self, key_size):
|
|
||||||
"""
|
|
||||||
Generate a DHParameters instance with a modulus of key_size bits.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_dh_private_key(self, parameters):
|
|
||||||
"""
|
|
||||||
Generate a DHPrivateKey instance with parameters as a DHParameters
|
|
||||||
object.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_dh_private_key_and_parameters(self, key_size):
|
|
||||||
"""
|
|
||||||
Generate a DHPrivateKey instance using key size only.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_dh_private_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns a DHPrivateKey provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_dh_public_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns a DHPublicKey provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def load_dh_parameter_numbers(self, numbers):
|
|
||||||
"""
|
|
||||||
Returns a DHParameters provider.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def dh_exchange_algorithm_supported(self, exchange_algorithm):
|
|
||||||
"""
|
|
||||||
Returns whether the exchange algorithm is supported by this backend.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def dh_parameters_supported(self, p, g):
|
|
||||||
"""
|
|
||||||
Returns whether the backend supports DH with these parameter values.
|
|
||||||
"""
|
|
|
@ -1,386 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
|
||||||
from cryptography.hazmat.backends.interfaces import (
|
|
||||||
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
|
|
||||||
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
|
|
||||||
PEMSerializationBackend, RSABackend, X509Backend
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(CMACBackend)
|
|
||||||
@utils.register_interface(CipherBackend)
|
|
||||||
@utils.register_interface(DERSerializationBackend)
|
|
||||||
@utils.register_interface(HashBackend)
|
|
||||||
@utils.register_interface(HMACBackend)
|
|
||||||
@utils.register_interface(PBKDF2HMACBackend)
|
|
||||||
@utils.register_interface(RSABackend)
|
|
||||||
@utils.register_interface(DSABackend)
|
|
||||||
@utils.register_interface(EllipticCurveBackend)
|
|
||||||
@utils.register_interface(PEMSerializationBackend)
|
|
||||||
@utils.register_interface(X509Backend)
|
|
||||||
class MultiBackend(object):
|
|
||||||
name = "multibackend"
|
|
||||||
|
|
||||||
def __init__(self, backends):
|
|
||||||
self._backends = backends
|
|
||||||
|
|
||||||
def _filtered_backends(self, interface):
|
|
||||||
for b in self._backends:
|
|
||||||
if isinstance(b, interface):
|
|
||||||
yield b
|
|
||||||
|
|
||||||
def cipher_supported(self, cipher, mode):
|
|
||||||
return any(
|
|
||||||
b.cipher_supported(cipher, mode)
|
|
||||||
for b in self._filtered_backends(CipherBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_symmetric_encryption_ctx(self, cipher, mode):
|
|
||||||
for b in self._filtered_backends(CipherBackend):
|
|
||||||
try:
|
|
||||||
return b.create_symmetric_encryption_ctx(cipher, mode)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
pass
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"cipher {0} in {1} mode is not supported by this backend.".format(
|
|
||||||
cipher.name, mode.name if mode else mode),
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_symmetric_decryption_ctx(self, cipher, mode):
|
|
||||||
for b in self._filtered_backends(CipherBackend):
|
|
||||||
try:
|
|
||||||
return b.create_symmetric_decryption_ctx(cipher, mode)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
pass
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"cipher {0} in {1} mode is not supported by this backend.".format(
|
|
||||||
cipher.name, mode.name if mode else mode),
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER
|
|
||||||
)
|
|
||||||
|
|
||||||
def hash_supported(self, algorithm):
|
|
||||||
return any(
|
|
||||||
b.hash_supported(algorithm)
|
|
||||||
for b in self._filtered_backends(HashBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_hash_ctx(self, algorithm):
|
|
||||||
for b in self._filtered_backends(HashBackend):
|
|
||||||
try:
|
|
||||||
return b.create_hash_ctx(algorithm)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
pass
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
def hmac_supported(self, algorithm):
|
|
||||||
return any(
|
|
||||||
b.hmac_supported(algorithm)
|
|
||||||
for b in self._filtered_backends(HMACBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_hmac_ctx(self, key, algorithm):
|
|
||||||
for b in self._filtered_backends(HMACBackend):
|
|
||||||
try:
|
|
||||||
return b.create_hmac_ctx(key, algorithm)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
pass
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
def pbkdf2_hmac_supported(self, algorithm):
|
|
||||||
return any(
|
|
||||||
b.pbkdf2_hmac_supported(algorithm)
|
|
||||||
for b in self._filtered_backends(PBKDF2HMACBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
|
||||||
key_material):
|
|
||||||
for b in self._filtered_backends(PBKDF2HMACBackend):
|
|
||||||
try:
|
|
||||||
return b.derive_pbkdf2_hmac(
|
|
||||||
algorithm, length, salt, iterations, key_material
|
|
||||||
)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
pass
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_rsa_private_key(self, public_exponent, key_size):
|
|
||||||
for b in self._filtered_backends(RSABackend):
|
|
||||||
return b.generate_rsa_private_key(public_exponent, key_size)
|
|
||||||
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
|
||||||
for b in self._filtered_backends(RSABackend):
|
|
||||||
return b.generate_rsa_parameters_supported(
|
|
||||||
public_exponent, key_size
|
|
||||||
)
|
|
||||||
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def rsa_padding_supported(self, padding):
|
|
||||||
for b in self._filtered_backends(RSABackend):
|
|
||||||
return b.rsa_padding_supported(padding)
|
|
||||||
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def load_rsa_private_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(RSABackend):
|
|
||||||
return b.load_rsa_private_numbers(numbers)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm("RSA is not supported by the backend",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def load_rsa_public_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(RSABackend):
|
|
||||||
return b.load_rsa_public_numbers(numbers)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm("RSA is not supported by the backend",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def generate_dsa_parameters(self, key_size):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.generate_dsa_parameters(key_size)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def generate_dsa_private_key(self, parameters):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.generate_dsa_private_key(parameters)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def generate_dsa_private_key_and_parameters(self, key_size):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.generate_dsa_private_key_and_parameters(key_size)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def dsa_hash_supported(self, algorithm):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.dsa_hash_supported(algorithm)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def dsa_parameters_supported(self, p, q, g):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.dsa_parameters_supported(p, q, g)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def load_dsa_public_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.load_dsa_public_numbers(numbers)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def load_dsa_private_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.load_dsa_private_numbers(numbers)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def load_dsa_parameter_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(DSABackend):
|
|
||||||
return b.load_dsa_parameter_numbers(numbers)
|
|
||||||
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def cmac_algorithm_supported(self, algorithm):
|
|
||||||
return any(
|
|
||||||
b.cmac_algorithm_supported(algorithm)
|
|
||||||
for b in self._filtered_backends(CMACBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_cmac_ctx(self, algorithm):
|
|
||||||
for b in self._filtered_backends(CMACBackend):
|
|
||||||
try:
|
|
||||||
return b.create_cmac_ctx(algorithm)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
pass
|
|
||||||
raise UnsupportedAlgorithm("This backend does not support CMAC.",
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER)
|
|
||||||
|
|
||||||
def elliptic_curve_supported(self, curve):
|
|
||||||
return any(
|
|
||||||
b.elliptic_curve_supported(curve)
|
|
||||||
for b in self._filtered_backends(EllipticCurveBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def elliptic_curve_signature_algorithm_supported(
|
|
||||||
self, signature_algorithm, curve
|
|
||||||
):
|
|
||||||
return any(
|
|
||||||
b.elliptic_curve_signature_algorithm_supported(
|
|
||||||
signature_algorithm, curve
|
|
||||||
)
|
|
||||||
for b in self._filtered_backends(EllipticCurveBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_elliptic_curve_private_key(self, curve):
|
|
||||||
for b in self._filtered_backends(EllipticCurveBackend):
|
|
||||||
try:
|
|
||||||
return b.generate_elliptic_curve_private_key(curve)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
continue
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this elliptic curve.",
|
|
||||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_elliptic_curve_private_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(EllipticCurveBackend):
|
|
||||||
try:
|
|
||||||
return b.load_elliptic_curve_private_numbers(numbers)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
continue
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this elliptic curve.",
|
|
||||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_elliptic_curve_public_numbers(self, numbers):
|
|
||||||
for b in self._filtered_backends(EllipticCurveBackend):
|
|
||||||
try:
|
|
||||||
return b.load_elliptic_curve_public_numbers(numbers)
|
|
||||||
except UnsupportedAlgorithm:
|
|
||||||
continue
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this elliptic curve.",
|
|
||||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
|
||||||
)
|
|
||||||
|
|
||||||
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
|
||||||
return any(
|
|
||||||
b.elliptic_curve_exchange_algorithm_supported(algorithm, curve)
|
|
||||||
for b in self._filtered_backends(EllipticCurveBackend)
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_pem_private_key(self, data, password):
|
|
||||||
for b in self._filtered_backends(PEMSerializationBackend):
|
|
||||||
return b.load_pem_private_key(data, password)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this key serialization.",
|
|
||||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_pem_public_key(self, data):
|
|
||||||
for b in self._filtered_backends(PEMSerializationBackend):
|
|
||||||
return b.load_pem_public_key(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this key serialization.",
|
|
||||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_der_private_key(self, data, password):
|
|
||||||
for b in self._filtered_backends(DERSerializationBackend):
|
|
||||||
return b.load_der_private_key(data, password)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this key serialization.",
|
|
||||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_der_public_key(self, data):
|
|
||||||
for b in self._filtered_backends(DERSerializationBackend):
|
|
||||||
return b.load_der_public_key(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support this key serialization.",
|
|
||||||
_Reasons.UNSUPPORTED_SERIALIZATION
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_pem_x509_certificate(self, data):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.load_pem_x509_certificate(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_der_x509_certificate(self, data):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.load_der_x509_certificate(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_pem_x509_crl(self, data):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.load_pem_x509_crl(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_der_x509_crl(self, data):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.load_der_x509_crl(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_der_x509_csr(self, data):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.load_der_x509_csr(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def load_pem_x509_csr(self, data):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.load_pem_x509_csr(data)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_x509_csr(self, builder, private_key, algorithm):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.create_x509_csr(builder, private_key, algorithm)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
||||||
|
|
||||||
def create_x509_certificate(self, builder, private_key, algorithm):
|
|
||||||
for b in self._filtered_backends(X509Backend):
|
|
||||||
return b.create_x509_certificate(builder, private_key, algorithm)
|
|
||||||
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support X.509.",
|
|
||||||
_Reasons.UNSUPPORTED_X509
|
|
||||||
)
|
|
|
@ -1,10 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography.hazmat.backends.openssl.backend import backend
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["backend"]
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,213 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
|
|
||||||
from cryptography.hazmat.primitives import ciphers
|
|
||||||
from cryptography.hazmat.primitives.ciphers import modes
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(ciphers.CipherContext)
|
|
||||||
@utils.register_interface(ciphers.AEADCipherContext)
|
|
||||||
@utils.register_interface(ciphers.AEADEncryptionContext)
|
|
||||||
class _CipherContext(object):
|
|
||||||
_ENCRYPT = 1
|
|
||||||
_DECRYPT = 0
|
|
||||||
|
|
||||||
def __init__(self, backend, cipher, mode, operation):
|
|
||||||
self._backend = backend
|
|
||||||
self._cipher = cipher
|
|
||||||
self._mode = mode
|
|
||||||
self._operation = operation
|
|
||||||
self._tag = None
|
|
||||||
|
|
||||||
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
|
|
||||||
self._block_size = self._cipher.block_size
|
|
||||||
else:
|
|
||||||
self._block_size = 1
|
|
||||||
|
|
||||||
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
|
|
||||||
ctx = self._backend._ffi.gc(
|
|
||||||
ctx, self._backend._lib.EVP_CIPHER_CTX_free
|
|
||||||
)
|
|
||||||
|
|
||||||
registry = self._backend._cipher_registry
|
|
||||||
try:
|
|
||||||
adapter = registry[type(cipher), type(mode)]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"cipher {0} in {1} mode is not supported "
|
|
||||||
"by this backend.".format(
|
|
||||||
cipher.name, mode.name if mode else mode),
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER
|
|
||||||
)
|
|
||||||
|
|
||||||
evp_cipher = adapter(self._backend, cipher, mode)
|
|
||||||
if evp_cipher == self._backend._ffi.NULL:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"cipher {0} in {1} mode is not supported "
|
|
||||||
"by this backend.".format(
|
|
||||||
cipher.name, mode.name if mode else mode),
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER
|
|
||||||
)
|
|
||||||
|
|
||||||
if isinstance(mode, modes.ModeWithInitializationVector):
|
|
||||||
iv_nonce = mode.initialization_vector
|
|
||||||
elif isinstance(mode, modes.ModeWithNonce):
|
|
||||||
iv_nonce = mode.nonce
|
|
||||||
else:
|
|
||||||
iv_nonce = self._backend._ffi.NULL
|
|
||||||
# begin init with cipher and operation type
|
|
||||||
res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
operation)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
# set the key length to handle variable key ciphers
|
|
||||||
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
|
|
||||||
ctx, len(cipher.key)
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
if isinstance(mode, modes.GCM):
|
|
||||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
|
||||||
ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN,
|
|
||||||
len(iv_nonce), self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
if operation == self._DECRYPT:
|
|
||||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
|
||||||
ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG,
|
|
||||||
len(mode.tag), mode.tag
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
|
|
||||||
# pass key/iv
|
|
||||||
res = self._backend._lib.EVP_CipherInit_ex(
|
|
||||||
ctx,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
cipher.key,
|
|
||||||
iv_nonce,
|
|
||||||
operation
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
# We purposely disable padding here as it's handled higher up in the
|
|
||||||
# API.
|
|
||||||
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
# OpenSSL 0.9.8e has an assertion in its EVP code that causes it
|
|
||||||
# to SIGABRT if you call update with an empty byte string. This can be
|
|
||||||
# removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch
|
|
||||||
# should be taken only when length is zero and mode is not GCM because
|
|
||||||
# AES GCM can return improper tag values if you don't call update
|
|
||||||
# with empty plaintext when authenticating AAD for ...reasons.
|
|
||||||
if len(data) == 0 and not isinstance(self._mode, modes.GCM):
|
|
||||||
return b""
|
|
||||||
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]",
|
|
||||||
len(data) + self._block_size - 1)
|
|
||||||
outlen = self._backend._ffi.new("int *")
|
|
||||||
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data,
|
|
||||||
len(data))
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
|
|
||||||
# appears to have a bug where you must make at least one call to update
|
|
||||||
# even if you are only using authenticate_additional_data or the
|
|
||||||
# GCM tag will be wrong. An (empty) call to update resolves this
|
|
||||||
# and is harmless for all other versions of OpenSSL.
|
|
||||||
if isinstance(self._mode, modes.GCM):
|
|
||||||
self.update(b"")
|
|
||||||
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", self._block_size)
|
|
||||||
outlen = self._backend._ffi.new("int *")
|
|
||||||
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
|
|
||||||
if res == 0:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
|
|
||||||
if not errors and isinstance(self._mode, modes.GCM):
|
|
||||||
raise InvalidTag
|
|
||||||
|
|
||||||
self._backend.openssl_assert(
|
|
||||||
errors[0][1:] == (
|
|
||||||
self._backend._lib.ERR_LIB_EVP,
|
|
||||||
self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX,
|
|
||||||
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
|
|
||||||
) or errors[0][1:] == (
|
|
||||||
self._backend._lib.ERR_LIB_EVP,
|
|
||||||
self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX,
|
|
||||||
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
|
|
||||||
)
|
|
||||||
)
|
|
||||||
raise ValueError(
|
|
||||||
"The length of the provided data is not a multiple of "
|
|
||||||
"the block length."
|
|
||||||
)
|
|
||||||
|
|
||||||
if (isinstance(self._mode, modes.GCM) and
|
|
||||||
self._operation == self._ENCRYPT):
|
|
||||||
block_byte_size = self._block_size // 8
|
|
||||||
tag_buf = self._backend._ffi.new(
|
|
||||||
"unsigned char[]", block_byte_size
|
|
||||||
)
|
|
||||||
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
|
||||||
self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG,
|
|
||||||
block_byte_size, tag_buf
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
|
||||||
|
|
||||||
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
||||||
|
|
||||||
def authenticate_additional_data(self, data):
|
|
||||||
outlen = self._backend._ffi.new("int *")
|
|
||||||
res = self._backend._lib.EVP_CipherUpdate(
|
|
||||||
self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
|
|
||||||
tag = utils.read_only_property("_tag")
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(ciphers.CipherContext)
|
|
||||||
class _AESCTRCipherContext(object):
|
|
||||||
"""
|
|
||||||
This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can
|
|
||||||
be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020).
|
|
||||||
"""
|
|
||||||
def __init__(self, backend, cipher, mode):
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
self._key = self._backend._ffi.new("AES_KEY *")
|
|
||||||
res = self._backend._lib.AES_set_encrypt_key(
|
|
||||||
cipher.key, len(cipher.key) * 8, self._key
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res == 0)
|
|
||||||
self._ecount = self._backend._ffi.new("char[]", 16)
|
|
||||||
self._nonce = self._backend._ffi.new("char[16]", mode.nonce)
|
|
||||||
self._num = self._backend._ffi.new("unsigned int *", 0)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", len(data))
|
|
||||||
self._backend._lib.AES_ctr128_encrypt(
|
|
||||||
data, buf, len(data), self._key, self._nonce,
|
|
||||||
self._ecount, self._num
|
|
||||||
)
|
|
||||||
return self._backend._ffi.buffer(buf)[:]
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
self._key = None
|
|
||||||
self._ecount = None
|
|
||||||
self._nonce = None
|
|
||||||
self._num = None
|
|
||||||
return b""
|
|
|
@ -1,80 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives import constant_time, interfaces
|
|
||||||
from cryptography.hazmat.primitives.ciphers.modes import CBC
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(interfaces.MACContext)
|
|
||||||
class _CMACContext(object):
|
|
||||||
def __init__(self, backend, algorithm, ctx=None):
|
|
||||||
if not backend.cmac_algorithm_supported(algorithm):
|
|
||||||
raise UnsupportedAlgorithm("This backend does not support CMAC.",
|
|
||||||
_Reasons.UNSUPPORTED_CIPHER)
|
|
||||||
|
|
||||||
self._backend = backend
|
|
||||||
self._key = algorithm.key
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._output_length = algorithm.block_size // 8
|
|
||||||
|
|
||||||
if ctx is None:
|
|
||||||
registry = self._backend._cipher_registry
|
|
||||||
adapter = registry[type(algorithm), CBC]
|
|
||||||
|
|
||||||
evp_cipher = adapter(self._backend, algorithm, CBC)
|
|
||||||
|
|
||||||
ctx = self._backend._lib.CMAC_CTX_new()
|
|
||||||
|
|
||||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
|
||||||
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
|
|
||||||
|
|
||||||
self._backend._lib.CMAC_Init(
|
|
||||||
ctx, self._key, len(self._key),
|
|
||||||
evp_cipher, self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
|
|
||||||
length = self._backend._ffi.new("size_t *", self._output_length)
|
|
||||||
res = self._backend._lib.CMAC_Final(
|
|
||||||
self._ctx, buf, length
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
|
|
||||||
self._ctx = None
|
|
||||||
|
|
||||||
return self._backend._ffi.buffer(buf)[:]
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
copied_ctx = self._backend._lib.CMAC_CTX_new()
|
|
||||||
copied_ctx = self._backend._ffi.gc(
|
|
||||||
copied_ctx, self._backend._lib.CMAC_CTX_free
|
|
||||||
)
|
|
||||||
res = self._backend._lib.CMAC_CTX_copy(
|
|
||||||
copied_ctx, self._ctx
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
return _CMACContext(
|
|
||||||
self._backend, self._algorithm, ctx=copied_ctx
|
|
||||||
)
|
|
||||||
|
|
||||||
def verify(self, signature):
|
|
||||||
digest = self.finalize()
|
|
||||||
if not constant_time.bytes_eq(digest, signature):
|
|
||||||
raise InvalidSignature("Signature did not match digest.")
|
|
|
@ -1,218 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import InvalidSignature
|
|
||||||
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
|
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import (
|
|
||||||
AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _truncate_digest_for_dsa(dsa_cdata, digest, backend):
|
|
||||||
"""
|
|
||||||
This function truncates digests that are longer than a given DS
|
|
||||||
key's length so they can be signed. OpenSSL does this for us in
|
|
||||||
1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three
|
|
||||||
releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This
|
|
||||||
truncation is not required in 0.9.8 because DSA is limited to SHA-1.
|
|
||||||
"""
|
|
||||||
|
|
||||||
order_bits = backend._lib.BN_num_bits(dsa_cdata.q)
|
|
||||||
return _truncate_digest(digest, order_bits)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricVerificationContext)
|
|
||||||
class _DSAVerificationContext(object):
|
|
||||||
def __init__(self, backend, public_key, signature, algorithm):
|
|
||||||
self._backend = backend
|
|
||||||
self._public_key = public_key
|
|
||||||
self._signature = signature
|
|
||||||
self._algorithm = algorithm
|
|
||||||
|
|
||||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._hash_ctx.update(data)
|
|
||||||
|
|
||||||
def verify(self):
|
|
||||||
data_to_verify = self._hash_ctx.finalize()
|
|
||||||
|
|
||||||
data_to_verify = _truncate_digest_for_dsa(
|
|
||||||
self._public_key._dsa_cdata, data_to_verify, self._backend
|
|
||||||
)
|
|
||||||
|
|
||||||
# The first parameter passed to DSA_verify is unused by OpenSSL but
|
|
||||||
# must be an integer.
|
|
||||||
res = self._backend._lib.DSA_verify(
|
|
||||||
0, data_to_verify, len(data_to_verify), self._signature,
|
|
||||||
len(self._signature), self._public_key._dsa_cdata)
|
|
||||||
|
|
||||||
if res != 1:
|
|
||||||
self._backend._consume_errors()
|
|
||||||
raise InvalidSignature
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricSignatureContext)
|
|
||||||
class _DSASignatureContext(object):
|
|
||||||
def __init__(self, backend, private_key, algorithm):
|
|
||||||
self._backend = backend
|
|
||||||
self._private_key = private_key
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._hash_ctx.update(data)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
data_to_sign = self._hash_ctx.finalize()
|
|
||||||
data_to_sign = _truncate_digest_for_dsa(
|
|
||||||
self._private_key._dsa_cdata, data_to_sign, self._backend
|
|
||||||
)
|
|
||||||
sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata)
|
|
||||||
sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len)
|
|
||||||
buflen = self._backend._ffi.new("unsigned int *")
|
|
||||||
|
|
||||||
# The first parameter passed to DSA_sign is unused by OpenSSL but
|
|
||||||
# must be an integer.
|
|
||||||
res = self._backend._lib.DSA_sign(
|
|
||||||
0, data_to_sign, len(data_to_sign), sig_buf,
|
|
||||||
buflen, self._private_key._dsa_cdata)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
self._backend.openssl_assert(buflen[0])
|
|
||||||
|
|
||||||
return self._backend._ffi.buffer(sig_buf)[:buflen[0]]
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(dsa.DSAParametersWithNumbers)
|
|
||||||
class _DSAParameters(object):
|
|
||||||
def __init__(self, backend, dsa_cdata):
|
|
||||||
self._backend = backend
|
|
||||||
self._dsa_cdata = dsa_cdata
|
|
||||||
|
|
||||||
def parameter_numbers(self):
|
|
||||||
return dsa.DSAParameterNumbers(
|
|
||||||
p=self._backend._bn_to_int(self._dsa_cdata.p),
|
|
||||||
q=self._backend._bn_to_int(self._dsa_cdata.q),
|
|
||||||
g=self._backend._bn_to_int(self._dsa_cdata.g)
|
|
||||||
)
|
|
||||||
|
|
||||||
def generate_private_key(self):
|
|
||||||
return self._backend.generate_dsa_private_key(self)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
|
|
||||||
class _DSAPrivateKey(object):
|
|
||||||
def __init__(self, backend, dsa_cdata, evp_pkey):
|
|
||||||
self._backend = backend
|
|
||||||
self._dsa_cdata = dsa_cdata
|
|
||||||
self._evp_pkey = evp_pkey
|
|
||||||
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
|
|
||||||
|
|
||||||
key_size = utils.read_only_property("_key_size")
|
|
||||||
|
|
||||||
def signer(self, signature_algorithm):
|
|
||||||
return _DSASignatureContext(self._backend, self, signature_algorithm)
|
|
||||||
|
|
||||||
def private_numbers(self):
|
|
||||||
return dsa.DSAPrivateNumbers(
|
|
||||||
public_numbers=dsa.DSAPublicNumbers(
|
|
||||||
parameter_numbers=dsa.DSAParameterNumbers(
|
|
||||||
p=self._backend._bn_to_int(self._dsa_cdata.p),
|
|
||||||
q=self._backend._bn_to_int(self._dsa_cdata.q),
|
|
||||||
g=self._backend._bn_to_int(self._dsa_cdata.g)
|
|
||||||
),
|
|
||||||
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
|
|
||||||
),
|
|
||||||
x=self._backend._bn_to_int(self._dsa_cdata.priv_key)
|
|
||||||
)
|
|
||||||
|
|
||||||
def public_key(self):
|
|
||||||
dsa_cdata = self._backend._lib.DSA_new()
|
|
||||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
|
||||||
dsa_cdata = self._backend._ffi.gc(
|
|
||||||
dsa_cdata, self._backend._lib.DSA_free
|
|
||||||
)
|
|
||||||
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
|
|
||||||
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
|
|
||||||
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
|
|
||||||
dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key)
|
|
||||||
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
|
|
||||||
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
|
|
||||||
|
|
||||||
def parameters(self):
|
|
||||||
dsa_cdata = self._backend._lib.DSA_new()
|
|
||||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
|
||||||
dsa_cdata = self._backend._ffi.gc(
|
|
||||||
dsa_cdata, self._backend._lib.DSA_free
|
|
||||||
)
|
|
||||||
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
|
|
||||||
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
|
|
||||||
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
|
|
||||||
return _DSAParameters(self._backend, dsa_cdata)
|
|
||||||
|
|
||||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
|
||||||
return self._backend._private_key_bytes(
|
|
||||||
encoding,
|
|
||||||
format,
|
|
||||||
encryption_algorithm,
|
|
||||||
self._evp_pkey,
|
|
||||||
self._dsa_cdata
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
|
|
||||||
class _DSAPublicKey(object):
|
|
||||||
def __init__(self, backend, dsa_cdata, evp_pkey):
|
|
||||||
self._backend = backend
|
|
||||||
self._dsa_cdata = dsa_cdata
|
|
||||||
self._evp_pkey = evp_pkey
|
|
||||||
self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p)
|
|
||||||
|
|
||||||
key_size = utils.read_only_property("_key_size")
|
|
||||||
|
|
||||||
def verifier(self, signature, signature_algorithm):
|
|
||||||
if not isinstance(signature, bytes):
|
|
||||||
raise TypeError("signature must be bytes.")
|
|
||||||
|
|
||||||
return _DSAVerificationContext(
|
|
||||||
self._backend, self, signature, signature_algorithm
|
|
||||||
)
|
|
||||||
|
|
||||||
def public_numbers(self):
|
|
||||||
return dsa.DSAPublicNumbers(
|
|
||||||
parameter_numbers=dsa.DSAParameterNumbers(
|
|
||||||
p=self._backend._bn_to_int(self._dsa_cdata.p),
|
|
||||||
q=self._backend._bn_to_int(self._dsa_cdata.q),
|
|
||||||
g=self._backend._bn_to_int(self._dsa_cdata.g)
|
|
||||||
),
|
|
||||||
y=self._backend._bn_to_int(self._dsa_cdata.pub_key)
|
|
||||||
)
|
|
||||||
|
|
||||||
def parameters(self):
|
|
||||||
dsa_cdata = self._backend._lib.DSA_new()
|
|
||||||
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
|
||||||
dsa_cdata = self._backend._ffi.gc(
|
|
||||||
dsa_cdata, self._backend._lib.DSA_free
|
|
||||||
)
|
|
||||||
dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p)
|
|
||||||
dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q)
|
|
||||||
dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g)
|
|
||||||
return _DSAParameters(self._backend, dsa_cdata)
|
|
||||||
|
|
||||||
def public_bytes(self, encoding, format):
|
|
||||||
if format is serialization.PublicFormat.PKCS1:
|
|
||||||
raise ValueError(
|
|
||||||
"DSA public keys do not support PKCS1 serialization"
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._backend._public_key_bytes(
|
|
||||||
encoding,
|
|
||||||
format,
|
|
||||||
self._evp_pkey,
|
|
||||||
None
|
|
||||||
)
|
|
|
@ -1,299 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
|
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import (
|
|
||||||
AsymmetricSignatureContext, AsymmetricVerificationContext, ec
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend):
|
|
||||||
"""
|
|
||||||
This function truncates digests that are longer than a given elliptic
|
|
||||||
curve key's length so they can be signed. Since elliptic curve keys are
|
|
||||||
much shorter than RSA keys many digests (e.g. SHA-512) may require
|
|
||||||
truncation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_lib = backend._lib
|
|
||||||
_ffi = backend._ffi
|
|
||||||
|
|
||||||
group = _lib.EC_KEY_get0_group(ec_key_cdata)
|
|
||||||
|
|
||||||
with backend._tmp_bn_ctx() as bn_ctx:
|
|
||||||
order = _lib.BN_CTX_get(bn_ctx)
|
|
||||||
backend.openssl_assert(order != _ffi.NULL)
|
|
||||||
|
|
||||||
res = _lib.EC_GROUP_get_order(group, order, bn_ctx)
|
|
||||||
backend.openssl_assert(res == 1)
|
|
||||||
|
|
||||||
order_bits = _lib.BN_num_bits(order)
|
|
||||||
|
|
||||||
return _truncate_digest(digest, order_bits)
|
|
||||||
|
|
||||||
|
|
||||||
def _ec_key_curve_sn(backend, ec_key):
|
|
||||||
group = backend._lib.EC_KEY_get0_group(ec_key)
|
|
||||||
backend.openssl_assert(group != backend._ffi.NULL)
|
|
||||||
|
|
||||||
nid = backend._lib.EC_GROUP_get_curve_name(group)
|
|
||||||
# The following check is to find EC keys with unnamed curves and raise
|
|
||||||
# an error for now.
|
|
||||||
if nid == backend._lib.NID_undef:
|
|
||||||
raise NotImplementedError(
|
|
||||||
"ECDSA certificates with unnamed curves are unsupported "
|
|
||||||
"at this time"
|
|
||||||
)
|
|
||||||
|
|
||||||
curve_name = backend._lib.OBJ_nid2sn(nid)
|
|
||||||
backend.openssl_assert(curve_name != backend._ffi.NULL)
|
|
||||||
|
|
||||||
sn = backend._ffi.string(curve_name).decode('ascii')
|
|
||||||
return sn
|
|
||||||
|
|
||||||
|
|
||||||
def _mark_asn1_named_ec_curve(backend, ec_cdata):
|
|
||||||
"""
|
|
||||||
Set the named curve flag on the EC_KEY. This causes OpenSSL to
|
|
||||||
serialize EC keys along with their curve OID which makes
|
|
||||||
deserialization easier.
|
|
||||||
"""
|
|
||||||
|
|
||||||
backend._lib.EC_KEY_set_asn1_flag(
|
|
||||||
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _sn_to_elliptic_curve(backend, sn):
|
|
||||||
try:
|
|
||||||
return ec._CURVE_TYPES[sn]()
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported elliptic curve".format(sn),
|
|
||||||
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricSignatureContext)
|
|
||||||
class _ECDSASignatureContext(object):
|
|
||||||
def __init__(self, backend, private_key, algorithm):
|
|
||||||
self._backend = backend
|
|
||||||
self._private_key = private_key
|
|
||||||
self._digest = hashes.Hash(algorithm, backend)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._digest.update(data)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
ec_key = self._private_key._ec_key
|
|
||||||
|
|
||||||
digest = self._digest.finalize()
|
|
||||||
|
|
||||||
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
|
|
||||||
|
|
||||||
max_size = self._backend._lib.ECDSA_size(ec_key)
|
|
||||||
self._backend.openssl_assert(max_size > 0)
|
|
||||||
|
|
||||||
sigbuf = self._backend._ffi.new("char[]", max_size)
|
|
||||||
siglen_ptr = self._backend._ffi.new("unsigned int[]", 1)
|
|
||||||
res = self._backend._lib.ECDSA_sign(
|
|
||||||
0,
|
|
||||||
digest,
|
|
||||||
len(digest),
|
|
||||||
sigbuf,
|
|
||||||
siglen_ptr,
|
|
||||||
ec_key
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricVerificationContext)
|
|
||||||
class _ECDSAVerificationContext(object):
|
|
||||||
def __init__(self, backend, public_key, signature, algorithm):
|
|
||||||
self._backend = backend
|
|
||||||
self._public_key = public_key
|
|
||||||
self._signature = signature
|
|
||||||
self._digest = hashes.Hash(algorithm, backend)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._digest.update(data)
|
|
||||||
|
|
||||||
def verify(self):
|
|
||||||
ec_key = self._public_key._ec_key
|
|
||||||
|
|
||||||
digest = self._digest.finalize()
|
|
||||||
|
|
||||||
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
|
|
||||||
|
|
||||||
res = self._backend._lib.ECDSA_verify(
|
|
||||||
0,
|
|
||||||
digest,
|
|
||||||
len(digest),
|
|
||||||
self._signature,
|
|
||||||
len(self._signature),
|
|
||||||
ec_key
|
|
||||||
)
|
|
||||||
if res != 1:
|
|
||||||
self._backend._consume_errors()
|
|
||||||
raise InvalidSignature
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
|
|
||||||
class _EllipticCurvePrivateKey(object):
|
|
||||||
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
|
||||||
self._backend = backend
|
|
||||||
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
|
||||||
self._ec_key = ec_key_cdata
|
|
||||||
self._evp_pkey = evp_pkey
|
|
||||||
|
|
||||||
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
|
||||||
self._curve = _sn_to_elliptic_curve(backend, sn)
|
|
||||||
|
|
||||||
curve = utils.read_only_property("_curve")
|
|
||||||
|
|
||||||
def signer(self, signature_algorithm):
|
|
||||||
if isinstance(signature_algorithm, ec.ECDSA):
|
|
||||||
return _ECDSASignatureContext(
|
|
||||||
self._backend, self, signature_algorithm.algorithm
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Unsupported elliptic curve signature algorithm.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def exchange(self, algorithm, peer_public_key):
|
|
||||||
if not (
|
|
||||||
self._backend.elliptic_curve_exchange_algorithm_supported(
|
|
||||||
algorithm, self.curve
|
|
||||||
)
|
|
||||||
):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend does not support the ECDH algorithm.",
|
|
||||||
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
|
|
||||||
)
|
|
||||||
|
|
||||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
|
||||||
z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
|
|
||||||
self._backend.openssl_assert(z_len > 0)
|
|
||||||
z_buf = self._backend._ffi.new("uint8_t[]", z_len)
|
|
||||||
peer_key = self._backend._lib.EC_KEY_get0_public_key(
|
|
||||||
peer_public_key._ec_key
|
|
||||||
)
|
|
||||||
|
|
||||||
r = self._backend._lib.ECDH_compute_key(
|
|
||||||
z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(r > 0)
|
|
||||||
return self._backend._ffi.buffer(z_buf)[:z_len]
|
|
||||||
|
|
||||||
def public_key(self):
|
|
||||||
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
|
||||||
self._backend.openssl_assert(group != self._backend._ffi.NULL)
|
|
||||||
|
|
||||||
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
|
|
||||||
|
|
||||||
public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid)
|
|
||||||
self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL)
|
|
||||||
public_ec_key = self._backend._ffi.gc(
|
|
||||||
public_ec_key, self._backend._lib.EC_KEY_free
|
|
||||||
)
|
|
||||||
|
|
||||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
|
||||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
|
||||||
|
|
||||||
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
|
|
||||||
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
|
|
||||||
|
|
||||||
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
|
|
||||||
|
|
||||||
def private_numbers(self):
|
|
||||||
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
|
|
||||||
private_value = self._backend._bn_to_int(bn)
|
|
||||||
return ec.EllipticCurvePrivateNumbers(
|
|
||||||
private_value=private_value,
|
|
||||||
public_numbers=self.public_key().public_numbers()
|
|
||||||
)
|
|
||||||
|
|
||||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
|
||||||
return self._backend._private_key_bytes(
|
|
||||||
encoding,
|
|
||||||
format,
|
|
||||||
encryption_algorithm,
|
|
||||||
self._evp_pkey,
|
|
||||||
self._ec_key
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
|
|
||||||
class _EllipticCurvePublicKey(object):
|
|
||||||
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
|
||||||
self._backend = backend
|
|
||||||
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
|
||||||
self._ec_key = ec_key_cdata
|
|
||||||
self._evp_pkey = evp_pkey
|
|
||||||
|
|
||||||
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
|
||||||
self._curve = _sn_to_elliptic_curve(backend, sn)
|
|
||||||
|
|
||||||
curve = utils.read_only_property("_curve")
|
|
||||||
|
|
||||||
def verifier(self, signature, signature_algorithm):
|
|
||||||
if not isinstance(signature, bytes):
|
|
||||||
raise TypeError("signature must be bytes.")
|
|
||||||
|
|
||||||
if isinstance(signature_algorithm, ec.ECDSA):
|
|
||||||
return _ECDSAVerificationContext(
|
|
||||||
self._backend, self, signature, signature_algorithm.algorithm
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Unsupported elliptic curve signature algorithm.",
|
|
||||||
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
|
||||||
|
|
||||||
def public_numbers(self):
|
|
||||||
set_func, get_func, group = (
|
|
||||||
self._backend._ec_key_determine_group_get_set_funcs(self._ec_key)
|
|
||||||
)
|
|
||||||
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
|
||||||
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
|
||||||
|
|
||||||
with self._backend._tmp_bn_ctx() as bn_ctx:
|
|
||||||
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
|
|
||||||
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
|
|
||||||
|
|
||||||
res = get_func(group, point, bn_x, bn_y, bn_ctx)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
|
|
||||||
x = self._backend._bn_to_int(bn_x)
|
|
||||||
y = self._backend._bn_to_int(bn_y)
|
|
||||||
|
|
||||||
return ec.EllipticCurvePublicNumbers(
|
|
||||||
x=x,
|
|
||||||
y=y,
|
|
||||||
curve=self._curve
|
|
||||||
)
|
|
||||||
|
|
||||||
def public_bytes(self, encoding, format):
|
|
||||||
if format is serialization.PublicFormat.PKCS1:
|
|
||||||
raise ValueError(
|
|
||||||
"EC public keys do not support PKCS1 serialization"
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._backend._public_key_bytes(
|
|
||||||
encoding,
|
|
||||||
format,
|
|
||||||
self._evp_pkey,
|
|
||||||
None
|
|
||||||
)
|
|
|
@ -1,62 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(hashes.HashContext)
|
|
||||||
class _HashContext(object):
|
|
||||||
def __init__(self, backend, algorithm, ctx=None):
|
|
||||||
self._algorithm = algorithm
|
|
||||||
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
if ctx is None:
|
|
||||||
ctx = self._backend._lib.EVP_MD_CTX_create()
|
|
||||||
ctx = self._backend._ffi.gc(ctx,
|
|
||||||
self._backend._lib.EVP_MD_CTX_destroy)
|
|
||||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
|
||||||
algorithm.name.encode("ascii"))
|
|
||||||
if evp_md == self._backend._ffi.NULL:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
|
|
||||||
self._backend._ffi.NULL)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
copied_ctx = self._backend._lib.EVP_MD_CTX_create()
|
|
||||||
copied_ctx = self._backend._ffi.gc(
|
|
||||||
copied_ctx, self._backend._lib.EVP_MD_CTX_destroy
|
|
||||||
)
|
|
||||||
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]",
|
|
||||||
self._backend._lib.EVP_MAX_MD_SIZE)
|
|
||||||
outlen = self._backend._ffi.new("unsigned int *")
|
|
||||||
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
|
||||||
res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
|
@ -1,81 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(interfaces.MACContext)
|
|
||||||
@utils.register_interface(hashes.HashContext)
|
|
||||||
class _HMACContext(object):
|
|
||||||
def __init__(self, backend, key, algorithm, ctx=None):
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
if ctx is None:
|
|
||||||
ctx = self._backend._ffi.new("HMAC_CTX *")
|
|
||||||
self._backend._lib.HMAC_CTX_init(ctx)
|
|
||||||
ctx = self._backend._ffi.gc(
|
|
||||||
ctx, self._backend._lib.HMAC_CTX_cleanup
|
|
||||||
)
|
|
||||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
|
||||||
algorithm.name.encode('ascii'))
|
|
||||||
if evp_md == self._backend._ffi.NULL:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not a supported hash on this backend.".format(
|
|
||||||
algorithm.name),
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
res = self._backend._lib.Cryptography_HMAC_Init_ex(
|
|
||||||
ctx, key, len(key), evp_md, self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
|
|
||||||
self._ctx = ctx
|
|
||||||
self._key = key
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
copied_ctx = self._backend._ffi.new("HMAC_CTX *")
|
|
||||||
self._backend._lib.HMAC_CTX_init(copied_ctx)
|
|
||||||
copied_ctx = self._backend._ffi.gc(
|
|
||||||
copied_ctx, self._backend._lib.HMAC_CTX_cleanup
|
|
||||||
)
|
|
||||||
res = self._backend._lib.Cryptography_HMAC_CTX_copy(
|
|
||||||
copied_ctx, self._ctx
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
return _HMACContext(
|
|
||||||
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
|
||||||
)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
res = self._backend._lib.Cryptography_HMAC_Update(
|
|
||||||
self._ctx, data, len(data)
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]",
|
|
||||||
self._backend._lib.EVP_MAX_MD_SIZE)
|
|
||||||
outlen = self._backend._ffi.new("unsigned int *")
|
|
||||||
res = self._backend._lib.Cryptography_HMAC_Final(
|
|
||||||
self._ctx, buf, outlen
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res != 0)
|
|
||||||
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
|
||||||
self._backend._lib.HMAC_CTX_cleanup(self._ctx)
|
|
||||||
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
||||||
|
|
||||||
def verify(self, signature):
|
|
||||||
digest = self.finalize()
|
|
||||||
if not constant_time.bytes_eq(digest, signature):
|
|
||||||
raise InvalidSignature("Signature did not match digest.")
|
|
|
@ -1,604 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import math
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
from cryptography.hazmat.primitives.asymmetric import (
|
|
||||||
AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.padding import (
|
|
||||||
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.primitives.asymmetric.rsa import (
|
|
||||||
RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_rsa_pss_salt_length(pss, key_size, digest_size):
|
|
||||||
salt = pss._salt_length
|
|
||||||
|
|
||||||
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
|
|
||||||
# bit length - 1 per RFC 3447
|
|
||||||
emlen = int(math.ceil((key_size - 1) / 8.0))
|
|
||||||
salt_length = emlen - digest_size - 2
|
|
||||||
assert salt_length >= 0
|
|
||||||
return salt_length
|
|
||||||
else:
|
|
||||||
return salt
|
|
||||||
|
|
||||||
|
|
||||||
def _enc_dec_rsa(backend, key, data, padding):
|
|
||||||
if not isinstance(padding, AsymmetricPadding):
|
|
||||||
raise TypeError("Padding must be an instance of AsymmetricPadding.")
|
|
||||||
|
|
||||||
if isinstance(padding, PKCS1v15):
|
|
||||||
padding_enum = backend._lib.RSA_PKCS1_PADDING
|
|
||||||
elif isinstance(padding, OAEP):
|
|
||||||
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
|
|
||||||
if not isinstance(padding._mgf, MGF1):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Only MGF1 is supported by this backend.",
|
|
||||||
_Reasons.UNSUPPORTED_MGF
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(padding._mgf._algorithm, hashes.SHA1):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend supports only SHA1 inside MGF1 when "
|
|
||||||
"using OAEP.",
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
if padding._label is not None and padding._label != b"":
|
|
||||||
raise ValueError("This backend does not support OAEP labels.")
|
|
||||||
|
|
||||||
if not isinstance(padding._algorithm, hashes.SHA1):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"This backend only supports SHA1 when using OAEP.",
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not supported by this backend.".format(
|
|
||||||
padding.name
|
|
||||||
),
|
|
||||||
_Reasons.UNSUPPORTED_PADDING
|
|
||||||
)
|
|
||||||
|
|
||||||
if backend._lib.Cryptography_HAS_PKEY_CTX:
|
|
||||||
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum)
|
|
||||||
else:
|
|
||||||
return _enc_dec_rsa_098(backend, key, data, padding_enum)
|
|
||||||
|
|
||||||
|
|
||||||
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum):
|
|
||||||
if isinstance(key, _RSAPublicKey):
|
|
||||||
init = backend._lib.EVP_PKEY_encrypt_init
|
|
||||||
crypt = backend._lib.Cryptography_EVP_PKEY_encrypt
|
|
||||||
else:
|
|
||||||
init = backend._lib.EVP_PKEY_decrypt_init
|
|
||||||
crypt = backend._lib.Cryptography_EVP_PKEY_decrypt
|
|
||||||
|
|
||||||
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
|
|
||||||
key._evp_pkey, backend._ffi.NULL
|
|
||||||
)
|
|
||||||
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
|
|
||||||
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
|
|
||||||
res = init(pkey_ctx)
|
|
||||||
backend.openssl_assert(res == 1)
|
|
||||||
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
|
||||||
pkey_ctx, padding_enum)
|
|
||||||
backend.openssl_assert(res > 0)
|
|
||||||
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
|
|
||||||
backend.openssl_assert(buf_size > 0)
|
|
||||||
outlen = backend._ffi.new("size_t *", buf_size)
|
|
||||||
buf = backend._ffi.new("char[]", buf_size)
|
|
||||||
res = crypt(pkey_ctx, buf, outlen, data, len(data))
|
|
||||||
if res <= 0:
|
|
||||||
_handle_rsa_enc_dec_error(backend, key)
|
|
||||||
|
|
||||||
return backend._ffi.buffer(buf)[:outlen[0]]
|
|
||||||
|
|
||||||
|
|
||||||
def _enc_dec_rsa_098(backend, key, data, padding_enum):
|
|
||||||
if isinstance(key, _RSAPublicKey):
|
|
||||||
crypt = backend._lib.RSA_public_encrypt
|
|
||||||
else:
|
|
||||||
crypt = backend._lib.RSA_private_decrypt
|
|
||||||
|
|
||||||
key_size = backend._lib.RSA_size(key._rsa_cdata)
|
|
||||||
backend.openssl_assert(key_size > 0)
|
|
||||||
buf = backend._ffi.new("unsigned char[]", key_size)
|
|
||||||
res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum)
|
|
||||||
if res < 0:
|
|
||||||
_handle_rsa_enc_dec_error(backend, key)
|
|
||||||
|
|
||||||
return backend._ffi.buffer(buf)[:res]
|
|
||||||
|
|
||||||
|
|
||||||
def _handle_rsa_enc_dec_error(backend, key):
|
|
||||||
errors = backend._consume_errors()
|
|
||||||
assert errors
|
|
||||||
assert errors[0].lib == backend._lib.ERR_LIB_RSA
|
|
||||||
if isinstance(key, _RSAPublicKey):
|
|
||||||
assert (errors[0].reason ==
|
|
||||||
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
|
|
||||||
raise ValueError(
|
|
||||||
"Data too long for key size. Encrypt less data or use a "
|
|
||||||
"larger key size."
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
decoding_errors = [
|
|
||||||
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
|
|
||||||
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
|
|
||||||
]
|
|
||||||
if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
|
|
||||||
decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
|
|
||||||
|
|
||||||
assert errors[0].reason in decoding_errors
|
|
||||||
raise ValueError("Decryption failed.")
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricSignatureContext)
|
|
||||||
class _RSASignatureContext(object):
|
|
||||||
def __init__(self, backend, private_key, padding, algorithm):
|
|
||||||
self._backend = backend
|
|
||||||
self._private_key = private_key
|
|
||||||
|
|
||||||
if not isinstance(padding, AsymmetricPadding):
|
|
||||||
raise TypeError("Expected provider of AsymmetricPadding.")
|
|
||||||
|
|
||||||
self._pkey_size = self._backend._lib.EVP_PKEY_size(
|
|
||||||
self._private_key._evp_pkey
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(self._pkey_size > 0)
|
|
||||||
|
|
||||||
if isinstance(padding, PKCS1v15):
|
|
||||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
|
||||||
self._finalize_method = self._finalize_pkey_ctx
|
|
||||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
|
|
||||||
else:
|
|
||||||
self._finalize_method = self._finalize_pkcs1
|
|
||||||
elif isinstance(padding, PSS):
|
|
||||||
if not isinstance(padding._mgf, MGF1):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Only MGF1 is supported by this backend.",
|
|
||||||
_Reasons.UNSUPPORTED_MGF
|
|
||||||
)
|
|
||||||
|
|
||||||
# Size of key in bytes - 2 is the maximum
|
|
||||||
# PSS signature length (salt length is checked later)
|
|
||||||
if self._pkey_size - algorithm.digest_size - 2 < 0:
|
|
||||||
raise ValueError("Digest too large for key size. Use a larger "
|
|
||||||
"key.")
|
|
||||||
|
|
||||||
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"When OpenSSL is older than 1.0.1 then only SHA1 is "
|
|
||||||
"supported with MGF1.",
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
|
||||||
self._finalize_method = self._finalize_pkey_ctx
|
|
||||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
|
|
||||||
else:
|
|
||||||
self._finalize_method = self._finalize_pss
|
|
||||||
else:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not supported by this backend.".format(padding.name),
|
|
||||||
_Reasons.UNSUPPORTED_PADDING
|
|
||||||
)
|
|
||||||
|
|
||||||
self._padding = padding
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._hash_ctx.update(data)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
|
||||||
self._algorithm.name.encode("ascii"))
|
|
||||||
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
|
|
||||||
|
|
||||||
return self._finalize_method(evp_md)
|
|
||||||
|
|
||||||
def _finalize_pkey_ctx(self, evp_md):
|
|
||||||
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
|
|
||||||
self._private_key._evp_pkey, self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
|
|
||||||
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
|
|
||||||
self._backend._lib.EVP_PKEY_CTX_free)
|
|
||||||
res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
|
|
||||||
pkey_ctx, evp_md)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
|
||||||
pkey_ctx, self._padding_enum)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
if isinstance(self._padding, PSS):
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
|
||||||
pkey_ctx,
|
|
||||||
_get_rsa_pss_salt_length(
|
|
||||||
self._padding,
|
|
||||||
self._private_key.key_size,
|
|
||||||
self._hash_ctx.algorithm.digest_size
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
|
|
||||||
if self._backend._lib.Cryptography_HAS_MGF1_MD:
|
|
||||||
# MGF1 MD is configurable in OpenSSL 1.0.1+
|
|
||||||
mgf1_md = self._backend._lib.EVP_get_digestbyname(
|
|
||||||
self._padding._mgf._algorithm.name.encode("ascii"))
|
|
||||||
self._backend.openssl_assert(
|
|
||||||
mgf1_md != self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
|
|
||||||
pkey_ctx, mgf1_md
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
data_to_sign = self._hash_ctx.finalize()
|
|
||||||
buflen = self._backend._ffi.new("size_t *")
|
|
||||||
res = self._backend._lib.EVP_PKEY_sign(
|
|
||||||
pkey_ctx,
|
|
||||||
self._backend._ffi.NULL,
|
|
||||||
buflen,
|
|
||||||
data_to_sign,
|
|
||||||
len(data_to_sign)
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", buflen[0])
|
|
||||||
res = self._backend._lib.EVP_PKEY_sign(
|
|
||||||
pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign))
|
|
||||||
if res != 1:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
|
||||||
reason = None
|
|
||||||
if (errors[0].reason ==
|
|
||||||
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
|
|
||||||
reason = ("Salt length too long for key size. Try using "
|
|
||||||
"MAX_LENGTH instead.")
|
|
||||||
else:
|
|
||||||
assert (errors[0].reason ==
|
|
||||||
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
|
|
||||||
reason = "Digest too large for key size. Use a larger key."
|
|
||||||
assert reason is not None
|
|
||||||
raise ValueError(reason)
|
|
||||||
|
|
||||||
return self._backend._ffi.buffer(buf)[:]
|
|
||||||
|
|
||||||
def _finalize_pkcs1(self, evp_md):
|
|
||||||
if self._hash_ctx._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context has already been finalized.")
|
|
||||||
|
|
||||||
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
|
|
||||||
sig_len = self._backend._ffi.new("unsigned int *")
|
|
||||||
res = self._backend._lib.EVP_SignFinal(
|
|
||||||
self._hash_ctx._ctx._ctx,
|
|
||||||
sig_buf,
|
|
||||||
sig_len,
|
|
||||||
self._private_key._evp_pkey
|
|
||||||
)
|
|
||||||
self._hash_ctx.finalize()
|
|
||||||
if res == 0:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
|
||||||
assert (errors[0].reason ==
|
|
||||||
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
|
|
||||||
raise ValueError("Digest too large for key size. Use a larger "
|
|
||||||
"key.")
|
|
||||||
|
|
||||||
return self._backend._ffi.buffer(sig_buf)[:sig_len[0]]
|
|
||||||
|
|
||||||
def _finalize_pss(self, evp_md):
|
|
||||||
data_to_sign = self._hash_ctx.finalize()
|
|
||||||
padded = self._backend._ffi.new("unsigned char[]", self._pkey_size)
|
|
||||||
res = self._backend._lib.RSA_padding_add_PKCS1_PSS(
|
|
||||||
self._private_key._rsa_cdata,
|
|
||||||
padded,
|
|
||||||
data_to_sign,
|
|
||||||
evp_md,
|
|
||||||
_get_rsa_pss_salt_length(
|
|
||||||
self._padding,
|
|
||||||
self._private_key.key_size,
|
|
||||||
len(data_to_sign)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if res != 1:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
|
||||||
assert (errors[0].reason ==
|
|
||||||
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
|
|
||||||
raise ValueError("Salt length too long for key size. Try using "
|
|
||||||
"MAX_LENGTH instead.")
|
|
||||||
|
|
||||||
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
|
|
||||||
sig_len = self._backend._lib.RSA_private_encrypt(
|
|
||||||
self._pkey_size,
|
|
||||||
padded,
|
|
||||||
sig_buf,
|
|
||||||
self._private_key._rsa_cdata,
|
|
||||||
self._backend._lib.RSA_NO_PADDING
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(sig_len != -1)
|
|
||||||
return self._backend._ffi.buffer(sig_buf)[:sig_len]
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricVerificationContext)
|
|
||||||
class _RSAVerificationContext(object):
|
|
||||||
def __init__(self, backend, public_key, signature, padding, algorithm):
|
|
||||||
self._backend = backend
|
|
||||||
self._public_key = public_key
|
|
||||||
self._signature = signature
|
|
||||||
|
|
||||||
if not isinstance(padding, AsymmetricPadding):
|
|
||||||
raise TypeError("Expected provider of AsymmetricPadding.")
|
|
||||||
|
|
||||||
self._pkey_size = self._backend._lib.EVP_PKEY_size(
|
|
||||||
self._public_key._evp_pkey
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(self._pkey_size > 0)
|
|
||||||
|
|
||||||
if isinstance(padding, PKCS1v15):
|
|
||||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
|
||||||
self._verify_method = self._verify_pkey_ctx
|
|
||||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
|
|
||||||
else:
|
|
||||||
self._verify_method = self._verify_pkcs1
|
|
||||||
elif isinstance(padding, PSS):
|
|
||||||
if not isinstance(padding._mgf, MGF1):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Only MGF1 is supported by this backend.",
|
|
||||||
_Reasons.UNSUPPORTED_MGF
|
|
||||||
)
|
|
||||||
|
|
||||||
# Size of key in bytes - 2 is the maximum
|
|
||||||
# PSS signature length (salt length is checked later)
|
|
||||||
if self._pkey_size - algorithm.digest_size - 2 < 0:
|
|
||||||
raise ValueError(
|
|
||||||
"Digest too large for key size. Check that you have the "
|
|
||||||
"correct key and digest algorithm."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not self._backend._mgf1_hash_supported(padding._mgf._algorithm):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"When OpenSSL is older than 1.0.1 then only SHA1 is "
|
|
||||||
"supported with MGF1.",
|
|
||||||
_Reasons.UNSUPPORTED_HASH
|
|
||||||
)
|
|
||||||
|
|
||||||
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
|
||||||
self._verify_method = self._verify_pkey_ctx
|
|
||||||
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
|
|
||||||
else:
|
|
||||||
self._verify_method = self._verify_pss
|
|
||||||
else:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"{0} is not supported by this backend.".format(padding.name),
|
|
||||||
_Reasons.UNSUPPORTED_PADDING
|
|
||||||
)
|
|
||||||
|
|
||||||
self._padding = padding
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
self._hash_ctx.update(data)
|
|
||||||
|
|
||||||
def verify(self):
|
|
||||||
evp_md = self._backend._lib.EVP_get_digestbyname(
|
|
||||||
self._algorithm.name.encode("ascii"))
|
|
||||||
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
|
|
||||||
|
|
||||||
self._verify_method(evp_md)
|
|
||||||
|
|
||||||
def _verify_pkey_ctx(self, evp_md):
|
|
||||||
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
|
|
||||||
self._public_key._evp_pkey, self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
|
|
||||||
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
|
|
||||||
self._backend._lib.EVP_PKEY_CTX_free)
|
|
||||||
res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
|
|
||||||
pkey_ctx, evp_md)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
|
||||||
pkey_ctx, self._padding_enum)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
if isinstance(self._padding, PSS):
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
|
||||||
pkey_ctx,
|
|
||||||
_get_rsa_pss_salt_length(
|
|
||||||
self._padding,
|
|
||||||
self._public_key.key_size,
|
|
||||||
self._hash_ctx.algorithm.digest_size
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
if self._backend._lib.Cryptography_HAS_MGF1_MD:
|
|
||||||
# MGF1 MD is configurable in OpenSSL 1.0.1+
|
|
||||||
mgf1_md = self._backend._lib.EVP_get_digestbyname(
|
|
||||||
self._padding._mgf._algorithm.name.encode("ascii"))
|
|
||||||
self._backend.openssl_assert(
|
|
||||||
mgf1_md != self._backend._ffi.NULL
|
|
||||||
)
|
|
||||||
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
|
|
||||||
pkey_ctx, mgf1_md
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res > 0)
|
|
||||||
|
|
||||||
data_to_verify = self._hash_ctx.finalize()
|
|
||||||
res = self._backend._lib.EVP_PKEY_verify(
|
|
||||||
pkey_ctx,
|
|
||||||
self._signature,
|
|
||||||
len(self._signature),
|
|
||||||
data_to_verify,
|
|
||||||
len(data_to_verify)
|
|
||||||
)
|
|
||||||
# The previous call can return negative numbers in the event of an
|
|
||||||
# error. This is not a signature failure but we need to fail if it
|
|
||||||
# occurs.
|
|
||||||
self._backend.openssl_assert(res >= 0)
|
|
||||||
if res == 0:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors
|
|
||||||
raise InvalidSignature
|
|
||||||
|
|
||||||
def _verify_pkcs1(self, evp_md):
|
|
||||||
if self._hash_ctx._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context has already been finalized.")
|
|
||||||
|
|
||||||
res = self._backend._lib.EVP_VerifyFinal(
|
|
||||||
self._hash_ctx._ctx._ctx,
|
|
||||||
self._signature,
|
|
||||||
len(self._signature),
|
|
||||||
self._public_key._evp_pkey
|
|
||||||
)
|
|
||||||
self._hash_ctx.finalize()
|
|
||||||
# The previous call can return negative numbers in the event of an
|
|
||||||
# error. This is not a signature failure but we need to fail if it
|
|
||||||
# occurs.
|
|
||||||
self._backend.openssl_assert(res >= 0)
|
|
||||||
if res == 0:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors
|
|
||||||
raise InvalidSignature
|
|
||||||
|
|
||||||
def _verify_pss(self, evp_md):
|
|
||||||
buf = self._backend._ffi.new("unsigned char[]", self._pkey_size)
|
|
||||||
res = self._backend._lib.RSA_public_decrypt(
|
|
||||||
len(self._signature),
|
|
||||||
self._signature,
|
|
||||||
buf,
|
|
||||||
self._public_key._rsa_cdata,
|
|
||||||
self._backend._lib.RSA_NO_PADDING
|
|
||||||
)
|
|
||||||
if res != self._pkey_size:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors
|
|
||||||
raise InvalidSignature
|
|
||||||
|
|
||||||
data_to_verify = self._hash_ctx.finalize()
|
|
||||||
res = self._backend._lib.RSA_verify_PKCS1_PSS(
|
|
||||||
self._public_key._rsa_cdata,
|
|
||||||
data_to_verify,
|
|
||||||
evp_md,
|
|
||||||
buf,
|
|
||||||
_get_rsa_pss_salt_length(
|
|
||||||
self._padding,
|
|
||||||
self._public_key.key_size,
|
|
||||||
len(data_to_verify)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
if res != 1:
|
|
||||||
errors = self._backend._consume_errors()
|
|
||||||
assert errors
|
|
||||||
raise InvalidSignature
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(RSAPrivateKeyWithSerialization)
|
|
||||||
class _RSAPrivateKey(object):
|
|
||||||
def __init__(self, backend, rsa_cdata, evp_pkey):
|
|
||||||
self._backend = backend
|
|
||||||
self._rsa_cdata = rsa_cdata
|
|
||||||
self._evp_pkey = evp_pkey
|
|
||||||
|
|
||||||
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
|
|
||||||
|
|
||||||
key_size = utils.read_only_property("_key_size")
|
|
||||||
|
|
||||||
def signer(self, padding, algorithm):
|
|
||||||
return _RSASignatureContext(self._backend, self, padding, algorithm)
|
|
||||||
|
|
||||||
def decrypt(self, ciphertext, padding):
|
|
||||||
key_size_bytes = int(math.ceil(self.key_size / 8.0))
|
|
||||||
if key_size_bytes != len(ciphertext):
|
|
||||||
raise ValueError("Ciphertext length must be equal to key size.")
|
|
||||||
|
|
||||||
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
|
|
||||||
|
|
||||||
def public_key(self):
|
|
||||||
ctx = self._backend._lib.RSA_new()
|
|
||||||
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
|
||||||
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
|
|
||||||
ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e)
|
|
||||||
ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n)
|
|
||||||
res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
|
|
||||||
return _RSAPublicKey(self._backend, ctx, evp_pkey)
|
|
||||||
|
|
||||||
def private_numbers(self):
|
|
||||||
return rsa.RSAPrivateNumbers(
|
|
||||||
p=self._backend._bn_to_int(self._rsa_cdata.p),
|
|
||||||
q=self._backend._bn_to_int(self._rsa_cdata.q),
|
|
||||||
d=self._backend._bn_to_int(self._rsa_cdata.d),
|
|
||||||
dmp1=self._backend._bn_to_int(self._rsa_cdata.dmp1),
|
|
||||||
dmq1=self._backend._bn_to_int(self._rsa_cdata.dmq1),
|
|
||||||
iqmp=self._backend._bn_to_int(self._rsa_cdata.iqmp),
|
|
||||||
public_numbers=rsa.RSAPublicNumbers(
|
|
||||||
e=self._backend._bn_to_int(self._rsa_cdata.e),
|
|
||||||
n=self._backend._bn_to_int(self._rsa_cdata.n),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
|
||||||
return self._backend._private_key_bytes(
|
|
||||||
encoding,
|
|
||||||
format,
|
|
||||||
encryption_algorithm,
|
|
||||||
self._evp_pkey,
|
|
||||||
self._rsa_cdata
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(RSAPublicKeyWithSerialization)
|
|
||||||
class _RSAPublicKey(object):
|
|
||||||
def __init__(self, backend, rsa_cdata, evp_pkey):
|
|
||||||
self._backend = backend
|
|
||||||
self._rsa_cdata = rsa_cdata
|
|
||||||
self._evp_pkey = evp_pkey
|
|
||||||
|
|
||||||
self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n)
|
|
||||||
|
|
||||||
key_size = utils.read_only_property("_key_size")
|
|
||||||
|
|
||||||
def verifier(self, signature, padding, algorithm):
|
|
||||||
if not isinstance(signature, bytes):
|
|
||||||
raise TypeError("signature must be bytes.")
|
|
||||||
|
|
||||||
return _RSAVerificationContext(
|
|
||||||
self._backend, self, signature, padding, algorithm
|
|
||||||
)
|
|
||||||
|
|
||||||
def encrypt(self, plaintext, padding):
|
|
||||||
return _enc_dec_rsa(self._backend, self, plaintext, padding)
|
|
||||||
|
|
||||||
def public_numbers(self):
|
|
||||||
return rsa.RSAPublicNumbers(
|
|
||||||
e=self._backend._bn_to_int(self._rsa_cdata.e),
|
|
||||||
n=self._backend._bn_to_int(self._rsa_cdata.n),
|
|
||||||
)
|
|
||||||
|
|
||||||
def public_bytes(self, encoding, format):
|
|
||||||
return self._backend._public_key_bytes(
|
|
||||||
encoding,
|
|
||||||
format,
|
|
||||||
self._evp_pkey,
|
|
||||||
self._rsa_cdata
|
|
||||||
)
|
|
|
@ -1,26 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
def _truncate_digest(digest, order_bits):
|
|
||||||
digest_len = len(digest)
|
|
||||||
|
|
||||||
if 8 * digest_len > order_bits:
|
|
||||||
digest_len = (order_bits + 7) // 8
|
|
||||||
digest = digest[:digest_len]
|
|
||||||
|
|
||||||
if 8 * digest_len > order_bits:
|
|
||||||
rshift = 8 - (order_bits & 0x7)
|
|
||||||
assert 0 < rshift < 8
|
|
||||||
|
|
||||||
mask = 0xFF >> rshift << rshift
|
|
||||||
|
|
||||||
# Set the bottom rshift bits to 0
|
|
||||||
digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask)
|
|
||||||
|
|
||||||
return digest
|
|
|
@ -1,940 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import ipaddress
|
|
||||||
|
|
||||||
from email.utils import parseaddr
|
|
||||||
|
|
||||||
import idna
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from six.moves import urllib_parse
|
|
||||||
|
|
||||||
from cryptography import utils, x509
|
|
||||||
from cryptography.exceptions import UnsupportedAlgorithm
|
|
||||||
from cryptography.hazmat.primitives import hashes, serialization
|
|
||||||
from cryptography.x509.oid import (
|
|
||||||
CRLExtensionOID, CertificatePoliciesOID, ExtensionOID
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _obj2txt(backend, obj):
|
|
||||||
# Set to 80 on the recommendation of
|
|
||||||
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
|
|
||||||
buf_len = 80
|
|
||||||
buf = backend._ffi.new("char[]", buf_len)
|
|
||||||
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
|
||||||
backend.openssl_assert(res > 0)
|
|
||||||
return backend._ffi.buffer(buf, res)[:].decode()
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_x509_name_entry(backend, x509_name_entry):
|
|
||||||
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
|
|
||||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
|
||||||
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
|
|
||||||
backend.openssl_assert(data != backend._ffi.NULL)
|
|
||||||
value = backend._asn1_string_to_utf8(data)
|
|
||||||
oid = _obj2txt(backend, obj)
|
|
||||||
|
|
||||||
return x509.NameAttribute(x509.ObjectIdentifier(oid), value)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_x509_name(backend, x509_name):
|
|
||||||
count = backend._lib.X509_NAME_entry_count(x509_name)
|
|
||||||
attributes = []
|
|
||||||
for x in range(count):
|
|
||||||
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
|
|
||||||
attributes.append(_decode_x509_name_entry(backend, entry))
|
|
||||||
|
|
||||||
return x509.Name(attributes)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_general_names(backend, gns):
|
|
||||||
num = backend._lib.sk_GENERAL_NAME_num(gns)
|
|
||||||
names = []
|
|
||||||
for i in range(num):
|
|
||||||
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
|
|
||||||
backend.openssl_assert(gn != backend._ffi.NULL)
|
|
||||||
names.append(_decode_general_name(backend, gn))
|
|
||||||
|
|
||||||
return names
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_general_name(backend, gn):
|
|
||||||
if gn.type == backend._lib.GEN_DNS:
|
|
||||||
data = backend._asn1_string_to_bytes(gn.d.dNSName)
|
|
||||||
if not data:
|
|
||||||
decoded = u""
|
|
||||||
elif data.startswith(b"*."):
|
|
||||||
# This is a wildcard name. We need to remove the leading wildcard,
|
|
||||||
# IDNA decode, then re-add the wildcard. Wildcard characters should
|
|
||||||
# always be left-most (RFC 2595 section 2.4).
|
|
||||||
decoded = u"*." + idna.decode(data[2:])
|
|
||||||
else:
|
|
||||||
# Not a wildcard, decode away. If the string has a * in it anywhere
|
|
||||||
# invalid this will raise an InvalidCodePoint
|
|
||||||
decoded = idna.decode(data)
|
|
||||||
if data.startswith(b"."):
|
|
||||||
# idna strips leading periods. Name constraints can have that
|
|
||||||
# so we need to re-add it. Sigh.
|
|
||||||
decoded = u"." + decoded
|
|
||||||
|
|
||||||
return x509.DNSName(decoded)
|
|
||||||
elif gn.type == backend._lib.GEN_URI:
|
|
||||||
data = backend._asn1_string_to_ascii(gn.d.uniformResourceIdentifier)
|
|
||||||
parsed = urllib_parse.urlparse(data)
|
|
||||||
if parsed.hostname:
|
|
||||||
hostname = idna.decode(parsed.hostname)
|
|
||||||
else:
|
|
||||||
hostname = ""
|
|
||||||
if parsed.port:
|
|
||||||
netloc = hostname + u":" + six.text_type(parsed.port)
|
|
||||||
else:
|
|
||||||
netloc = hostname
|
|
||||||
|
|
||||||
# Note that building a URL in this fashion means it should be
|
|
||||||
# semantically indistinguishable from the original but is not
|
|
||||||
# guaranteed to be exactly the same.
|
|
||||||
uri = urllib_parse.urlunparse((
|
|
||||||
parsed.scheme,
|
|
||||||
netloc,
|
|
||||||
parsed.path,
|
|
||||||
parsed.params,
|
|
||||||
parsed.query,
|
|
||||||
parsed.fragment
|
|
||||||
))
|
|
||||||
return x509.UniformResourceIdentifier(uri)
|
|
||||||
elif gn.type == backend._lib.GEN_RID:
|
|
||||||
oid = _obj2txt(backend, gn.d.registeredID)
|
|
||||||
return x509.RegisteredID(x509.ObjectIdentifier(oid))
|
|
||||||
elif gn.type == backend._lib.GEN_IPADD:
|
|
||||||
data = backend._asn1_string_to_bytes(gn.d.iPAddress)
|
|
||||||
data_len = len(data)
|
|
||||||
if data_len == 8 or data_len == 32:
|
|
||||||
# This is an IPv4 or IPv6 Network and not a single IP. This
|
|
||||||
# type of data appears in Name Constraints. Unfortunately,
|
|
||||||
# ipaddress doesn't support packed bytes + netmask. Additionally,
|
|
||||||
# IPv6Network can only handle CIDR rather than the full 16 byte
|
|
||||||
# netmask. To handle this we convert the netmask to integer, then
|
|
||||||
# find the first 0 bit, which will be the prefix. If another 1
|
|
||||||
# bit is present after that the netmask is invalid.
|
|
||||||
base = ipaddress.ip_address(data[:data_len // 2])
|
|
||||||
netmask = ipaddress.ip_address(data[data_len // 2:])
|
|
||||||
bits = bin(int(netmask))[2:]
|
|
||||||
prefix = bits.find('0')
|
|
||||||
# If no 0 bits are found it is a /32 or /128
|
|
||||||
if prefix == -1:
|
|
||||||
prefix = len(bits)
|
|
||||||
|
|
||||||
if "1" in bits[prefix:]:
|
|
||||||
raise ValueError("Invalid netmask")
|
|
||||||
|
|
||||||
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
|
|
||||||
else:
|
|
||||||
ip = ipaddress.ip_address(data)
|
|
||||||
|
|
||||||
return x509.IPAddress(ip)
|
|
||||||
elif gn.type == backend._lib.GEN_DIRNAME:
|
|
||||||
return x509.DirectoryName(
|
|
||||||
_decode_x509_name(backend, gn.d.directoryName)
|
|
||||||
)
|
|
||||||
elif gn.type == backend._lib.GEN_EMAIL:
|
|
||||||
data = backend._asn1_string_to_ascii(gn.d.rfc822Name)
|
|
||||||
name, address = parseaddr(data)
|
|
||||||
parts = address.split(u"@")
|
|
||||||
if name or not address:
|
|
||||||
# parseaddr has found a name (e.g. Name <email>) or the entire
|
|
||||||
# value is an empty string.
|
|
||||||
raise ValueError("Invalid rfc822name value")
|
|
||||||
elif len(parts) == 1:
|
|
||||||
# Single label email name. This is valid for local delivery. No
|
|
||||||
# IDNA decoding can be done since there is no domain component.
|
|
||||||
return x509.RFC822Name(address)
|
|
||||||
else:
|
|
||||||
# A normal email of the form user@domain.com. Let's attempt to
|
|
||||||
# decode the domain component and return the entire address.
|
|
||||||
return x509.RFC822Name(
|
|
||||||
parts[0] + u"@" + idna.decode(parts[1])
|
|
||||||
)
|
|
||||||
elif gn.type == backend._lib.GEN_OTHERNAME:
|
|
||||||
type_id = _obj2txt(backend, gn.d.otherName.type_id)
|
|
||||||
value = backend._asn1_to_der(gn.d.otherName.value)
|
|
||||||
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
|
|
||||||
else:
|
|
||||||
# x400Address or ediPartyName
|
|
||||||
raise x509.UnsupportedGeneralNameType(
|
|
||||||
"{0} is not a supported type".format(
|
|
||||||
x509._GENERAL_NAMES.get(gn.type, gn.type)
|
|
||||||
),
|
|
||||||
gn.type
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_ocsp_no_check(backend, ext):
|
|
||||||
return x509.OCSPNoCheck()
|
|
||||||
|
|
||||||
|
|
||||||
class _X509ExtensionParser(object):
|
|
||||||
def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None):
|
|
||||||
self.ext_count = ext_count
|
|
||||||
self.get_ext = get_ext
|
|
||||||
self.handlers = handlers
|
|
||||||
self.unsupported_exts = unsupported_exts
|
|
||||||
|
|
||||||
def parse(self, backend, x509_obj):
|
|
||||||
extensions = []
|
|
||||||
seen_oids = set()
|
|
||||||
for i in range(self.ext_count(backend, x509_obj)):
|
|
||||||
ext = self.get_ext(backend, x509_obj, i)
|
|
||||||
backend.openssl_assert(ext != backend._ffi.NULL)
|
|
||||||
crit = backend._lib.X509_EXTENSION_get_critical(ext)
|
|
||||||
critical = crit == 1
|
|
||||||
oid = x509.ObjectIdentifier(_obj2txt(backend, ext.object))
|
|
||||||
if oid in seen_oids:
|
|
||||||
raise x509.DuplicateExtension(
|
|
||||||
"Duplicate {0} extension found".format(oid), oid
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
handler = self.handlers[oid]
|
|
||||||
except KeyError:
|
|
||||||
if critical:
|
|
||||||
raise x509.UnsupportedExtension(
|
|
||||||
"Critical extension {0} is not currently supported"
|
|
||||||
.format(oid), oid
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# For extensions which are not supported by OpenSSL we pass the
|
|
||||||
# extension object directly to the parsing routine so it can
|
|
||||||
# be decoded manually.
|
|
||||||
if self.unsupported_exts and oid in self.unsupported_exts:
|
|
||||||
ext_data = ext
|
|
||||||
else:
|
|
||||||
ext_data = backend._lib.X509V3_EXT_d2i(ext)
|
|
||||||
if ext_data == backend._ffi.NULL:
|
|
||||||
backend._consume_errors()
|
|
||||||
raise ValueError(
|
|
||||||
"The {0} extension is invalid and can't be "
|
|
||||||
"parsed".format(oid)
|
|
||||||
)
|
|
||||||
|
|
||||||
value = handler(backend, ext_data)
|
|
||||||
extensions.append(x509.Extension(oid, critical, value))
|
|
||||||
|
|
||||||
seen_oids.add(oid)
|
|
||||||
|
|
||||||
return x509.Extensions(extensions)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(x509.Certificate)
|
|
||||||
class _Certificate(object):
|
|
||||||
def __init__(self, backend, x509):
|
|
||||||
self._backend = backend
|
|
||||||
self._x509 = x509
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<Certificate(subject={0}, ...)>".format(self.subject)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, x509.Certificate):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
res = self._backend._lib.X509_cmp(self._x509, other._x509)
|
|
||||||
return res == 0
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(self.public_bytes(serialization.Encoding.DER))
|
|
||||||
|
|
||||||
def fingerprint(self, algorithm):
|
|
||||||
h = hashes.Hash(algorithm, self._backend)
|
|
||||||
h.update(self.public_bytes(serialization.Encoding.DER))
|
|
||||||
return h.finalize()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version(self):
|
|
||||||
version = self._backend._lib.X509_get_version(self._x509)
|
|
||||||
if version == 0:
|
|
||||||
return x509.Version.v1
|
|
||||||
elif version == 2:
|
|
||||||
return x509.Version.v3
|
|
||||||
else:
|
|
||||||
raise x509.InvalidVersion(
|
|
||||||
"{0} is not a valid X509 version".format(version), version
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def serial(self):
|
|
||||||
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
|
|
||||||
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
|
||||||
return self._backend._asn1_integer_to_int(asn1_int)
|
|
||||||
|
|
||||||
def public_key(self):
|
|
||||||
pkey = self._backend._lib.X509_get_pubkey(self._x509)
|
|
||||||
if pkey == self._backend._ffi.NULL:
|
|
||||||
# Remove errors from the stack.
|
|
||||||
self._backend._consume_errors()
|
|
||||||
raise ValueError("Certificate public key is of an unknown type")
|
|
||||||
|
|
||||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
|
||||||
|
|
||||||
return self._backend._evp_pkey_to_public_key(pkey)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def not_valid_before(self):
|
|
||||||
asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
|
|
||||||
return self._backend._parse_asn1_time(asn1_time)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def not_valid_after(self):
|
|
||||||
asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
|
|
||||||
return self._backend._parse_asn1_time(asn1_time)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def issuer(self):
|
|
||||||
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
|
|
||||||
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
|
||||||
return _decode_x509_name(self._backend, issuer)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def subject(self):
|
|
||||||
subject = self._backend._lib.X509_get_subject_name(self._x509)
|
|
||||||
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
|
||||||
return _decode_x509_name(self._backend, subject)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def signature_hash_algorithm(self):
|
|
||||||
oid = _obj2txt(self._backend, self._x509.sig_alg.algorithm)
|
|
||||||
try:
|
|
||||||
return x509._SIG_OIDS_TO_HASH[oid]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Signature algorithm OID:{0} not recognized".format(oid)
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extensions(self):
|
|
||||||
return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509)
|
|
||||||
|
|
||||||
def public_bytes(self, encoding):
|
|
||||||
bio = self._backend._create_mem_bio()
|
|
||||||
if encoding is serialization.Encoding.PEM:
|
|
||||||
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
|
|
||||||
elif encoding is serialization.Encoding.DER:
|
|
||||||
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
|
|
||||||
else:
|
|
||||||
raise TypeError("encoding must be an item from the Encoding enum")
|
|
||||||
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
return self._backend._read_mem_bio(bio)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_certificate_policies(backend, cp):
|
|
||||||
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
|
|
||||||
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
|
|
||||||
num = backend._lib.sk_POLICYINFO_num(cp)
|
|
||||||
certificate_policies = []
|
|
||||||
for i in range(num):
|
|
||||||
qualifiers = None
|
|
||||||
pi = backend._lib.sk_POLICYINFO_value(cp, i)
|
|
||||||
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
|
|
||||||
if pi.qualifiers != backend._ffi.NULL:
|
|
||||||
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
|
|
||||||
qualifiers = []
|
|
||||||
for j in range(qnum):
|
|
||||||
pqi = backend._lib.sk_POLICYQUALINFO_value(
|
|
||||||
pi.qualifiers, j
|
|
||||||
)
|
|
||||||
pqualid = x509.ObjectIdentifier(
|
|
||||||
_obj2txt(backend, pqi.pqualid)
|
|
||||||
)
|
|
||||||
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
|
|
||||||
cpsuri = backend._ffi.buffer(
|
|
||||||
pqi.d.cpsuri.data, pqi.d.cpsuri.length
|
|
||||||
)[:].decode('ascii')
|
|
||||||
qualifiers.append(cpsuri)
|
|
||||||
else:
|
|
||||||
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
|
|
||||||
user_notice = _decode_user_notice(
|
|
||||||
backend, pqi.d.usernotice
|
|
||||||
)
|
|
||||||
qualifiers.append(user_notice)
|
|
||||||
|
|
||||||
certificate_policies.append(
|
|
||||||
x509.PolicyInformation(oid, qualifiers)
|
|
||||||
)
|
|
||||||
|
|
||||||
return x509.CertificatePolicies(certificate_policies)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_user_notice(backend, un):
|
|
||||||
explicit_text = None
|
|
||||||
notice_reference = None
|
|
||||||
|
|
||||||
if un.exptext != backend._ffi.NULL:
|
|
||||||
explicit_text = backend._asn1_string_to_utf8(un.exptext)
|
|
||||||
|
|
||||||
if un.noticeref != backend._ffi.NULL:
|
|
||||||
organization = backend._asn1_string_to_utf8(un.noticeref.organization)
|
|
||||||
|
|
||||||
num = backend._lib.sk_ASN1_INTEGER_num(
|
|
||||||
un.noticeref.noticenos
|
|
||||||
)
|
|
||||||
notice_numbers = []
|
|
||||||
for i in range(num):
|
|
||||||
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
|
|
||||||
un.noticeref.noticenos, i
|
|
||||||
)
|
|
||||||
notice_num = backend._asn1_integer_to_int(asn1_int)
|
|
||||||
notice_numbers.append(notice_num)
|
|
||||||
|
|
||||||
notice_reference = x509.NoticeReference(
|
|
||||||
organization, notice_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
return x509.UserNotice(notice_reference, explicit_text)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_basic_constraints(backend, bc_st):
|
|
||||||
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
|
|
||||||
basic_constraints = backend._ffi.gc(
|
|
||||||
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
|
|
||||||
)
|
|
||||||
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
|
|
||||||
# chooses to just map this to its ordinal value, so true is 255 and
|
|
||||||
# false is 0.
|
|
||||||
ca = basic_constraints.ca == 255
|
|
||||||
if basic_constraints.pathlen == backend._ffi.NULL:
|
|
||||||
path_length = None
|
|
||||||
else:
|
|
||||||
path_length = backend._asn1_integer_to_int(basic_constraints.pathlen)
|
|
||||||
|
|
||||||
return x509.BasicConstraints(ca, path_length)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_subject_key_identifier(backend, asn1_string):
|
|
||||||
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
|
|
||||||
asn1_string = backend._ffi.gc(
|
|
||||||
asn1_string, backend._lib.ASN1_OCTET_STRING_free
|
|
||||||
)
|
|
||||||
return x509.SubjectKeyIdentifier(
|
|
||||||
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_authority_key_identifier(backend, akid):
|
|
||||||
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
|
|
||||||
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
|
|
||||||
key_identifier = None
|
|
||||||
authority_cert_issuer = None
|
|
||||||
authority_cert_serial_number = None
|
|
||||||
|
|
||||||
if akid.keyid != backend._ffi.NULL:
|
|
||||||
key_identifier = backend._ffi.buffer(
|
|
||||||
akid.keyid.data, akid.keyid.length
|
|
||||||
)[:]
|
|
||||||
|
|
||||||
if akid.issuer != backend._ffi.NULL:
|
|
||||||
authority_cert_issuer = _decode_general_names(
|
|
||||||
backend, akid.issuer
|
|
||||||
)
|
|
||||||
|
|
||||||
if akid.serial != backend._ffi.NULL:
|
|
||||||
authority_cert_serial_number = backend._asn1_integer_to_int(
|
|
||||||
akid.serial
|
|
||||||
)
|
|
||||||
|
|
||||||
return x509.AuthorityKeyIdentifier(
|
|
||||||
key_identifier, authority_cert_issuer, authority_cert_serial_number
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_authority_information_access(backend, aia):
|
|
||||||
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
|
|
||||||
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
|
|
||||||
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
|
|
||||||
access_descriptions = []
|
|
||||||
for i in range(num):
|
|
||||||
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
|
|
||||||
backend.openssl_assert(ad.method != backend._ffi.NULL)
|
|
||||||
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
|
|
||||||
backend.openssl_assert(ad.location != backend._ffi.NULL)
|
|
||||||
gn = _decode_general_name(backend, ad.location)
|
|
||||||
access_descriptions.append(x509.AccessDescription(oid, gn))
|
|
||||||
|
|
||||||
return x509.AuthorityInformationAccess(access_descriptions)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_key_usage(backend, bit_string):
|
|
||||||
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
|
|
||||||
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
|
|
||||||
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
|
||||||
digital_signature = get_bit(bit_string, 0) == 1
|
|
||||||
content_commitment = get_bit(bit_string, 1) == 1
|
|
||||||
key_encipherment = get_bit(bit_string, 2) == 1
|
|
||||||
data_encipherment = get_bit(bit_string, 3) == 1
|
|
||||||
key_agreement = get_bit(bit_string, 4) == 1
|
|
||||||
key_cert_sign = get_bit(bit_string, 5) == 1
|
|
||||||
crl_sign = get_bit(bit_string, 6) == 1
|
|
||||||
encipher_only = get_bit(bit_string, 7) == 1
|
|
||||||
decipher_only = get_bit(bit_string, 8) == 1
|
|
||||||
return x509.KeyUsage(
|
|
||||||
digital_signature,
|
|
||||||
content_commitment,
|
|
||||||
key_encipherment,
|
|
||||||
data_encipherment,
|
|
||||||
key_agreement,
|
|
||||||
key_cert_sign,
|
|
||||||
crl_sign,
|
|
||||||
encipher_only,
|
|
||||||
decipher_only
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_general_names_extension(backend, gns):
|
|
||||||
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
|
|
||||||
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
|
||||||
general_names = _decode_general_names(backend, gns)
|
|
||||||
return general_names
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_subject_alt_name(backend, ext):
|
|
||||||
return x509.SubjectAlternativeName(
|
|
||||||
_decode_general_names_extension(backend, ext)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_issuer_alt_name(backend, ext):
|
|
||||||
return x509.IssuerAlternativeName(
|
|
||||||
_decode_general_names_extension(backend, ext)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_name_constraints(backend, nc):
|
|
||||||
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
|
|
||||||
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
|
|
||||||
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
|
|
||||||
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
|
|
||||||
return x509.NameConstraints(
|
|
||||||
permitted_subtrees=permitted, excluded_subtrees=excluded
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_general_subtrees(backend, stack_subtrees):
|
|
||||||
if stack_subtrees == backend._ffi.NULL:
|
|
||||||
return None
|
|
||||||
|
|
||||||
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
|
|
||||||
subtrees = []
|
|
||||||
|
|
||||||
for i in range(num):
|
|
||||||
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
|
|
||||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
|
||||||
name = _decode_general_name(backend, obj.base)
|
|
||||||
subtrees.append(name)
|
|
||||||
|
|
||||||
return subtrees
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_extended_key_usage(backend, sk):
|
|
||||||
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
|
|
||||||
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
|
|
||||||
num = backend._lib.sk_ASN1_OBJECT_num(sk)
|
|
||||||
ekus = []
|
|
||||||
|
|
||||||
for i in range(num):
|
|
||||||
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
|
|
||||||
backend.openssl_assert(obj != backend._ffi.NULL)
|
|
||||||
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
|
|
||||||
ekus.append(oid)
|
|
||||||
|
|
||||||
return x509.ExtendedKeyUsage(ekus)
|
|
||||||
|
|
||||||
|
|
||||||
_DISTPOINT_TYPE_FULLNAME = 0
|
|
||||||
_DISTPOINT_TYPE_RELATIVENAME = 1
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_crl_distribution_points(backend, cdps):
|
|
||||||
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
|
|
||||||
cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free)
|
|
||||||
num = backend._lib.sk_DIST_POINT_num(cdps)
|
|
||||||
|
|
||||||
dist_points = []
|
|
||||||
for i in range(num):
|
|
||||||
full_name = None
|
|
||||||
relative_name = None
|
|
||||||
crl_issuer = None
|
|
||||||
reasons = None
|
|
||||||
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
|
|
||||||
if cdp.reasons != backend._ffi.NULL:
|
|
||||||
# We will check each bit from RFC 5280
|
|
||||||
# ReasonFlags ::= BIT STRING {
|
|
||||||
# unused (0),
|
|
||||||
# keyCompromise (1),
|
|
||||||
# cACompromise (2),
|
|
||||||
# affiliationChanged (3),
|
|
||||||
# superseded (4),
|
|
||||||
# cessationOfOperation (5),
|
|
||||||
# certificateHold (6),
|
|
||||||
# privilegeWithdrawn (7),
|
|
||||||
# aACompromise (8) }
|
|
||||||
reasons = []
|
|
||||||
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
|
||||||
if get_bit(cdp.reasons, 1):
|
|
||||||
reasons.append(x509.ReasonFlags.key_compromise)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 2):
|
|
||||||
reasons.append(x509.ReasonFlags.ca_compromise)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 3):
|
|
||||||
reasons.append(x509.ReasonFlags.affiliation_changed)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 4):
|
|
||||||
reasons.append(x509.ReasonFlags.superseded)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 5):
|
|
||||||
reasons.append(x509.ReasonFlags.cessation_of_operation)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 6):
|
|
||||||
reasons.append(x509.ReasonFlags.certificate_hold)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 7):
|
|
||||||
reasons.append(x509.ReasonFlags.privilege_withdrawn)
|
|
||||||
|
|
||||||
if get_bit(cdp.reasons, 8):
|
|
||||||
reasons.append(x509.ReasonFlags.aa_compromise)
|
|
||||||
|
|
||||||
reasons = frozenset(reasons)
|
|
||||||
|
|
||||||
if cdp.CRLissuer != backend._ffi.NULL:
|
|
||||||
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
|
|
||||||
|
|
||||||
# Certificates may have a crl_issuer/reasons and no distribution
|
|
||||||
# point so make sure it's not null.
|
|
||||||
if cdp.distpoint != backend._ffi.NULL:
|
|
||||||
# Type 0 is fullName, there is no #define for it in the code.
|
|
||||||
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
|
|
||||||
full_name = _decode_general_names(
|
|
||||||
backend, cdp.distpoint.name.fullname
|
|
||||||
)
|
|
||||||
# OpenSSL code doesn't test for a specific type for
|
|
||||||
# relativename, everything that isn't fullname is considered
|
|
||||||
# relativename.
|
|
||||||
else:
|
|
||||||
rns = cdp.distpoint.name.relativename
|
|
||||||
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
|
|
||||||
attributes = []
|
|
||||||
for i in range(rnum):
|
|
||||||
rn = backend._lib.sk_X509_NAME_ENTRY_value(
|
|
||||||
rns, i
|
|
||||||
)
|
|
||||||
backend.openssl_assert(rn != backend._ffi.NULL)
|
|
||||||
attributes.append(
|
|
||||||
_decode_x509_name_entry(backend, rn)
|
|
||||||
)
|
|
||||||
|
|
||||||
relative_name = x509.Name(attributes)
|
|
||||||
|
|
||||||
dist_points.append(
|
|
||||||
x509.DistributionPoint(
|
|
||||||
full_name, relative_name, reasons, crl_issuer
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return x509.CRLDistributionPoints(dist_points)
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_inhibit_any_policy(backend, asn1_int):
|
|
||||||
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
|
|
||||||
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
|
||||||
skip_certs = backend._asn1_integer_to_int(asn1_int)
|
|
||||||
return x509.InhibitAnyPolicy(skip_certs)
|
|
||||||
|
|
||||||
|
|
||||||
_CRL_REASON_CODE_TO_ENUM = {
|
|
||||||
0: x509.ReasonFlags.unspecified,
|
|
||||||
1: x509.ReasonFlags.key_compromise,
|
|
||||||
2: x509.ReasonFlags.ca_compromise,
|
|
||||||
3: x509.ReasonFlags.affiliation_changed,
|
|
||||||
4: x509.ReasonFlags.superseded,
|
|
||||||
5: x509.ReasonFlags.cessation_of_operation,
|
|
||||||
6: x509.ReasonFlags.certificate_hold,
|
|
||||||
8: x509.ReasonFlags.remove_from_crl,
|
|
||||||
9: x509.ReasonFlags.privilege_withdrawn,
|
|
||||||
10: x509.ReasonFlags.aa_compromise,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_crl_reason(backend, enum):
|
|
||||||
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
|
|
||||||
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
|
|
||||||
code = backend._lib.ASN1_ENUMERATED_get(enum)
|
|
||||||
|
|
||||||
try:
|
|
||||||
return _CRL_REASON_CODE_TO_ENUM[code]
|
|
||||||
except KeyError:
|
|
||||||
raise ValueError("Unsupported reason code: {0}".format(code))
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_invalidity_date(backend, inv_date):
|
|
||||||
generalized_time = backend._ffi.cast(
|
|
||||||
"ASN1_GENERALIZEDTIME *", inv_date
|
|
||||||
)
|
|
||||||
generalized_time = backend._ffi.gc(
|
|
||||||
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
|
|
||||||
)
|
|
||||||
time = backend._ffi.string(
|
|
||||||
backend._lib.ASN1_STRING_data(
|
|
||||||
backend._ffi.cast("ASN1_STRING *", generalized_time)
|
|
||||||
)
|
|
||||||
).decode("ascii")
|
|
||||||
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
|
|
||||||
|
|
||||||
|
|
||||||
def _decode_cert_issuer(backend, ext):
|
|
||||||
"""
|
|
||||||
This handler decodes the CertificateIssuer entry extension directly
|
|
||||||
from the X509_EXTENSION object. This is necessary because this entry
|
|
||||||
extension is not directly supported by OpenSSL 0.9.8.
|
|
||||||
"""
|
|
||||||
|
|
||||||
data_ptr_ptr = backend._ffi.new("const unsigned char **")
|
|
||||||
data_ptr_ptr[0] = ext.value.data
|
|
||||||
gns = backend._lib.d2i_GENERAL_NAMES(
|
|
||||||
backend._ffi.NULL, data_ptr_ptr, ext.value.length
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered
|
|
||||||
# in _X509ExtensionParser but since we are responsible for decoding this
|
|
||||||
# entry extension ourselves, we have to this here.
|
|
||||||
if gns == backend._ffi.NULL:
|
|
||||||
backend._consume_errors()
|
|
||||||
raise ValueError(
|
|
||||||
"The {0} extension is corrupted and can't be parsed".format(
|
|
||||||
CRLExtensionOID.CERTIFICATE_ISSUER))
|
|
||||||
|
|
||||||
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
|
||||||
return x509.GeneralNames(_decode_general_names(backend, gns))
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(x509.RevokedCertificate)
|
|
||||||
class _RevokedCertificate(object):
|
|
||||||
def __init__(self, backend, x509_revoked):
|
|
||||||
self._backend = backend
|
|
||||||
self._x509_revoked = x509_revoked
|
|
||||||
|
|
||||||
@property
|
|
||||||
def serial_number(self):
|
|
||||||
asn1_int = self._x509_revoked.serialNumber
|
|
||||||
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
|
||||||
return self._backend._asn1_integer_to_int(asn1_int)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def revocation_date(self):
|
|
||||||
return self._backend._parse_asn1_time(
|
|
||||||
self._x509_revoked.revocationDate)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extensions(self):
|
|
||||||
return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
|
|
||||||
self._backend, self._x509_revoked
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(x509.CertificateRevocationList)
|
|
||||||
class _CertificateRevocationList(object):
|
|
||||||
def __init__(self, backend, x509_crl):
|
|
||||||
self._backend = backend
|
|
||||||
self._x509_crl = x509_crl
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, x509.CertificateRevocationList):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
|
|
||||||
return res == 0
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def fingerprint(self, algorithm):
|
|
||||||
h = hashes.Hash(algorithm, self._backend)
|
|
||||||
bio = self._backend._create_mem_bio()
|
|
||||||
res = self._backend._lib.i2d_X509_CRL_bio(
|
|
||||||
bio, self._x509_crl
|
|
||||||
)
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
der = self._backend._read_mem_bio(bio)
|
|
||||||
h.update(der)
|
|
||||||
return h.finalize()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def signature_hash_algorithm(self):
|
|
||||||
oid = _obj2txt(self._backend, self._x509_crl.sig_alg.algorithm)
|
|
||||||
try:
|
|
||||||
return x509._SIG_OIDS_TO_HASH[oid]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Signature algorithm OID:{0} not recognized".format(oid)
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def issuer(self):
|
|
||||||
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
|
|
||||||
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
|
||||||
return _decode_x509_name(self._backend, issuer)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def next_update(self):
|
|
||||||
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
|
|
||||||
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
|
|
||||||
return self._backend._parse_asn1_time(nu)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_update(self):
|
|
||||||
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
|
|
||||||
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
|
|
||||||
return self._backend._parse_asn1_time(lu)
|
|
||||||
|
|
||||||
def _revoked_certificates(self):
|
|
||||||
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
|
|
||||||
self._backend.openssl_assert(revoked != self._backend._ffi.NULL)
|
|
||||||
|
|
||||||
num = self._backend._lib.sk_X509_REVOKED_num(revoked)
|
|
||||||
revoked_list = []
|
|
||||||
for i in range(num):
|
|
||||||
r = self._backend._lib.sk_X509_REVOKED_value(revoked, i)
|
|
||||||
self._backend.openssl_assert(r != self._backend._ffi.NULL)
|
|
||||||
revoked_list.append(_RevokedCertificate(self._backend, r))
|
|
||||||
|
|
||||||
return revoked_list
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self._revoked_certificates())
|
|
||||||
|
|
||||||
def __getitem__(self, idx):
|
|
||||||
return self._revoked_certificates()[idx]
|
|
||||||
|
|
||||||
def __len__(self):
|
|
||||||
return len(self._revoked_certificates())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extensions(self):
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(x509.CertificateSigningRequest)
|
|
||||||
class _CertificateSigningRequest(object):
|
|
||||||
def __init__(self, backend, x509_req):
|
|
||||||
self._backend = backend
|
|
||||||
self._x509_req = x509_req
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, _CertificateSigningRequest):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
self_bytes = self.public_bytes(serialization.Encoding.DER)
|
|
||||||
other_bytes = other.public_bytes(serialization.Encoding.DER)
|
|
||||||
return self_bytes == other_bytes
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash(self.public_bytes(serialization.Encoding.DER))
|
|
||||||
|
|
||||||
def public_key(self):
|
|
||||||
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
|
|
||||||
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
|
|
||||||
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
|
||||||
return self._backend._evp_pkey_to_public_key(pkey)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def subject(self):
|
|
||||||
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
|
|
||||||
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
|
||||||
return _decode_x509_name(self._backend, subject)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def signature_hash_algorithm(self):
|
|
||||||
oid = _obj2txt(self._backend, self._x509_req.sig_alg.algorithm)
|
|
||||||
try:
|
|
||||||
return x509._SIG_OIDS_TO_HASH[oid]
|
|
||||||
except KeyError:
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Signature algorithm OID:{0} not recognized".format(oid)
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def extensions(self):
|
|
||||||
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
|
|
||||||
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
|
|
||||||
|
|
||||||
def public_bytes(self, encoding):
|
|
||||||
bio = self._backend._create_mem_bio()
|
|
||||||
if encoding is serialization.Encoding.PEM:
|
|
||||||
res = self._backend._lib.PEM_write_bio_X509_REQ(
|
|
||||||
bio, self._x509_req
|
|
||||||
)
|
|
||||||
elif encoding is serialization.Encoding.DER:
|
|
||||||
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
|
|
||||||
else:
|
|
||||||
raise TypeError("encoding must be an item from the Encoding enum")
|
|
||||||
|
|
||||||
self._backend.openssl_assert(res == 1)
|
|
||||||
return self._backend._read_mem_bio(bio)
|
|
||||||
|
|
||||||
|
|
||||||
_EXTENSION_HANDLERS = {
|
|
||||||
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
|
|
||||||
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
|
|
||||||
ExtensionOID.KEY_USAGE: _decode_key_usage,
|
|
||||||
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
|
|
||||||
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
|
|
||||||
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
|
|
||||||
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
|
||||||
_decode_authority_information_access
|
|
||||||
),
|
|
||||||
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
|
|
||||||
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
|
|
||||||
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
|
|
||||||
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
|
|
||||||
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
|
|
||||||
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
|
|
||||||
}
|
|
||||||
|
|
||||||
_REVOKED_EXTENSION_HANDLERS = {
|
|
||||||
CRLExtensionOID.CRL_REASON: _decode_crl_reason,
|
|
||||||
CRLExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
|
|
||||||
CRLExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
|
|
||||||
}
|
|
||||||
|
|
||||||
_REVOKED_UNSUPPORTED_EXTENSIONS = set([
|
|
||||||
CRLExtensionOID.CERTIFICATE_ISSUER,
|
|
||||||
])
|
|
||||||
|
|
||||||
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
|
|
||||||
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
|
|
||||||
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
|
|
||||||
handlers=_EXTENSION_HANDLERS
|
|
||||||
)
|
|
||||||
|
|
||||||
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
|
|
||||||
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
|
|
||||||
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
|
|
||||||
handlers=_EXTENSION_HANDLERS
|
|
||||||
)
|
|
||||||
|
|
||||||
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
|
|
||||||
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
|
|
||||||
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
|
|
||||||
handlers=_REVOKED_EXTENSION_HANDLERS,
|
|
||||||
unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS
|
|
||||||
)
|
|
|
@ -1,5 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,5 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
|
@ -1,15 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography.hazmat.bindings._commoncrypto import ffi, lib
|
|
||||||
|
|
||||||
|
|
||||||
class Binding(object):
|
|
||||||
"""
|
|
||||||
CommonCrypto API wrapper.
|
|
||||||
"""
|
|
||||||
lib = lib
|
|
||||||
ffi = ffi
|
|
|
@ -1,5 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
|
@ -1,418 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
# This is a temporary copy of all the CONDITIONAL_NAMES from _cffi_src so
|
|
||||||
# we can loop over them and delete them at runtime. It will be removed when
|
|
||||||
# cffi supports #if in cdef
|
|
||||||
|
|
||||||
CONDITIONAL_NAMES = {
|
|
||||||
"Cryptography_HAS_AES_WRAP": [
|
|
||||||
"AES_wrap_key",
|
|
||||||
"AES_unwrap_key",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_CMAC": [
|
|
||||||
"CMAC_CTX_new",
|
|
||||||
"CMAC_Init",
|
|
||||||
"CMAC_Update",
|
|
||||||
"CMAC_Final",
|
|
||||||
"CMAC_CTX_copy",
|
|
||||||
"CMAC_CTX_free",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_CMS": [
|
|
||||||
"BIO_new_CMS",
|
|
||||||
"i2d_CMS_bio_stream",
|
|
||||||
"PEM_write_bio_CMS_stream",
|
|
||||||
"CMS_final",
|
|
||||||
"CMS_sign",
|
|
||||||
"CMS_verify",
|
|
||||||
"CMS_encrypt",
|
|
||||||
"CMS_decrypt",
|
|
||||||
"CMS_add1_signer",
|
|
||||||
"CMS_TEXT",
|
|
||||||
"CMS_NOCERTS",
|
|
||||||
"CMS_NO_CONTENT_VERIFY",
|
|
||||||
"CMS_NO_ATTR_VERIFY",
|
|
||||||
"CMS_NOSIGS",
|
|
||||||
"CMS_NOINTERN",
|
|
||||||
"CMS_NO_SIGNER_CERT_VERIFY",
|
|
||||||
"CMS_NOVERIFY",
|
|
||||||
"CMS_DETACHED",
|
|
||||||
"CMS_BINARY",
|
|
||||||
"CMS_NOATTR",
|
|
||||||
"CMS_NOSMIMECAP",
|
|
||||||
"CMS_NOOLDMIMETYPE",
|
|
||||||
"CMS_CRLFEOL",
|
|
||||||
"CMS_STREAM",
|
|
||||||
"CMS_NOCRL",
|
|
||||||
"CMS_PARTIAL",
|
|
||||||
"CMS_REUSE_DIGEST",
|
|
||||||
"CMS_USE_KEYID",
|
|
||||||
"CMS_DEBUG_DECRYPT",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_EC": [
|
|
||||||
"OPENSSL_EC_NAMED_CURVE",
|
|
||||||
"EC_GROUP_new",
|
|
||||||
"EC_GROUP_free",
|
|
||||||
"EC_GROUP_clear_free",
|
|
||||||
"EC_GROUP_new_curve_GFp",
|
|
||||||
"EC_GROUP_new_by_curve_name",
|
|
||||||
"EC_GROUP_set_curve_GFp",
|
|
||||||
"EC_GROUP_get_curve_GFp",
|
|
||||||
"EC_GROUP_method_of",
|
|
||||||
"EC_GROUP_get0_generator",
|
|
||||||
"EC_GROUP_get_curve_name",
|
|
||||||
"EC_GROUP_get_degree",
|
|
||||||
"EC_GROUP_set_asn1_flag",
|
|
||||||
"EC_GROUP_set_point_conversion_form",
|
|
||||||
"EC_KEY_new",
|
|
||||||
"EC_KEY_free",
|
|
||||||
"EC_get_builtin_curves",
|
|
||||||
"EC_KEY_new_by_curve_name",
|
|
||||||
"EC_KEY_copy",
|
|
||||||
"EC_KEY_dup",
|
|
||||||
"EC_KEY_up_ref",
|
|
||||||
"EC_KEY_set_group",
|
|
||||||
"EC_KEY_get0_private_key",
|
|
||||||
"EC_KEY_set_private_key",
|
|
||||||
"EC_KEY_set_public_key",
|
|
||||||
"EC_KEY_get_enc_flags",
|
|
||||||
"EC_KEY_set_enc_flags",
|
|
||||||
"EC_KEY_set_conv_form",
|
|
||||||
"EC_KEY_get_key_method_data",
|
|
||||||
"EC_KEY_insert_key_method_data",
|
|
||||||
"EC_KEY_set_asn1_flag",
|
|
||||||
"EC_KEY_precompute_mult",
|
|
||||||
"EC_KEY_generate_key",
|
|
||||||
"EC_KEY_check_key",
|
|
||||||
"EC_POINT_new",
|
|
||||||
"EC_POINT_free",
|
|
||||||
"EC_POINT_clear_free",
|
|
||||||
"EC_POINT_copy",
|
|
||||||
"EC_POINT_dup",
|
|
||||||
"EC_POINT_method_of",
|
|
||||||
"EC_POINT_set_to_infinity",
|
|
||||||
"EC_POINT_set_Jprojective_coordinates_GFp",
|
|
||||||
"EC_POINT_get_Jprojective_coordinates_GFp",
|
|
||||||
"EC_POINT_set_affine_coordinates_GFp",
|
|
||||||
"EC_POINT_get_affine_coordinates_GFp",
|
|
||||||
"EC_POINT_set_compressed_coordinates_GFp",
|
|
||||||
"EC_POINT_point2oct",
|
|
||||||
"EC_POINT_oct2point",
|
|
||||||
"EC_POINT_point2bn",
|
|
||||||
"EC_POINT_bn2point",
|
|
||||||
"EC_POINT_point2hex",
|
|
||||||
"EC_POINT_hex2point",
|
|
||||||
"EC_POINT_add",
|
|
||||||
"EC_POINT_dbl",
|
|
||||||
"EC_POINT_invert",
|
|
||||||
"EC_POINT_is_at_infinity",
|
|
||||||
"EC_POINT_is_on_curve",
|
|
||||||
"EC_POINT_cmp",
|
|
||||||
"EC_POINT_make_affine",
|
|
||||||
"EC_POINTs_make_affine",
|
|
||||||
"EC_POINTs_mul",
|
|
||||||
"EC_POINT_mul",
|
|
||||||
"EC_GROUP_precompute_mult",
|
|
||||||
"EC_GROUP_have_precompute_mult",
|
|
||||||
"EC_GFp_simple_method",
|
|
||||||
"EC_GFp_mont_method",
|
|
||||||
"EC_GFp_nist_method",
|
|
||||||
"EC_METHOD_get_field_type",
|
|
||||||
"EVP_PKEY_assign_EC_KEY",
|
|
||||||
"EVP_PKEY_get1_EC_KEY",
|
|
||||||
"EVP_PKEY_set1_EC_KEY",
|
|
||||||
"PEM_write_bio_ECPrivateKey",
|
|
||||||
"i2d_EC_PUBKEY",
|
|
||||||
"d2i_EC_PUBKEY",
|
|
||||||
"d2i_EC_PUBKEY_bio",
|
|
||||||
"i2d_EC_PUBKEY_bio",
|
|
||||||
"d2i_ECPrivateKey",
|
|
||||||
"d2i_ECPrivateKey_bio",
|
|
||||||
"i2d_ECPrivateKey",
|
|
||||||
"i2d_ECPrivateKey_bio",
|
|
||||||
"i2o_ECPublicKey",
|
|
||||||
"o2i_ECPublicKey",
|
|
||||||
"SSL_CTX_set_tmp_ecdh",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_EC_1_0_1": [
|
|
||||||
"EC_KEY_get_flags",
|
|
||||||
"EC_KEY_set_flags",
|
|
||||||
"EC_KEY_clear_flags",
|
|
||||||
"EC_KEY_set_public_key_affine_coordinates",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_EC_NISTP_64_GCC_128": [
|
|
||||||
"EC_GFp_nistp224_method",
|
|
||||||
"EC_GFp_nistp256_method",
|
|
||||||
"EC_GFp_nistp521_method",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_EC2M": [
|
|
||||||
"EC_GF2m_simple_method",
|
|
||||||
"EC_POINT_set_affine_coordinates_GF2m",
|
|
||||||
"EC_POINT_get_affine_coordinates_GF2m",
|
|
||||||
"EC_POINT_set_compressed_coordinates_GF2m",
|
|
||||||
"EC_GROUP_set_curve_GF2m",
|
|
||||||
"EC_GROUP_get_curve_GF2m",
|
|
||||||
"EC_GROUP_new_curve_GF2m",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_EC_1_0_2": [
|
|
||||||
"EC_curve_nid2nist",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_ECDH": [
|
|
||||||
"ECDH_compute_key",
|
|
||||||
"ECDH_get_ex_new_index",
|
|
||||||
"ECDH_set_ex_data",
|
|
||||||
"ECDH_get_ex_data",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_ECDSA": [
|
|
||||||
"ECDSA_SIG_new",
|
|
||||||
"ECDSA_SIG_free",
|
|
||||||
"i2d_ECDSA_SIG",
|
|
||||||
"d2i_ECDSA_SIG",
|
|
||||||
"ECDSA_do_sign",
|
|
||||||
"ECDSA_do_sign_ex",
|
|
||||||
"ECDSA_do_verify",
|
|
||||||
"ECDSA_sign_setup",
|
|
||||||
"ECDSA_sign",
|
|
||||||
"ECDSA_sign_ex",
|
|
||||||
"ECDSA_verify",
|
|
||||||
"ECDSA_size",
|
|
||||||
"ECDSA_OpenSSL",
|
|
||||||
"ECDSA_set_default_method",
|
|
||||||
"ECDSA_get_default_method",
|
|
||||||
"ECDSA_set_method",
|
|
||||||
"ECDSA_get_ex_new_index",
|
|
||||||
"ECDSA_set_ex_data",
|
|
||||||
"ECDSA_get_ex_data",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_ENGINE_CRYPTODEV": [
|
|
||||||
"ENGINE_load_cryptodev"
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_REMOVE_THREAD_STATE": [
|
|
||||||
"ERR_remove_thread_state"
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_098H_ERROR_CODES": [
|
|
||||||
"ASN1_F_B64_READ_ASN1",
|
|
||||||
"ASN1_F_B64_WRITE_ASN1",
|
|
||||||
"ASN1_F_SMIME_READ_ASN1",
|
|
||||||
"ASN1_F_SMIME_TEXT",
|
|
||||||
"ASN1_R_NO_CONTENT_TYPE",
|
|
||||||
"ASN1_R_NO_MULTIPART_BODY_FAILURE",
|
|
||||||
"ASN1_R_NO_MULTIPART_BOUNDARY",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_098C_CAMELLIA_CODES": [
|
|
||||||
"EVP_F_CAMELLIA_INIT_KEY",
|
|
||||||
"EVP_R_CAMELLIA_KEY_SETUP_FAILED"
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_EC_CODES": [
|
|
||||||
"EC_R_UNKNOWN_GROUP",
|
|
||||||
"EC_F_EC_GROUP_NEW_BY_CURVE_NAME"
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": [
|
|
||||||
"RSA_R_PKCS_DECODING_ERROR"
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_GCM": [
|
|
||||||
"EVP_CTRL_GCM_GET_TAG",
|
|
||||||
"EVP_CTRL_GCM_SET_TAG",
|
|
||||||
"EVP_CTRL_GCM_SET_IVLEN",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_PBKDF2_HMAC": [
|
|
||||||
"PKCS5_PBKDF2_HMAC"
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_PKEY_CTX": [
|
|
||||||
"EVP_PKEY_CTX_new",
|
|
||||||
"EVP_PKEY_CTX_new_id",
|
|
||||||
"EVP_PKEY_CTX_dup",
|
|
||||||
"EVP_PKEY_CTX_free",
|
|
||||||
"EVP_PKEY_sign",
|
|
||||||
"EVP_PKEY_sign_init",
|
|
||||||
"EVP_PKEY_verify",
|
|
||||||
"EVP_PKEY_verify_init",
|
|
||||||
"Cryptography_EVP_PKEY_encrypt",
|
|
||||||
"EVP_PKEY_encrypt_init",
|
|
||||||
"Cryptography_EVP_PKEY_decrypt",
|
|
||||||
"EVP_PKEY_decrypt_init",
|
|
||||||
"EVP_PKEY_CTX_set_signature_md",
|
|
||||||
"EVP_PKEY_id",
|
|
||||||
"EVP_PKEY_CTX_set_rsa_padding",
|
|
||||||
"EVP_PKEY_CTX_set_rsa_pss_saltlen",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_ECDSA_SHA2_NIDS": [
|
|
||||||
"NID_ecdsa_with_SHA224",
|
|
||||||
"NID_ecdsa_with_SHA256",
|
|
||||||
"NID_ecdsa_with_SHA384",
|
|
||||||
"NID_ecdsa_with_SHA512",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_EGD": [
|
|
||||||
"RAND_egd",
|
|
||||||
"RAND_egd_bytes",
|
|
||||||
"RAND_query_egd_bytes",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_PSS_PADDING": [
|
|
||||||
"RSA_PKCS1_PSS_PADDING",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_MGF1_MD": [
|
|
||||||
"EVP_PKEY_CTX_set_rsa_mgf1_md",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_TLSv1_1": [
|
|
||||||
"SSL_OP_NO_TLSv1_1",
|
|
||||||
"TLSv1_1_method",
|
|
||||||
"TLSv1_1_server_method",
|
|
||||||
"TLSv1_1_client_method",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_TLSv1_2": [
|
|
||||||
"SSL_OP_NO_TLSv1_2",
|
|
||||||
"TLSv1_2_method",
|
|
||||||
"TLSv1_2_server_method",
|
|
||||||
"TLSv1_2_client_method",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SSL2": [
|
|
||||||
"SSLv2_method",
|
|
||||||
"SSLv2_client_method",
|
|
||||||
"SSLv2_server_method",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SSL3_METHOD": [
|
|
||||||
"SSLv3_method",
|
|
||||||
"SSLv3_client_method",
|
|
||||||
"SSLv3_server_method",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_TLSEXT_HOSTNAME": [
|
|
||||||
"SSL_set_tlsext_host_name",
|
|
||||||
"SSL_get_servername",
|
|
||||||
"SSL_CTX_set_tlsext_servername_callback",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_TLSEXT_STATUS_REQ_CB": [
|
|
||||||
"SSL_CTX_set_tlsext_status_cb",
|
|
||||||
"SSL_CTX_set_tlsext_status_arg"
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_STATUS_REQ_OCSP_RESP": [
|
|
||||||
"SSL_set_tlsext_status_ocsp_resp",
|
|
||||||
"SSL_get_tlsext_status_ocsp_resp",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE": [
|
|
||||||
"SSL_set_tlsext_status_type",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_RELEASE_BUFFERS": [
|
|
||||||
"SSL_MODE_RELEASE_BUFFERS",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_OP_NO_COMPRESSION": [
|
|
||||||
"SSL_OP_NO_COMPRESSION",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [
|
|
||||||
"SSL_OP_MSIE_SSLV2_RSA_PADDING",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SSL_OP_NO_TICKET": [
|
|
||||||
"SSL_OP_NO_TICKET",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SSL_SET_SSL_CTX": [
|
|
||||||
"SSL_set_SSL_CTX",
|
|
||||||
"TLSEXT_NAMETYPE_host_name",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_NETBSD_D1_METH": [
|
|
||||||
"DTLSv1_method",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_NEXTPROTONEG": [
|
|
||||||
"SSL_CTX_set_next_protos_advertised_cb",
|
|
||||||
"SSL_CTX_set_next_proto_select_cb",
|
|
||||||
"SSL_select_next_proto",
|
|
||||||
"SSL_get0_next_proto_negotiated",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SECURE_RENEGOTIATION": [
|
|
||||||
"SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION",
|
|
||||||
"SSL_OP_LEGACY_SERVER_CONNECT",
|
|
||||||
"SSL_get_secure_renegotiation_support",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_ALPN": [
|
|
||||||
"SSL_CTX_set_alpn_protos",
|
|
||||||
"SSL_set_alpn_protos",
|
|
||||||
"SSL_CTX_set_alpn_select_cb",
|
|
||||||
"SSL_get0_alpn_selected",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_COMPRESSION": [
|
|
||||||
"SSL_get_current_compression",
|
|
||||||
"SSL_get_current_expansion",
|
|
||||||
"SSL_COMP_get_name",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_GET_SERVER_TMP_KEY": [
|
|
||||||
"SSL_get_server_tmp_key",
|
|
||||||
],
|
|
||||||
|
|
||||||
"Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE": [
|
|
||||||
"SSL_CTX_set_client_cert_engine",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [
|
|
||||||
'X509_V_ERR_SUITE_B_INVALID_VERSION',
|
|
||||||
'X509_V_ERR_SUITE_B_INVALID_ALGORITHM',
|
|
||||||
'X509_V_ERR_SUITE_B_INVALID_CURVE',
|
|
||||||
'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM',
|
|
||||||
'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED',
|
|
||||||
'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256',
|
|
||||||
'X509_V_ERR_HOSTNAME_MISMATCH',
|
|
||||||
'X509_V_ERR_EMAIL_MISMATCH',
|
|
||||||
'X509_V_ERR_IP_ADDRESS_MISMATCH'
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_102_VERIFICATION_PARAMS": [
|
|
||||||
"X509_V_FLAG_SUITEB_128_LOS_ONLY",
|
|
||||||
"X509_V_FLAG_SUITEB_192_LOS",
|
|
||||||
"X509_V_FLAG_SUITEB_128_LOS",
|
|
||||||
"X509_VERIFY_PARAM_set1_host",
|
|
||||||
"X509_VERIFY_PARAM_set1_email",
|
|
||||||
"X509_VERIFY_PARAM_set1_ip",
|
|
||||||
"X509_VERIFY_PARAM_set1_ip_asc",
|
|
||||||
"X509_VERIFY_PARAM_set_hostflags",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [
|
|
||||||
"X509_V_FLAG_TRUSTED_FIRST",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [
|
|
||||||
"X509_V_FLAG_PARTIAL_CHAIN",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [
|
|
||||||
'X509_V_ERR_DIFFERENT_CRL_SCOPE',
|
|
||||||
'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE',
|
|
||||||
'X509_V_ERR_UNNESTED_RESOURCE',
|
|
||||||
'X509_V_ERR_PERMITTED_VIOLATION',
|
|
||||||
'X509_V_ERR_EXCLUDED_VIOLATION',
|
|
||||||
'X509_V_ERR_SUBTREE_MINMAX',
|
|
||||||
'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE',
|
|
||||||
'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX',
|
|
||||||
'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX',
|
|
||||||
'X509_V_ERR_CRL_PATH_VALIDATION_ERROR',
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_100_VERIFICATION_PARAMS": [
|
|
||||||
"Cryptography_HAS_100_VERIFICATION_PARAMS",
|
|
||||||
"X509_V_FLAG_EXTENDED_CRL_SUPPORT",
|
|
||||||
"X509_V_FLAG_USE_DELTAS",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [
|
|
||||||
"X509_V_FLAG_CHECK_SS_SIGNATURE",
|
|
||||||
],
|
|
||||||
"Cryptography_HAS_SET_CERT_CB": [
|
|
||||||
"SSL_CTX_set_cert_cb",
|
|
||||||
"SSL_set_cert_cb",
|
|
||||||
],
|
|
||||||
}
|
|
|
@ -1,182 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import os
|
|
||||||
import threading
|
|
||||||
import types
|
|
||||||
|
|
||||||
from cryptography.exceptions import InternalError
|
|
||||||
from cryptography.hazmat.bindings._openssl import ffi, lib
|
|
||||||
from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
|
|
||||||
|
|
||||||
|
|
||||||
_OpenSSLError = collections.namedtuple("_OpenSSLError",
|
|
||||||
["code", "lib", "func", "reason"])
|
|
||||||
|
|
||||||
|
|
||||||
def _consume_errors(lib):
|
|
||||||
errors = []
|
|
||||||
while True:
|
|
||||||
code = lib.ERR_get_error()
|
|
||||||
if code == 0:
|
|
||||||
break
|
|
||||||
|
|
||||||
err_lib = lib.ERR_GET_LIB(code)
|
|
||||||
err_func = lib.ERR_GET_FUNC(code)
|
|
||||||
err_reason = lib.ERR_GET_REASON(code)
|
|
||||||
|
|
||||||
errors.append(_OpenSSLError(code, err_lib, err_func, err_reason))
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
def _openssl_assert(lib, ok):
|
|
||||||
if not ok:
|
|
||||||
errors = _consume_errors(lib)
|
|
||||||
raise InternalError(
|
|
||||||
"Unknown OpenSSL error. Please file an issue at https://github.com"
|
|
||||||
"/pyca/cryptography/issues with information on how to reproduce "
|
|
||||||
"this.",
|
|
||||||
errors
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@ffi.callback("int (*)(unsigned char *, int)", error=-1)
|
|
||||||
def _osrandom_rand_bytes(buf, size):
|
|
||||||
signed = ffi.cast("char *", buf)
|
|
||||||
result = os.urandom(size)
|
|
||||||
signed[0:size] = result
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
@ffi.callback("int (*)(void)")
|
|
||||||
def _osrandom_rand_status():
|
|
||||||
return 1
|
|
||||||
|
|
||||||
|
|
||||||
def build_conditional_library(lib, conditional_names):
|
|
||||||
conditional_lib = types.ModuleType("lib")
|
|
||||||
excluded_names = set()
|
|
||||||
for condition, names in conditional_names.items():
|
|
||||||
if not getattr(lib, condition):
|
|
||||||
excluded_names |= set(names)
|
|
||||||
|
|
||||||
for attr in dir(lib):
|
|
||||||
if attr not in excluded_names:
|
|
||||||
setattr(conditional_lib, attr, getattr(lib, attr))
|
|
||||||
|
|
||||||
return conditional_lib
|
|
||||||
|
|
||||||
|
|
||||||
class Binding(object):
|
|
||||||
"""
|
|
||||||
OpenSSL API wrapper.
|
|
||||||
"""
|
|
||||||
lib = None
|
|
||||||
ffi = ffi
|
|
||||||
_lib_loaded = False
|
|
||||||
_locks = None
|
|
||||||
_lock_cb_handle = None
|
|
||||||
_init_lock = threading.Lock()
|
|
||||||
_lock_init_lock = threading.Lock()
|
|
||||||
|
|
||||||
_osrandom_engine_id = ffi.new("const char[]", b"osrandom")
|
|
||||||
_osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine")
|
|
||||||
_osrandom_method = ffi.new(
|
|
||||||
"RAND_METHOD *",
|
|
||||||
dict(bytes=_osrandom_rand_bytes, pseudorand=_osrandom_rand_bytes,
|
|
||||||
status=_osrandom_rand_status)
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._ensure_ffi_initialized()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _register_osrandom_engine(cls):
|
|
||||||
_openssl_assert(cls.lib, cls.lib.ERR_peek_error() == 0)
|
|
||||||
|
|
||||||
engine = cls.lib.ENGINE_new()
|
|
||||||
_openssl_assert(cls.lib, engine != cls.ffi.NULL)
|
|
||||||
try:
|
|
||||||
result = cls.lib.ENGINE_set_id(engine, cls._osrandom_engine_id)
|
|
||||||
_openssl_assert(cls.lib, result == 1)
|
|
||||||
result = cls.lib.ENGINE_set_name(engine, cls._osrandom_engine_name)
|
|
||||||
_openssl_assert(cls.lib, result == 1)
|
|
||||||
result = cls.lib.ENGINE_set_RAND(engine, cls._osrandom_method)
|
|
||||||
_openssl_assert(cls.lib, result == 1)
|
|
||||||
result = cls.lib.ENGINE_add(engine)
|
|
||||||
if result != 1:
|
|
||||||
errors = _consume_errors(cls.lib)
|
|
||||||
_openssl_assert(
|
|
||||||
cls.lib,
|
|
||||||
errors[0].reason == cls.lib.ENGINE_R_CONFLICTING_ENGINE_ID
|
|
||||||
)
|
|
||||||
|
|
||||||
finally:
|
|
||||||
result = cls.lib.ENGINE_free(engine)
|
|
||||||
_openssl_assert(cls.lib, result == 1)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _ensure_ffi_initialized(cls):
|
|
||||||
with cls._init_lock:
|
|
||||||
if not cls._lib_loaded:
|
|
||||||
cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES)
|
|
||||||
cls._lib_loaded = True
|
|
||||||
# initialize the SSL library
|
|
||||||
cls.lib.SSL_library_init()
|
|
||||||
# adds all ciphers/digests for EVP
|
|
||||||
cls.lib.OpenSSL_add_all_algorithms()
|
|
||||||
# loads error strings for libcrypto and libssl functions
|
|
||||||
cls.lib.SSL_load_error_strings()
|
|
||||||
cls._register_osrandom_engine()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def init_static_locks(cls):
|
|
||||||
with cls._lock_init_lock:
|
|
||||||
cls._ensure_ffi_initialized()
|
|
||||||
|
|
||||||
if not cls._lock_cb_handle:
|
|
||||||
cls._lock_cb_handle = cls.ffi.callback(
|
|
||||||
"void(int, int, const char *, int)",
|
|
||||||
cls._lock_cb
|
|
||||||
)
|
|
||||||
|
|
||||||
# Use Python's implementation if available, importing _ssl triggers
|
|
||||||
# the setup for this.
|
|
||||||
__import__("_ssl")
|
|
||||||
|
|
||||||
if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
|
|
||||||
return
|
|
||||||
|
|
||||||
# If nothing else has setup a locking callback already, we set up
|
|
||||||
# our own
|
|
||||||
num_locks = cls.lib.CRYPTO_num_locks()
|
|
||||||
cls._locks = [threading.Lock() for n in range(num_locks)]
|
|
||||||
|
|
||||||
cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _lock_cb(cls, mode, n, file, line):
|
|
||||||
lock = cls._locks[n]
|
|
||||||
|
|
||||||
if mode & cls.lib.CRYPTO_LOCK:
|
|
||||||
lock.acquire()
|
|
||||||
elif mode & cls.lib.CRYPTO_UNLOCK:
|
|
||||||
lock.release()
|
|
||||||
else:
|
|
||||||
raise RuntimeError(
|
|
||||||
"Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format(
|
|
||||||
mode, n, file, line
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# OpenSSL is not thread safe until the locks are initialized. We call this
|
|
||||||
# method in module scope so that it executes with the import lock. On
|
|
||||||
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
|
|
||||||
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
|
|
||||||
# is per module so this approach will not work.
|
|
||||||
Binding.init_static_locks()
|
|
|
@ -1,5 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
|
@ -1,40 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class AsymmetricSignatureContext(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update(self, data):
|
|
||||||
"""
|
|
||||||
Processes the provided bytes and returns nothing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def finalize(self):
|
|
||||||
"""
|
|
||||||
Returns the signature as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class AsymmetricVerificationContext(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update(self, data):
|
|
||||||
"""
|
|
||||||
Processes the provided bytes and returns nothing.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def verify(self):
|
|
||||||
"""
|
|
||||||
Raises an exception if the bytes provided to update do not match the
|
|
||||||
signature or the signature does not match the public key.
|
|
||||||
"""
|
|
|
@ -1,166 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
|
|
||||||
|
|
||||||
class DHPrivateNumbers(object):
|
|
||||||
def __init__(self, x, public_numbers):
|
|
||||||
if not isinstance(x, six.integer_types):
|
|
||||||
raise TypeError("x must be an integer.")
|
|
||||||
|
|
||||||
if not isinstance(public_numbers, DHPublicNumbers):
|
|
||||||
raise TypeError("public_numbers must be an instance of "
|
|
||||||
"DHPublicNumbers.")
|
|
||||||
|
|
||||||
self._x = x
|
|
||||||
self._public_numbers = public_numbers
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, DHPrivateNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self._x == other._x and
|
|
||||||
self._public_numbers == other._public_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
public_numbers = utils.read_only_property("_public_numbers")
|
|
||||||
x = utils.read_only_property("_x")
|
|
||||||
|
|
||||||
|
|
||||||
class DHPublicNumbers(object):
|
|
||||||
def __init__(self, y, parameter_numbers):
|
|
||||||
if not isinstance(y, six.integer_types):
|
|
||||||
raise TypeError("y must be an integer.")
|
|
||||||
|
|
||||||
if not isinstance(parameter_numbers, DHParameterNumbers):
|
|
||||||
raise TypeError(
|
|
||||||
"parameters must be an instance of DHParameterNumbers.")
|
|
||||||
|
|
||||||
self._y = y
|
|
||||||
self._parameter_numbers = parameter_numbers
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, DHPublicNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self._y == other._y and
|
|
||||||
self._parameter_numbers == other._parameter_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
y = utils.read_only_property("_y")
|
|
||||||
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
|
||||||
|
|
||||||
|
|
||||||
class DHParameterNumbers(object):
|
|
||||||
def __init__(self, p, g):
|
|
||||||
if (
|
|
||||||
not isinstance(p, six.integer_types) or
|
|
||||||
not isinstance(g, six.integer_types)
|
|
||||||
):
|
|
||||||
raise TypeError("p and g must be integers")
|
|
||||||
|
|
||||||
self._p = p
|
|
||||||
self._g = g
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, DHParameterNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self._p == other._p and
|
|
||||||
self._g == other._g
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
p = utils.read_only_property("_p")
|
|
||||||
g = utils.read_only_property("_g")
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHParameters(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_private_key(self):
|
|
||||||
"""
|
|
||||||
Generates and returns a DHPrivateKey.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHParametersWithSerialization(DHParameters):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def parameter_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns a DHParameterNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHPrivateKey(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the prime modulus.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_key(self):
|
|
||||||
"""
|
|
||||||
The DHPublicKey associated with this private key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def parameters(self):
|
|
||||||
"""
|
|
||||||
The DHParameters object associated with this private key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHPrivateKeyWithSerialization(DHPrivateKey):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns a DHPrivateNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHPublicKey(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the prime modulus.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def parameters(self):
|
|
||||||
"""
|
|
||||||
The DHParameters object associated with this public key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DHPublicKeyWithSerialization(DHPublicKey):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns a DHPublicNumbers.
|
|
||||||
"""
|
|
|
@ -1,229 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DSAParameters(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def generate_private_key(self):
|
|
||||||
"""
|
|
||||||
Generates and returns a DSAPrivateKey.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DSAParametersWithNumbers(DSAParameters):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def parameter_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns a DSAParameterNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DSAPrivateKey(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the prime modulus.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_key(self):
|
|
||||||
"""
|
|
||||||
The DSAPublicKey associated with this private key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def parameters(self):
|
|
||||||
"""
|
|
||||||
The DSAParameters object associated with this private key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def signer(self, signature_algorithm):
|
|
||||||
"""
|
|
||||||
Returns an AsymmetricSignatureContext used for signing data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DSAPrivateKeyWithSerialization(DSAPrivateKey):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns a DSAPrivateNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
|
||||||
"""
|
|
||||||
Returns the key serialized as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class DSAPublicKey(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the prime modulus.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def parameters(self):
|
|
||||||
"""
|
|
||||||
The DSAParameters object associated with this public key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def verifier(self, signature, signature_algorithm):
|
|
||||||
"""
|
|
||||||
Returns an AsymmetricVerificationContext used for signing data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns a DSAPublicNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_bytes(self, encoding, format):
|
|
||||||
"""
|
|
||||||
Returns the key serialized as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
DSAPublicKeyWithSerialization = DSAPublicKey
|
|
||||||
|
|
||||||
|
|
||||||
def generate_parameters(key_size, backend):
|
|
||||||
return backend.generate_dsa_parameters(key_size)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_private_key(key_size, backend):
|
|
||||||
return backend.generate_dsa_private_key_and_parameters(key_size)
|
|
||||||
|
|
||||||
|
|
||||||
def _check_dsa_parameters(parameters):
|
|
||||||
if utils.bit_length(parameters.p) not in [1024, 2048, 3072]:
|
|
||||||
raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
|
|
||||||
if utils.bit_length(parameters.q) not in [160, 256]:
|
|
||||||
raise ValueError("q must be exactly 160 or 256 bits long")
|
|
||||||
|
|
||||||
if not (1 < parameters.g < parameters.p):
|
|
||||||
raise ValueError("g, p don't satisfy 1 < g < p.")
|
|
||||||
|
|
||||||
|
|
||||||
def _check_dsa_private_numbers(numbers):
|
|
||||||
parameters = numbers.public_numbers.parameter_numbers
|
|
||||||
_check_dsa_parameters(parameters)
|
|
||||||
if numbers.x <= 0 or numbers.x >= parameters.q:
|
|
||||||
raise ValueError("x must be > 0 and < q.")
|
|
||||||
|
|
||||||
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
|
|
||||||
raise ValueError("y must be equal to (g ** x % p).")
|
|
||||||
|
|
||||||
|
|
||||||
class DSAParameterNumbers(object):
|
|
||||||
def __init__(self, p, q, g):
|
|
||||||
if (
|
|
||||||
not isinstance(p, six.integer_types) or
|
|
||||||
not isinstance(q, six.integer_types) or
|
|
||||||
not isinstance(g, six.integer_types)
|
|
||||||
):
|
|
||||||
raise TypeError(
|
|
||||||
"DSAParameterNumbers p, q, and g arguments must be integers."
|
|
||||||
)
|
|
||||||
|
|
||||||
self._p = p
|
|
||||||
self._q = q
|
|
||||||
self._g = g
|
|
||||||
|
|
||||||
p = utils.read_only_property("_p")
|
|
||||||
q = utils.read_only_property("_q")
|
|
||||||
g = utils.read_only_property("_g")
|
|
||||||
|
|
||||||
def parameters(self, backend):
|
|
||||||
return backend.load_dsa_parameter_numbers(self)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, DSAParameterNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return self.p == other.p and self.q == other.q and self.g == other.g
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
|
|
||||||
class DSAPublicNumbers(object):
|
|
||||||
def __init__(self, y, parameter_numbers):
|
|
||||||
if not isinstance(y, six.integer_types):
|
|
||||||
raise TypeError("DSAPublicNumbers y argument must be an integer.")
|
|
||||||
|
|
||||||
if not isinstance(parameter_numbers, DSAParameterNumbers):
|
|
||||||
raise TypeError(
|
|
||||||
"parameter_numbers must be a DSAParameterNumbers instance."
|
|
||||||
)
|
|
||||||
|
|
||||||
self._y = y
|
|
||||||
self._parameter_numbers = parameter_numbers
|
|
||||||
|
|
||||||
y = utils.read_only_property("_y")
|
|
||||||
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
|
||||||
|
|
||||||
def public_key(self, backend):
|
|
||||||
return backend.load_dsa_public_numbers(self)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, DSAPublicNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self.y == other.y and
|
|
||||||
self.parameter_numbers == other.parameter_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
|
|
||||||
class DSAPrivateNumbers(object):
|
|
||||||
def __init__(self, x, public_numbers):
|
|
||||||
if not isinstance(x, six.integer_types):
|
|
||||||
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
|
|
||||||
|
|
||||||
if not isinstance(public_numbers, DSAPublicNumbers):
|
|
||||||
raise TypeError(
|
|
||||||
"public_numbers must be a DSAPublicNumbers instance."
|
|
||||||
)
|
|
||||||
self._public_numbers = public_numbers
|
|
||||||
self._x = x
|
|
||||||
|
|
||||||
x = utils.read_only_property("_x")
|
|
||||||
public_numbers = utils.read_only_property("_public_numbers")
|
|
||||||
|
|
||||||
def private_key(self, backend):
|
|
||||||
return backend.load_dsa_private_numbers(self)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, DSAPrivateNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self.x == other.x and self.public_numbers == other.public_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
|
@ -1,346 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class EllipticCurve(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
The name of the curve. e.g. secp256r1.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the base point of the curve.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class EllipticCurveSignatureAlgorithm(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def algorithm(self):
|
|
||||||
"""
|
|
||||||
The digest algorithm used with this signature.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class EllipticCurvePrivateKey(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def signer(self, signature_algorithm):
|
|
||||||
"""
|
|
||||||
Returns an AsymmetricSignatureContext used for signing data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def exchange(self, algorithm, peer_public_key):
|
|
||||||
"""
|
|
||||||
Performs a key exchange operation using the provided algorithm with the
|
|
||||||
provided peer's public key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_key(self):
|
|
||||||
"""
|
|
||||||
The EllipticCurvePublicKey for this private key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def curve(self):
|
|
||||||
"""
|
|
||||||
The EllipticCurve that this key is on.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns an EllipticCurvePrivateNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
|
||||||
"""
|
|
||||||
Returns the key serialized as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class EllipticCurvePublicKey(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def verifier(self, signature, signature_algorithm):
|
|
||||||
"""
|
|
||||||
Returns an AsymmetricVerificationContext used for signing data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def curve(self):
|
|
||||||
"""
|
|
||||||
The EllipticCurve that this key is on.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns an EllipticCurvePublicNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_bytes(self, encoding, format):
|
|
||||||
"""
|
|
||||||
Returns the key serialized as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT571R1(object):
|
|
||||||
name = "sect571r1"
|
|
||||||
key_size = 571
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT409R1(object):
|
|
||||||
name = "sect409r1"
|
|
||||||
key_size = 409
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT283R1(object):
|
|
||||||
name = "sect283r1"
|
|
||||||
key_size = 283
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT233R1(object):
|
|
||||||
name = "sect233r1"
|
|
||||||
key_size = 233
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT163R2(object):
|
|
||||||
name = "sect163r2"
|
|
||||||
key_size = 163
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT571K1(object):
|
|
||||||
name = "sect571k1"
|
|
||||||
key_size = 571
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT409K1(object):
|
|
||||||
name = "sect409k1"
|
|
||||||
key_size = 409
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT283K1(object):
|
|
||||||
name = "sect283k1"
|
|
||||||
key_size = 283
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT233K1(object):
|
|
||||||
name = "sect233k1"
|
|
||||||
key_size = 233
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECT163K1(object):
|
|
||||||
name = "sect163k1"
|
|
||||||
key_size = 163
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECP521R1(object):
|
|
||||||
name = "secp521r1"
|
|
||||||
key_size = 521
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECP384R1(object):
|
|
||||||
name = "secp384r1"
|
|
||||||
key_size = 384
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECP256R1(object):
|
|
||||||
name = "secp256r1"
|
|
||||||
key_size = 256
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECP256K1(object):
|
|
||||||
name = "secp256k1"
|
|
||||||
key_size = 256
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECP224R1(object):
|
|
||||||
name = "secp224r1"
|
|
||||||
key_size = 224
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurve)
|
|
||||||
class SECP192R1(object):
|
|
||||||
name = "secp192r1"
|
|
||||||
key_size = 192
|
|
||||||
|
|
||||||
|
|
||||||
_CURVE_TYPES = {
|
|
||||||
"prime192v1": SECP192R1,
|
|
||||||
"prime256v1": SECP256R1,
|
|
||||||
|
|
||||||
"secp192r1": SECP192R1,
|
|
||||||
"secp224r1": SECP224R1,
|
|
||||||
"secp256r1": SECP256R1,
|
|
||||||
"secp384r1": SECP384R1,
|
|
||||||
"secp521r1": SECP521R1,
|
|
||||||
"secp256k1": SECP256K1,
|
|
||||||
|
|
||||||
"sect163k1": SECT163K1,
|
|
||||||
"sect233k1": SECT233K1,
|
|
||||||
"sect283k1": SECT283K1,
|
|
||||||
"sect409k1": SECT409K1,
|
|
||||||
"sect571k1": SECT571K1,
|
|
||||||
|
|
||||||
"sect163r2": SECT163R2,
|
|
||||||
"sect233r1": SECT233R1,
|
|
||||||
"sect283r1": SECT283R1,
|
|
||||||
"sect409r1": SECT409R1,
|
|
||||||
"sect571r1": SECT571R1,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(EllipticCurveSignatureAlgorithm)
|
|
||||||
class ECDSA(object):
|
|
||||||
def __init__(self, algorithm):
|
|
||||||
self._algorithm = algorithm
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
|
|
||||||
def generate_private_key(curve, backend):
|
|
||||||
return backend.generate_elliptic_curve_private_key(curve)
|
|
||||||
|
|
||||||
|
|
||||||
class EllipticCurvePublicNumbers(object):
|
|
||||||
def __init__(self, x, y, curve):
|
|
||||||
if (
|
|
||||||
not isinstance(x, six.integer_types) or
|
|
||||||
not isinstance(y, six.integer_types)
|
|
||||||
):
|
|
||||||
raise TypeError("x and y must be integers.")
|
|
||||||
|
|
||||||
if not isinstance(curve, EllipticCurve):
|
|
||||||
raise TypeError("curve must provide the EllipticCurve interface.")
|
|
||||||
|
|
||||||
self._y = y
|
|
||||||
self._x = x
|
|
||||||
self._curve = curve
|
|
||||||
|
|
||||||
def public_key(self, backend):
|
|
||||||
return backend.load_elliptic_curve_public_numbers(self)
|
|
||||||
|
|
||||||
def encode_point(self):
|
|
||||||
# key_size is in bits. Convert to bytes and round up
|
|
||||||
byte_length = (self.curve.key_size + 7) // 8
|
|
||||||
return (
|
|
||||||
b'\x04' + utils.int_to_bytes(self.x, byte_length) +
|
|
||||||
utils.int_to_bytes(self.y, byte_length)
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_encoded_point(cls, curve, data):
|
|
||||||
if not isinstance(curve, EllipticCurve):
|
|
||||||
raise TypeError("curve must be an EllipticCurve instance")
|
|
||||||
|
|
||||||
if data.startswith(b'\x04'):
|
|
||||||
# key_size is in bits. Convert to bytes and round up
|
|
||||||
byte_length = (curve.key_size + 7) // 8
|
|
||||||
if len(data) == 2 * byte_length + 1:
|
|
||||||
x = utils.int_from_bytes(data[1:byte_length + 1], 'big')
|
|
||||||
y = utils.int_from_bytes(data[byte_length + 1:], 'big')
|
|
||||||
return cls(x, y, curve)
|
|
||||||
else:
|
|
||||||
raise ValueError('Invalid elliptic curve point data length')
|
|
||||||
else:
|
|
||||||
raise ValueError('Unsupported elliptic curve point type')
|
|
||||||
|
|
||||||
curve = utils.read_only_property("_curve")
|
|
||||||
x = utils.read_only_property("_x")
|
|
||||||
y = utils.read_only_property("_y")
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, EllipticCurvePublicNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self.x == other.x and
|
|
||||||
self.y == other.y and
|
|
||||||
self.curve.name == other.curve.name and
|
|
||||||
self.curve.key_size == other.curve.key_size
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return (
|
|
||||||
"<EllipticCurvePublicNumbers(curve={0.curve.name}, x={0.x}, "
|
|
||||||
"y={0.y}>".format(self)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class EllipticCurvePrivateNumbers(object):
|
|
||||||
def __init__(self, private_value, public_numbers):
|
|
||||||
if not isinstance(private_value, six.integer_types):
|
|
||||||
raise TypeError("private_value must be an integer.")
|
|
||||||
|
|
||||||
if not isinstance(public_numbers, EllipticCurvePublicNumbers):
|
|
||||||
raise TypeError(
|
|
||||||
"public_numbers must be an EllipticCurvePublicNumbers "
|
|
||||||
"instance."
|
|
||||||
)
|
|
||||||
|
|
||||||
self._private_value = private_value
|
|
||||||
self._public_numbers = public_numbers
|
|
||||||
|
|
||||||
def private_key(self, backend):
|
|
||||||
return backend.load_elliptic_curve_private_numbers(self)
|
|
||||||
|
|
||||||
private_value = utils.read_only_property("_private_value")
|
|
||||||
public_numbers = utils.read_only_property("_public_numbers")
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, EllipticCurvePrivateNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self.private_value == other.private_value and
|
|
||||||
self.public_numbers == other.public_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
|
|
||||||
class ECDH(object):
|
|
||||||
pass
|
|
|
@ -1,67 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.hazmat.primitives import hashes
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class AsymmetricPadding(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
A string naming this padding (e.g. "PSS", "PKCS1").
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricPadding)
|
|
||||||
class PKCS1v15(object):
|
|
||||||
name = "EMSA-PKCS1-v1_5"
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricPadding)
|
|
||||||
class PSS(object):
|
|
||||||
MAX_LENGTH = object()
|
|
||||||
name = "EMSA-PSS"
|
|
||||||
|
|
||||||
def __init__(self, mgf, salt_length):
|
|
||||||
self._mgf = mgf
|
|
||||||
|
|
||||||
if (not isinstance(salt_length, six.integer_types) and
|
|
||||||
salt_length is not self.MAX_LENGTH):
|
|
||||||
raise TypeError("salt_length must be an integer.")
|
|
||||||
|
|
||||||
if salt_length is not self.MAX_LENGTH and salt_length < 0:
|
|
||||||
raise ValueError("salt_length must be zero or greater.")
|
|
||||||
|
|
||||||
self._salt_length = salt_length
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AsymmetricPadding)
|
|
||||||
class OAEP(object):
|
|
||||||
name = "EME-OAEP"
|
|
||||||
|
|
||||||
def __init__(self, mgf, algorithm, label):
|
|
||||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
|
||||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
|
||||||
|
|
||||||
self._mgf = mgf
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._label = label
|
|
||||||
|
|
||||||
|
|
||||||
class MGF1(object):
|
|
||||||
MAX_LENGTH = object()
|
|
||||||
|
|
||||||
def __init__(self, algorithm):
|
|
||||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
|
||||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
|
||||||
|
|
||||||
self._algorithm = algorithm
|
|
|
@ -1,352 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
from fractions import gcd
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
|
||||||
from cryptography.hazmat.backends.interfaces import RSABackend
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class RSAPrivateKey(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def signer(self, padding, algorithm):
|
|
||||||
"""
|
|
||||||
Returns an AsymmetricSignatureContext used for signing data.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def decrypt(self, ciphertext, padding):
|
|
||||||
"""
|
|
||||||
Decrypts the provided ciphertext.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the public modulus.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_key(self):
|
|
||||||
"""
|
|
||||||
The RSAPublicKey associated with this private key.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class RSAPrivateKeyWithSerialization(RSAPrivateKey):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns an RSAPrivateNumbers.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_bytes(self, encoding, format, encryption_algorithm):
|
|
||||||
"""
|
|
||||||
Returns the key serialized as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class RSAPublicKey(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def verifier(self, signature, padding, algorithm):
|
|
||||||
"""
|
|
||||||
Returns an AsymmetricVerificationContext used for verifying signatures.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def encrypt(self, plaintext, padding):
|
|
||||||
"""
|
|
||||||
Encrypts the given plaintext.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The bit length of the public modulus.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_numbers(self):
|
|
||||||
"""
|
|
||||||
Returns an RSAPublicNumbers
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def public_bytes(self, encoding, format):
|
|
||||||
"""
|
|
||||||
Returns the key serialized as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
RSAPublicKeyWithSerialization = RSAPublicKey
|
|
||||||
|
|
||||||
|
|
||||||
def generate_private_key(public_exponent, key_size, backend):
|
|
||||||
if not isinstance(backend, RSABackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement RSABackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
|
|
||||||
_verify_rsa_parameters(public_exponent, key_size)
|
|
||||||
return backend.generate_rsa_private_key(public_exponent, key_size)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_rsa_parameters(public_exponent, key_size):
|
|
||||||
if public_exponent < 3:
|
|
||||||
raise ValueError("public_exponent must be >= 3.")
|
|
||||||
|
|
||||||
if public_exponent & 1 == 0:
|
|
||||||
raise ValueError("public_exponent must be odd.")
|
|
||||||
|
|
||||||
if key_size < 512:
|
|
||||||
raise ValueError("key_size must be at least 512-bits.")
|
|
||||||
|
|
||||||
|
|
||||||
def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,
|
|
||||||
public_exponent, modulus):
|
|
||||||
if modulus < 3:
|
|
||||||
raise ValueError("modulus must be >= 3.")
|
|
||||||
|
|
||||||
if p >= modulus:
|
|
||||||
raise ValueError("p must be < modulus.")
|
|
||||||
|
|
||||||
if q >= modulus:
|
|
||||||
raise ValueError("q must be < modulus.")
|
|
||||||
|
|
||||||
if dmp1 >= modulus:
|
|
||||||
raise ValueError("dmp1 must be < modulus.")
|
|
||||||
|
|
||||||
if dmq1 >= modulus:
|
|
||||||
raise ValueError("dmq1 must be < modulus.")
|
|
||||||
|
|
||||||
if iqmp >= modulus:
|
|
||||||
raise ValueError("iqmp must be < modulus.")
|
|
||||||
|
|
||||||
if private_exponent >= modulus:
|
|
||||||
raise ValueError("private_exponent must be < modulus.")
|
|
||||||
|
|
||||||
if public_exponent < 3 or public_exponent >= modulus:
|
|
||||||
raise ValueError("public_exponent must be >= 3 and < modulus.")
|
|
||||||
|
|
||||||
if public_exponent & 1 == 0:
|
|
||||||
raise ValueError("public_exponent must be odd.")
|
|
||||||
|
|
||||||
if dmp1 & 1 == 0:
|
|
||||||
raise ValueError("dmp1 must be odd.")
|
|
||||||
|
|
||||||
if dmq1 & 1 == 0:
|
|
||||||
raise ValueError("dmq1 must be odd.")
|
|
||||||
|
|
||||||
if p * q != modulus:
|
|
||||||
raise ValueError("p*q must equal modulus.")
|
|
||||||
|
|
||||||
|
|
||||||
def _check_public_key_components(e, n):
|
|
||||||
if n < 3:
|
|
||||||
raise ValueError("n must be >= 3.")
|
|
||||||
|
|
||||||
if e < 3 or e >= n:
|
|
||||||
raise ValueError("e must be >= 3 and < n.")
|
|
||||||
|
|
||||||
if e & 1 == 0:
|
|
||||||
raise ValueError("e must be odd.")
|
|
||||||
|
|
||||||
|
|
||||||
def _modinv(e, m):
|
|
||||||
"""
|
|
||||||
Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
|
|
||||||
"""
|
|
||||||
x1, y1, x2, y2 = 1, 0, 0, 1
|
|
||||||
a, b = e, m
|
|
||||||
while b > 0:
|
|
||||||
q, r = divmod(a, b)
|
|
||||||
xn, yn = x1 - q * x2, y1 - q * y2
|
|
||||||
a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn
|
|
||||||
return x1 % m
|
|
||||||
|
|
||||||
|
|
||||||
def rsa_crt_iqmp(p, q):
|
|
||||||
"""
|
|
||||||
Compute the CRT (q ** -1) % p value from RSA primes p and q.
|
|
||||||
"""
|
|
||||||
return _modinv(q, p)
|
|
||||||
|
|
||||||
|
|
||||||
def rsa_crt_dmp1(private_exponent, p):
|
|
||||||
"""
|
|
||||||
Compute the CRT private_exponent % (p - 1) value from the RSA
|
|
||||||
private_exponent and p.
|
|
||||||
"""
|
|
||||||
return private_exponent % (p - 1)
|
|
||||||
|
|
||||||
|
|
||||||
def rsa_crt_dmq1(private_exponent, q):
|
|
||||||
"""
|
|
||||||
Compute the CRT private_exponent % (q - 1) value from the RSA
|
|
||||||
private_exponent and q.
|
|
||||||
"""
|
|
||||||
return private_exponent % (q - 1)
|
|
||||||
|
|
||||||
|
|
||||||
# Controls the number of iterations rsa_recover_prime_factors will perform
|
|
||||||
# to obtain the prime factors. Each iteration increments by 2 so the actual
|
|
||||||
# maximum attempts is half this number.
|
|
||||||
_MAX_RECOVERY_ATTEMPTS = 1000
|
|
||||||
|
|
||||||
|
|
||||||
def rsa_recover_prime_factors(n, e, d):
|
|
||||||
"""
|
|
||||||
Compute factors p and q from the private exponent d. We assume that n has
|
|
||||||
no more than two factors. This function is adapted from code in PyCrypto.
|
|
||||||
"""
|
|
||||||
# See 8.2.2(i) in Handbook of Applied Cryptography.
|
|
||||||
ktot = d * e - 1
|
|
||||||
# The quantity d*e-1 is a multiple of phi(n), even,
|
|
||||||
# and can be represented as t*2^s.
|
|
||||||
t = ktot
|
|
||||||
while t % 2 == 0:
|
|
||||||
t = t // 2
|
|
||||||
# Cycle through all multiplicative inverses in Zn.
|
|
||||||
# The algorithm is non-deterministic, but there is a 50% chance
|
|
||||||
# any candidate a leads to successful factoring.
|
|
||||||
# See "Digitalized Signatures and Public Key Functions as Intractable
|
|
||||||
# as Factorization", M. Rabin, 1979
|
|
||||||
spotted = False
|
|
||||||
a = 2
|
|
||||||
while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
|
|
||||||
k = t
|
|
||||||
# Cycle through all values a^{t*2^i}=a^k
|
|
||||||
while k < ktot:
|
|
||||||
cand = pow(a, k, n)
|
|
||||||
# Check if a^k is a non-trivial root of unity (mod n)
|
|
||||||
if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
|
|
||||||
# We have found a number such that (cand-1)(cand+1)=0 (mod n).
|
|
||||||
# Either of the terms divides n.
|
|
||||||
p = gcd(cand + 1, n)
|
|
||||||
spotted = True
|
|
||||||
break
|
|
||||||
k *= 2
|
|
||||||
# This value was not any good... let's try another!
|
|
||||||
a += 2
|
|
||||||
if not spotted:
|
|
||||||
raise ValueError("Unable to compute factors p and q from exponent d.")
|
|
||||||
# Found !
|
|
||||||
q, r = divmod(n, p)
|
|
||||||
assert r == 0
|
|
||||||
|
|
||||||
return (p, q)
|
|
||||||
|
|
||||||
|
|
||||||
class RSAPrivateNumbers(object):
|
|
||||||
def __init__(self, p, q, d, dmp1, dmq1, iqmp,
|
|
||||||
public_numbers):
|
|
||||||
if (
|
|
||||||
not isinstance(p, six.integer_types) or
|
|
||||||
not isinstance(q, six.integer_types) or
|
|
||||||
not isinstance(d, six.integer_types) or
|
|
||||||
not isinstance(dmp1, six.integer_types) or
|
|
||||||
not isinstance(dmq1, six.integer_types) or
|
|
||||||
not isinstance(iqmp, six.integer_types)
|
|
||||||
):
|
|
||||||
raise TypeError(
|
|
||||||
"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
|
|
||||||
" all be an integers."
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(public_numbers, RSAPublicNumbers):
|
|
||||||
raise TypeError(
|
|
||||||
"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
|
|
||||||
" instance."
|
|
||||||
)
|
|
||||||
|
|
||||||
self._p = p
|
|
||||||
self._q = q
|
|
||||||
self._d = d
|
|
||||||
self._dmp1 = dmp1
|
|
||||||
self._dmq1 = dmq1
|
|
||||||
self._iqmp = iqmp
|
|
||||||
self._public_numbers = public_numbers
|
|
||||||
|
|
||||||
p = utils.read_only_property("_p")
|
|
||||||
q = utils.read_only_property("_q")
|
|
||||||
d = utils.read_only_property("_d")
|
|
||||||
dmp1 = utils.read_only_property("_dmp1")
|
|
||||||
dmq1 = utils.read_only_property("_dmq1")
|
|
||||||
iqmp = utils.read_only_property("_iqmp")
|
|
||||||
public_numbers = utils.read_only_property("_public_numbers")
|
|
||||||
|
|
||||||
def private_key(self, backend):
|
|
||||||
return backend.load_rsa_private_numbers(self)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, RSAPrivateNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return (
|
|
||||||
self.p == other.p and
|
|
||||||
self.q == other.q and
|
|
||||||
self.d == other.d and
|
|
||||||
self.dmp1 == other.dmp1 and
|
|
||||||
self.dmq1 == other.dmq1 and
|
|
||||||
self.iqmp == other.iqmp and
|
|
||||||
self.public_numbers == other.public_numbers
|
|
||||||
)
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((
|
|
||||||
self.p,
|
|
||||||
self.q,
|
|
||||||
self.d,
|
|
||||||
self.dmp1,
|
|
||||||
self.dmq1,
|
|
||||||
self.iqmp,
|
|
||||||
self.public_numbers,
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
class RSAPublicNumbers(object):
|
|
||||||
def __init__(self, e, n):
|
|
||||||
if (
|
|
||||||
not isinstance(e, six.integer_types) or
|
|
||||||
not isinstance(n, six.integer_types)
|
|
||||||
):
|
|
||||||
raise TypeError("RSAPublicNumbers arguments must be integers.")
|
|
||||||
|
|
||||||
self._e = e
|
|
||||||
self._n = n
|
|
||||||
|
|
||||||
e = utils.read_only_property("_e")
|
|
||||||
n = utils.read_only_property("_n")
|
|
||||||
|
|
||||||
def public_key(self, backend):
|
|
||||||
return backend.load_rsa_public_numbers(self)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return "<RSAPublicNumbers(e={0.e}, n={0.n})>".format(self)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
if not isinstance(other, RSAPublicNumbers):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
return self.e == other.e and self.n == other.n
|
|
||||||
|
|
||||||
def __ne__(self, other):
|
|
||||||
return not self == other
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return hash((self.e, self.n))
|
|
|
@ -1,73 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
from pyasn1.codec.der import decoder, encoder
|
|
||||||
from pyasn1.error import PyAsn1Error
|
|
||||||
from pyasn1.type import namedtype, univ
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
|
|
||||||
|
|
||||||
class _DSSSigValue(univ.Sequence):
|
|
||||||
componentType = namedtype.NamedTypes(
|
|
||||||
namedtype.NamedType('r', univ.Integer()),
|
|
||||||
namedtype.NamedType('s', univ.Integer())
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def decode_rfc6979_signature(signature):
|
|
||||||
warnings.warn(
|
|
||||||
"decode_rfc6979_signature is deprecated and will "
|
|
||||||
"be removed in a future version, use decode_dss_signature instead "
|
|
||||||
"instead.",
|
|
||||||
utils.DeprecatedIn10,
|
|
||||||
stacklevel=2
|
|
||||||
)
|
|
||||||
return decode_dss_signature(signature)
|
|
||||||
|
|
||||||
|
|
||||||
def decode_dss_signature(signature):
|
|
||||||
try:
|
|
||||||
data, remaining = decoder.decode(signature, asn1Spec=_DSSSigValue())
|
|
||||||
except PyAsn1Error:
|
|
||||||
raise ValueError("Invalid signature data. Unable to decode ASN.1")
|
|
||||||
|
|
||||||
if remaining:
|
|
||||||
raise ValueError(
|
|
||||||
"The signature contains bytes after the end of the ASN.1 sequence."
|
|
||||||
)
|
|
||||||
|
|
||||||
r = int(data.getComponentByName('r'))
|
|
||||||
s = int(data.getComponentByName('s'))
|
|
||||||
return (r, s)
|
|
||||||
|
|
||||||
|
|
||||||
def encode_rfc6979_signature(r, s):
|
|
||||||
warnings.warn(
|
|
||||||
"encode_rfc6979_signature is deprecated and will "
|
|
||||||
"be removed in a future version, use encode_dss_signature instead "
|
|
||||||
"instead.",
|
|
||||||
utils.DeprecatedIn10,
|
|
||||||
stacklevel=2
|
|
||||||
)
|
|
||||||
return encode_dss_signature(r, s)
|
|
||||||
|
|
||||||
|
|
||||||
def encode_dss_signature(r, s):
|
|
||||||
if (
|
|
||||||
not isinstance(r, six.integer_types) or
|
|
||||||
not isinstance(s, six.integer_types)
|
|
||||||
):
|
|
||||||
raise ValueError("Both r and s must be integers")
|
|
||||||
|
|
||||||
sig = _DSSSigValue()
|
|
||||||
sig.setComponentByName('r', r)
|
|
||||||
sig.setComponentByName('s', s)
|
|
||||||
return encoder.encode(sig)
|
|
|
@ -1,20 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography.hazmat.primitives.ciphers.base import (
|
|
||||||
AEADCipherContext, AEADEncryptionContext, BlockCipherAlgorithm, Cipher,
|
|
||||||
CipherAlgorithm, CipherContext
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Cipher",
|
|
||||||
"CipherAlgorithm",
|
|
||||||
"BlockCipherAlgorithm",
|
|
||||||
"CipherContext",
|
|
||||||
"AEADCipherContext",
|
|
||||||
"AEADEncryptionContext",
|
|
||||||
]
|
|
|
@ -1,140 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.hazmat.primitives.ciphers import (
|
|
||||||
BlockCipherAlgorithm, CipherAlgorithm
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _verify_key_size(algorithm, key):
|
|
||||||
# Verify that the key size matches the expected key size
|
|
||||||
if len(key) * 8 not in algorithm.key_sizes:
|
|
||||||
raise ValueError("Invalid key size ({0}) for {1}.".format(
|
|
||||||
len(key) * 8, algorithm.name
|
|
||||||
))
|
|
||||||
return key
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(BlockCipherAlgorithm)
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class AES(object):
|
|
||||||
name = "AES"
|
|
||||||
block_size = 128
|
|
||||||
key_sizes = frozenset([128, 192, 256])
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(BlockCipherAlgorithm)
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class Camellia(object):
|
|
||||||
name = "camellia"
|
|
||||||
block_size = 128
|
|
||||||
key_sizes = frozenset([128, 192, 256])
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(BlockCipherAlgorithm)
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class TripleDES(object):
|
|
||||||
name = "3DES"
|
|
||||||
block_size = 64
|
|
||||||
key_sizes = frozenset([64, 128, 192])
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
if len(key) == 8:
|
|
||||||
key += key + key
|
|
||||||
elif len(key) == 16:
|
|
||||||
key += key[:8]
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(BlockCipherAlgorithm)
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class Blowfish(object):
|
|
||||||
name = "Blowfish"
|
|
||||||
block_size = 64
|
|
||||||
key_sizes = frozenset(range(32, 449, 8))
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(BlockCipherAlgorithm)
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class CAST5(object):
|
|
||||||
name = "CAST5"
|
|
||||||
block_size = 64
|
|
||||||
key_sizes = frozenset(range(40, 129, 8))
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class ARC4(object):
|
|
||||||
name = "RC4"
|
|
||||||
key_sizes = frozenset([40, 56, 64, 80, 128, 192, 256])
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class IDEA(object):
|
|
||||||
name = "IDEA"
|
|
||||||
block_size = 64
|
|
||||||
key_sizes = frozenset([128])
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(BlockCipherAlgorithm)
|
|
||||||
@utils.register_interface(CipherAlgorithm)
|
|
||||||
class SEED(object):
|
|
||||||
name = "SEED"
|
|
||||||
block_size = 128
|
|
||||||
key_sizes = frozenset([128])
|
|
||||||
|
|
||||||
def __init__(self, key):
|
|
||||||
self.key = _verify_key_size(self, key)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def key_size(self):
|
|
||||||
return len(self.key) * 8
|
|
|
@ -1,203 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm,
|
|
||||||
_Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.interfaces import CipherBackend
|
|
||||||
from cryptography.hazmat.primitives.ciphers import modes
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class CipherAlgorithm(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
A string naming this mode (e.g. "AES", "Camellia").
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def key_size(self):
|
|
||||||
"""
|
|
||||||
The size of the key being used as an integer in bits (e.g. 128, 256).
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class BlockCipherAlgorithm(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def block_size(self):
|
|
||||||
"""
|
|
||||||
The size of a block as an integer in bits (e.g. 64, 128).
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class CipherContext(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update(self, data):
|
|
||||||
"""
|
|
||||||
Processes the provided bytes through the cipher and returns the results
|
|
||||||
as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def finalize(self):
|
|
||||||
"""
|
|
||||||
Returns the results of processing the final block as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class AEADCipherContext(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def authenticate_additional_data(self, data):
|
|
||||||
"""
|
|
||||||
Authenticates the provided bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class AEADEncryptionContext(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def tag(self):
|
|
||||||
"""
|
|
||||||
Returns tag bytes. This is only available after encryption is
|
|
||||||
finalized.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Cipher(object):
|
|
||||||
def __init__(self, algorithm, mode, backend):
|
|
||||||
if not isinstance(backend, CipherBackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement CipherBackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(algorithm, CipherAlgorithm):
|
|
||||||
raise TypeError("Expected interface of CipherAlgorithm.")
|
|
||||||
|
|
||||||
if mode is not None:
|
|
||||||
mode.validate_for_algorithm(algorithm)
|
|
||||||
|
|
||||||
self.algorithm = algorithm
|
|
||||||
self.mode = mode
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
def encryptor(self):
|
|
||||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
|
||||||
if self.mode.tag is not None:
|
|
||||||
raise ValueError(
|
|
||||||
"Authentication tag must be None when encrypting."
|
|
||||||
)
|
|
||||||
ctx = self._backend.create_symmetric_encryption_ctx(
|
|
||||||
self.algorithm, self.mode
|
|
||||||
)
|
|
||||||
return self._wrap_ctx(ctx, encrypt=True)
|
|
||||||
|
|
||||||
def decryptor(self):
|
|
||||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
|
||||||
if self.mode.tag is None:
|
|
||||||
raise ValueError(
|
|
||||||
"Authentication tag must be provided when decrypting."
|
|
||||||
)
|
|
||||||
ctx = self._backend.create_symmetric_decryption_ctx(
|
|
||||||
self.algorithm, self.mode
|
|
||||||
)
|
|
||||||
return self._wrap_ctx(ctx, encrypt=False)
|
|
||||||
|
|
||||||
def _wrap_ctx(self, ctx, encrypt):
|
|
||||||
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
|
||||||
if encrypt:
|
|
||||||
return _AEADEncryptionContext(ctx)
|
|
||||||
else:
|
|
||||||
return _AEADCipherContext(ctx)
|
|
||||||
else:
|
|
||||||
return _CipherContext(ctx)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(CipherContext)
|
|
||||||
class _CipherContext(object):
|
|
||||||
def __init__(self, ctx):
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
return self._ctx.update(data)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
data = self._ctx.finalize()
|
|
||||||
self._ctx = None
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AEADCipherContext)
|
|
||||||
@utils.register_interface(CipherContext)
|
|
||||||
class _AEADCipherContext(object):
|
|
||||||
def __init__(self, ctx):
|
|
||||||
self._ctx = ctx
|
|
||||||
self._bytes_processed = 0
|
|
||||||
self._aad_bytes_processed = 0
|
|
||||||
self._tag = None
|
|
||||||
self._updated = False
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
self._updated = True
|
|
||||||
self._bytes_processed += len(data)
|
|
||||||
if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
|
|
||||||
raise ValueError(
|
|
||||||
"{0} has a maximum encrypted byte limit of {1}".format(
|
|
||||||
self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._ctx.update(data)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
data = self._ctx.finalize()
|
|
||||||
self._tag = self._ctx.tag
|
|
||||||
self._ctx = None
|
|
||||||
return data
|
|
||||||
|
|
||||||
def authenticate_additional_data(self, data):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
if self._updated:
|
|
||||||
raise AlreadyUpdated("Update has been called on this context.")
|
|
||||||
|
|
||||||
self._aad_bytes_processed += len(data)
|
|
||||||
if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
|
|
||||||
raise ValueError(
|
|
||||||
"{0} has a maximum AAD byte limit of {0}".format(
|
|
||||||
self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._ctx.authenticate_additional_data(data)
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(AEADEncryptionContext)
|
|
||||||
class _AEADEncryptionContext(_AEADCipherContext):
|
|
||||||
@property
|
|
||||||
def tag(self):
|
|
||||||
if self._ctx is not None:
|
|
||||||
raise NotYetFinalized("You must finalize encryption before "
|
|
||||||
"getting the tag.")
|
|
||||||
return self._tag
|
|
|
@ -1,164 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class Mode(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
A string naming this mode (e.g. "ECB", "CBC").
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def validate_for_algorithm(self, algorithm):
|
|
||||||
"""
|
|
||||||
Checks that all the necessary invariants of this (mode, algorithm)
|
|
||||||
combination are met.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class ModeWithInitializationVector(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def initialization_vector(self):
|
|
||||||
"""
|
|
||||||
The value of the initialization vector for this mode as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class ModeWithNonce(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def nonce(self):
|
|
||||||
"""
|
|
||||||
The value of the nonce for this mode as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class ModeWithAuthenticationTag(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def tag(self):
|
|
||||||
"""
|
|
||||||
The value of the tag supplied to the constructor of this mode.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def _check_iv_length(self, algorithm):
|
|
||||||
if len(self.initialization_vector) * 8 != algorithm.block_size:
|
|
||||||
raise ValueError("Invalid IV size ({0}) for {1}.".format(
|
|
||||||
len(self.initialization_vector), self.name
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
@utils.register_interface(ModeWithInitializationVector)
|
|
||||||
class CBC(object):
|
|
||||||
name = "CBC"
|
|
||||||
|
|
||||||
def __init__(self, initialization_vector):
|
|
||||||
self._initialization_vector = initialization_vector
|
|
||||||
|
|
||||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
|
||||||
validate_for_algorithm = _check_iv_length
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
class ECB(object):
|
|
||||||
name = "ECB"
|
|
||||||
|
|
||||||
def validate_for_algorithm(self, algorithm):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
@utils.register_interface(ModeWithInitializationVector)
|
|
||||||
class OFB(object):
|
|
||||||
name = "OFB"
|
|
||||||
|
|
||||||
def __init__(self, initialization_vector):
|
|
||||||
self._initialization_vector = initialization_vector
|
|
||||||
|
|
||||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
|
||||||
validate_for_algorithm = _check_iv_length
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
@utils.register_interface(ModeWithInitializationVector)
|
|
||||||
class CFB(object):
|
|
||||||
name = "CFB"
|
|
||||||
|
|
||||||
def __init__(self, initialization_vector):
|
|
||||||
self._initialization_vector = initialization_vector
|
|
||||||
|
|
||||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
|
||||||
validate_for_algorithm = _check_iv_length
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
@utils.register_interface(ModeWithInitializationVector)
|
|
||||||
class CFB8(object):
|
|
||||||
name = "CFB8"
|
|
||||||
|
|
||||||
def __init__(self, initialization_vector):
|
|
||||||
self._initialization_vector = initialization_vector
|
|
||||||
|
|
||||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
|
||||||
validate_for_algorithm = _check_iv_length
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
@utils.register_interface(ModeWithNonce)
|
|
||||||
class CTR(object):
|
|
||||||
name = "CTR"
|
|
||||||
|
|
||||||
def __init__(self, nonce):
|
|
||||||
self._nonce = nonce
|
|
||||||
|
|
||||||
nonce = utils.read_only_property("_nonce")
|
|
||||||
|
|
||||||
def validate_for_algorithm(self, algorithm):
|
|
||||||
if len(self.nonce) * 8 != algorithm.block_size:
|
|
||||||
raise ValueError("Invalid nonce size ({0}) for {1}.".format(
|
|
||||||
len(self.nonce), self.name
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(Mode)
|
|
||||||
@utils.register_interface(ModeWithInitializationVector)
|
|
||||||
@utils.register_interface(ModeWithAuthenticationTag)
|
|
||||||
class GCM(object):
|
|
||||||
name = "GCM"
|
|
||||||
_MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8
|
|
||||||
_MAX_AAD_BYTES = (2 ** 64) // 8
|
|
||||||
|
|
||||||
def __init__(self, initialization_vector, tag=None, min_tag_length=16):
|
|
||||||
# len(initialization_vector) must in [1, 2 ** 64), but it's impossible
|
|
||||||
# to actually construct a bytes object that large, so we don't check
|
|
||||||
# for it
|
|
||||||
if min_tag_length < 4:
|
|
||||||
raise ValueError("min_tag_length must be >= 4")
|
|
||||||
if tag is not None and len(tag) < min_tag_length:
|
|
||||||
raise ValueError(
|
|
||||||
"Authentication tag must be {0} bytes or longer.".format(
|
|
||||||
min_tag_length)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._initialization_vector = initialization_vector
|
|
||||||
self._tag = tag
|
|
||||||
|
|
||||||
tag = utils.read_only_property("_tag")
|
|
||||||
initialization_vector = utils.read_only_property("_initialization_vector")
|
|
||||||
|
|
||||||
def validate_for_algorithm(self, algorithm):
|
|
||||||
pass
|
|
|
@ -1,66 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.interfaces import CMACBackend
|
|
||||||
from cryptography.hazmat.primitives import ciphers, interfaces
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(interfaces.MACContext)
|
|
||||||
class CMAC(object):
|
|
||||||
def __init__(self, algorithm, backend, ctx=None):
|
|
||||||
if not isinstance(backend, CMACBackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement CMACBackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
|
|
||||||
raise TypeError(
|
|
||||||
"Expected instance of BlockCipherAlgorithm."
|
|
||||||
)
|
|
||||||
self._algorithm = algorithm
|
|
||||||
|
|
||||||
self._backend = backend
|
|
||||||
if ctx is None:
|
|
||||||
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
|
|
||||||
else:
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
if not isinstance(data, bytes):
|
|
||||||
raise TypeError("data must be bytes.")
|
|
||||||
self._ctx.update(data)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
digest = self._ctx.finalize()
|
|
||||||
self._ctx = None
|
|
||||||
return digest
|
|
||||||
|
|
||||||
def verify(self, signature):
|
|
||||||
if not isinstance(signature, bytes):
|
|
||||||
raise TypeError("signature must be bytes.")
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
|
|
||||||
ctx, self._ctx = self._ctx, None
|
|
||||||
ctx.verify(signature)
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
return CMAC(
|
|
||||||
self._algorithm,
|
|
||||||
backend=self._backend,
|
|
||||||
ctx=self._ctx.copy()
|
|
||||||
)
|
|
|
@ -1,26 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import hmac
|
|
||||||
|
|
||||||
from cryptography.hazmat.bindings._constant_time import lib
|
|
||||||
|
|
||||||
|
|
||||||
if hasattr(hmac, "compare_digest"):
|
|
||||||
def bytes_eq(a, b):
|
|
||||||
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
|
||||||
raise TypeError("a and b must be bytes.")
|
|
||||||
|
|
||||||
return hmac.compare_digest(a, b)
|
|
||||||
|
|
||||||
else:
|
|
||||||
def bytes_eq(a, b):
|
|
||||||
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
|
||||||
raise TypeError("a and b must be bytes.")
|
|
||||||
|
|
||||||
return lib.Cryptography_constant_time_bytes_eq(
|
|
||||||
a, len(a), b, len(b)
|
|
||||||
) == 1
|
|
|
@ -1,163 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class HashAlgorithm(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def name(self):
|
|
||||||
"""
|
|
||||||
A string naming this algorithm (e.g. "sha256", "md5").
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def digest_size(self):
|
|
||||||
"""
|
|
||||||
The size of the resulting digest in bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractproperty
|
|
||||||
def block_size(self):
|
|
||||||
"""
|
|
||||||
The internal block size of the hash algorithm in bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class HashContext(object):
|
|
||||||
@abc.abstractproperty
|
|
||||||
def algorithm(self):
|
|
||||||
"""
|
|
||||||
A HashAlgorithm that will be used by this context.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update(self, data):
|
|
||||||
"""
|
|
||||||
Processes the provided bytes through the hash.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def finalize(self):
|
|
||||||
"""
|
|
||||||
Finalizes the hash context and returns the hash digest as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def copy(self):
|
|
||||||
"""
|
|
||||||
Return a HashContext that is a copy of the current context.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashContext)
|
|
||||||
class Hash(object):
|
|
||||||
def __init__(self, algorithm, backend, ctx=None):
|
|
||||||
if not isinstance(backend, HashBackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement HashBackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(algorithm, HashAlgorithm):
|
|
||||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
|
||||||
self._algorithm = algorithm
|
|
||||||
|
|
||||||
self._backend = backend
|
|
||||||
|
|
||||||
if ctx is None:
|
|
||||||
self._ctx = self._backend.create_hash_ctx(self.algorithm)
|
|
||||||
else:
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
if not isinstance(data, bytes):
|
|
||||||
raise TypeError("data must be bytes.")
|
|
||||||
self._ctx.update(data)
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
return Hash(
|
|
||||||
self.algorithm, backend=self._backend, ctx=self._ctx.copy()
|
|
||||||
)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
digest = self._ctx.finalize()
|
|
||||||
self._ctx = None
|
|
||||||
return digest
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class SHA1(object):
|
|
||||||
name = "sha1"
|
|
||||||
digest_size = 20
|
|
||||||
block_size = 64
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class SHA224(object):
|
|
||||||
name = "sha224"
|
|
||||||
digest_size = 28
|
|
||||||
block_size = 64
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class SHA256(object):
|
|
||||||
name = "sha256"
|
|
||||||
digest_size = 32
|
|
||||||
block_size = 64
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class SHA384(object):
|
|
||||||
name = "sha384"
|
|
||||||
digest_size = 48
|
|
||||||
block_size = 128
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class SHA512(object):
|
|
||||||
name = "sha512"
|
|
||||||
digest_size = 64
|
|
||||||
block_size = 128
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class RIPEMD160(object):
|
|
||||||
name = "ripemd160"
|
|
||||||
digest_size = 20
|
|
||||||
block_size = 64
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class Whirlpool(object):
|
|
||||||
name = "whirlpool"
|
|
||||||
digest_size = 64
|
|
||||||
block_size = 64
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(HashAlgorithm)
|
|
||||||
class MD5(object):
|
|
||||||
name = "md5"
|
|
||||||
digest_size = 16
|
|
||||||
block_size = 64
|
|
|
@ -1,69 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
|
||||||
from cryptography.hazmat.primitives import hashes, interfaces
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(interfaces.MACContext)
|
|
||||||
@utils.register_interface(hashes.HashContext)
|
|
||||||
class HMAC(object):
|
|
||||||
def __init__(self, key, algorithm, backend, ctx=None):
|
|
||||||
if not isinstance(backend, HMACBackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement HMACBackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isinstance(algorithm, hashes.HashAlgorithm):
|
|
||||||
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
|
||||||
self._algorithm = algorithm
|
|
||||||
|
|
||||||
self._backend = backend
|
|
||||||
self._key = key
|
|
||||||
if ctx is None:
|
|
||||||
self._ctx = self._backend.create_hmac_ctx(key, self.algorithm)
|
|
||||||
else:
|
|
||||||
self._ctx = ctx
|
|
||||||
|
|
||||||
algorithm = utils.read_only_property("_algorithm")
|
|
||||||
|
|
||||||
def update(self, data):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
if not isinstance(data, bytes):
|
|
||||||
raise TypeError("data must be bytes.")
|
|
||||||
self._ctx.update(data)
|
|
||||||
|
|
||||||
def copy(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
return HMAC(
|
|
||||||
self._key,
|
|
||||||
self.algorithm,
|
|
||||||
backend=self._backend,
|
|
||||||
ctx=self._ctx.copy()
|
|
||||||
)
|
|
||||||
|
|
||||||
def finalize(self):
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
digest = self._ctx.finalize()
|
|
||||||
self._ctx = None
|
|
||||||
return digest
|
|
||||||
|
|
||||||
def verify(self, signature):
|
|
||||||
if not isinstance(signature, bytes):
|
|
||||||
raise TypeError("signature must be bytes.")
|
|
||||||
if self._ctx is None:
|
|
||||||
raise AlreadyFinalized("Context was already finalized.")
|
|
||||||
|
|
||||||
ctx, self._ctx = self._ctx, None
|
|
||||||
ctx.verify(signature)
|
|
|
@ -1,37 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class MACContext(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def update(self, data):
|
|
||||||
"""
|
|
||||||
Processes the provided bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def finalize(self):
|
|
||||||
"""
|
|
||||||
Returns the message authentication code as bytes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def copy(self):
|
|
||||||
"""
|
|
||||||
Return a MACContext that is a copy of the current context.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def verify(self, signature):
|
|
||||||
"""
|
|
||||||
Checks if the generated message authentication code matches the
|
|
||||||
signature.
|
|
||||||
"""
|
|
|
@ -1,26 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import abc
|
|
||||||
|
|
||||||
import six
|
|
||||||
|
|
||||||
|
|
||||||
@six.add_metaclass(abc.ABCMeta)
|
|
||||||
class KeyDerivationFunction(object):
|
|
||||||
@abc.abstractmethod
|
|
||||||
def derive(self, key_material):
|
|
||||||
"""
|
|
||||||
Deterministically generates and returns a new key based on the existing
|
|
||||||
key material.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def verify(self, key_material, expected_key):
|
|
||||||
"""
|
|
||||||
Checks whether the key generated by the key material matches the
|
|
||||||
expected derived key. Raises an exception if they do not match.
|
|
||||||
"""
|
|
|
@ -1,125 +0,0 @@
|
||||||
# This file is dual licensed under the terms of the Apache License, Version
|
|
||||||
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
|
||||||
# for complete details.
|
|
||||||
|
|
||||||
from __future__ import absolute_import, division, print_function
|
|
||||||
|
|
||||||
import struct
|
|
||||||
|
|
||||||
from cryptography import utils
|
|
||||||
from cryptography.exceptions import (
|
|
||||||
AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons
|
|
||||||
)
|
|
||||||
from cryptography.hazmat.backends.interfaces import HMACBackend
|
|
||||||
from cryptography.hazmat.backends.interfaces import HashBackend
|
|
||||||
from cryptography.hazmat.primitives import constant_time, hashes, hmac
|
|
||||||
from cryptography.hazmat.primitives.kdf import KeyDerivationFunction
|
|
||||||
|
|
||||||
|
|
||||||
def _int_to_u32be(n):
|
|
||||||
return struct.pack('>I', n)
|
|
||||||
|
|
||||||
|
|
||||||
def _common_args_checks(algorithm, length, otherinfo):
|
|
||||||
max_length = algorithm.digest_size * (2 ** 32 - 1)
|
|
||||||
if length > max_length:
|
|
||||||
raise ValueError(
|
|
||||||
"Can not derive keys larger than {0} bits.".format(
|
|
||||||
max_length
|
|
||||||
))
|
|
||||||
if not (otherinfo is None or isinstance(otherinfo, bytes)):
|
|
||||||
raise TypeError("otherinfo must be bytes.")
|
|
||||||
|
|
||||||
|
|
||||||
def _concatkdf_derive(key_material, length, auxfn, otherinfo):
|
|
||||||
if not isinstance(key_material, bytes):
|
|
||||||
raise TypeError("key_material must be bytes.")
|
|
||||||
|
|
||||||
output = [b""]
|
|
||||||
outlen = 0
|
|
||||||
counter = 1
|
|
||||||
|
|
||||||
while (length > outlen):
|
|
||||||
h = auxfn()
|
|
||||||
h.update(_int_to_u32be(counter))
|
|
||||||
h.update(key_material)
|
|
||||||
h.update(otherinfo)
|
|
||||||
output.append(h.finalize())
|
|
||||||
outlen += len(output[-1])
|
|
||||||
counter += 1
|
|
||||||
|
|
||||||
return b"".join(output)[:length]
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(KeyDerivationFunction)
|
|
||||||
class ConcatKDFHash(object):
|
|
||||||
def __init__(self, algorithm, length, otherinfo, backend):
|
|
||||||
|
|
||||||
_common_args_checks(algorithm, length, otherinfo)
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._length = length
|
|
||||||
self._otherinfo = otherinfo
|
|
||||||
if self._otherinfo is None:
|
|
||||||
self._otherinfo = b""
|
|
||||||
|
|
||||||
if not isinstance(backend, HashBackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement HashBackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
self._backend = backend
|
|
||||||
self._used = False
|
|
||||||
|
|
||||||
def _hash(self):
|
|
||||||
return hashes.Hash(self._algorithm, self._backend)
|
|
||||||
|
|
||||||
def derive(self, key_material):
|
|
||||||
if self._used:
|
|
||||||
raise AlreadyFinalized
|
|
||||||
self._used = True
|
|
||||||
return _concatkdf_derive(key_material, self._length,
|
|
||||||
self._hash, self._otherinfo)
|
|
||||||
|
|
||||||
def verify(self, key_material, expected_key):
|
|
||||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
|
||||||
raise InvalidKey
|
|
||||||
|
|
||||||
|
|
||||||
@utils.register_interface(KeyDerivationFunction)
|
|
||||||
class ConcatKDFHMAC(object):
|
|
||||||
def __init__(self, algorithm, length, salt, otherinfo, backend):
|
|
||||||
|
|
||||||
_common_args_checks(algorithm, length, otherinfo)
|
|
||||||
self._algorithm = algorithm
|
|
||||||
self._length = length
|
|
||||||
self._otherinfo = otherinfo
|
|
||||||
if self._otherinfo is None:
|
|
||||||
self._otherinfo = b""
|
|
||||||
|
|
||||||
if not (salt is None or isinstance(salt, bytes)):
|
|
||||||
raise TypeError("salt must be bytes.")
|
|
||||||
if salt is None:
|
|
||||||
salt = b"\x00" * algorithm.block_size
|
|
||||||
self._salt = salt
|
|
||||||
|
|
||||||
if not isinstance(backend, HMACBackend):
|
|
||||||
raise UnsupportedAlgorithm(
|
|
||||||
"Backend object does not implement HMACBackend.",
|
|
||||||
_Reasons.BACKEND_MISSING_INTERFACE
|
|
||||||
)
|
|
||||||
self._backend = backend
|
|
||||||
self._used = False
|
|
||||||
|
|
||||||
def _hmac(self):
|
|
||||||
return hmac.HMAC(self._salt, self._algorithm, self._backend)
|
|
||||||
|
|
||||||
def derive(self, key_material):
|
|
||||||
if self._used:
|
|
||||||
raise AlreadyFinalized
|
|
||||||
self._used = True
|
|
||||||
return _concatkdf_derive(key_material, self._length,
|
|
||||||
self._hmac, self._otherinfo)
|
|
||||||
|
|
||||||
def verify(self, key_material, expected_key):
|
|
||||||
if not constant_time.bytes_eq(self.derive(key_material), expected_key):
|
|
||||||
raise InvalidKey
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue