platform for raspberry pi
This commit is contained in:
commit
73d4832b38
523 changed files with 190349 additions and 0 deletions
8
.gitignore
vendored
Normal file
8
.gitignore
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
*.swp
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
*.pyd
|
||||||
|
__pycache__
|
||||||
|
pip_cache
|
||||||
|
bin
|
||||||
|
.DS_Store
|
3
README.md
Normal file
3
README.md
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
to update you need:
|
||||||
|
|
||||||
|
apt install python3-pip python3.4 python3.4-dev virtualenv libffi-dev libssl-dev
|
1944
lib/python3.4/site-packages/OpenSSL/SSL.py
Normal file
1944
lib/python3.4/site-packages/OpenSSL/SSL.py
Normal file
File diff suppressed because it is too large
Load diff
20
lib/python3.4/site-packages/OpenSSL/__init__.py
Normal file
20
lib/python3.4/site-packages/OpenSSL/__init__.py
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# Copyright (C) AB Strakt
|
||||||
|
# See LICENSE for details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pyOpenSSL - A simple wrapper around the OpenSSL library
|
||||||
|
"""
|
||||||
|
|
||||||
|
from OpenSSL import rand, crypto, SSL
|
||||||
|
from OpenSSL.version import (
|
||||||
|
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
||||||
|
__uri__, __version__,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"SSL", "crypto", "rand",
|
||||||
|
|
||||||
|
"__author__", "__copyright__", "__email__", "__license__", "__summary__",
|
||||||
|
"__title__", "__uri__", "__version__",
|
||||||
|
]
|
141
lib/python3.4/site-packages/OpenSSL/_util.py
Normal file
141
lib/python3.4/site-packages/OpenSSL/_util.py
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from six import PY3, binary_type, text_type
|
||||||
|
|
||||||
|
from cryptography.hazmat.bindings.openssl.binding import Binding
|
||||||
|
|
||||||
|
|
||||||
|
binding = Binding()
|
||||||
|
binding.init_static_locks()
|
||||||
|
ffi = binding.ffi
|
||||||
|
lib = binding.lib
|
||||||
|
|
||||||
|
|
||||||
|
def text(charp):
|
||||||
|
"""
|
||||||
|
Get a native string type representing of the given CFFI ``char*`` object.
|
||||||
|
|
||||||
|
:param charp: A C-style string represented using CFFI.
|
||||||
|
|
||||||
|
:return: :class:`str`
|
||||||
|
"""
|
||||||
|
if not charp:
|
||||||
|
return ""
|
||||||
|
return native(ffi.string(charp))
|
||||||
|
|
||||||
|
|
||||||
|
def exception_from_error_queue(exception_type):
|
||||||
|
"""
|
||||||
|
Convert an OpenSSL library failure into a Python exception.
|
||||||
|
|
||||||
|
When a call to the native OpenSSL library fails, this is usually signalled
|
||||||
|
by the return value, and an error code is stored in an error queue
|
||||||
|
associated with the current thread. The err library provides functions to
|
||||||
|
obtain these error codes and textual error messages.
|
||||||
|
"""
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
while True:
|
||||||
|
error = lib.ERR_get_error()
|
||||||
|
if error == 0:
|
||||||
|
break
|
||||||
|
errors.append((
|
||||||
|
text(lib.ERR_lib_error_string(error)),
|
||||||
|
text(lib.ERR_func_error_string(error)),
|
||||||
|
text(lib.ERR_reason_error_string(error))))
|
||||||
|
|
||||||
|
raise exception_type(errors)
|
||||||
|
|
||||||
|
|
||||||
|
def make_assert(error):
|
||||||
|
"""
|
||||||
|
Create an assert function that uses :func:`exception_from_error_queue` to
|
||||||
|
raise an exception wrapped by *error*.
|
||||||
|
"""
|
||||||
|
def openssl_assert(ok):
|
||||||
|
"""
|
||||||
|
If *ok* is not True, retrieve the error from OpenSSL and raise it.
|
||||||
|
"""
|
||||||
|
if ok is not True:
|
||||||
|
exception_from_error_queue(error)
|
||||||
|
|
||||||
|
return openssl_assert
|
||||||
|
|
||||||
|
|
||||||
|
def native(s):
|
||||||
|
"""
|
||||||
|
Convert :py:class:`bytes` or :py:class:`unicode` to the native
|
||||||
|
:py:class:`str` type, using UTF-8 encoding if conversion is necessary.
|
||||||
|
|
||||||
|
:raise UnicodeError: The input string is not UTF-8 decodeable.
|
||||||
|
|
||||||
|
:raise TypeError: The input is neither :py:class:`bytes` nor
|
||||||
|
:py:class:`unicode`.
|
||||||
|
"""
|
||||||
|
if not isinstance(s, (binary_type, text_type)):
|
||||||
|
raise TypeError("%r is neither bytes nor unicode" % s)
|
||||||
|
if PY3:
|
||||||
|
if isinstance(s, binary_type):
|
||||||
|
return s.decode("utf-8")
|
||||||
|
else:
|
||||||
|
if isinstance(s, text_type):
|
||||||
|
return s.encode("utf-8")
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def path_string(s):
|
||||||
|
"""
|
||||||
|
Convert a Python string to a :py:class:`bytes` string identifying the same
|
||||||
|
path and which can be passed into an OpenSSL API accepting a filename.
|
||||||
|
|
||||||
|
:param s: An instance of :py:class:`bytes` or :py:class:`unicode`.
|
||||||
|
|
||||||
|
:return: An instance of :py:class:`bytes`.
|
||||||
|
"""
|
||||||
|
if isinstance(s, binary_type):
|
||||||
|
return s
|
||||||
|
elif isinstance(s, text_type):
|
||||||
|
return s.encode(sys.getfilesystemencoding())
|
||||||
|
else:
|
||||||
|
raise TypeError("Path must be represented as bytes or unicode string")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def byte_string(s):
|
||||||
|
return s.encode("charmap")
|
||||||
|
else:
|
||||||
|
def byte_string(s):
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
# A marker object to observe whether some optional arguments are passed any
|
||||||
|
# value or not.
|
||||||
|
UNSPECIFIED = object()
|
||||||
|
|
||||||
|
_TEXT_WARNING = (
|
||||||
|
text_type.__name__ + " for {0} is no longer accepted, use bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def text_to_bytes_and_warn(label, obj):
|
||||||
|
"""
|
||||||
|
If ``obj`` is text, emit a warning that it should be bytes instead and try
|
||||||
|
to convert it to bytes automatically.
|
||||||
|
|
||||||
|
:param str label: The name of the parameter from which ``obj`` was taken
|
||||||
|
(so a developer can easily find the source of the problem and correct
|
||||||
|
it).
|
||||||
|
|
||||||
|
:return: If ``obj`` is the text string type, a ``bytes`` object giving the
|
||||||
|
UTF-8 encoding of that text is returned. Otherwise, ``obj`` itself is
|
||||||
|
returned.
|
||||||
|
"""
|
||||||
|
if isinstance(obj, text_type):
|
||||||
|
warnings.warn(
|
||||||
|
_TEXT_WARNING.format(label),
|
||||||
|
category=DeprecationWarning,
|
||||||
|
stacklevel=3
|
||||||
|
)
|
||||||
|
return obj.encode('utf-8')
|
||||||
|
return obj
|
2807
lib/python3.4/site-packages/OpenSSL/crypto.py
Normal file
2807
lib/python3.4/site-packages/OpenSSL/crypto.py
Normal file
File diff suppressed because it is too large
Load diff
211
lib/python3.4/site-packages/OpenSSL/rand.py
Normal file
211
lib/python3.4/site-packages/OpenSSL/rand.py
Normal file
|
@ -0,0 +1,211 @@
|
||||||
|
"""
|
||||||
|
PRNG management routines, thin wrappers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from six import integer_types as _integer_types
|
||||||
|
|
||||||
|
from OpenSSL._util import (
|
||||||
|
ffi as _ffi,
|
||||||
|
lib as _lib,
|
||||||
|
exception_from_error_queue as _exception_from_error_queue,
|
||||||
|
path_string as _path_string)
|
||||||
|
|
||||||
|
|
||||||
|
class Error(Exception):
|
||||||
|
"""
|
||||||
|
An error occurred in an :mod:`OpenSSL.rand` API.
|
||||||
|
|
||||||
|
If the current RAND method supports any errors, this is raised when needed.
|
||||||
|
The default method does not raise this when the entropy pool is depleted.
|
||||||
|
|
||||||
|
Whenever this exception is raised directly, it has a list of error messages
|
||||||
|
from the OpenSSL error queue, where each item is a tuple *(lib, function,
|
||||||
|
reason)*. Here *lib*, *function* and *reason* are all strings, describing
|
||||||
|
where and what the problem is.
|
||||||
|
|
||||||
|
See :manpage:`err(3)` for more information.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_raise_current_error = partial(_exception_from_error_queue, Error)
|
||||||
|
|
||||||
|
_unspecified = object()
|
||||||
|
|
||||||
|
_builtin_bytes = bytes
|
||||||
|
|
||||||
|
|
||||||
|
def bytes(num_bytes):
|
||||||
|
"""
|
||||||
|
Get some random bytes from the PRNG as a string.
|
||||||
|
|
||||||
|
This is a wrapper for the C function ``RAND_bytes``.
|
||||||
|
|
||||||
|
:param num_bytes: The number of bytes to fetch.
|
||||||
|
|
||||||
|
:return: A string of random bytes.
|
||||||
|
"""
|
||||||
|
if not isinstance(num_bytes, _integer_types):
|
||||||
|
raise TypeError("num_bytes must be an integer")
|
||||||
|
|
||||||
|
if num_bytes < 0:
|
||||||
|
raise ValueError("num_bytes must not be negative")
|
||||||
|
|
||||||
|
result_buffer = _ffi.new("char[]", num_bytes)
|
||||||
|
result_code = _lib.RAND_bytes(result_buffer, num_bytes)
|
||||||
|
if result_code == -1:
|
||||||
|
# TODO: No tests for this code path. Triggering a RAND_bytes failure
|
||||||
|
# might involve supplying a custom ENGINE? That's hard.
|
||||||
|
_raise_current_error()
|
||||||
|
|
||||||
|
return _ffi.buffer(result_buffer)[:]
|
||||||
|
|
||||||
|
|
||||||
|
def add(buffer, entropy):
|
||||||
|
"""
|
||||||
|
Mix bytes from *string* into the PRNG state.
|
||||||
|
|
||||||
|
The *entropy* argument is (the lower bound of) an estimate of how much
|
||||||
|
randomness is contained in *string*, measured in bytes.
|
||||||
|
|
||||||
|
For more information, see e.g. :rfc:`1750`.
|
||||||
|
|
||||||
|
:param buffer: Buffer with random data.
|
||||||
|
:param entropy: The entropy (in bytes) measurement of the buffer.
|
||||||
|
|
||||||
|
:return: :obj:`None`
|
||||||
|
"""
|
||||||
|
if not isinstance(buffer, _builtin_bytes):
|
||||||
|
raise TypeError("buffer must be a byte string")
|
||||||
|
|
||||||
|
if not isinstance(entropy, int):
|
||||||
|
raise TypeError("entropy must be an integer")
|
||||||
|
|
||||||
|
# TODO Nothing tests this call actually being made, or made properly.
|
||||||
|
_lib.RAND_add(buffer, len(buffer), entropy)
|
||||||
|
|
||||||
|
|
||||||
|
def seed(buffer):
|
||||||
|
"""
|
||||||
|
Equivalent to calling :func:`add` with *entropy* as the length of *buffer*.
|
||||||
|
|
||||||
|
:param buffer: Buffer with random data
|
||||||
|
|
||||||
|
:return: :obj:`None`
|
||||||
|
"""
|
||||||
|
if not isinstance(buffer, _builtin_bytes):
|
||||||
|
raise TypeError("buffer must be a byte string")
|
||||||
|
|
||||||
|
# TODO Nothing tests this call actually being made, or made properly.
|
||||||
|
_lib.RAND_seed(buffer, len(buffer))
|
||||||
|
|
||||||
|
|
||||||
|
def status():
|
||||||
|
"""
|
||||||
|
Check whether the PRNG has been seeded with enough data.
|
||||||
|
|
||||||
|
:return: :obj:`True` if the PRNG is seeded enough, :obj:`False` otherwise.
|
||||||
|
"""
|
||||||
|
return _lib.RAND_status()
|
||||||
|
|
||||||
|
|
||||||
|
def egd(path, bytes=_unspecified):
|
||||||
|
"""
|
||||||
|
Query the system random source and seed the PRNG.
|
||||||
|
|
||||||
|
Does *not* actually query the EGD.
|
||||||
|
|
||||||
|
.. deprecated:: 16.0.0
|
||||||
|
EGD was only necessary for some commercial UNIX systems that all
|
||||||
|
reached their ends of life more than a decade ago. See
|
||||||
|
`pyca/cryptography#1636
|
||||||
|
<https://github.com/pyca/cryptography/pull/1636>`_.
|
||||||
|
|
||||||
|
:param path: Ignored.
|
||||||
|
:param bytes: (optional) The number of bytes to read, default is 255.
|
||||||
|
|
||||||
|
:returns: ``len(bytes)`` or 255 if not specified.
|
||||||
|
"""
|
||||||
|
warnings.warn("OpenSSL.rand.egd() is deprecated as of 16.0.0.",
|
||||||
|
DeprecationWarning)
|
||||||
|
|
||||||
|
if not isinstance(path, _builtin_bytes):
|
||||||
|
raise TypeError("path must be a byte string")
|
||||||
|
|
||||||
|
if bytes is _unspecified:
|
||||||
|
bytes = 255
|
||||||
|
elif not isinstance(bytes, int):
|
||||||
|
raise TypeError("bytes must be an integer")
|
||||||
|
|
||||||
|
seed(os.urandom(bytes))
|
||||||
|
return bytes
|
||||||
|
|
||||||
|
|
||||||
|
def cleanup():
|
||||||
|
"""
|
||||||
|
Erase the memory used by the PRNG.
|
||||||
|
|
||||||
|
This is a wrapper for the C function ``RAND_cleanup``.
|
||||||
|
|
||||||
|
:return: :obj:`None`
|
||||||
|
"""
|
||||||
|
# TODO Nothing tests this call actually being made, or made properly.
|
||||||
|
_lib.RAND_cleanup()
|
||||||
|
|
||||||
|
|
||||||
|
def load_file(filename, maxbytes=_unspecified):
|
||||||
|
"""
|
||||||
|
Read *maxbytes* of data from *filename* and seed the PRNG with it.
|
||||||
|
|
||||||
|
Read the whole file if *maxbytes* is not specified or negative.
|
||||||
|
|
||||||
|
:param filename: The file to read data from (``bytes`` or ``unicode``).
|
||||||
|
:param maxbytes: (optional) The number of bytes to read. Default is to
|
||||||
|
read the entire file.
|
||||||
|
|
||||||
|
:return: The number of bytes read
|
||||||
|
"""
|
||||||
|
filename = _path_string(filename)
|
||||||
|
|
||||||
|
if maxbytes is _unspecified:
|
||||||
|
maxbytes = -1
|
||||||
|
elif not isinstance(maxbytes, int):
|
||||||
|
raise TypeError("maxbytes must be an integer")
|
||||||
|
|
||||||
|
return _lib.RAND_load_file(filename, maxbytes)
|
||||||
|
|
||||||
|
|
||||||
|
def write_file(filename):
|
||||||
|
"""
|
||||||
|
Write a number of random bytes (currently 1024) to the file *path*. This
|
||||||
|
file can then be used with :func:`load_file` to seed the PRNG again.
|
||||||
|
|
||||||
|
:param filename: The file to write data to (``bytes`` or ``unicode``).
|
||||||
|
|
||||||
|
:return: The number of bytes written.
|
||||||
|
"""
|
||||||
|
filename = _path_string(filename)
|
||||||
|
return _lib.RAND_write_file(filename)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO There are no tests for screen at all
|
||||||
|
def screen():
|
||||||
|
"""
|
||||||
|
Add the current contents of the screen to the PRNG state.
|
||||||
|
|
||||||
|
Availability: Windows.
|
||||||
|
|
||||||
|
:return: None
|
||||||
|
"""
|
||||||
|
_lib.RAND_screen()
|
||||||
|
|
||||||
|
if getattr(_lib, 'RAND_screen', None) is None:
|
||||||
|
del screen
|
||||||
|
|
||||||
|
|
||||||
|
# TODO There are no tests for the RAND strings being loaded, whatever that
|
||||||
|
# means.
|
||||||
|
_lib.ERR_load_RAND_strings()
|
24
lib/python3.4/site-packages/OpenSSL/tsafe.py
Normal file
24
lib/python3.4/site-packages/OpenSSL/tsafe.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
from threading import RLock as _RLock
|
||||||
|
|
||||||
|
from OpenSSL import SSL as _ssl
|
||||||
|
|
||||||
|
|
||||||
|
class Connection:
|
||||||
|
def __init__(self, *args):
|
||||||
|
self._ssl_conn = _ssl.Connection(*args)
|
||||||
|
self._lock = _RLock()
|
||||||
|
|
||||||
|
for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read',
|
||||||
|
'renegotiate', 'bind', 'listen', 'connect', 'accept',
|
||||||
|
'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list',
|
||||||
|
'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
|
||||||
|
'makefile', 'get_app_data', 'set_app_data', 'state_string',
|
||||||
|
'sock_shutdown', 'get_peer_certificate', 'get_peer_cert_chain',
|
||||||
|
'want_read', 'want_write', 'set_connect_state',
|
||||||
|
'set_accept_state', 'connect_ex', 'sendall'):
|
||||||
|
exec("""def %s(self, *args):
|
||||||
|
self._lock.acquire()
|
||||||
|
try:
|
||||||
|
return self._ssl_conn.%s(*args)
|
||||||
|
finally:
|
||||||
|
self._lock.release()\n""" % (f, f))
|
22
lib/python3.4/site-packages/OpenSSL/version.py
Normal file
22
lib/python3.4/site-packages/OpenSSL/version.py
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
# Copyright (C) AB Strakt
|
||||||
|
# Copyright (C) Jean-Paul Calderone
|
||||||
|
# See LICENSE for details.
|
||||||
|
|
||||||
|
"""
|
||||||
|
pyOpenSSL - A simple wrapper around the OpenSSL library
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__author__", "__copyright__", "__email__", "__license__", "__summary__",
|
||||||
|
"__title__", "__uri__", "__version__",
|
||||||
|
]
|
||||||
|
|
||||||
|
__version__ = "16.0.0"
|
||||||
|
|
||||||
|
__title__ = "pyOpenSSL"
|
||||||
|
__uri__ = "https://pyopenssl.readthedocs.org/"
|
||||||
|
__summary__ = "Python wrapper module around the OpenSSL library"
|
||||||
|
__author__ = "The pyOpenSSL developers"
|
||||||
|
__email__ = "cryptography-dev@python.org"
|
||||||
|
__license__ = "Apache License, Version 2.0"
|
||||||
|
__copyright__ = "Copyright 2001-2016 {0}".format(__author__)
|
|
@ -0,0 +1,155 @@
|
||||||
|
Metadata-Version: 1.1
|
||||||
|
Name: SQLAlchemy
|
||||||
|
Version: 1.0.12
|
||||||
|
Summary: Database Abstraction Library
|
||||||
|
Home-page: http://www.sqlalchemy.org
|
||||||
|
Author: Mike Bayer
|
||||||
|
Author-email: mike_mp@zzzcomputing.com
|
||||||
|
License: MIT License
|
||||||
|
Description: SQLAlchemy
|
||||||
|
==========
|
||||||
|
|
||||||
|
The Python SQL Toolkit and Object Relational Mapper
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
-------------
|
||||||
|
|
||||||
|
SQLAlchemy is the Python SQL toolkit and Object Relational Mapper
|
||||||
|
that gives application developers the full power and
|
||||||
|
flexibility of SQL. SQLAlchemy provides a full suite
|
||||||
|
of well known enterprise-level persistence patterns,
|
||||||
|
designed for efficient and high-performing database
|
||||||
|
access, adapted into a simple and Pythonic domain
|
||||||
|
language.
|
||||||
|
|
||||||
|
Major SQLAlchemy features include:
|
||||||
|
|
||||||
|
* An industrial strength ORM, built
|
||||||
|
from the core on the identity map, unit of work,
|
||||||
|
and data mapper patterns. These patterns
|
||||||
|
allow transparent persistence of objects
|
||||||
|
using a declarative configuration system.
|
||||||
|
Domain models
|
||||||
|
can be constructed and manipulated naturally,
|
||||||
|
and changes are synchronized with the
|
||||||
|
current transaction automatically.
|
||||||
|
* A relationally-oriented query system, exposing
|
||||||
|
the full range of SQL's capabilities
|
||||||
|
explicitly, including joins, subqueries,
|
||||||
|
correlation, and most everything else,
|
||||||
|
in terms of the object model.
|
||||||
|
Writing queries with the ORM uses the same
|
||||||
|
techniques of relational composition you use
|
||||||
|
when writing SQL. While you can drop into
|
||||||
|
literal SQL at any time, it's virtually never
|
||||||
|
needed.
|
||||||
|
* A comprehensive and flexible system
|
||||||
|
of eager loading for related collections and objects.
|
||||||
|
Collections are cached within a session,
|
||||||
|
and can be loaded on individual access, all
|
||||||
|
at once using joins, or by query per collection
|
||||||
|
across the full result set.
|
||||||
|
* A Core SQL construction system and DBAPI
|
||||||
|
interaction layer. The SQLAlchemy Core is
|
||||||
|
separate from the ORM and is a full database
|
||||||
|
abstraction layer in its own right, and includes
|
||||||
|
an extensible Python-based SQL expression
|
||||||
|
language, schema metadata, connection pooling,
|
||||||
|
type coercion, and custom types.
|
||||||
|
* All primary and foreign key constraints are
|
||||||
|
assumed to be composite and natural. Surrogate
|
||||||
|
integer primary keys are of course still the
|
||||||
|
norm, but SQLAlchemy never assumes or hardcodes
|
||||||
|
to this model.
|
||||||
|
* Database introspection and generation. Database
|
||||||
|
schemas can be "reflected" in one step into
|
||||||
|
Python structures representing database metadata;
|
||||||
|
those same structures can then generate
|
||||||
|
CREATE statements right back out - all within
|
||||||
|
the Core, independent of the ORM.
|
||||||
|
|
||||||
|
SQLAlchemy's philosophy:
|
||||||
|
|
||||||
|
* SQL databases behave less and less like object
|
||||||
|
collections the more size and performance start to
|
||||||
|
matter; object collections behave less and less like
|
||||||
|
tables and rows the more abstraction starts to matter.
|
||||||
|
SQLAlchemy aims to accommodate both of these
|
||||||
|
principles.
|
||||||
|
* An ORM doesn't need to hide the "R". A relational
|
||||||
|
database provides rich, set-based functionality
|
||||||
|
that should be fully exposed. SQLAlchemy's
|
||||||
|
ORM provides an open-ended set of patterns
|
||||||
|
that allow a developer to construct a custom
|
||||||
|
mediation layer between a domain model and
|
||||||
|
a relational schema, turning the so-called
|
||||||
|
"object relational impedance" issue into
|
||||||
|
a distant memory.
|
||||||
|
* The developer, in all cases, makes all decisions
|
||||||
|
regarding the design, structure, and naming conventions
|
||||||
|
of both the object model as well as the relational
|
||||||
|
schema. SQLAlchemy only provides the means
|
||||||
|
to automate the execution of these decisions.
|
||||||
|
* With SQLAlchemy, there's no such thing as
|
||||||
|
"the ORM generated a bad query" - you
|
||||||
|
retain full control over the structure of
|
||||||
|
queries, including how joins are organized,
|
||||||
|
how subqueries and correlation is used, what
|
||||||
|
columns are requested. Everything SQLAlchemy
|
||||||
|
does is ultimately the result of a developer-
|
||||||
|
initiated decision.
|
||||||
|
* Don't use an ORM if the problem doesn't need one.
|
||||||
|
SQLAlchemy consists of a Core and separate ORM
|
||||||
|
component. The Core offers a full SQL expression
|
||||||
|
language that allows Pythonic construction
|
||||||
|
of SQL constructs that render directly to SQL
|
||||||
|
strings for a target database, returning
|
||||||
|
result sets that are essentially enhanced DBAPI
|
||||||
|
cursors.
|
||||||
|
* Transactions should be the norm. With SQLAlchemy's
|
||||||
|
ORM, nothing goes to permanent storage until
|
||||||
|
commit() is called. SQLAlchemy encourages applications
|
||||||
|
to create a consistent means of delineating
|
||||||
|
the start and end of a series of operations.
|
||||||
|
* Never render a literal value in a SQL statement.
|
||||||
|
Bound parameters are used to the greatest degree
|
||||||
|
possible, allowing query optimizers to cache
|
||||||
|
query plans effectively and making SQL injection
|
||||||
|
attacks a non-issue.
|
||||||
|
|
||||||
|
Documentation
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Latest documentation is at:
|
||||||
|
|
||||||
|
http://www.sqlalchemy.org/docs/
|
||||||
|
|
||||||
|
Installation / Requirements
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
Full documentation for installation is at
|
||||||
|
`Installation <http://www.sqlalchemy.org/docs/intro.html#installation>`_.
|
||||||
|
|
||||||
|
Getting Help / Development / Bug reporting
|
||||||
|
------------------------------------------
|
||||||
|
|
||||||
|
Please refer to the `SQLAlchemy Community Guide <http://www.sqlalchemy.org/support.html>`_.
|
||||||
|
|
||||||
|
License
|
||||||
|
-------
|
||||||
|
|
||||||
|
SQLAlchemy is distributed under the `MIT license
|
||||||
|
<http://www.opensource.org/licenses/mit-license.php>`_.
|
||||||
|
|
||||||
|
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Development Status :: 5 - Production/Stable
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: MIT License
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: Jython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Topic :: Database :: Front-Ends
|
||||||
|
Classifier: Operating System :: OS Independent
|
|
@ -0,0 +1,786 @@
|
||||||
|
AUTHORS
|
||||||
|
CHANGES
|
||||||
|
LICENSE
|
||||||
|
MANIFEST.in
|
||||||
|
README.dialects.rst
|
||||||
|
README.rst
|
||||||
|
README.unittests.rst
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
sqla_nose.py
|
||||||
|
tox.ini
|
||||||
|
doc/contents.html
|
||||||
|
doc/copyright.html
|
||||||
|
doc/genindex.html
|
||||||
|
doc/glossary.html
|
||||||
|
doc/index.html
|
||||||
|
doc/intro.html
|
||||||
|
doc/search.html
|
||||||
|
doc/searchindex.js
|
||||||
|
doc/_images/sqla_arch_small.png
|
||||||
|
doc/_images/sqla_engine_arch.png
|
||||||
|
doc/_modules/index.html
|
||||||
|
doc/_modules/examples/adjacency_list/adjacency_list.html
|
||||||
|
doc/_modules/examples/association/basic_association.html
|
||||||
|
doc/_modules/examples/association/dict_of_sets_with_default.html
|
||||||
|
doc/_modules/examples/association/proxied_association.html
|
||||||
|
doc/_modules/examples/custom_attributes/custom_management.html
|
||||||
|
doc/_modules/examples/custom_attributes/listen_for_events.html
|
||||||
|
doc/_modules/examples/dogpile_caching/advanced.html
|
||||||
|
doc/_modules/examples/dogpile_caching/caching_query.html
|
||||||
|
doc/_modules/examples/dogpile_caching/environment.html
|
||||||
|
doc/_modules/examples/dogpile_caching/fixture_data.html
|
||||||
|
doc/_modules/examples/dogpile_caching/helloworld.html
|
||||||
|
doc/_modules/examples/dogpile_caching/local_session_caching.html
|
||||||
|
doc/_modules/examples/dogpile_caching/model.html
|
||||||
|
doc/_modules/examples/dogpile_caching/relationship_caching.html
|
||||||
|
doc/_modules/examples/dynamic_dict/dynamic_dict.html
|
||||||
|
doc/_modules/examples/elementtree/adjacency_list.html
|
||||||
|
doc/_modules/examples/elementtree/optimized_al.html
|
||||||
|
doc/_modules/examples/elementtree/pickle.html
|
||||||
|
doc/_modules/examples/generic_associations/discriminator_on_association.html
|
||||||
|
doc/_modules/examples/generic_associations/generic_fk.html
|
||||||
|
doc/_modules/examples/generic_associations/table_per_association.html
|
||||||
|
doc/_modules/examples/generic_associations/table_per_related.html
|
||||||
|
doc/_modules/examples/graphs/directed_graph.html
|
||||||
|
doc/_modules/examples/inheritance/concrete.html
|
||||||
|
doc/_modules/examples/inheritance/joined.html
|
||||||
|
doc/_modules/examples/inheritance/single.html
|
||||||
|
doc/_modules/examples/join_conditions/cast.html
|
||||||
|
doc/_modules/examples/join_conditions/threeway.html
|
||||||
|
doc/_modules/examples/large_collection/large_collection.html
|
||||||
|
doc/_modules/examples/materialized_paths/materialized_paths.html
|
||||||
|
doc/_modules/examples/nested_sets/nested_sets.html
|
||||||
|
doc/_modules/examples/performance/__main__.html
|
||||||
|
doc/_modules/examples/performance/bulk_inserts.html
|
||||||
|
doc/_modules/examples/performance/bulk_updates.html
|
||||||
|
doc/_modules/examples/performance/large_resultsets.html
|
||||||
|
doc/_modules/examples/performance/short_selects.html
|
||||||
|
doc/_modules/examples/performance/single_inserts.html
|
||||||
|
doc/_modules/examples/postgis/postgis.html
|
||||||
|
doc/_modules/examples/sharding/attribute_shard.html
|
||||||
|
doc/_modules/examples/versioned_history/history_meta.html
|
||||||
|
doc/_modules/examples/versioned_history/test_versioning.html
|
||||||
|
doc/_modules/examples/versioned_rows/versioned_map.html
|
||||||
|
doc/_modules/examples/versioned_rows/versioned_rows.html
|
||||||
|
doc/_modules/examples/vertical/dictlike-polymorphic.html
|
||||||
|
doc/_modules/examples/vertical/dictlike.html
|
||||||
|
doc/_static/basic.css
|
||||||
|
doc/_static/changelog.css
|
||||||
|
doc/_static/comment-bright.png
|
||||||
|
doc/_static/comment-close.png
|
||||||
|
doc/_static/comment.png
|
||||||
|
doc/_static/detectmobile.js
|
||||||
|
doc/_static/docs.css
|
||||||
|
doc/_static/doctools.js
|
||||||
|
doc/_static/down-pressed.png
|
||||||
|
doc/_static/down.png
|
||||||
|
doc/_static/file.png
|
||||||
|
doc/_static/init.js
|
||||||
|
doc/_static/jquery-1.11.1.js
|
||||||
|
doc/_static/jquery.js
|
||||||
|
doc/_static/minus.png
|
||||||
|
doc/_static/plus.png
|
||||||
|
doc/_static/pygments.css
|
||||||
|
doc/_static/searchtools.js
|
||||||
|
doc/_static/sphinx_paramlinks.css
|
||||||
|
doc/_static/underscore-1.3.1.js
|
||||||
|
doc/_static/underscore.js
|
||||||
|
doc/_static/up-pressed.png
|
||||||
|
doc/_static/up.png
|
||||||
|
doc/_static/websupport.js
|
||||||
|
doc/build/Makefile
|
||||||
|
doc/build/conf.py
|
||||||
|
doc/build/contents.rst
|
||||||
|
doc/build/copyright.rst
|
||||||
|
doc/build/corrections.py
|
||||||
|
doc/build/glossary.rst
|
||||||
|
doc/build/index.rst
|
||||||
|
doc/build/intro.rst
|
||||||
|
doc/build/requirements.txt
|
||||||
|
doc/build/sqla_arch_small.png
|
||||||
|
doc/build/changelog/changelog_01.rst
|
||||||
|
doc/build/changelog/changelog_02.rst
|
||||||
|
doc/build/changelog/changelog_03.rst
|
||||||
|
doc/build/changelog/changelog_04.rst
|
||||||
|
doc/build/changelog/changelog_05.rst
|
||||||
|
doc/build/changelog/changelog_06.rst
|
||||||
|
doc/build/changelog/changelog_07.rst
|
||||||
|
doc/build/changelog/changelog_08.rst
|
||||||
|
doc/build/changelog/changelog_09.rst
|
||||||
|
doc/build/changelog/changelog_10.rst
|
||||||
|
doc/build/changelog/index.rst
|
||||||
|
doc/build/changelog/migration_04.rst
|
||||||
|
doc/build/changelog/migration_05.rst
|
||||||
|
doc/build/changelog/migration_06.rst
|
||||||
|
doc/build/changelog/migration_07.rst
|
||||||
|
doc/build/changelog/migration_08.rst
|
||||||
|
doc/build/changelog/migration_09.rst
|
||||||
|
doc/build/changelog/migration_10.rst
|
||||||
|
doc/build/core/api_basics.rst
|
||||||
|
doc/build/core/compiler.rst
|
||||||
|
doc/build/core/connections.rst
|
||||||
|
doc/build/core/constraints.rst
|
||||||
|
doc/build/core/custom_types.rst
|
||||||
|
doc/build/core/ddl.rst
|
||||||
|
doc/build/core/defaults.rst
|
||||||
|
doc/build/core/dml.rst
|
||||||
|
doc/build/core/engines.rst
|
||||||
|
doc/build/core/engines_connections.rst
|
||||||
|
doc/build/core/event.rst
|
||||||
|
doc/build/core/events.rst
|
||||||
|
doc/build/core/exceptions.rst
|
||||||
|
doc/build/core/expression_api.rst
|
||||||
|
doc/build/core/functions.rst
|
||||||
|
doc/build/core/index.rst
|
||||||
|
doc/build/core/inspection.rst
|
||||||
|
doc/build/core/interfaces.rst
|
||||||
|
doc/build/core/internals.rst
|
||||||
|
doc/build/core/metadata.rst
|
||||||
|
doc/build/core/pooling.rst
|
||||||
|
doc/build/core/reflection.rst
|
||||||
|
doc/build/core/schema.rst
|
||||||
|
doc/build/core/selectable.rst
|
||||||
|
doc/build/core/serializer.rst
|
||||||
|
doc/build/core/sqla_engine_arch.png
|
||||||
|
doc/build/core/sqlelement.rst
|
||||||
|
doc/build/core/tutorial.rst
|
||||||
|
doc/build/core/type_api.rst
|
||||||
|
doc/build/core/type_basics.rst
|
||||||
|
doc/build/core/types.rst
|
||||||
|
doc/build/dialects/firebird.rst
|
||||||
|
doc/build/dialects/index.rst
|
||||||
|
doc/build/dialects/mssql.rst
|
||||||
|
doc/build/dialects/mysql.rst
|
||||||
|
doc/build/dialects/oracle.rst
|
||||||
|
doc/build/dialects/postgresql.rst
|
||||||
|
doc/build/dialects/sqlite.rst
|
||||||
|
doc/build/dialects/sybase.rst
|
||||||
|
doc/build/faq/connections.rst
|
||||||
|
doc/build/faq/index.rst
|
||||||
|
doc/build/faq/metadata_schema.rst
|
||||||
|
doc/build/faq/ormconfiguration.rst
|
||||||
|
doc/build/faq/performance.rst
|
||||||
|
doc/build/faq/sessions.rst
|
||||||
|
doc/build/faq/sqlexpressions.rst
|
||||||
|
doc/build/orm/backref.rst
|
||||||
|
doc/build/orm/basic_relationships.rst
|
||||||
|
doc/build/orm/cascades.rst
|
||||||
|
doc/build/orm/classical.rst
|
||||||
|
doc/build/orm/collections.rst
|
||||||
|
doc/build/orm/composites.rst
|
||||||
|
doc/build/orm/constructors.rst
|
||||||
|
doc/build/orm/contextual.rst
|
||||||
|
doc/build/orm/deprecated.rst
|
||||||
|
doc/build/orm/events.rst
|
||||||
|
doc/build/orm/examples.rst
|
||||||
|
doc/build/orm/exceptions.rst
|
||||||
|
doc/build/orm/extending.rst
|
||||||
|
doc/build/orm/index.rst
|
||||||
|
doc/build/orm/inheritance.rst
|
||||||
|
doc/build/orm/internals.rst
|
||||||
|
doc/build/orm/join_conditions.rst
|
||||||
|
doc/build/orm/loading.rst
|
||||||
|
doc/build/orm/loading_columns.rst
|
||||||
|
doc/build/orm/loading_objects.rst
|
||||||
|
doc/build/orm/loading_relationships.rst
|
||||||
|
doc/build/orm/mapped_attributes.rst
|
||||||
|
doc/build/orm/mapped_sql_expr.rst
|
||||||
|
doc/build/orm/mapper_config.rst
|
||||||
|
doc/build/orm/mapping_api.rst
|
||||||
|
doc/build/orm/mapping_columns.rst
|
||||||
|
doc/build/orm/mapping_styles.rst
|
||||||
|
doc/build/orm/nonstandard_mappings.rst
|
||||||
|
doc/build/orm/persistence_techniques.rst
|
||||||
|
doc/build/orm/query.rst
|
||||||
|
doc/build/orm/relationship_api.rst
|
||||||
|
doc/build/orm/relationship_persistence.rst
|
||||||
|
doc/build/orm/relationships.rst
|
||||||
|
doc/build/orm/scalar_mapping.rst
|
||||||
|
doc/build/orm/self_referential.rst
|
||||||
|
doc/build/orm/session.rst
|
||||||
|
doc/build/orm/session_api.rst
|
||||||
|
doc/build/orm/session_basics.rst
|
||||||
|
doc/build/orm/session_events.rst
|
||||||
|
doc/build/orm/session_state_management.rst
|
||||||
|
doc/build/orm/session_transaction.rst
|
||||||
|
doc/build/orm/tutorial.rst
|
||||||
|
doc/build/orm/versioning.rst
|
||||||
|
doc/build/orm/extensions/associationproxy.rst
|
||||||
|
doc/build/orm/extensions/automap.rst
|
||||||
|
doc/build/orm/extensions/baked.rst
|
||||||
|
doc/build/orm/extensions/horizontal_shard.rst
|
||||||
|
doc/build/orm/extensions/hybrid.rst
|
||||||
|
doc/build/orm/extensions/index.rst
|
||||||
|
doc/build/orm/extensions/instrumentation.rst
|
||||||
|
doc/build/orm/extensions/mutable.rst
|
||||||
|
doc/build/orm/extensions/orderinglist.rst
|
||||||
|
doc/build/orm/extensions/declarative/api.rst
|
||||||
|
doc/build/orm/extensions/declarative/basic_use.rst
|
||||||
|
doc/build/orm/extensions/declarative/index.rst
|
||||||
|
doc/build/orm/extensions/declarative/inheritance.rst
|
||||||
|
doc/build/orm/extensions/declarative/mixins.rst
|
||||||
|
doc/build/orm/extensions/declarative/relationships.rst
|
||||||
|
doc/build/orm/extensions/declarative/table_config.rst
|
||||||
|
doc/build/texinputs/Makefile
|
||||||
|
doc/build/texinputs/sphinx.sty
|
||||||
|
doc/changelog/changelog_01.html
|
||||||
|
doc/changelog/changelog_02.html
|
||||||
|
doc/changelog/changelog_03.html
|
||||||
|
doc/changelog/changelog_04.html
|
||||||
|
doc/changelog/changelog_05.html
|
||||||
|
doc/changelog/changelog_06.html
|
||||||
|
doc/changelog/changelog_07.html
|
||||||
|
doc/changelog/changelog_08.html
|
||||||
|
doc/changelog/changelog_09.html
|
||||||
|
doc/changelog/changelog_10.html
|
||||||
|
doc/changelog/index.html
|
||||||
|
doc/changelog/migration_04.html
|
||||||
|
doc/changelog/migration_05.html
|
||||||
|
doc/changelog/migration_06.html
|
||||||
|
doc/changelog/migration_07.html
|
||||||
|
doc/changelog/migration_08.html
|
||||||
|
doc/changelog/migration_09.html
|
||||||
|
doc/changelog/migration_10.html
|
||||||
|
doc/core/api_basics.html
|
||||||
|
doc/core/compiler.html
|
||||||
|
doc/core/connections.html
|
||||||
|
doc/core/constraints.html
|
||||||
|
doc/core/custom_types.html
|
||||||
|
doc/core/ddl.html
|
||||||
|
doc/core/defaults.html
|
||||||
|
doc/core/dml.html
|
||||||
|
doc/core/engines.html
|
||||||
|
doc/core/engines_connections.html
|
||||||
|
doc/core/event.html
|
||||||
|
doc/core/events.html
|
||||||
|
doc/core/exceptions.html
|
||||||
|
doc/core/expression_api.html
|
||||||
|
doc/core/functions.html
|
||||||
|
doc/core/index.html
|
||||||
|
doc/core/inspection.html
|
||||||
|
doc/core/interfaces.html
|
||||||
|
doc/core/internals.html
|
||||||
|
doc/core/metadata.html
|
||||||
|
doc/core/pooling.html
|
||||||
|
doc/core/reflection.html
|
||||||
|
doc/core/schema.html
|
||||||
|
doc/core/selectable.html
|
||||||
|
doc/core/serializer.html
|
||||||
|
doc/core/sqlelement.html
|
||||||
|
doc/core/tutorial.html
|
||||||
|
doc/core/type_api.html
|
||||||
|
doc/core/type_basics.html
|
||||||
|
doc/core/types.html
|
||||||
|
doc/dialects/firebird.html
|
||||||
|
doc/dialects/index.html
|
||||||
|
doc/dialects/mssql.html
|
||||||
|
doc/dialects/mysql.html
|
||||||
|
doc/dialects/oracle.html
|
||||||
|
doc/dialects/postgresql.html
|
||||||
|
doc/dialects/sqlite.html
|
||||||
|
doc/dialects/sybase.html
|
||||||
|
doc/faq/connections.html
|
||||||
|
doc/faq/index.html
|
||||||
|
doc/faq/metadata_schema.html
|
||||||
|
doc/faq/ormconfiguration.html
|
||||||
|
doc/faq/performance.html
|
||||||
|
doc/faq/sessions.html
|
||||||
|
doc/faq/sqlexpressions.html
|
||||||
|
doc/orm/backref.html
|
||||||
|
doc/orm/basic_relationships.html
|
||||||
|
doc/orm/cascades.html
|
||||||
|
doc/orm/classical.html
|
||||||
|
doc/orm/collections.html
|
||||||
|
doc/orm/composites.html
|
||||||
|
doc/orm/constructors.html
|
||||||
|
doc/orm/contextual.html
|
||||||
|
doc/orm/deprecated.html
|
||||||
|
doc/orm/events.html
|
||||||
|
doc/orm/examples.html
|
||||||
|
doc/orm/exceptions.html
|
||||||
|
doc/orm/extending.html
|
||||||
|
doc/orm/index.html
|
||||||
|
doc/orm/inheritance.html
|
||||||
|
doc/orm/internals.html
|
||||||
|
doc/orm/join_conditions.html
|
||||||
|
doc/orm/loading.html
|
||||||
|
doc/orm/loading_columns.html
|
||||||
|
doc/orm/loading_objects.html
|
||||||
|
doc/orm/loading_relationships.html
|
||||||
|
doc/orm/mapped_attributes.html
|
||||||
|
doc/orm/mapped_sql_expr.html
|
||||||
|
doc/orm/mapper_config.html
|
||||||
|
doc/orm/mapping_api.html
|
||||||
|
doc/orm/mapping_columns.html
|
||||||
|
doc/orm/mapping_styles.html
|
||||||
|
doc/orm/nonstandard_mappings.html
|
||||||
|
doc/orm/persistence_techniques.html
|
||||||
|
doc/orm/query.html
|
||||||
|
doc/orm/relationship_api.html
|
||||||
|
doc/orm/relationship_persistence.html
|
||||||
|
doc/orm/relationships.html
|
||||||
|
doc/orm/scalar_mapping.html
|
||||||
|
doc/orm/self_referential.html
|
||||||
|
doc/orm/session.html
|
||||||
|
doc/orm/session_api.html
|
||||||
|
doc/orm/session_basics.html
|
||||||
|
doc/orm/session_events.html
|
||||||
|
doc/orm/session_state_management.html
|
||||||
|
doc/orm/session_transaction.html
|
||||||
|
doc/orm/tutorial.html
|
||||||
|
doc/orm/versioning.html
|
||||||
|
doc/orm/extensions/associationproxy.html
|
||||||
|
doc/orm/extensions/automap.html
|
||||||
|
doc/orm/extensions/baked.html
|
||||||
|
doc/orm/extensions/horizontal_shard.html
|
||||||
|
doc/orm/extensions/hybrid.html
|
||||||
|
doc/orm/extensions/index.html
|
||||||
|
doc/orm/extensions/instrumentation.html
|
||||||
|
doc/orm/extensions/mutable.html
|
||||||
|
doc/orm/extensions/orderinglist.html
|
||||||
|
doc/orm/extensions/declarative/api.html
|
||||||
|
doc/orm/extensions/declarative/basic_use.html
|
||||||
|
doc/orm/extensions/declarative/index.html
|
||||||
|
doc/orm/extensions/declarative/inheritance.html
|
||||||
|
doc/orm/extensions/declarative/mixins.html
|
||||||
|
doc/orm/extensions/declarative/relationships.html
|
||||||
|
doc/orm/extensions/declarative/table_config.html
|
||||||
|
examples/__init__.py
|
||||||
|
examples/adjacency_list/__init__.py
|
||||||
|
examples/adjacency_list/adjacency_list.py
|
||||||
|
examples/association/__init__.py
|
||||||
|
examples/association/basic_association.py
|
||||||
|
examples/association/dict_of_sets_with_default.py
|
||||||
|
examples/association/proxied_association.py
|
||||||
|
examples/custom_attributes/__init__.py
|
||||||
|
examples/custom_attributes/custom_management.py
|
||||||
|
examples/custom_attributes/listen_for_events.py
|
||||||
|
examples/dogpile_caching/__init__.py
|
||||||
|
examples/dogpile_caching/advanced.py
|
||||||
|
examples/dogpile_caching/caching_query.py
|
||||||
|
examples/dogpile_caching/environment.py
|
||||||
|
examples/dogpile_caching/fixture_data.py
|
||||||
|
examples/dogpile_caching/helloworld.py
|
||||||
|
examples/dogpile_caching/local_session_caching.py
|
||||||
|
examples/dogpile_caching/model.py
|
||||||
|
examples/dogpile_caching/relationship_caching.py
|
||||||
|
examples/dynamic_dict/__init__.py
|
||||||
|
examples/dynamic_dict/dynamic_dict.py
|
||||||
|
examples/elementtree/__init__.py
|
||||||
|
examples/elementtree/adjacency_list.py
|
||||||
|
examples/elementtree/optimized_al.py
|
||||||
|
examples/elementtree/pickle.py
|
||||||
|
examples/elementtree/test.xml
|
||||||
|
examples/elementtree/test2.xml
|
||||||
|
examples/elementtree/test3.xml
|
||||||
|
examples/generic_associations/__init__.py
|
||||||
|
examples/generic_associations/discriminator_on_association.py
|
||||||
|
examples/generic_associations/generic_fk.py
|
||||||
|
examples/generic_associations/table_per_association.py
|
||||||
|
examples/generic_associations/table_per_related.py
|
||||||
|
examples/graphs/__init__.py
|
||||||
|
examples/graphs/directed_graph.py
|
||||||
|
examples/inheritance/__init__.py
|
||||||
|
examples/inheritance/concrete.py
|
||||||
|
examples/inheritance/joined.py
|
||||||
|
examples/inheritance/single.py
|
||||||
|
examples/join_conditions/__init__.py
|
||||||
|
examples/join_conditions/cast.py
|
||||||
|
examples/join_conditions/threeway.py
|
||||||
|
examples/large_collection/__init__.py
|
||||||
|
examples/large_collection/large_collection.py
|
||||||
|
examples/materialized_paths/__init__.py
|
||||||
|
examples/materialized_paths/materialized_paths.py
|
||||||
|
examples/nested_sets/__init__.py
|
||||||
|
examples/nested_sets/nested_sets.py
|
||||||
|
examples/performance/__init__.py
|
||||||
|
examples/performance/__main__.py
|
||||||
|
examples/performance/bulk_inserts.py
|
||||||
|
examples/performance/bulk_updates.py
|
||||||
|
examples/performance/large_resultsets.py
|
||||||
|
examples/performance/short_selects.py
|
||||||
|
examples/performance/single_inserts.py
|
||||||
|
examples/postgis/__init__.py
|
||||||
|
examples/postgis/postgis.py
|
||||||
|
examples/sharding/__init__.py
|
||||||
|
examples/sharding/attribute_shard.py
|
||||||
|
examples/versioned_history/__init__.py
|
||||||
|
examples/versioned_history/history_meta.py
|
||||||
|
examples/versioned_history/test_versioning.py
|
||||||
|
examples/versioned_rows/__init__.py
|
||||||
|
examples/versioned_rows/versioned_map.py
|
||||||
|
examples/versioned_rows/versioned_rows.py
|
||||||
|
examples/vertical/__init__.py
|
||||||
|
examples/vertical/dictlike-polymorphic.py
|
||||||
|
examples/vertical/dictlike.py
|
||||||
|
lib/SQLAlchemy.egg-info/PKG-INFO
|
||||||
|
lib/SQLAlchemy.egg-info/SOURCES.txt
|
||||||
|
lib/SQLAlchemy.egg-info/dependency_links.txt
|
||||||
|
lib/SQLAlchemy.egg-info/top_level.txt
|
||||||
|
lib/sqlalchemy/__init__.py
|
||||||
|
lib/sqlalchemy/events.py
|
||||||
|
lib/sqlalchemy/exc.py
|
||||||
|
lib/sqlalchemy/inspection.py
|
||||||
|
lib/sqlalchemy/interfaces.py
|
||||||
|
lib/sqlalchemy/log.py
|
||||||
|
lib/sqlalchemy/pool.py
|
||||||
|
lib/sqlalchemy/processors.py
|
||||||
|
lib/sqlalchemy/schema.py
|
||||||
|
lib/sqlalchemy/types.py
|
||||||
|
lib/sqlalchemy/cextension/processors.c
|
||||||
|
lib/sqlalchemy/cextension/resultproxy.c
|
||||||
|
lib/sqlalchemy/cextension/utils.c
|
||||||
|
lib/sqlalchemy/connectors/__init__.py
|
||||||
|
lib/sqlalchemy/connectors/mxodbc.py
|
||||||
|
lib/sqlalchemy/connectors/pyodbc.py
|
||||||
|
lib/sqlalchemy/connectors/zxJDBC.py
|
||||||
|
lib/sqlalchemy/databases/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/postgres.py
|
||||||
|
lib/sqlalchemy/dialects/type_migration_guidelines.txt
|
||||||
|
lib/sqlalchemy/dialects/firebird/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/firebird/base.py
|
||||||
|
lib/sqlalchemy/dialects/firebird/fdb.py
|
||||||
|
lib/sqlalchemy/dialects/firebird/kinterbasdb.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/adodbapi.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/base.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/information_schema.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/mxodbc.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/pymssql.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/pyodbc.py
|
||||||
|
lib/sqlalchemy/dialects/mssql/zxjdbc.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/base.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/cymysql.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/gaerdbms.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/mysqlconnector.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/mysqldb.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/oursql.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/pymysql.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/pyodbc.py
|
||||||
|
lib/sqlalchemy/dialects/mysql/zxjdbc.py
|
||||||
|
lib/sqlalchemy/dialects/oracle/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/oracle/base.py
|
||||||
|
lib/sqlalchemy/dialects/oracle/cx_oracle.py
|
||||||
|
lib/sqlalchemy/dialects/oracle/zxjdbc.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/base.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/constraints.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/hstore.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/json.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/pg8000.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/psycopg2.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/psycopg2cffi.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/pypostgresql.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/ranges.py
|
||||||
|
lib/sqlalchemy/dialects/postgresql/zxjdbc.py
|
||||||
|
lib/sqlalchemy/dialects/sqlite/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/sqlite/base.py
|
||||||
|
lib/sqlalchemy/dialects/sqlite/pysqlcipher.py
|
||||||
|
lib/sqlalchemy/dialects/sqlite/pysqlite.py
|
||||||
|
lib/sqlalchemy/dialects/sybase/__init__.py
|
||||||
|
lib/sqlalchemy/dialects/sybase/base.py
|
||||||
|
lib/sqlalchemy/dialects/sybase/mxodbc.py
|
||||||
|
lib/sqlalchemy/dialects/sybase/pyodbc.py
|
||||||
|
lib/sqlalchemy/dialects/sybase/pysybase.py
|
||||||
|
lib/sqlalchemy/engine/__init__.py
|
||||||
|
lib/sqlalchemy/engine/base.py
|
||||||
|
lib/sqlalchemy/engine/default.py
|
||||||
|
lib/sqlalchemy/engine/interfaces.py
|
||||||
|
lib/sqlalchemy/engine/reflection.py
|
||||||
|
lib/sqlalchemy/engine/result.py
|
||||||
|
lib/sqlalchemy/engine/strategies.py
|
||||||
|
lib/sqlalchemy/engine/threadlocal.py
|
||||||
|
lib/sqlalchemy/engine/url.py
|
||||||
|
lib/sqlalchemy/engine/util.py
|
||||||
|
lib/sqlalchemy/event/__init__.py
|
||||||
|
lib/sqlalchemy/event/api.py
|
||||||
|
lib/sqlalchemy/event/attr.py
|
||||||
|
lib/sqlalchemy/event/base.py
|
||||||
|
lib/sqlalchemy/event/legacy.py
|
||||||
|
lib/sqlalchemy/event/registry.py
|
||||||
|
lib/sqlalchemy/ext/__init__.py
|
||||||
|
lib/sqlalchemy/ext/associationproxy.py
|
||||||
|
lib/sqlalchemy/ext/automap.py
|
||||||
|
lib/sqlalchemy/ext/baked.py
|
||||||
|
lib/sqlalchemy/ext/compiler.py
|
||||||
|
lib/sqlalchemy/ext/horizontal_shard.py
|
||||||
|
lib/sqlalchemy/ext/hybrid.py
|
||||||
|
lib/sqlalchemy/ext/instrumentation.py
|
||||||
|
lib/sqlalchemy/ext/mutable.py
|
||||||
|
lib/sqlalchemy/ext/orderinglist.py
|
||||||
|
lib/sqlalchemy/ext/serializer.py
|
||||||
|
lib/sqlalchemy/ext/declarative/__init__.py
|
||||||
|
lib/sqlalchemy/ext/declarative/api.py
|
||||||
|
lib/sqlalchemy/ext/declarative/base.py
|
||||||
|
lib/sqlalchemy/ext/declarative/clsregistry.py
|
||||||
|
lib/sqlalchemy/orm/__init__.py
|
||||||
|
lib/sqlalchemy/orm/attributes.py
|
||||||
|
lib/sqlalchemy/orm/base.py
|
||||||
|
lib/sqlalchemy/orm/collections.py
|
||||||
|
lib/sqlalchemy/orm/dependency.py
|
||||||
|
lib/sqlalchemy/orm/deprecated_interfaces.py
|
||||||
|
lib/sqlalchemy/orm/descriptor_props.py
|
||||||
|
lib/sqlalchemy/orm/dynamic.py
|
||||||
|
lib/sqlalchemy/orm/evaluator.py
|
||||||
|
lib/sqlalchemy/orm/events.py
|
||||||
|
lib/sqlalchemy/orm/exc.py
|
||||||
|
lib/sqlalchemy/orm/identity.py
|
||||||
|
lib/sqlalchemy/orm/instrumentation.py
|
||||||
|
lib/sqlalchemy/orm/interfaces.py
|
||||||
|
lib/sqlalchemy/orm/loading.py
|
||||||
|
lib/sqlalchemy/orm/mapper.py
|
||||||
|
lib/sqlalchemy/orm/path_registry.py
|
||||||
|
lib/sqlalchemy/orm/persistence.py
|
||||||
|
lib/sqlalchemy/orm/properties.py
|
||||||
|
lib/sqlalchemy/orm/query.py
|
||||||
|
lib/sqlalchemy/orm/relationships.py
|
||||||
|
lib/sqlalchemy/orm/scoping.py
|
||||||
|
lib/sqlalchemy/orm/session.py
|
||||||
|
lib/sqlalchemy/orm/state.py
|
||||||
|
lib/sqlalchemy/orm/strategies.py
|
||||||
|
lib/sqlalchemy/orm/strategy_options.py
|
||||||
|
lib/sqlalchemy/orm/sync.py
|
||||||
|
lib/sqlalchemy/orm/unitofwork.py
|
||||||
|
lib/sqlalchemy/orm/util.py
|
||||||
|
lib/sqlalchemy/sql/__init__.py
|
||||||
|
lib/sqlalchemy/sql/annotation.py
|
||||||
|
lib/sqlalchemy/sql/base.py
|
||||||
|
lib/sqlalchemy/sql/compiler.py
|
||||||
|
lib/sqlalchemy/sql/crud.py
|
||||||
|
lib/sqlalchemy/sql/ddl.py
|
||||||
|
lib/sqlalchemy/sql/default_comparator.py
|
||||||
|
lib/sqlalchemy/sql/dml.py
|
||||||
|
lib/sqlalchemy/sql/elements.py
|
||||||
|
lib/sqlalchemy/sql/expression.py
|
||||||
|
lib/sqlalchemy/sql/functions.py
|
||||||
|
lib/sqlalchemy/sql/naming.py
|
||||||
|
lib/sqlalchemy/sql/operators.py
|
||||||
|
lib/sqlalchemy/sql/schema.py
|
||||||
|
lib/sqlalchemy/sql/selectable.py
|
||||||
|
lib/sqlalchemy/sql/sqltypes.py
|
||||||
|
lib/sqlalchemy/sql/type_api.py
|
||||||
|
lib/sqlalchemy/sql/util.py
|
||||||
|
lib/sqlalchemy/sql/visitors.py
|
||||||
|
lib/sqlalchemy/testing/__init__.py
|
||||||
|
lib/sqlalchemy/testing/assertions.py
|
||||||
|
lib/sqlalchemy/testing/assertsql.py
|
||||||
|
lib/sqlalchemy/testing/config.py
|
||||||
|
lib/sqlalchemy/testing/distutils_run.py
|
||||||
|
lib/sqlalchemy/testing/engines.py
|
||||||
|
lib/sqlalchemy/testing/entities.py
|
||||||
|
lib/sqlalchemy/testing/exclusions.py
|
||||||
|
lib/sqlalchemy/testing/fixtures.py
|
||||||
|
lib/sqlalchemy/testing/mock.py
|
||||||
|
lib/sqlalchemy/testing/pickleable.py
|
||||||
|
lib/sqlalchemy/testing/profiling.py
|
||||||
|
lib/sqlalchemy/testing/provision.py
|
||||||
|
lib/sqlalchemy/testing/replay_fixture.py
|
||||||
|
lib/sqlalchemy/testing/requirements.py
|
||||||
|
lib/sqlalchemy/testing/runner.py
|
||||||
|
lib/sqlalchemy/testing/schema.py
|
||||||
|
lib/sqlalchemy/testing/util.py
|
||||||
|
lib/sqlalchemy/testing/warnings.py
|
||||||
|
lib/sqlalchemy/testing/plugin/__init__.py
|
||||||
|
lib/sqlalchemy/testing/plugin/bootstrap.py
|
||||||
|
lib/sqlalchemy/testing/plugin/noseplugin.py
|
||||||
|
lib/sqlalchemy/testing/plugin/plugin_base.py
|
||||||
|
lib/sqlalchemy/testing/plugin/pytestplugin.py
|
||||||
|
lib/sqlalchemy/testing/suite/__init__.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_ddl.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_dialect.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_insert.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_reflection.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_results.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_select.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_sequence.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_types.py
|
||||||
|
lib/sqlalchemy/testing/suite/test_update_delete.py
|
||||||
|
lib/sqlalchemy/util/__init__.py
|
||||||
|
lib/sqlalchemy/util/_collections.py
|
||||||
|
lib/sqlalchemy/util/compat.py
|
||||||
|
lib/sqlalchemy/util/deprecations.py
|
||||||
|
lib/sqlalchemy/util/langhelpers.py
|
||||||
|
lib/sqlalchemy/util/queue.py
|
||||||
|
lib/sqlalchemy/util/topological.py
|
||||||
|
test/__init__.py
|
||||||
|
test/binary_data_one.dat
|
||||||
|
test/binary_data_two.dat
|
||||||
|
test/conftest.py
|
||||||
|
test/requirements.py
|
||||||
|
test/aaa_profiling/__init__.py
|
||||||
|
test/aaa_profiling/test_compiler.py
|
||||||
|
test/aaa_profiling/test_memusage.py
|
||||||
|
test/aaa_profiling/test_orm.py
|
||||||
|
test/aaa_profiling/test_pool.py
|
||||||
|
test/aaa_profiling/test_resultset.py
|
||||||
|
test/aaa_profiling/test_zoomark.py
|
||||||
|
test/aaa_profiling/test_zoomark_orm.py
|
||||||
|
test/base/__init__.py
|
||||||
|
test/base/test_dependency.py
|
||||||
|
test/base/test_events.py
|
||||||
|
test/base/test_except.py
|
||||||
|
test/base/test_inspect.py
|
||||||
|
test/base/test_tutorials.py
|
||||||
|
test/base/test_utils.py
|
||||||
|
test/dialect/__init__.py
|
||||||
|
test/dialect/test_firebird.py
|
||||||
|
test/dialect/test_mxodbc.py
|
||||||
|
test/dialect/test_oracle.py
|
||||||
|
test/dialect/test_pyodbc.py
|
||||||
|
test/dialect/test_sqlite.py
|
||||||
|
test/dialect/test_suite.py
|
||||||
|
test/dialect/test_sybase.py
|
||||||
|
test/dialect/mssql/__init__.py
|
||||||
|
test/dialect/mssql/test_compiler.py
|
||||||
|
test/dialect/mssql/test_engine.py
|
||||||
|
test/dialect/mssql/test_query.py
|
||||||
|
test/dialect/mssql/test_reflection.py
|
||||||
|
test/dialect/mssql/test_types.py
|
||||||
|
test/dialect/mysql/__init__.py
|
||||||
|
test/dialect/mysql/test_compiler.py
|
||||||
|
test/dialect/mysql/test_dialect.py
|
||||||
|
test/dialect/mysql/test_query.py
|
||||||
|
test/dialect/mysql/test_reflection.py
|
||||||
|
test/dialect/mysql/test_types.py
|
||||||
|
test/dialect/postgresql/__init__.py
|
||||||
|
test/dialect/postgresql/test_compiler.py
|
||||||
|
test/dialect/postgresql/test_dialect.py
|
||||||
|
test/dialect/postgresql/test_query.py
|
||||||
|
test/dialect/postgresql/test_reflection.py
|
||||||
|
test/dialect/postgresql/test_types.py
|
||||||
|
test/engine/__init__.py
|
||||||
|
test/engine/test_bind.py
|
||||||
|
test/engine/test_ddlevents.py
|
||||||
|
test/engine/test_execute.py
|
||||||
|
test/engine/test_logging.py
|
||||||
|
test/engine/test_parseconnect.py
|
||||||
|
test/engine/test_pool.py
|
||||||
|
test/engine/test_processors.py
|
||||||
|
test/engine/test_reconnect.py
|
||||||
|
test/engine/test_reflection.py
|
||||||
|
test/engine/test_transaction.py
|
||||||
|
test/ext/__init__.py
|
||||||
|
test/ext/test_associationproxy.py
|
||||||
|
test/ext/test_automap.py
|
||||||
|
test/ext/test_baked.py
|
||||||
|
test/ext/test_compiler.py
|
||||||
|
test/ext/test_extendedattr.py
|
||||||
|
test/ext/test_horizontal_shard.py
|
||||||
|
test/ext/test_hybrid.py
|
||||||
|
test/ext/test_mutable.py
|
||||||
|
test/ext/test_orderinglist.py
|
||||||
|
test/ext/test_serializer.py
|
||||||
|
test/ext/declarative/__init__.py
|
||||||
|
test/ext/declarative/test_basic.py
|
||||||
|
test/ext/declarative/test_clsregistry.py
|
||||||
|
test/ext/declarative/test_inheritance.py
|
||||||
|
test/ext/declarative/test_mixin.py
|
||||||
|
test/ext/declarative/test_reflection.py
|
||||||
|
test/orm/__init__.py
|
||||||
|
test/orm/_fixtures.py
|
||||||
|
test/orm/test_association.py
|
||||||
|
test/orm/test_assorted_eager.py
|
||||||
|
test/orm/test_attributes.py
|
||||||
|
test/orm/test_backref_mutations.py
|
||||||
|
test/orm/test_bind.py
|
||||||
|
test/orm/test_bulk.py
|
||||||
|
test/orm/test_bundle.py
|
||||||
|
test/orm/test_cascade.py
|
||||||
|
test/orm/test_collection.py
|
||||||
|
test/orm/test_compile.py
|
||||||
|
test/orm/test_composites.py
|
||||||
|
test/orm/test_cycles.py
|
||||||
|
test/orm/test_default_strategies.py
|
||||||
|
test/orm/test_defaults.py
|
||||||
|
test/orm/test_deferred.py
|
||||||
|
test/orm/test_deprecations.py
|
||||||
|
test/orm/test_descriptor.py
|
||||||
|
test/orm/test_dynamic.py
|
||||||
|
test/orm/test_eager_relations.py
|
||||||
|
test/orm/test_evaluator.py
|
||||||
|
test/orm/test_events.py
|
||||||
|
test/orm/test_expire.py
|
||||||
|
test/orm/test_froms.py
|
||||||
|
test/orm/test_generative.py
|
||||||
|
test/orm/test_hasparent.py
|
||||||
|
test/orm/test_immediate_load.py
|
||||||
|
test/orm/test_inspect.py
|
||||||
|
test/orm/test_instrumentation.py
|
||||||
|
test/orm/test_joins.py
|
||||||
|
test/orm/test_lazy_relations.py
|
||||||
|
test/orm/test_load_on_fks.py
|
||||||
|
test/orm/test_loading.py
|
||||||
|
test/orm/test_lockmode.py
|
||||||
|
test/orm/test_manytomany.py
|
||||||
|
test/orm/test_mapper.py
|
||||||
|
test/orm/test_merge.py
|
||||||
|
test/orm/test_naturalpks.py
|
||||||
|
test/orm/test_of_type.py
|
||||||
|
test/orm/test_onetoone.py
|
||||||
|
test/orm/test_options.py
|
||||||
|
test/orm/test_pickled.py
|
||||||
|
test/orm/test_query.py
|
||||||
|
test/orm/test_rel_fn.py
|
||||||
|
test/orm/test_relationships.py
|
||||||
|
test/orm/test_scoping.py
|
||||||
|
test/orm/test_selectable.py
|
||||||
|
test/orm/test_session.py
|
||||||
|
test/orm/test_subquery_relations.py
|
||||||
|
test/orm/test_sync.py
|
||||||
|
test/orm/test_transaction.py
|
||||||
|
test/orm/test_unitofwork.py
|
||||||
|
test/orm/test_unitofworkv2.py
|
||||||
|
test/orm/test_update_delete.py
|
||||||
|
test/orm/test_utils.py
|
||||||
|
test/orm/test_validators.py
|
||||||
|
test/orm/test_versioning.py
|
||||||
|
test/orm/inheritance/__init__.py
|
||||||
|
test/orm/inheritance/_poly_fixtures.py
|
||||||
|
test/orm/inheritance/test_abc_inheritance.py
|
||||||
|
test/orm/inheritance/test_abc_polymorphic.py
|
||||||
|
test/orm/inheritance/test_assorted_poly.py
|
||||||
|
test/orm/inheritance/test_basic.py
|
||||||
|
test/orm/inheritance/test_concrete.py
|
||||||
|
test/orm/inheritance/test_magazine.py
|
||||||
|
test/orm/inheritance/test_manytomany.py
|
||||||
|
test/orm/inheritance/test_poly_linked_list.py
|
||||||
|
test/orm/inheritance/test_poly_persistence.py
|
||||||
|
test/orm/inheritance/test_polymorphic_rel.py
|
||||||
|
test/orm/inheritance/test_productspec.py
|
||||||
|
test/orm/inheritance/test_relationship.py
|
||||||
|
test/orm/inheritance/test_selects.py
|
||||||
|
test/orm/inheritance/test_single.py
|
||||||
|
test/orm/inheritance/test_with_poly.py
|
||||||
|
test/perf/invalidate_stresstest.py
|
||||||
|
test/perf/orm2010.py
|
||||||
|
test/sql/__init__.py
|
||||||
|
test/sql/test_case_statement.py
|
||||||
|
test/sql/test_compiler.py
|
||||||
|
test/sql/test_constraints.py
|
||||||
|
test/sql/test_cte.py
|
||||||
|
test/sql/test_ddlemit.py
|
||||||
|
test/sql/test_defaults.py
|
||||||
|
test/sql/test_delete.py
|
||||||
|
test/sql/test_functions.py
|
||||||
|
test/sql/test_generative.py
|
||||||
|
test/sql/test_insert.py
|
||||||
|
test/sql/test_insert_exec.py
|
||||||
|
test/sql/test_inspect.py
|
||||||
|
test/sql/test_join_rewriting.py
|
||||||
|
test/sql/test_labels.py
|
||||||
|
test/sql/test_metadata.py
|
||||||
|
test/sql/test_operators.py
|
||||||
|
test/sql/test_query.py
|
||||||
|
test/sql/test_quote.py
|
||||||
|
test/sql/test_resultset.py
|
||||||
|
test/sql/test_returning.py
|
||||||
|
test/sql/test_rowcount.py
|
||||||
|
test/sql/test_selectable.py
|
||||||
|
test/sql/test_text.py
|
||||||
|
test/sql/test_type_expressions.py
|
||||||
|
test/sql/test_types.py
|
||||||
|
test/sql/test_unicode.py
|
||||||
|
test/sql/test_update.py
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1,374 @@
|
||||||
|
../sqlalchemy/inspection.py
|
||||||
|
../sqlalchemy/events.py
|
||||||
|
../sqlalchemy/interfaces.py
|
||||||
|
../sqlalchemy/schema.py
|
||||||
|
../sqlalchemy/processors.py
|
||||||
|
../sqlalchemy/__init__.py
|
||||||
|
../sqlalchemy/types.py
|
||||||
|
../sqlalchemy/exc.py
|
||||||
|
../sqlalchemy/pool.py
|
||||||
|
../sqlalchemy/log.py
|
||||||
|
../sqlalchemy/sql/compiler.py
|
||||||
|
../sqlalchemy/sql/sqltypes.py
|
||||||
|
../sqlalchemy/sql/type_api.py
|
||||||
|
../sqlalchemy/sql/default_comparator.py
|
||||||
|
../sqlalchemy/sql/crud.py
|
||||||
|
../sqlalchemy/sql/schema.py
|
||||||
|
../sqlalchemy/sql/expression.py
|
||||||
|
../sqlalchemy/sql/__init__.py
|
||||||
|
../sqlalchemy/sql/dml.py
|
||||||
|
../sqlalchemy/sql/operators.py
|
||||||
|
../sqlalchemy/sql/base.py
|
||||||
|
../sqlalchemy/sql/selectable.py
|
||||||
|
../sqlalchemy/sql/util.py
|
||||||
|
../sqlalchemy/sql/visitors.py
|
||||||
|
../sqlalchemy/sql/elements.py
|
||||||
|
../sqlalchemy/sql/functions.py
|
||||||
|
../sqlalchemy/sql/naming.py
|
||||||
|
../sqlalchemy/sql/ddl.py
|
||||||
|
../sqlalchemy/sql/annotation.py
|
||||||
|
../sqlalchemy/engine/threadlocal.py
|
||||||
|
../sqlalchemy/engine/interfaces.py
|
||||||
|
../sqlalchemy/engine/reflection.py
|
||||||
|
../sqlalchemy/engine/__init__.py
|
||||||
|
../sqlalchemy/engine/url.py
|
||||||
|
../sqlalchemy/engine/strategies.py
|
||||||
|
../sqlalchemy/engine/base.py
|
||||||
|
../sqlalchemy/engine/util.py
|
||||||
|
../sqlalchemy/engine/result.py
|
||||||
|
../sqlalchemy/engine/default.py
|
||||||
|
../sqlalchemy/event/__init__.py
|
||||||
|
../sqlalchemy/event/registry.py
|
||||||
|
../sqlalchemy/event/base.py
|
||||||
|
../sqlalchemy/event/attr.py
|
||||||
|
../sqlalchemy/event/api.py
|
||||||
|
../sqlalchemy/event/legacy.py
|
||||||
|
../sqlalchemy/connectors/zxJDBC.py
|
||||||
|
../sqlalchemy/connectors/pyodbc.py
|
||||||
|
../sqlalchemy/connectors/__init__.py
|
||||||
|
../sqlalchemy/connectors/mxodbc.py
|
||||||
|
../sqlalchemy/orm/events.py
|
||||||
|
../sqlalchemy/orm/loading.py
|
||||||
|
../sqlalchemy/orm/scoping.py
|
||||||
|
../sqlalchemy/orm/unitofwork.py
|
||||||
|
../sqlalchemy/orm/instrumentation.py
|
||||||
|
../sqlalchemy/orm/strategy_options.py
|
||||||
|
../sqlalchemy/orm/interfaces.py
|
||||||
|
../sqlalchemy/orm/dynamic.py
|
||||||
|
../sqlalchemy/orm/properties.py
|
||||||
|
../sqlalchemy/orm/__init__.py
|
||||||
|
../sqlalchemy/orm/mapper.py
|
||||||
|
../sqlalchemy/orm/relationships.py
|
||||||
|
../sqlalchemy/orm/strategies.py
|
||||||
|
../sqlalchemy/orm/sync.py
|
||||||
|
../sqlalchemy/orm/state.py
|
||||||
|
../sqlalchemy/orm/base.py
|
||||||
|
../sqlalchemy/orm/util.py
|
||||||
|
../sqlalchemy/orm/exc.py
|
||||||
|
../sqlalchemy/orm/identity.py
|
||||||
|
../sqlalchemy/orm/evaluator.py
|
||||||
|
../sqlalchemy/orm/collections.py
|
||||||
|
../sqlalchemy/orm/descriptor_props.py
|
||||||
|
../sqlalchemy/orm/persistence.py
|
||||||
|
../sqlalchemy/orm/deprecated_interfaces.py
|
||||||
|
../sqlalchemy/orm/attributes.py
|
||||||
|
../sqlalchemy/orm/query.py
|
||||||
|
../sqlalchemy/orm/dependency.py
|
||||||
|
../sqlalchemy/orm/path_registry.py
|
||||||
|
../sqlalchemy/orm/session.py
|
||||||
|
../sqlalchemy/ext/compiler.py
|
||||||
|
../sqlalchemy/ext/automap.py
|
||||||
|
../sqlalchemy/ext/instrumentation.py
|
||||||
|
../sqlalchemy/ext/serializer.py
|
||||||
|
../sqlalchemy/ext/orderinglist.py
|
||||||
|
../sqlalchemy/ext/horizontal_shard.py
|
||||||
|
../sqlalchemy/ext/__init__.py
|
||||||
|
../sqlalchemy/ext/hybrid.py
|
||||||
|
../sqlalchemy/ext/associationproxy.py
|
||||||
|
../sqlalchemy/ext/mutable.py
|
||||||
|
../sqlalchemy/ext/baked.py
|
||||||
|
../sqlalchemy/ext/declarative/__init__.py
|
||||||
|
../sqlalchemy/ext/declarative/base.py
|
||||||
|
../sqlalchemy/ext/declarative/api.py
|
||||||
|
../sqlalchemy/ext/declarative/clsregistry.py
|
||||||
|
../sqlalchemy/databases/__init__.py
|
||||||
|
../sqlalchemy/util/topological.py
|
||||||
|
../sqlalchemy/util/queue.py
|
||||||
|
../sqlalchemy/util/__init__.py
|
||||||
|
../sqlalchemy/util/compat.py
|
||||||
|
../sqlalchemy/util/_collections.py
|
||||||
|
../sqlalchemy/util/deprecations.py
|
||||||
|
../sqlalchemy/util/langhelpers.py
|
||||||
|
../sqlalchemy/testing/engines.py
|
||||||
|
../sqlalchemy/testing/warnings.py
|
||||||
|
../sqlalchemy/testing/profiling.py
|
||||||
|
../sqlalchemy/testing/schema.py
|
||||||
|
../sqlalchemy/testing/replay_fixture.py
|
||||||
|
../sqlalchemy/testing/assertions.py
|
||||||
|
../sqlalchemy/testing/__init__.py
|
||||||
|
../sqlalchemy/testing/pickleable.py
|
||||||
|
../sqlalchemy/testing/exclusions.py
|
||||||
|
../sqlalchemy/testing/distutils_run.py
|
||||||
|
../sqlalchemy/testing/assertsql.py
|
||||||
|
../sqlalchemy/testing/util.py
|
||||||
|
../sqlalchemy/testing/mock.py
|
||||||
|
../sqlalchemy/testing/runner.py
|
||||||
|
../sqlalchemy/testing/fixtures.py
|
||||||
|
../sqlalchemy/testing/provision.py
|
||||||
|
../sqlalchemy/testing/requirements.py
|
||||||
|
../sqlalchemy/testing/entities.py
|
||||||
|
../sqlalchemy/testing/config.py
|
||||||
|
../sqlalchemy/testing/plugin/bootstrap.py
|
||||||
|
../sqlalchemy/testing/plugin/__init__.py
|
||||||
|
../sqlalchemy/testing/plugin/plugin_base.py
|
||||||
|
../sqlalchemy/testing/plugin/pytestplugin.py
|
||||||
|
../sqlalchemy/testing/plugin/noseplugin.py
|
||||||
|
../sqlalchemy/testing/suite/test_types.py
|
||||||
|
../sqlalchemy/testing/suite/test_ddl.py
|
||||||
|
../sqlalchemy/testing/suite/test_sequence.py
|
||||||
|
../sqlalchemy/testing/suite/test_update_delete.py
|
||||||
|
../sqlalchemy/testing/suite/__init__.py
|
||||||
|
../sqlalchemy/testing/suite/test_reflection.py
|
||||||
|
../sqlalchemy/testing/suite/test_dialect.py
|
||||||
|
../sqlalchemy/testing/suite/test_select.py
|
||||||
|
../sqlalchemy/testing/suite/test_results.py
|
||||||
|
../sqlalchemy/testing/suite/test_insert.py
|
||||||
|
../sqlalchemy/dialects/postgres.py
|
||||||
|
../sqlalchemy/dialects/__init__.py
|
||||||
|
../sqlalchemy/dialects/oracle/zxjdbc.py
|
||||||
|
../sqlalchemy/dialects/oracle/cx_oracle.py
|
||||||
|
../sqlalchemy/dialects/oracle/__init__.py
|
||||||
|
../sqlalchemy/dialects/oracle/base.py
|
||||||
|
../sqlalchemy/dialects/mssql/zxjdbc.py
|
||||||
|
../sqlalchemy/dialects/mssql/pyodbc.py
|
||||||
|
../sqlalchemy/dialects/mssql/__init__.py
|
||||||
|
../sqlalchemy/dialects/mssql/mxodbc.py
|
||||||
|
../sqlalchemy/dialects/mssql/base.py
|
||||||
|
../sqlalchemy/dialects/mssql/adodbapi.py
|
||||||
|
../sqlalchemy/dialects/mssql/information_schema.py
|
||||||
|
../sqlalchemy/dialects/mssql/pymssql.py
|
||||||
|
../sqlalchemy/dialects/sybase/pyodbc.py
|
||||||
|
../sqlalchemy/dialects/sybase/__init__.py
|
||||||
|
../sqlalchemy/dialects/sybase/mxodbc.py
|
||||||
|
../sqlalchemy/dialects/sybase/base.py
|
||||||
|
../sqlalchemy/dialects/sybase/pysybase.py
|
||||||
|
../sqlalchemy/dialects/sqlite/pysqlcipher.py
|
||||||
|
../sqlalchemy/dialects/sqlite/__init__.py
|
||||||
|
../sqlalchemy/dialects/sqlite/base.py
|
||||||
|
../sqlalchemy/dialects/sqlite/pysqlite.py
|
||||||
|
../sqlalchemy/dialects/postgresql/ranges.py
|
||||||
|
../sqlalchemy/dialects/postgresql/constraints.py
|
||||||
|
../sqlalchemy/dialects/postgresql/hstore.py
|
||||||
|
../sqlalchemy/dialects/postgresql/psycopg2.py
|
||||||
|
../sqlalchemy/dialects/postgresql/zxjdbc.py
|
||||||
|
../sqlalchemy/dialects/postgresql/__init__.py
|
||||||
|
../sqlalchemy/dialects/postgresql/psycopg2cffi.py
|
||||||
|
../sqlalchemy/dialects/postgresql/base.py
|
||||||
|
../sqlalchemy/dialects/postgresql/pypostgresql.py
|
||||||
|
../sqlalchemy/dialects/postgresql/pg8000.py
|
||||||
|
../sqlalchemy/dialects/postgresql/json.py
|
||||||
|
../sqlalchemy/dialects/mysql/zxjdbc.py
|
||||||
|
../sqlalchemy/dialects/mysql/pyodbc.py
|
||||||
|
../sqlalchemy/dialects/mysql/__init__.py
|
||||||
|
../sqlalchemy/dialects/mysql/pymysql.py
|
||||||
|
../sqlalchemy/dialects/mysql/mysqlconnector.py
|
||||||
|
../sqlalchemy/dialects/mysql/mysqldb.py
|
||||||
|
../sqlalchemy/dialects/mysql/oursql.py
|
||||||
|
../sqlalchemy/dialects/mysql/base.py
|
||||||
|
../sqlalchemy/dialects/mysql/cymysql.py
|
||||||
|
../sqlalchemy/dialects/mysql/gaerdbms.py
|
||||||
|
../sqlalchemy/dialects/firebird/kinterbasdb.py
|
||||||
|
../sqlalchemy/dialects/firebird/__init__.py
|
||||||
|
../sqlalchemy/dialects/firebird/base.py
|
||||||
|
../sqlalchemy/dialects/firebird/fdb.py
|
||||||
|
../sqlalchemy/__pycache__/inspection.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/events.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/interfaces.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/schema.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/processors.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/types.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/exc.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/pool.cpython-34.pyc
|
||||||
|
../sqlalchemy/__pycache__/log.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/crud.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/schema.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/expression.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/dml.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/operators.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/util.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/elements.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/functions.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/naming.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc
|
||||||
|
../sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/url.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/util.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/result.cpython-34.pyc
|
||||||
|
../sqlalchemy/engine/__pycache__/default.cpython-34.pyc
|
||||||
|
../sqlalchemy/event/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/event/__pycache__/registry.cpython-34.pyc
|
||||||
|
../sqlalchemy/event/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/event/__pycache__/attr.cpython-34.pyc
|
||||||
|
../sqlalchemy/event/__pycache__/api.cpython-34.pyc
|
||||||
|
../sqlalchemy/event/__pycache__/legacy.cpython-34.pyc
|
||||||
|
../sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc
|
||||||
|
../sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/events.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/loading.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/properties.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/sync.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/state.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/util.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/exc.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/identity.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/collections.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/query.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc
|
||||||
|
../sqlalchemy/orm/__pycache__/session.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/automap.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/__pycache__/baked.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc
|
||||||
|
../sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc
|
||||||
|
../sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/topological.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/queue.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/compat.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/_collections.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc
|
||||||
|
../sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/engines.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/schema.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/util.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/mock.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/runner.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/provision.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/entities.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/__pycache__/config.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc
|
||||||
|
../sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc
|
||||||
|
../sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc
|
||||||
|
../sqlalchemy/cprocessors.cpython-34m.so
|
||||||
|
../sqlalchemy/cresultproxy.cpython-34m.so
|
||||||
|
../sqlalchemy/cutils.cpython-34m.so
|
||||||
|
./
|
||||||
|
PKG-INFO
|
||||||
|
dependency_links.txt
|
||||||
|
SOURCES.txt
|
||||||
|
top_level.txt
|
|
@ -0,0 +1 @@
|
||||||
|
sqlalchemy
|
BIN
lib/python3.4/site-packages/_cffi_backend.cpython-34m.so
Executable file
BIN
lib/python3.4/site-packages/_cffi_backend.cpython-34m.so
Executable file
Binary file not shown.
16
lib/python3.4/site-packages/_markerlib/__init__.py
Normal file
16
lib/python3.4/site-packages/_markerlib/__init__.py
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
try:
|
||||||
|
import ast
|
||||||
|
from _markerlib.markers import default_environment, compile, interpret
|
||||||
|
except ImportError:
|
||||||
|
if 'ast' in globals():
|
||||||
|
raise
|
||||||
|
def default_environment():
|
||||||
|
return {}
|
||||||
|
def compile(marker):
|
||||||
|
def marker_fn(environment=None, override=None):
|
||||||
|
# 'empty markers are True' heuristic won't install extra deps.
|
||||||
|
return not marker.strip()
|
||||||
|
marker_fn.__doc__ = marker
|
||||||
|
return marker_fn
|
||||||
|
def interpret(marker, environment=None, override=None):
|
||||||
|
return compile(marker)()
|
119
lib/python3.4/site-packages/_markerlib/markers.py
Normal file
119
lib/python3.4/site-packages/_markerlib/markers.py
Normal file
|
@ -0,0 +1,119 @@
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
"""Interpret PEP 345 environment markers.
|
||||||
|
|
||||||
|
EXPR [in|==|!=|not in] EXPR [or|and] ...
|
||||||
|
|
||||||
|
where EXPR belongs to any of those:
|
||||||
|
|
||||||
|
python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
|
||||||
|
python_full_version = sys.version.split()[0]
|
||||||
|
os.name = os.name
|
||||||
|
sys.platform = sys.platform
|
||||||
|
platform.version = platform.version()
|
||||||
|
platform.machine = platform.machine()
|
||||||
|
platform.python_implementation = platform.python_implementation()
|
||||||
|
a free string, like '2.6', or 'win32'
|
||||||
|
"""
|
||||||
|
|
||||||
|
__all__ = ['default_environment', 'compile', 'interpret']
|
||||||
|
|
||||||
|
import ast
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
_builtin_compile = compile
|
||||||
|
|
||||||
|
try:
|
||||||
|
from platform import python_implementation
|
||||||
|
except ImportError:
|
||||||
|
if os.name == "java":
|
||||||
|
# Jython 2.5 has ast module, but not platform.python_implementation() function.
|
||||||
|
def python_implementation():
|
||||||
|
return "Jython"
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
# restricted set of variables
|
||||||
|
_VARS = {'sys.platform': sys.platform,
|
||||||
|
'python_version': '%s.%s' % sys.version_info[:2],
|
||||||
|
# FIXME parsing sys.platform is not reliable, but there is no other
|
||||||
|
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
|
||||||
|
'python_full_version': sys.version.split(' ', 1)[0],
|
||||||
|
'os.name': os.name,
|
||||||
|
'platform.version': platform.version(),
|
||||||
|
'platform.machine': platform.machine(),
|
||||||
|
'platform.python_implementation': python_implementation(),
|
||||||
|
'extra': None # wheel extension
|
||||||
|
}
|
||||||
|
|
||||||
|
for var in list(_VARS.keys()):
|
||||||
|
if '.' in var:
|
||||||
|
_VARS[var.replace('.', '_')] = _VARS[var]
|
||||||
|
|
||||||
|
def default_environment():
|
||||||
|
"""Return copy of default PEP 385 globals dictionary."""
|
||||||
|
return dict(_VARS)
|
||||||
|
|
||||||
|
class ASTWhitelist(ast.NodeTransformer):
|
||||||
|
def __init__(self, statement):
|
||||||
|
self.statement = statement # for error messages
|
||||||
|
|
||||||
|
ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
|
||||||
|
# Bool operations
|
||||||
|
ALLOWED += (ast.And, ast.Or)
|
||||||
|
# Comparison operations
|
||||||
|
ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
|
||||||
|
|
||||||
|
def visit(self, node):
|
||||||
|
"""Ensure statement only contains allowed nodes."""
|
||||||
|
if not isinstance(node, self.ALLOWED):
|
||||||
|
raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
|
||||||
|
(self.statement,
|
||||||
|
(' ' * node.col_offset) + '^'))
|
||||||
|
return ast.NodeTransformer.visit(self, node)
|
||||||
|
|
||||||
|
def visit_Attribute(self, node):
|
||||||
|
"""Flatten one level of attribute access."""
|
||||||
|
new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
|
||||||
|
return ast.copy_location(new_node, node)
|
||||||
|
|
||||||
|
def parse_marker(marker):
|
||||||
|
tree = ast.parse(marker, mode='eval')
|
||||||
|
new_tree = ASTWhitelist(marker).generic_visit(tree)
|
||||||
|
return new_tree
|
||||||
|
|
||||||
|
def compile_marker(parsed_marker):
|
||||||
|
return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
|
||||||
|
dont_inherit=True)
|
||||||
|
|
||||||
|
_cache = weakref.WeakValueDictionary()
|
||||||
|
|
||||||
|
def compile(marker):
|
||||||
|
"""Return compiled marker as a function accepting an environment dict."""
|
||||||
|
try:
|
||||||
|
return _cache[marker]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
if not marker.strip():
|
||||||
|
def marker_fn(environment=None, override=None):
|
||||||
|
""""""
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
compiled_marker = compile_marker(parse_marker(marker))
|
||||||
|
def marker_fn(environment=None, override=None):
|
||||||
|
"""override updates environment"""
|
||||||
|
if override is None:
|
||||||
|
override = {}
|
||||||
|
if environment is None:
|
||||||
|
environment = default_environment()
|
||||||
|
environment.update(override)
|
||||||
|
return eval(compiled_marker, environment)
|
||||||
|
marker_fn.__doc__ = marker
|
||||||
|
_cache[marker] = marker_fn
|
||||||
|
return _cache[marker]
|
||||||
|
|
||||||
|
def interpret(marker, environment=None):
|
||||||
|
return compile(marker)(environment)
|
|
@ -0,0 +1,31 @@
|
||||||
|
Metadata-Version: 1.1
|
||||||
|
Name: cffi
|
||||||
|
Version: 1.7.0
|
||||||
|
Summary: Foreign Function Interface for Python calling C code.
|
||||||
|
Home-page: http://cffi.readthedocs.org
|
||||||
|
Author: Armin Rigo, Maciej Fijalkowski
|
||||||
|
Author-email: python-cffi@googlegroups.com
|
||||||
|
License: MIT
|
||||||
|
Description:
|
||||||
|
CFFI
|
||||||
|
====
|
||||||
|
|
||||||
|
Foreign Function Interface for Python calling C code.
|
||||||
|
Please see the `Documentation <http://cffi.readthedocs.org/>`_.
|
||||||
|
|
||||||
|
Contact
|
||||||
|
-------
|
||||||
|
|
||||||
|
`Mailing list <https://groups.google.com/forum/#!forum/python-cffi>`_
|
||||||
|
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.6
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.2
|
||||||
|
Classifier: Programming Language :: Python :: 3.3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
|
@ -0,0 +1,180 @@
|
||||||
|
AUTHORS
|
||||||
|
LICENSE
|
||||||
|
MANIFEST.in
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
setup_base.py
|
||||||
|
c/_cffi_backend.c
|
||||||
|
c/call_python.c
|
||||||
|
c/cdlopen.c
|
||||||
|
c/cffi1_module.c
|
||||||
|
c/cglob.c
|
||||||
|
c/commontypes.c
|
||||||
|
c/ffi_obj.c
|
||||||
|
c/file_emulator.h
|
||||||
|
c/lib_obj.c
|
||||||
|
c/malloc_closure.h
|
||||||
|
c/minibuffer.h
|
||||||
|
c/misc_thread_common.h
|
||||||
|
c/misc_thread_posix.h
|
||||||
|
c/misc_win32.h
|
||||||
|
c/parse_c_type.c
|
||||||
|
c/realize_c_type.c
|
||||||
|
c/test_c.py
|
||||||
|
c/wchar_helper.h
|
||||||
|
c/libffi_msvc/ffi.c
|
||||||
|
c/libffi_msvc/ffi.h
|
||||||
|
c/libffi_msvc/ffi_common.h
|
||||||
|
c/libffi_msvc/fficonfig.h
|
||||||
|
c/libffi_msvc/ffitarget.h
|
||||||
|
c/libffi_msvc/prep_cif.c
|
||||||
|
c/libffi_msvc/types.c
|
||||||
|
c/libffi_msvc/win32.c
|
||||||
|
c/libffi_msvc/win64.asm
|
||||||
|
c/libffi_msvc/win64.obj
|
||||||
|
cffi/__init__.py
|
||||||
|
cffi/_cffi_include.h
|
||||||
|
cffi/_embedding.h
|
||||||
|
cffi/api.py
|
||||||
|
cffi/backend_ctypes.py
|
||||||
|
cffi/cffi_opcode.py
|
||||||
|
cffi/commontypes.py
|
||||||
|
cffi/cparser.py
|
||||||
|
cffi/ffiplatform.py
|
||||||
|
cffi/lock.py
|
||||||
|
cffi/model.py
|
||||||
|
cffi/parse_c_type.h
|
||||||
|
cffi/recompiler.py
|
||||||
|
cffi/setuptools_ext.py
|
||||||
|
cffi/vengine_cpy.py
|
||||||
|
cffi/vengine_gen.py
|
||||||
|
cffi/verifier.py
|
||||||
|
cffi.egg-info/PKG-INFO
|
||||||
|
cffi.egg-info/SOURCES.txt
|
||||||
|
cffi.egg-info/dependency_links.txt
|
||||||
|
cffi.egg-info/entry_points.txt
|
||||||
|
cffi.egg-info/not-zip-safe
|
||||||
|
cffi.egg-info/requires.txt
|
||||||
|
cffi.egg-info/top_level.txt
|
||||||
|
demo/_curses.py
|
||||||
|
demo/_curses_build.py
|
||||||
|
demo/_curses_setup.py
|
||||||
|
demo/api.py
|
||||||
|
demo/bsdopendirtype.py
|
||||||
|
demo/bsdopendirtype_build.py
|
||||||
|
demo/bsdopendirtype_setup.py
|
||||||
|
demo/btrfs-snap.py
|
||||||
|
demo/cffi-cocoa.py
|
||||||
|
demo/embedding.py
|
||||||
|
demo/embedding_test.c
|
||||||
|
demo/extern_python.py
|
||||||
|
demo/extern_python_varargs.py
|
||||||
|
demo/fastcsv.py
|
||||||
|
demo/gmp.py
|
||||||
|
demo/gmp_build.py
|
||||||
|
demo/manual.c
|
||||||
|
demo/manual2.py
|
||||||
|
demo/pwuid.py
|
||||||
|
demo/pwuid_build.py
|
||||||
|
demo/py.cleanup
|
||||||
|
demo/pyobj.py
|
||||||
|
demo/readdir.py
|
||||||
|
demo/readdir2.py
|
||||||
|
demo/readdir2_build.py
|
||||||
|
demo/readdir2_setup.py
|
||||||
|
demo/readdir_build.py
|
||||||
|
demo/readdir_ctypes.py
|
||||||
|
demo/readdir_setup.py
|
||||||
|
demo/recopendirtype.py
|
||||||
|
demo/recopendirtype_build.py
|
||||||
|
demo/setup_manual.py
|
||||||
|
demo/winclipboard.py
|
||||||
|
demo/winclipboard_build.py
|
||||||
|
demo/xclient.py
|
||||||
|
demo/xclient_build.py
|
||||||
|
doc/Makefile
|
||||||
|
doc/make.bat
|
||||||
|
doc/misc/design.rst
|
||||||
|
doc/misc/grant-cffi-1.0.rst
|
||||||
|
doc/misc/parse_c_type.rst
|
||||||
|
doc/source/cdef.rst
|
||||||
|
doc/source/conf.py
|
||||||
|
doc/source/embedding.rst
|
||||||
|
doc/source/index.rst
|
||||||
|
doc/source/installation.rst
|
||||||
|
doc/source/overview.rst
|
||||||
|
doc/source/ref.rst
|
||||||
|
doc/source/using.rst
|
||||||
|
doc/source/whatsnew.rst
|
||||||
|
testing/__init__.py
|
||||||
|
testing/support.py
|
||||||
|
testing/udir.py
|
||||||
|
testing/cffi0/__init__.py
|
||||||
|
testing/cffi0/backend_tests.py
|
||||||
|
testing/cffi0/callback_in_thread.py
|
||||||
|
testing/cffi0/test_cdata.py
|
||||||
|
testing/cffi0/test_ctypes.py
|
||||||
|
testing/cffi0/test_ffi_backend.py
|
||||||
|
testing/cffi0/test_function.py
|
||||||
|
testing/cffi0/test_model.py
|
||||||
|
testing/cffi0/test_ownlib.py
|
||||||
|
testing/cffi0/test_parsing.py
|
||||||
|
testing/cffi0/test_platform.py
|
||||||
|
testing/cffi0/test_unicode_literals.py
|
||||||
|
testing/cffi0/test_verify.py
|
||||||
|
testing/cffi0/test_verify2.py
|
||||||
|
testing/cffi0/test_version.py
|
||||||
|
testing/cffi0/test_vgen.py
|
||||||
|
testing/cffi0/test_vgen2.py
|
||||||
|
testing/cffi0/test_zdistutils.py
|
||||||
|
testing/cffi0/test_zintegration.py
|
||||||
|
testing/cffi0/snippets/distutils_module/setup.py
|
||||||
|
testing/cffi0/snippets/distutils_module/snip_basic_verify.py
|
||||||
|
testing/cffi0/snippets/distutils_package_1/setup.py
|
||||||
|
testing/cffi0/snippets/distutils_package_1/snip_basic_verify1/__init__.py
|
||||||
|
testing/cffi0/snippets/distutils_package_2/setup.py
|
||||||
|
testing/cffi0/snippets/distutils_package_2/snip_basic_verify2/__init__.py
|
||||||
|
testing/cffi0/snippets/infrastructure/setup.py
|
||||||
|
testing/cffi0/snippets/infrastructure/snip_infrastructure/__init__.py
|
||||||
|
testing/cffi0/snippets/setuptools_module/setup.py
|
||||||
|
testing/cffi0/snippets/setuptools_module/snip_setuptools_verify.py
|
||||||
|
testing/cffi0/snippets/setuptools_package_1/setup.py
|
||||||
|
testing/cffi0/snippets/setuptools_package_1/snip_setuptools_verify1/__init__.py
|
||||||
|
testing/cffi0/snippets/setuptools_package_2/setup.py
|
||||||
|
testing/cffi0/snippets/setuptools_package_2/snip_setuptools_verify2/__init__.py
|
||||||
|
testing/cffi1/__init__.py
|
||||||
|
testing/cffi1/test_cffi_binary.py
|
||||||
|
testing/cffi1/test_commontypes.py
|
||||||
|
testing/cffi1/test_dlopen.py
|
||||||
|
testing/cffi1/test_dlopen_unicode_literals.py
|
||||||
|
testing/cffi1/test_ffi_obj.py
|
||||||
|
testing/cffi1/test_new_ffi_1.py
|
||||||
|
testing/cffi1/test_parse_c_type.py
|
||||||
|
testing/cffi1/test_re_python.py
|
||||||
|
testing/cffi1/test_realize_c_type.py
|
||||||
|
testing/cffi1/test_recompiler.py
|
||||||
|
testing/cffi1/test_unicode_literals.py
|
||||||
|
testing/cffi1/test_verify1.py
|
||||||
|
testing/cffi1/test_zdist.py
|
||||||
|
testing/embedding/__init__.py
|
||||||
|
testing/embedding/add1-test.c
|
||||||
|
testing/embedding/add1.py
|
||||||
|
testing/embedding/add2-test.c
|
||||||
|
testing/embedding/add2.py
|
||||||
|
testing/embedding/add3.py
|
||||||
|
testing/embedding/add_recursive-test.c
|
||||||
|
testing/embedding/add_recursive.py
|
||||||
|
testing/embedding/empty.py
|
||||||
|
testing/embedding/perf-test.c
|
||||||
|
testing/embedding/perf.py
|
||||||
|
testing/embedding/test_basic.py
|
||||||
|
testing/embedding/test_performance.py
|
||||||
|
testing/embedding/test_recursive.py
|
||||||
|
testing/embedding/test_thread.py
|
||||||
|
testing/embedding/test_tlocal.py
|
||||||
|
testing/embedding/thread-test.h
|
||||||
|
testing/embedding/thread1-test.c
|
||||||
|
testing/embedding/thread2-test.c
|
||||||
|
testing/embedding/thread3-test.c
|
||||||
|
testing/embedding/tlocal-test.c
|
||||||
|
testing/embedding/tlocal.py
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
[distutils.setup_keywords]
|
||||||
|
cffi_modules = cffi.setuptools_ext:cffi_modules
|
||||||
|
|
|
@ -0,0 +1,40 @@
|
||||||
|
../cffi/vengine_cpy.py
|
||||||
|
../cffi/backend_ctypes.py
|
||||||
|
../cffi/lock.py
|
||||||
|
../cffi/model.py
|
||||||
|
../cffi/cparser.py
|
||||||
|
../cffi/commontypes.py
|
||||||
|
../cffi/__init__.py
|
||||||
|
../cffi/recompiler.py
|
||||||
|
../cffi/ffiplatform.py
|
||||||
|
../cffi/verifier.py
|
||||||
|
../cffi/vengine_gen.py
|
||||||
|
../cffi/cffi_opcode.py
|
||||||
|
../cffi/setuptools_ext.py
|
||||||
|
../cffi/api.py
|
||||||
|
../cffi/_cffi_include.h
|
||||||
|
../cffi/parse_c_type.h
|
||||||
|
../cffi/_embedding.h
|
||||||
|
../cffi/__pycache__/vengine_cpy.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/backend_ctypes.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/lock.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/model.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/cparser.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/commontypes.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/recompiler.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/ffiplatform.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/verifier.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/vengine_gen.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/cffi_opcode.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/setuptools_ext.cpython-34.pyc
|
||||||
|
../cffi/__pycache__/api.cpython-34.pyc
|
||||||
|
../_cffi_backend.cpython-34m.so
|
||||||
|
./
|
||||||
|
PKG-INFO
|
||||||
|
requires.txt
|
||||||
|
not-zip-safe
|
||||||
|
dependency_links.txt
|
||||||
|
SOURCES.txt
|
||||||
|
top_level.txt
|
||||||
|
entry_points.txt
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
pycparser
|
|
@ -0,0 +1,2 @@
|
||||||
|
_cffi_backend
|
||||||
|
cffi
|
13
lib/python3.4/site-packages/cffi/__init__.py
Normal file
13
lib/python3.4/site-packages/cffi/__init__.py
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError',
|
||||||
|
'FFIError']
|
||||||
|
|
||||||
|
from .api import FFI, CDefError, FFIError
|
||||||
|
from .ffiplatform import VerificationError, VerificationMissing
|
||||||
|
|
||||||
|
__version__ = "1.7.0"
|
||||||
|
__version_info__ = (1, 7, 0)
|
||||||
|
|
||||||
|
# The verifier module file names are based on the CRC32 of a string that
|
||||||
|
# contains the following version number. It may be older than __version__
|
||||||
|
# if nothing is clearly incompatible.
|
||||||
|
__version_verifier_modules__ = "0.8.6"
|
248
lib/python3.4/site-packages/cffi/_cffi_include.h
Normal file
248
lib/python3.4/site-packages/cffi/_cffi_include.h
Normal file
|
@ -0,0 +1,248 @@
|
||||||
|
#define _CFFI_
|
||||||
|
#include <Python.h>
|
||||||
|
#ifdef __cplusplus
|
||||||
|
extern "C" {
|
||||||
|
#endif
|
||||||
|
#include <stddef.h>
|
||||||
|
#include "parse_c_type.h"
|
||||||
|
|
||||||
|
/* this block of #ifs should be kept exactly identical between
|
||||||
|
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py
|
||||||
|
and cffi/_cffi_include.h */
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
# include <malloc.h> /* for alloca() */
|
||||||
|
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||||
|
typedef __int8 int8_t;
|
||||||
|
typedef __int16 int16_t;
|
||||||
|
typedef __int32 int32_t;
|
||||||
|
typedef __int64 int64_t;
|
||||||
|
typedef unsigned __int8 uint8_t;
|
||||||
|
typedef unsigned __int16 uint16_t;
|
||||||
|
typedef unsigned __int32 uint32_t;
|
||||||
|
typedef unsigned __int64 uint64_t;
|
||||||
|
typedef __int8 int_least8_t;
|
||||||
|
typedef __int16 int_least16_t;
|
||||||
|
typedef __int32 int_least32_t;
|
||||||
|
typedef __int64 int_least64_t;
|
||||||
|
typedef unsigned __int8 uint_least8_t;
|
||||||
|
typedef unsigned __int16 uint_least16_t;
|
||||||
|
typedef unsigned __int32 uint_least32_t;
|
||||||
|
typedef unsigned __int64 uint_least64_t;
|
||||||
|
typedef __int8 int_fast8_t;
|
||||||
|
typedef __int16 int_fast16_t;
|
||||||
|
typedef __int32 int_fast32_t;
|
||||||
|
typedef __int64 int_fast64_t;
|
||||||
|
typedef unsigned __int8 uint_fast8_t;
|
||||||
|
typedef unsigned __int16 uint_fast16_t;
|
||||||
|
typedef unsigned __int32 uint_fast32_t;
|
||||||
|
typedef unsigned __int64 uint_fast64_t;
|
||||||
|
typedef __int64 intmax_t;
|
||||||
|
typedef unsigned __int64 uintmax_t;
|
||||||
|
# else
|
||||||
|
# include <stdint.h>
|
||||||
|
# endif
|
||||||
|
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||||
|
typedef unsigned char _Bool;
|
||||||
|
# endif
|
||||||
|
#else
|
||||||
|
# include <stdint.h>
|
||||||
|
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) || defined(__hpux)
|
||||||
|
# include <alloca.h>
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef __GNUC__
|
||||||
|
# define _CFFI_UNUSED_FN __attribute__((unused))
|
||||||
|
#else
|
||||||
|
# define _CFFI_UNUSED_FN /* nothing */
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
# ifndef _Bool
|
||||||
|
# define _Bool bool /* semi-hackish: C++ has no _Bool; bool is builtin */
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/********** CPython-specific section **********/
|
||||||
|
#ifndef PYPY_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
#if PY_MAJOR_VERSION >= 3
|
||||||
|
# define PyInt_FromLong PyLong_FromLong
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define _cffi_from_c_double PyFloat_FromDouble
|
||||||
|
#define _cffi_from_c_float PyFloat_FromDouble
|
||||||
|
#define _cffi_from_c_long PyInt_FromLong
|
||||||
|
#define _cffi_from_c_ulong PyLong_FromUnsignedLong
|
||||||
|
#define _cffi_from_c_longlong PyLong_FromLongLong
|
||||||
|
#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong
|
||||||
|
|
||||||
|
#define _cffi_to_c_double PyFloat_AsDouble
|
||||||
|
#define _cffi_to_c_float PyFloat_AsDouble
|
||||||
|
|
||||||
|
#define _cffi_from_c_int(x, type) \
|
||||||
|
(((type)-1) > 0 ? /* unsigned */ \
|
||||||
|
(sizeof(type) < sizeof(long) ? \
|
||||||
|
PyInt_FromLong((long)x) : \
|
||||||
|
sizeof(type) == sizeof(long) ? \
|
||||||
|
PyLong_FromUnsignedLong((unsigned long)x) : \
|
||||||
|
PyLong_FromUnsignedLongLong((unsigned long long)x)) : \
|
||||||
|
(sizeof(type) <= sizeof(long) ? \
|
||||||
|
PyInt_FromLong((long)x) : \
|
||||||
|
PyLong_FromLongLong((long long)x)))
|
||||||
|
|
||||||
|
#define _cffi_to_c_int(o, type) \
|
||||||
|
((type)( \
|
||||||
|
sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \
|
||||||
|
: (type)_cffi_to_c_i8(o)) : \
|
||||||
|
sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \
|
||||||
|
: (type)_cffi_to_c_i16(o)) : \
|
||||||
|
sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \
|
||||||
|
: (type)_cffi_to_c_i32(o)) : \
|
||||||
|
sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \
|
||||||
|
: (type)_cffi_to_c_i64(o)) : \
|
||||||
|
(Py_FatalError("unsupported size for type " #type), (type)0)))
|
||||||
|
|
||||||
|
#define _cffi_to_c_i8 \
|
||||||
|
((int(*)(PyObject *))_cffi_exports[1])
|
||||||
|
#define _cffi_to_c_u8 \
|
||||||
|
((int(*)(PyObject *))_cffi_exports[2])
|
||||||
|
#define _cffi_to_c_i16 \
|
||||||
|
((int(*)(PyObject *))_cffi_exports[3])
|
||||||
|
#define _cffi_to_c_u16 \
|
||||||
|
((int(*)(PyObject *))_cffi_exports[4])
|
||||||
|
#define _cffi_to_c_i32 \
|
||||||
|
((int(*)(PyObject *))_cffi_exports[5])
|
||||||
|
#define _cffi_to_c_u32 \
|
||||||
|
((unsigned int(*)(PyObject *))_cffi_exports[6])
|
||||||
|
#define _cffi_to_c_i64 \
|
||||||
|
((long long(*)(PyObject *))_cffi_exports[7])
|
||||||
|
#define _cffi_to_c_u64 \
|
||||||
|
((unsigned long long(*)(PyObject *))_cffi_exports[8])
|
||||||
|
#define _cffi_to_c_char \
|
||||||
|
((int(*)(PyObject *))_cffi_exports[9])
|
||||||
|
#define _cffi_from_c_pointer \
|
||||||
|
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10])
|
||||||
|
#define _cffi_to_c_pointer \
|
||||||
|
((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11])
|
||||||
|
#define _cffi_get_struct_layout \
|
||||||
|
not used any more
|
||||||
|
#define _cffi_restore_errno \
|
||||||
|
((void(*)(void))_cffi_exports[13])
|
||||||
|
#define _cffi_save_errno \
|
||||||
|
((void(*)(void))_cffi_exports[14])
|
||||||
|
#define _cffi_from_c_char \
|
||||||
|
((PyObject *(*)(char))_cffi_exports[15])
|
||||||
|
#define _cffi_from_c_deref \
|
||||||
|
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16])
|
||||||
|
#define _cffi_to_c \
|
||||||
|
((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17])
|
||||||
|
#define _cffi_from_c_struct \
|
||||||
|
((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18])
|
||||||
|
#define _cffi_to_c_wchar_t \
|
||||||
|
((wchar_t(*)(PyObject *))_cffi_exports[19])
|
||||||
|
#define _cffi_from_c_wchar_t \
|
||||||
|
((PyObject *(*)(wchar_t))_cffi_exports[20])
|
||||||
|
#define _cffi_to_c_long_double \
|
||||||
|
((long double(*)(PyObject *))_cffi_exports[21])
|
||||||
|
#define _cffi_to_c__Bool \
|
||||||
|
((_Bool(*)(PyObject *))_cffi_exports[22])
|
||||||
|
#define _cffi_prepare_pointer_call_argument \
|
||||||
|
((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23])
|
||||||
|
#define _cffi_convert_array_from_object \
|
||||||
|
((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24])
|
||||||
|
#define _CFFI_CPIDX 25
|
||||||
|
#define _cffi_call_python \
|
||||||
|
((void(*)(struct _cffi_externpy_s *, char *))_cffi_exports[_CFFI_CPIDX])
|
||||||
|
#define _CFFI_NUM_EXPORTS 26
|
||||||
|
|
||||||
|
typedef struct _ctypedescr CTypeDescrObject;
|
||||||
|
|
||||||
|
static void *_cffi_exports[_CFFI_NUM_EXPORTS];
|
||||||
|
|
||||||
|
#define _cffi_type(index) ( \
|
||||||
|
assert((((uintptr_t)_cffi_types[index]) & 1) == 0), \
|
||||||
|
(CTypeDescrObject *)_cffi_types[index])
|
||||||
|
|
||||||
|
static PyObject *_cffi_init(const char *module_name, Py_ssize_t version,
|
||||||
|
const struct _cffi_type_context_s *ctx)
|
||||||
|
{
|
||||||
|
PyObject *module, *o_arg, *new_module;
|
||||||
|
void *raw[] = {
|
||||||
|
(void *)module_name,
|
||||||
|
(void *)version,
|
||||||
|
(void *)_cffi_exports,
|
||||||
|
(void *)ctx,
|
||||||
|
};
|
||||||
|
|
||||||
|
module = PyImport_ImportModule("_cffi_backend");
|
||||||
|
if (module == NULL)
|
||||||
|
goto failure;
|
||||||
|
|
||||||
|
o_arg = PyLong_FromVoidPtr((void *)raw);
|
||||||
|
if (o_arg == NULL)
|
||||||
|
goto failure;
|
||||||
|
|
||||||
|
new_module = PyObject_CallMethod(
|
||||||
|
module, (char *)"_init_cffi_1_0_external_module", (char *)"O", o_arg);
|
||||||
|
|
||||||
|
Py_DECREF(o_arg);
|
||||||
|
Py_DECREF(module);
|
||||||
|
return new_module;
|
||||||
|
|
||||||
|
failure:
|
||||||
|
Py_XDECREF(module);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
|
_CFFI_UNUSED_FN
|
||||||
|
static PyObject **_cffi_unpack_args(PyObject *args_tuple, Py_ssize_t expected,
|
||||||
|
const char *fnname)
|
||||||
|
{
|
||||||
|
if (PyTuple_GET_SIZE(args_tuple) != expected) {
|
||||||
|
PyErr_Format(PyExc_TypeError,
|
||||||
|
"%.150s() takes exactly %zd arguments (%zd given)",
|
||||||
|
fnname, expected, PyTuple_GET_SIZE(args_tuple));
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
return &PyTuple_GET_ITEM(args_tuple, 0); /* pointer to the first item,
|
||||||
|
the others follow */
|
||||||
|
}
|
||||||
|
|
||||||
|
/********** end CPython-specific section **********/
|
||||||
|
#else
|
||||||
|
_CFFI_UNUSED_FN
|
||||||
|
static void (*_cffi_call_python_org)(struct _cffi_externpy_s *, char *);
|
||||||
|
# define _cffi_call_python _cffi_call_python_org
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
#define _cffi_array_len(array) (sizeof(array) / sizeof((array)[0]))
|
||||||
|
|
||||||
|
#define _cffi_prim_int(size, sign) \
|
||||||
|
((size) == 1 ? ((sign) ? _CFFI_PRIM_INT8 : _CFFI_PRIM_UINT8) : \
|
||||||
|
(size) == 2 ? ((sign) ? _CFFI_PRIM_INT16 : _CFFI_PRIM_UINT16) : \
|
||||||
|
(size) == 4 ? ((sign) ? _CFFI_PRIM_INT32 : _CFFI_PRIM_UINT32) : \
|
||||||
|
(size) == 8 ? ((sign) ? _CFFI_PRIM_INT64 : _CFFI_PRIM_UINT64) : \
|
||||||
|
_CFFI__UNKNOWN_PRIM)
|
||||||
|
|
||||||
|
#define _cffi_prim_float(size) \
|
||||||
|
((size) == sizeof(float) ? _CFFI_PRIM_FLOAT : \
|
||||||
|
(size) == sizeof(double) ? _CFFI_PRIM_DOUBLE : \
|
||||||
|
(size) == sizeof(long double) ? _CFFI__UNKNOWN_LONG_DOUBLE : \
|
||||||
|
_CFFI__UNKNOWN_FLOAT_PRIM)
|
||||||
|
|
||||||
|
#define _cffi_check_int(got, got_nonpos, expected) \
|
||||||
|
((got_nonpos) == (expected <= 0) && \
|
||||||
|
(got) == (unsigned long long)expected)
|
||||||
|
|
||||||
|
#ifdef MS_WIN32
|
||||||
|
# define _cffi_stdcall __stdcall
|
||||||
|
#else
|
||||||
|
# define _cffi_stdcall /* nothing */
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef __cplusplus
|
||||||
|
}
|
||||||
|
#endif
|
517
lib/python3.4/site-packages/cffi/_embedding.h
Normal file
517
lib/python3.4/site-packages/cffi/_embedding.h
Normal file
|
@ -0,0 +1,517 @@
|
||||||
|
|
||||||
|
/***** Support code for embedding *****/
|
||||||
|
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
# define CFFI_DLLEXPORT __declspec(dllexport)
|
||||||
|
#elif defined(__GNUC__)
|
||||||
|
# define CFFI_DLLEXPORT __attribute__((visibility("default")))
|
||||||
|
#else
|
||||||
|
# define CFFI_DLLEXPORT /* nothing */
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
/* There are two global variables of type _cffi_call_python_fnptr:
|
||||||
|
|
||||||
|
* _cffi_call_python, which we declare just below, is the one called
|
||||||
|
by ``extern "Python"`` implementations.
|
||||||
|
|
||||||
|
* _cffi_call_python_org, which on CPython is actually part of the
|
||||||
|
_cffi_exports[] array, is the function pointer copied from
|
||||||
|
_cffi_backend.
|
||||||
|
|
||||||
|
After initialization is complete, both are equal. However, the
|
||||||
|
first one remains equal to &_cffi_start_and_call_python until the
|
||||||
|
very end of initialization, when we are (or should be) sure that
|
||||||
|
concurrent threads also see a completely initialized world, and
|
||||||
|
only then is it changed.
|
||||||
|
*/
|
||||||
|
#undef _cffi_call_python
|
||||||
|
typedef void (*_cffi_call_python_fnptr)(struct _cffi_externpy_s *, char *);
|
||||||
|
static void _cffi_start_and_call_python(struct _cffi_externpy_s *, char *);
|
||||||
|
static _cffi_call_python_fnptr _cffi_call_python = &_cffi_start_and_call_python;
|
||||||
|
|
||||||
|
|
||||||
|
#ifndef _MSC_VER
|
||||||
|
/* --- Assuming a GCC not infinitely old --- */
|
||||||
|
# define cffi_compare_and_swap(l,o,n) __sync_bool_compare_and_swap(l,o,n)
|
||||||
|
# define cffi_write_barrier() __sync_synchronize()
|
||||||
|
# if !defined(__amd64__) && !defined(__x86_64__) && \
|
||||||
|
!defined(__i386__) && !defined(__i386)
|
||||||
|
# define cffi_read_barrier() __sync_synchronize()
|
||||||
|
# else
|
||||||
|
# define cffi_read_barrier() (void)0
|
||||||
|
# endif
|
||||||
|
#else
|
||||||
|
/* --- Windows threads version --- */
|
||||||
|
# include <Windows.h>
|
||||||
|
# define cffi_compare_and_swap(l,o,n) \
|
||||||
|
(InterlockedCompareExchangePointer(l,n,o) == (o))
|
||||||
|
# define cffi_write_barrier() InterlockedCompareExchange(&_cffi_dummy,0,0)
|
||||||
|
# define cffi_read_barrier() (void)0
|
||||||
|
static volatile LONG _cffi_dummy;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WITH_THREAD
|
||||||
|
# ifndef _MSC_VER
|
||||||
|
# include <pthread.h>
|
||||||
|
static pthread_mutex_t _cffi_embed_startup_lock;
|
||||||
|
# else
|
||||||
|
static CRITICAL_SECTION _cffi_embed_startup_lock;
|
||||||
|
# endif
|
||||||
|
static char _cffi_embed_startup_lock_ready = 0;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
static void _cffi_acquire_reentrant_mutex(void)
|
||||||
|
{
|
||||||
|
static void *volatile lock = NULL;
|
||||||
|
|
||||||
|
while (!cffi_compare_and_swap(&lock, NULL, (void *)1)) {
|
||||||
|
/* should ideally do a spin loop instruction here, but
|
||||||
|
hard to do it portably and doesn't really matter I
|
||||||
|
think: pthread_mutex_init() should be very fast, and
|
||||||
|
this is only run at start-up anyway. */
|
||||||
|
}
|
||||||
|
|
||||||
|
#ifdef WITH_THREAD
|
||||||
|
if (!_cffi_embed_startup_lock_ready) {
|
||||||
|
# ifndef _MSC_VER
|
||||||
|
pthread_mutexattr_t attr;
|
||||||
|
pthread_mutexattr_init(&attr);
|
||||||
|
pthread_mutexattr_settype(&attr, PTHREAD_MUTEX_RECURSIVE);
|
||||||
|
pthread_mutex_init(&_cffi_embed_startup_lock, &attr);
|
||||||
|
# else
|
||||||
|
InitializeCriticalSection(&_cffi_embed_startup_lock);
|
||||||
|
# endif
|
||||||
|
_cffi_embed_startup_lock_ready = 1;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
while (!cffi_compare_and_swap(&lock, (void *)1, NULL))
|
||||||
|
;
|
||||||
|
|
||||||
|
#ifndef _MSC_VER
|
||||||
|
pthread_mutex_lock(&_cffi_embed_startup_lock);
|
||||||
|
#else
|
||||||
|
EnterCriticalSection(&_cffi_embed_startup_lock);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
static void _cffi_release_reentrant_mutex(void)
|
||||||
|
{
|
||||||
|
#ifndef _MSC_VER
|
||||||
|
pthread_mutex_unlock(&_cffi_embed_startup_lock);
|
||||||
|
#else
|
||||||
|
LeaveCriticalSection(&_cffi_embed_startup_lock);
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/********** CPython-specific section **********/
|
||||||
|
#ifndef PYPY_VERSION
|
||||||
|
|
||||||
|
|
||||||
|
#define _cffi_call_python_org _cffi_exports[_CFFI_CPIDX]
|
||||||
|
|
||||||
|
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(void); /* forward */
|
||||||
|
|
||||||
|
static void _cffi_py_initialize(void)
|
||||||
|
{
|
||||||
|
/* XXX use initsigs=0, which "skips initialization registration of
|
||||||
|
signal handlers, which might be useful when Python is
|
||||||
|
embedded" according to the Python docs. But review and think
|
||||||
|
if it should be a user-controllable setting.
|
||||||
|
|
||||||
|
XXX we should also give a way to write errors to a buffer
|
||||||
|
instead of to stderr.
|
||||||
|
|
||||||
|
XXX if importing 'site' fails, CPython (any version) calls
|
||||||
|
exit(). Should we try to work around this behavior here?
|
||||||
|
*/
|
||||||
|
Py_InitializeEx(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
static int _cffi_initialize_python(void)
|
||||||
|
{
|
||||||
|
/* This initializes Python, imports _cffi_backend, and then the
|
||||||
|
present .dll/.so is set up as a CPython C extension module.
|
||||||
|
*/
|
||||||
|
int result;
|
||||||
|
PyGILState_STATE state;
|
||||||
|
PyObject *pycode=NULL, *global_dict=NULL, *x;
|
||||||
|
|
||||||
|
#if PY_MAJOR_VERSION >= 3
|
||||||
|
/* see comments in _cffi_carefully_make_gil() about the
|
||||||
|
Python2/Python3 difference
|
||||||
|
*/
|
||||||
|
#else
|
||||||
|
/* Acquire the GIL. We have no threadstate here. If Python is
|
||||||
|
already initialized, it is possible that there is already one
|
||||||
|
existing for this thread, but it is not made current now.
|
||||||
|
*/
|
||||||
|
PyEval_AcquireLock();
|
||||||
|
|
||||||
|
_cffi_py_initialize();
|
||||||
|
|
||||||
|
/* The Py_InitializeEx() sometimes made a threadstate for us, but
|
||||||
|
not always. Indeed Py_InitializeEx() could be called and do
|
||||||
|
nothing. So do we have a threadstate, or not? We don't know,
|
||||||
|
but we can replace it with NULL in all cases.
|
||||||
|
*/
|
||||||
|
(void)PyThreadState_Swap(NULL);
|
||||||
|
|
||||||
|
/* Now we can release the GIL and re-acquire immediately using the
|
||||||
|
logic of PyGILState(), which handles making or installing the
|
||||||
|
correct threadstate.
|
||||||
|
*/
|
||||||
|
PyEval_ReleaseLock();
|
||||||
|
#endif
|
||||||
|
state = PyGILState_Ensure();
|
||||||
|
|
||||||
|
/* Call the initxxx() function from the present module. It will
|
||||||
|
create and initialize us as a CPython extension module, instead
|
||||||
|
of letting the startup Python code do it---it might reimport
|
||||||
|
the same .dll/.so and get maybe confused on some platforms.
|
||||||
|
It might also have troubles locating the .dll/.so again for all
|
||||||
|
I know.
|
||||||
|
*/
|
||||||
|
(void)_CFFI_PYTHON_STARTUP_FUNC();
|
||||||
|
if (PyErr_Occurred())
|
||||||
|
goto error;
|
||||||
|
|
||||||
|
/* Now run the Python code provided to ffi.embedding_init_code().
|
||||||
|
*/
|
||||||
|
pycode = Py_CompileString(_CFFI_PYTHON_STARTUP_CODE,
|
||||||
|
"<init code for '" _CFFI_MODULE_NAME "'>",
|
||||||
|
Py_file_input);
|
||||||
|
if (pycode == NULL)
|
||||||
|
goto error;
|
||||||
|
global_dict = PyDict_New();
|
||||||
|
if (global_dict == NULL)
|
||||||
|
goto error;
|
||||||
|
if (PyDict_SetItemString(global_dict, "__builtins__",
|
||||||
|
PyThreadState_GET()->interp->builtins) < 0)
|
||||||
|
goto error;
|
||||||
|
x = PyEval_EvalCode(
|
||||||
|
#if PY_MAJOR_VERSION < 3
|
||||||
|
(PyCodeObject *)
|
||||||
|
#endif
|
||||||
|
pycode, global_dict, global_dict);
|
||||||
|
if (x == NULL)
|
||||||
|
goto error;
|
||||||
|
Py_DECREF(x);
|
||||||
|
|
||||||
|
/* Done! Now if we've been called from
|
||||||
|
_cffi_start_and_call_python() in an ``extern "Python"``, we can
|
||||||
|
only hope that the Python code did correctly set up the
|
||||||
|
corresponding @ffi.def_extern() function. Otherwise, the
|
||||||
|
general logic of ``extern "Python"`` functions (inside the
|
||||||
|
_cffi_backend module) will find that the reference is still
|
||||||
|
missing and print an error.
|
||||||
|
*/
|
||||||
|
result = 0;
|
||||||
|
done:
|
||||||
|
Py_XDECREF(pycode);
|
||||||
|
Py_XDECREF(global_dict);
|
||||||
|
PyGILState_Release(state);
|
||||||
|
return result;
|
||||||
|
|
||||||
|
error:;
|
||||||
|
{
|
||||||
|
/* Print as much information as potentially useful.
|
||||||
|
Debugging load-time failures with embedding is not fun
|
||||||
|
*/
|
||||||
|
PyObject *exception, *v, *tb, *f, *modules, *mod;
|
||||||
|
PyErr_Fetch(&exception, &v, &tb);
|
||||||
|
if (exception != NULL) {
|
||||||
|
PyErr_NormalizeException(&exception, &v, &tb);
|
||||||
|
PyErr_Display(exception, v, tb);
|
||||||
|
}
|
||||||
|
Py_XDECREF(exception);
|
||||||
|
Py_XDECREF(v);
|
||||||
|
Py_XDECREF(tb);
|
||||||
|
|
||||||
|
f = PySys_GetObject((char *)"stderr");
|
||||||
|
if (f != NULL && f != Py_None) {
|
||||||
|
PyFile_WriteString("\nFrom: " _CFFI_MODULE_NAME
|
||||||
|
"\ncompiled with cffi version: 1.7.0"
|
||||||
|
"\n_cffi_backend module: ", f);
|
||||||
|
modules = PyImport_GetModuleDict();
|
||||||
|
mod = PyDict_GetItemString(modules, "_cffi_backend");
|
||||||
|
if (mod == NULL) {
|
||||||
|
PyFile_WriteString("not loaded", f);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
v = PyObject_GetAttrString(mod, "__file__");
|
||||||
|
PyFile_WriteObject(v, f, 0);
|
||||||
|
Py_XDECREF(v);
|
||||||
|
}
|
||||||
|
PyFile_WriteString("\nsys.path: ", f);
|
||||||
|
PyFile_WriteObject(PySys_GetObject((char *)"path"), f, 0);
|
||||||
|
PyFile_WriteString("\n\n", f);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
result = -1;
|
||||||
|
goto done;
|
||||||
|
}
|
||||||
|
|
||||||
|
PyAPI_DATA(char *) _PyParser_TokenNames[]; /* from CPython */
|
||||||
|
|
||||||
|
static int _cffi_carefully_make_gil(void)
|
||||||
|
{
|
||||||
|
/* This does the basic initialization of Python. It can be called
|
||||||
|
completely concurrently from unrelated threads. It assumes
|
||||||
|
that we don't hold the GIL before (if it exists), and we don't
|
||||||
|
hold it afterwards.
|
||||||
|
|
||||||
|
What it really does is completely different in Python 2 and
|
||||||
|
Python 3.
|
||||||
|
|
||||||
|
Python 2
|
||||||
|
========
|
||||||
|
|
||||||
|
Initialize the GIL, without initializing the rest of Python,
|
||||||
|
by calling PyEval_InitThreads().
|
||||||
|
|
||||||
|
PyEval_InitThreads() must not be called concurrently at all.
|
||||||
|
So we use a global variable as a simple spin lock. This global
|
||||||
|
variable must be from 'libpythonX.Y.so', not from this
|
||||||
|
cffi-based extension module, because it must be shared from
|
||||||
|
different cffi-based extension modules. We choose
|
||||||
|
_PyParser_TokenNames[0] as a completely arbitrary pointer value
|
||||||
|
that is never written to. The default is to point to the
|
||||||
|
string "ENDMARKER". We change it temporarily to point to the
|
||||||
|
next character in that string. (Yes, I know it's REALLY
|
||||||
|
obscure.)
|
||||||
|
|
||||||
|
Python 3
|
||||||
|
========
|
||||||
|
|
||||||
|
In Python 3, PyEval_InitThreads() cannot be called before
|
||||||
|
Py_InitializeEx() any more. So this function calls
|
||||||
|
Py_InitializeEx() first. It uses the same obscure logic to
|
||||||
|
make sure we never call it concurrently.
|
||||||
|
|
||||||
|
Arguably, this is less good on the spinlock, because
|
||||||
|
Py_InitializeEx() takes much longer to run than
|
||||||
|
PyEval_InitThreads(). But I didn't find a way around it.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifdef WITH_THREAD
|
||||||
|
char *volatile *lock = (char *volatile *)_PyParser_TokenNames;
|
||||||
|
char *old_value;
|
||||||
|
|
||||||
|
while (1) { /* spin loop */
|
||||||
|
old_value = *lock;
|
||||||
|
if (old_value[0] == 'E') {
|
||||||
|
assert(old_value[1] == 'N');
|
||||||
|
if (cffi_compare_and_swap(lock, old_value, old_value + 1))
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
assert(old_value[0] == 'N');
|
||||||
|
/* should ideally do a spin loop instruction here, but
|
||||||
|
hard to do it portably and doesn't really matter I
|
||||||
|
think: PyEval_InitThreads() should be very fast, and
|
||||||
|
this is only run at start-up anyway. */
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if PY_MAJOR_VERSION >= 3
|
||||||
|
/* Python 3: call Py_InitializeEx() */
|
||||||
|
{
|
||||||
|
PyGILState_STATE state = PyGILState_UNLOCKED;
|
||||||
|
if (!Py_IsInitialized())
|
||||||
|
_cffi_py_initialize();
|
||||||
|
else
|
||||||
|
state = PyGILState_Ensure();
|
||||||
|
|
||||||
|
PyEval_InitThreads();
|
||||||
|
PyGILState_Release(state);
|
||||||
|
}
|
||||||
|
#else
|
||||||
|
/* Python 2: call PyEval_InitThreads() */
|
||||||
|
# ifdef WITH_THREAD
|
||||||
|
if (!PyEval_ThreadsInitialized()) {
|
||||||
|
PyEval_InitThreads(); /* makes the GIL */
|
||||||
|
PyEval_ReleaseLock(); /* then release it */
|
||||||
|
}
|
||||||
|
/* else: there is already a GIL, but we still needed to do the
|
||||||
|
spinlock dance to make sure that we see it as fully ready */
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifdef WITH_THREAD
|
||||||
|
/* release the lock */
|
||||||
|
while (!cffi_compare_and_swap(lock, old_value + 1, old_value))
|
||||||
|
;
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/********** end CPython-specific section **********/
|
||||||
|
|
||||||
|
|
||||||
|
#else
|
||||||
|
|
||||||
|
|
||||||
|
/********** PyPy-specific section **********/
|
||||||
|
|
||||||
|
PyMODINIT_FUNC _CFFI_PYTHON_STARTUP_FUNC(const void *[]); /* forward */
|
||||||
|
|
||||||
|
static struct _cffi_pypy_init_s {
|
||||||
|
const char *name;
|
||||||
|
void (*func)(const void *[]);
|
||||||
|
const char *code;
|
||||||
|
} _cffi_pypy_init = {
|
||||||
|
_CFFI_MODULE_NAME,
|
||||||
|
_CFFI_PYTHON_STARTUP_FUNC,
|
||||||
|
_CFFI_PYTHON_STARTUP_CODE,
|
||||||
|
};
|
||||||
|
|
||||||
|
extern int pypy_carefully_make_gil(const char *);
|
||||||
|
extern int pypy_init_embedded_cffi_module(int, struct _cffi_pypy_init_s *);
|
||||||
|
|
||||||
|
static int _cffi_carefully_make_gil(void)
|
||||||
|
{
|
||||||
|
return pypy_carefully_make_gil(_CFFI_MODULE_NAME);
|
||||||
|
}
|
||||||
|
|
||||||
|
static int _cffi_initialize_python(void)
|
||||||
|
{
|
||||||
|
return pypy_init_embedded_cffi_module(0xB011, &_cffi_pypy_init);
|
||||||
|
}
|
||||||
|
|
||||||
|
/********** end PyPy-specific section **********/
|
||||||
|
|
||||||
|
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
#ifdef __GNUC__
|
||||||
|
__attribute__((noinline))
|
||||||
|
#endif
|
||||||
|
static _cffi_call_python_fnptr _cffi_start_python(void)
|
||||||
|
{
|
||||||
|
/* Delicate logic to initialize Python. This function can be
|
||||||
|
called multiple times concurrently, e.g. when the process calls
|
||||||
|
its first ``extern "Python"`` functions in multiple threads at
|
||||||
|
once. It can also be called recursively, in which case we must
|
||||||
|
ignore it. We also have to consider what occurs if several
|
||||||
|
different cffi-based extensions reach this code in parallel
|
||||||
|
threads---it is a different copy of the code, then, and we
|
||||||
|
can't have any shared global variable unless it comes from
|
||||||
|
'libpythonX.Y.so'.
|
||||||
|
|
||||||
|
Idea:
|
||||||
|
|
||||||
|
* _cffi_carefully_make_gil(): "carefully" call
|
||||||
|
PyEval_InitThreads() (possibly with Py_InitializeEx() first).
|
||||||
|
|
||||||
|
* then we use a (local) custom lock to make sure that a call to this
|
||||||
|
cffi-based extension will wait if another call to the *same*
|
||||||
|
extension is running the initialization in another thread.
|
||||||
|
It is reentrant, so that a recursive call will not block, but
|
||||||
|
only one from a different thread.
|
||||||
|
|
||||||
|
* then we grab the GIL and (Python 2) we call Py_InitializeEx().
|
||||||
|
At this point, concurrent calls to Py_InitializeEx() are not
|
||||||
|
possible: we have the GIL.
|
||||||
|
|
||||||
|
* do the rest of the specific initialization, which may
|
||||||
|
temporarily release the GIL but not the custom lock.
|
||||||
|
Only release the custom lock when we are done.
|
||||||
|
*/
|
||||||
|
static char called = 0;
|
||||||
|
|
||||||
|
if (_cffi_carefully_make_gil() != 0)
|
||||||
|
return NULL;
|
||||||
|
|
||||||
|
_cffi_acquire_reentrant_mutex();
|
||||||
|
|
||||||
|
/* Here the GIL exists, but we don't have it. We're only protected
|
||||||
|
from concurrency by the reentrant mutex. */
|
||||||
|
|
||||||
|
/* This file only initializes the embedded module once, the first
|
||||||
|
time this is called, even if there are subinterpreters. */
|
||||||
|
if (!called) {
|
||||||
|
called = 1; /* invoke _cffi_initialize_python() only once,
|
||||||
|
but don't set '_cffi_call_python' right now,
|
||||||
|
otherwise concurrent threads won't call
|
||||||
|
this function at all (we need them to wait) */
|
||||||
|
if (_cffi_initialize_python() == 0) {
|
||||||
|
/* now initialization is finished. Switch to the fast-path. */
|
||||||
|
|
||||||
|
/* We would like nobody to see the new value of
|
||||||
|
'_cffi_call_python' without also seeing the rest of the
|
||||||
|
data initialized. However, this is not possible. But
|
||||||
|
the new value of '_cffi_call_python' is the function
|
||||||
|
'cffi_call_python()' from _cffi_backend. So: */
|
||||||
|
cffi_write_barrier();
|
||||||
|
/* ^^^ we put a write barrier here, and a corresponding
|
||||||
|
read barrier at the start of cffi_call_python(). This
|
||||||
|
ensures that after that read barrier, we see everything
|
||||||
|
done here before the write barrier.
|
||||||
|
*/
|
||||||
|
|
||||||
|
assert(_cffi_call_python_org != NULL);
|
||||||
|
_cffi_call_python = (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
/* initialization failed. Reset this to NULL, even if it was
|
||||||
|
already set to some other value. Future calls to
|
||||||
|
_cffi_start_python() are still forced to occur, and will
|
||||||
|
always return NULL from now on. */
|
||||||
|
_cffi_call_python_org = NULL;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
_cffi_release_reentrant_mutex();
|
||||||
|
|
||||||
|
return (_cffi_call_python_fnptr)_cffi_call_python_org;
|
||||||
|
}
|
||||||
|
|
||||||
|
static
|
||||||
|
void _cffi_start_and_call_python(struct _cffi_externpy_s *externpy, char *args)
|
||||||
|
{
|
||||||
|
_cffi_call_python_fnptr fnptr;
|
||||||
|
int current_err = errno;
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
int current_lasterr = GetLastError();
|
||||||
|
#endif
|
||||||
|
fnptr = _cffi_start_python();
|
||||||
|
if (fnptr == NULL) {
|
||||||
|
fprintf(stderr, "function %s() called, but initialization code "
|
||||||
|
"failed. Returning 0.\n", externpy->name);
|
||||||
|
memset(args, 0, externpy->size_of_result);
|
||||||
|
}
|
||||||
|
#ifdef _MSC_VER
|
||||||
|
SetLastError(current_lasterr);
|
||||||
|
#endif
|
||||||
|
errno = current_err;
|
||||||
|
|
||||||
|
if (fnptr != NULL)
|
||||||
|
fnptr(externpy, args);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/* The cffi_start_python() function makes sure Python is initialized
|
||||||
|
and our cffi module is set up. It can be called manually from the
|
||||||
|
user C code. The same effect is obtained automatically from any
|
||||||
|
dll-exported ``extern "Python"`` function. This function returns
|
||||||
|
-1 if initialization failed, 0 if all is OK. */
|
||||||
|
_CFFI_UNUSED_FN
|
||||||
|
static int cffi_start_python(void)
|
||||||
|
{
|
||||||
|
if (_cffi_call_python == &_cffi_start_and_call_python) {
|
||||||
|
if (_cffi_start_python() == NULL)
|
||||||
|
return -1;
|
||||||
|
}
|
||||||
|
cffi_read_barrier();
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#undef cffi_compare_and_swap
|
||||||
|
#undef cffi_write_barrier
|
||||||
|
#undef cffi_read_barrier
|
875
lib/python3.4/site-packages/cffi/api.py
Normal file
875
lib/python3.4/site-packages/cffi/api.py
Normal file
|
@ -0,0 +1,875 @@
|
||||||
|
import sys, types
|
||||||
|
from .lock import allocate_lock
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable
|
||||||
|
except NameError:
|
||||||
|
# Python 3.1
|
||||||
|
from collections import Callable
|
||||||
|
callable = lambda x: isinstance(x, Callable)
|
||||||
|
|
||||||
|
try:
|
||||||
|
basestring
|
||||||
|
except NameError:
|
||||||
|
# Python 3.x
|
||||||
|
basestring = str
|
||||||
|
|
||||||
|
|
||||||
|
class FFIError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class CDefError(Exception):
|
||||||
|
def __str__(self):
|
||||||
|
try:
|
||||||
|
line = 'line %d: ' % (self.args[1].coord.line,)
|
||||||
|
except (AttributeError, TypeError, IndexError):
|
||||||
|
line = ''
|
||||||
|
return '%s%s' % (line, self.args[0])
|
||||||
|
|
||||||
|
|
||||||
|
class FFI(object):
|
||||||
|
r'''
|
||||||
|
The main top-level class that you instantiate once, or once per module.
|
||||||
|
|
||||||
|
Example usage:
|
||||||
|
|
||||||
|
ffi = FFI()
|
||||||
|
ffi.cdef("""
|
||||||
|
int printf(const char *, ...);
|
||||||
|
""")
|
||||||
|
|
||||||
|
C = ffi.dlopen(None) # standard library
|
||||||
|
-or-
|
||||||
|
C = ffi.verify() # use a C compiler: verify the decl above is right
|
||||||
|
|
||||||
|
C.printf("hello, %s!\n", ffi.new("char[]", "world"))
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, backend=None):
|
||||||
|
"""Create an FFI instance. The 'backend' argument is used to
|
||||||
|
select a non-default backend, mostly for tests.
|
||||||
|
"""
|
||||||
|
from . import cparser, model
|
||||||
|
if backend is None:
|
||||||
|
# You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with
|
||||||
|
# _cffi_backend.so compiled.
|
||||||
|
import _cffi_backend as backend
|
||||||
|
from . import __version__
|
||||||
|
assert backend.__version__ == __version__, \
|
||||||
|
"version mismatch, %s != %s" % (backend.__version__, __version__)
|
||||||
|
# (If you insist you can also try to pass the option
|
||||||
|
# 'backend=backend_ctypes.CTypesBackend()', but don't
|
||||||
|
# rely on it! It's probably not going to work well.)
|
||||||
|
|
||||||
|
self._backend = backend
|
||||||
|
self._lock = allocate_lock()
|
||||||
|
self._parser = cparser.Parser()
|
||||||
|
self._cached_btypes = {}
|
||||||
|
self._parsed_types = types.ModuleType('parsed_types').__dict__
|
||||||
|
self._new_types = types.ModuleType('new_types').__dict__
|
||||||
|
self._function_caches = []
|
||||||
|
self._libraries = []
|
||||||
|
self._cdefsources = []
|
||||||
|
self._included_ffis = []
|
||||||
|
self._windows_unicode = None
|
||||||
|
self._init_once_cache = {}
|
||||||
|
self._cdef_version = None
|
||||||
|
self._embedding = None
|
||||||
|
if hasattr(backend, 'set_ffi'):
|
||||||
|
backend.set_ffi(self)
|
||||||
|
for name in backend.__dict__:
|
||||||
|
if name.startswith('RTLD_'):
|
||||||
|
setattr(self, name, getattr(backend, name))
|
||||||
|
#
|
||||||
|
with self._lock:
|
||||||
|
self.BVoidP = self._get_cached_btype(model.voidp_type)
|
||||||
|
self.BCharA = self._get_cached_btype(model.char_array_type)
|
||||||
|
if isinstance(backend, types.ModuleType):
|
||||||
|
# _cffi_backend: attach these constants to the class
|
||||||
|
if not hasattr(FFI, 'NULL'):
|
||||||
|
FFI.NULL = self.cast(self.BVoidP, 0)
|
||||||
|
FFI.CData, FFI.CType = backend._get_types()
|
||||||
|
else:
|
||||||
|
# ctypes backend: attach these constants to the instance
|
||||||
|
self.NULL = self.cast(self.BVoidP, 0)
|
||||||
|
self.CData, self.CType = backend._get_types()
|
||||||
|
|
||||||
|
def cdef(self, csource, override=False, packed=False):
|
||||||
|
"""Parse the given C source. This registers all declared functions,
|
||||||
|
types, and global variables. The functions and global variables can
|
||||||
|
then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'.
|
||||||
|
The types can be used in 'ffi.new()' and other functions.
|
||||||
|
If 'packed' is specified as True, all structs declared inside this
|
||||||
|
cdef are packed, i.e. laid out without any field alignment at all.
|
||||||
|
"""
|
||||||
|
self._cdef(csource, override=override, packed=packed)
|
||||||
|
|
||||||
|
def embedding_api(self, csource, packed=False):
|
||||||
|
self._cdef(csource, packed=packed, dllexport=True)
|
||||||
|
if self._embedding is None:
|
||||||
|
self._embedding = ''
|
||||||
|
|
||||||
|
def _cdef(self, csource, override=False, **options):
|
||||||
|
if not isinstance(csource, str): # unicode, on Python 2
|
||||||
|
if not isinstance(csource, basestring):
|
||||||
|
raise TypeError("cdef() argument must be a string")
|
||||||
|
csource = csource.encode('ascii')
|
||||||
|
with self._lock:
|
||||||
|
self._cdef_version = object()
|
||||||
|
self._parser.parse(csource, override=override, **options)
|
||||||
|
self._cdefsources.append(csource)
|
||||||
|
if override:
|
||||||
|
for cache in self._function_caches:
|
||||||
|
cache.clear()
|
||||||
|
finishlist = self._parser._recomplete
|
||||||
|
if finishlist:
|
||||||
|
self._parser._recomplete = []
|
||||||
|
for tp in finishlist:
|
||||||
|
tp.finish_backend_type(self, finishlist)
|
||||||
|
|
||||||
|
def dlopen(self, name, flags=0):
|
||||||
|
"""Load and return a dynamic library identified by 'name'.
|
||||||
|
The standard C library can be loaded by passing None.
|
||||||
|
Note that functions and types declared by 'ffi.cdef()' are not
|
||||||
|
linked to a particular library, just like C headers; in the
|
||||||
|
library we only look for the actual (untyped) symbols.
|
||||||
|
"""
|
||||||
|
assert isinstance(name, basestring) or name is None
|
||||||
|
with self._lock:
|
||||||
|
lib, function_cache = _make_ffi_library(self, name, flags)
|
||||||
|
self._function_caches.append(function_cache)
|
||||||
|
self._libraries.append(lib)
|
||||||
|
return lib
|
||||||
|
|
||||||
|
def _typeof_locked(self, cdecl):
|
||||||
|
# call me with the lock!
|
||||||
|
key = cdecl
|
||||||
|
if key in self._parsed_types:
|
||||||
|
return self._parsed_types[key]
|
||||||
|
#
|
||||||
|
if not isinstance(cdecl, str): # unicode, on Python 2
|
||||||
|
cdecl = cdecl.encode('ascii')
|
||||||
|
#
|
||||||
|
type = self._parser.parse_type(cdecl)
|
||||||
|
really_a_function_type = type.is_raw_function
|
||||||
|
if really_a_function_type:
|
||||||
|
type = type.as_function_pointer()
|
||||||
|
btype = self._get_cached_btype(type)
|
||||||
|
result = btype, really_a_function_type
|
||||||
|
self._parsed_types[key] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _typeof(self, cdecl, consider_function_as_funcptr=False):
|
||||||
|
# string -> ctype object
|
||||||
|
try:
|
||||||
|
result = self._parsed_types[cdecl]
|
||||||
|
except KeyError:
|
||||||
|
with self._lock:
|
||||||
|
result = self._typeof_locked(cdecl)
|
||||||
|
#
|
||||||
|
btype, really_a_function_type = result
|
||||||
|
if really_a_function_type and not consider_function_as_funcptr:
|
||||||
|
raise CDefError("the type %r is a function type, not a "
|
||||||
|
"pointer-to-function type" % (cdecl,))
|
||||||
|
return btype
|
||||||
|
|
||||||
|
def typeof(self, cdecl):
|
||||||
|
"""Parse the C type given as a string and return the
|
||||||
|
corresponding <ctype> object.
|
||||||
|
It can also be used on 'cdata' instance to get its C type.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
return self._typeof(cdecl)
|
||||||
|
if isinstance(cdecl, self.CData):
|
||||||
|
return self._backend.typeof(cdecl)
|
||||||
|
if isinstance(cdecl, types.BuiltinFunctionType):
|
||||||
|
res = _builtin_function_type(cdecl)
|
||||||
|
if res is not None:
|
||||||
|
return res
|
||||||
|
if (isinstance(cdecl, types.FunctionType)
|
||||||
|
and hasattr(cdecl, '_cffi_base_type')):
|
||||||
|
with self._lock:
|
||||||
|
return self._get_cached_btype(cdecl._cffi_base_type)
|
||||||
|
raise TypeError(type(cdecl))
|
||||||
|
|
||||||
|
def sizeof(self, cdecl):
|
||||||
|
"""Return the size in bytes of the argument. It can be a
|
||||||
|
string naming a C type, or a 'cdata' instance.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
BType = self._typeof(cdecl)
|
||||||
|
return self._backend.sizeof(BType)
|
||||||
|
else:
|
||||||
|
return self._backend.sizeof(cdecl)
|
||||||
|
|
||||||
|
def alignof(self, cdecl):
|
||||||
|
"""Return the natural alignment size in bytes of the C type
|
||||||
|
given as a string.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.alignof(cdecl)
|
||||||
|
|
||||||
|
def offsetof(self, cdecl, *fields_or_indexes):
|
||||||
|
"""Return the offset of the named field inside the given
|
||||||
|
structure or array, which must be given as a C type name.
|
||||||
|
You can give several field names in case of nested structures.
|
||||||
|
You can also give numeric values which correspond to array
|
||||||
|
items, in case of an array type.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._typeoffsetof(cdecl, *fields_or_indexes)[1]
|
||||||
|
|
||||||
|
def new(self, cdecl, init=None):
|
||||||
|
"""Allocate an instance according to the specified C type and
|
||||||
|
return a pointer to it. The specified C type must be either a
|
||||||
|
pointer or an array: ``new('X *')`` allocates an X and returns
|
||||||
|
a pointer to it, whereas ``new('X[n]')`` allocates an array of
|
||||||
|
n X'es and returns an array referencing it (which works
|
||||||
|
mostly like a pointer, like in C). You can also use
|
||||||
|
``new('X[]', n)`` to allocate an array of a non-constant
|
||||||
|
length n.
|
||||||
|
|
||||||
|
The memory is initialized following the rules of declaring a
|
||||||
|
global variable in C: by default it is zero-initialized, but
|
||||||
|
an explicit initializer can be given which can be used to
|
||||||
|
fill all or part of the memory.
|
||||||
|
|
||||||
|
When the returned <cdata> object goes out of scope, the memory
|
||||||
|
is freed. In other words the returned <cdata> object has
|
||||||
|
ownership of the value of type 'cdecl' that it points to. This
|
||||||
|
means that the raw data can be used as long as this object is
|
||||||
|
kept alive, but must not be used for a longer time. Be careful
|
||||||
|
about that when copying the pointer to the memory somewhere
|
||||||
|
else, e.g. into another structure.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.newp(cdecl, init)
|
||||||
|
|
||||||
|
def new_allocator(self, alloc=None, free=None,
|
||||||
|
should_clear_after_alloc=True):
|
||||||
|
"""Return a new allocator, i.e. a function that behaves like ffi.new()
|
||||||
|
but uses the provided low-level 'alloc' and 'free' functions.
|
||||||
|
|
||||||
|
'alloc' is called with the size as argument. If it returns NULL, a
|
||||||
|
MemoryError is raised. 'free' is called with the result of 'alloc'
|
||||||
|
as argument. Both can be either Python function or directly C
|
||||||
|
functions. If 'free' is None, then no free function is called.
|
||||||
|
If both 'alloc' and 'free' are None, the default is used.
|
||||||
|
|
||||||
|
If 'should_clear_after_alloc' is set to False, then the memory
|
||||||
|
returned by 'alloc' is assumed to be already cleared (or you are
|
||||||
|
fine with garbage); otherwise CFFI will clear it.
|
||||||
|
"""
|
||||||
|
compiled_ffi = self._backend.FFI()
|
||||||
|
allocator = compiled_ffi.new_allocator(alloc, free,
|
||||||
|
should_clear_after_alloc)
|
||||||
|
def allocate(cdecl, init=None):
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return allocator(cdecl, init)
|
||||||
|
return allocate
|
||||||
|
|
||||||
|
def cast(self, cdecl, source):
|
||||||
|
"""Similar to a C cast: returns an instance of the named C
|
||||||
|
type initialized with the given 'source'. The source is
|
||||||
|
casted between integers or pointers of any type.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
return self._backend.cast(cdecl, source)
|
||||||
|
|
||||||
|
def string(self, cdata, maxlen=-1):
|
||||||
|
"""Return a Python string (or unicode string) from the 'cdata'.
|
||||||
|
If 'cdata' is a pointer or array of characters or bytes, returns
|
||||||
|
the null-terminated string. The returned string extends until
|
||||||
|
the first null character, or at most 'maxlen' characters. If
|
||||||
|
'cdata' is an array then 'maxlen' defaults to its length.
|
||||||
|
|
||||||
|
If 'cdata' is a pointer or array of wchar_t, returns a unicode
|
||||||
|
string following the same rules.
|
||||||
|
|
||||||
|
If 'cdata' is a single character or byte or a wchar_t, returns
|
||||||
|
it as a string or unicode string.
|
||||||
|
|
||||||
|
If 'cdata' is an enum, returns the value of the enumerator as a
|
||||||
|
string, or 'NUMBER' if the value is out of range.
|
||||||
|
"""
|
||||||
|
return self._backend.string(cdata, maxlen)
|
||||||
|
|
||||||
|
def unpack(self, cdata, length):
|
||||||
|
"""Unpack an array of C data of the given length,
|
||||||
|
returning a Python string/unicode/list.
|
||||||
|
|
||||||
|
If 'cdata' is a pointer to 'char', returns a byte string.
|
||||||
|
It does not stop at the first null. This is equivalent to:
|
||||||
|
ffi.buffer(cdata, length)[:]
|
||||||
|
|
||||||
|
If 'cdata' is a pointer to 'wchar_t', returns a unicode string.
|
||||||
|
'length' is measured in wchar_t's; it is not the size in bytes.
|
||||||
|
|
||||||
|
If 'cdata' is a pointer to anything else, returns a list of
|
||||||
|
'length' items. This is a faster equivalent to:
|
||||||
|
[cdata[i] for i in range(length)]
|
||||||
|
"""
|
||||||
|
return self._backend.unpack(cdata, length)
|
||||||
|
|
||||||
|
def buffer(self, cdata, size=-1):
|
||||||
|
"""Return a read-write buffer object that references the raw C data
|
||||||
|
pointed to by the given 'cdata'. The 'cdata' must be a pointer or
|
||||||
|
an array. Can be passed to functions expecting a buffer, or directly
|
||||||
|
manipulated with:
|
||||||
|
|
||||||
|
buf[:] get a copy of it in a regular string, or
|
||||||
|
buf[idx] as a single character
|
||||||
|
buf[:] = ...
|
||||||
|
buf[idx] = ... change the content
|
||||||
|
"""
|
||||||
|
return self._backend.buffer(cdata, size)
|
||||||
|
|
||||||
|
def from_buffer(self, python_buffer):
|
||||||
|
"""Return a <cdata 'char[]'> that points to the data of the
|
||||||
|
given Python object, which must support the buffer interface.
|
||||||
|
Note that this is not meant to be used on the built-in types
|
||||||
|
str or unicode (you can build 'char[]' arrays explicitly)
|
||||||
|
but only on objects containing large quantities of raw data
|
||||||
|
in some other format, like 'array.array' or numpy arrays.
|
||||||
|
"""
|
||||||
|
return self._backend.from_buffer(self.BCharA, python_buffer)
|
||||||
|
|
||||||
|
def memmove(self, dest, src, n):
|
||||||
|
"""ffi.memmove(dest, src, n) copies n bytes of memory from src to dest.
|
||||||
|
|
||||||
|
Like the C function memmove(), the memory areas may overlap;
|
||||||
|
apart from that it behaves like the C function memcpy().
|
||||||
|
|
||||||
|
'src' can be any cdata ptr or array, or any Python buffer object.
|
||||||
|
'dest' can be any cdata ptr or array, or a writable Python buffer
|
||||||
|
object. The size to copy, 'n', is always measured in bytes.
|
||||||
|
|
||||||
|
Unlike other methods, this one supports all Python buffer including
|
||||||
|
byte strings and bytearrays---but it still does not support
|
||||||
|
non-contiguous buffers.
|
||||||
|
"""
|
||||||
|
return self._backend.memmove(dest, src, n)
|
||||||
|
|
||||||
|
def callback(self, cdecl, python_callable=None, error=None, onerror=None):
|
||||||
|
"""Return a callback object or a decorator making such a
|
||||||
|
callback object. 'cdecl' must name a C function pointer type.
|
||||||
|
The callback invokes the specified 'python_callable' (which may
|
||||||
|
be provided either directly or via a decorator). Important: the
|
||||||
|
callback object must be manually kept alive for as long as the
|
||||||
|
callback may be invoked from the C level.
|
||||||
|
"""
|
||||||
|
def callback_decorator_wrap(python_callable):
|
||||||
|
if not callable(python_callable):
|
||||||
|
raise TypeError("the 'python_callable' argument "
|
||||||
|
"is not callable")
|
||||||
|
return self._backend.callback(cdecl, python_callable,
|
||||||
|
error, onerror)
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl, consider_function_as_funcptr=True)
|
||||||
|
if python_callable is None:
|
||||||
|
return callback_decorator_wrap # decorator mode
|
||||||
|
else:
|
||||||
|
return callback_decorator_wrap(python_callable) # direct mode
|
||||||
|
|
||||||
|
def getctype(self, cdecl, replace_with=''):
|
||||||
|
"""Return a string giving the C type 'cdecl', which may be itself
|
||||||
|
a string or a <ctype> object. If 'replace_with' is given, it gives
|
||||||
|
extra text to append (or insert for more complicated C types), like
|
||||||
|
a variable name, or '*' to get actually the C type 'pointer-to-cdecl'.
|
||||||
|
"""
|
||||||
|
if isinstance(cdecl, basestring):
|
||||||
|
cdecl = self._typeof(cdecl)
|
||||||
|
replace_with = replace_with.strip()
|
||||||
|
if (replace_with.startswith('*')
|
||||||
|
and '&[' in self._backend.getcname(cdecl, '&')):
|
||||||
|
replace_with = '(%s)' % replace_with
|
||||||
|
elif replace_with and not replace_with[0] in '[(':
|
||||||
|
replace_with = ' ' + replace_with
|
||||||
|
return self._backend.getcname(cdecl, replace_with)
|
||||||
|
|
||||||
|
def gc(self, cdata, destructor):
|
||||||
|
"""Return a new cdata object that points to the same
|
||||||
|
data. Later, when this new cdata object is garbage-collected,
|
||||||
|
'destructor(old_cdata_object)' will be called.
|
||||||
|
"""
|
||||||
|
return self._backend.gcp(cdata, destructor)
|
||||||
|
|
||||||
|
def _get_cached_btype(self, type):
|
||||||
|
assert self._lock.acquire(False) is False
|
||||||
|
# call me with the lock!
|
||||||
|
try:
|
||||||
|
BType = self._cached_btypes[type]
|
||||||
|
except KeyError:
|
||||||
|
finishlist = []
|
||||||
|
BType = type.get_cached_btype(self, finishlist)
|
||||||
|
for type in finishlist:
|
||||||
|
type.finish_backend_type(self, finishlist)
|
||||||
|
return BType
|
||||||
|
|
||||||
|
def verify(self, source='', tmpdir=None, **kwargs):
|
||||||
|
"""Verify that the current ffi signatures compile on this
|
||||||
|
machine, and return a dynamic library object. The dynamic
|
||||||
|
library can be used to call functions and access global
|
||||||
|
variables declared in this 'ffi'. The library is compiled
|
||||||
|
by the C compiler: it gives you C-level API compatibility
|
||||||
|
(including calling macros). This is unlike 'ffi.dlopen()',
|
||||||
|
which requires binary compatibility in the signatures.
|
||||||
|
"""
|
||||||
|
from .verifier import Verifier, _caller_dir_pycache
|
||||||
|
#
|
||||||
|
# If set_unicode(True) was called, insert the UNICODE and
|
||||||
|
# _UNICODE macro declarations
|
||||||
|
if self._windows_unicode:
|
||||||
|
self._apply_windows_unicode(kwargs)
|
||||||
|
#
|
||||||
|
# Set the tmpdir here, and not in Verifier.__init__: it picks
|
||||||
|
# up the caller's directory, which we want to be the caller of
|
||||||
|
# ffi.verify(), as opposed to the caller of Veritier().
|
||||||
|
tmpdir = tmpdir or _caller_dir_pycache()
|
||||||
|
#
|
||||||
|
# Make a Verifier() and use it to load the library.
|
||||||
|
self.verifier = Verifier(self, source, tmpdir, **kwargs)
|
||||||
|
lib = self.verifier.load_library()
|
||||||
|
#
|
||||||
|
# Save the loaded library for keep-alive purposes, even
|
||||||
|
# if the caller doesn't keep it alive itself (it should).
|
||||||
|
self._libraries.append(lib)
|
||||||
|
return lib
|
||||||
|
|
||||||
|
def _get_errno(self):
|
||||||
|
return self._backend.get_errno()
|
||||||
|
def _set_errno(self, errno):
|
||||||
|
self._backend.set_errno(errno)
|
||||||
|
errno = property(_get_errno, _set_errno, None,
|
||||||
|
"the value of 'errno' from/to the C calls")
|
||||||
|
|
||||||
|
def getwinerror(self, code=-1):
|
||||||
|
return self._backend.getwinerror(code)
|
||||||
|
|
||||||
|
def _pointer_to(self, ctype):
|
||||||
|
from . import model
|
||||||
|
with self._lock:
|
||||||
|
return model.pointer_cache(self, ctype)
|
||||||
|
|
||||||
|
def addressof(self, cdata, *fields_or_indexes):
|
||||||
|
"""Return the address of a <cdata 'struct-or-union'>.
|
||||||
|
If 'fields_or_indexes' are given, returns the address of that
|
||||||
|
field or array item in the structure or array, recursively in
|
||||||
|
case of nested structures.
|
||||||
|
"""
|
||||||
|
ctype = self._backend.typeof(cdata)
|
||||||
|
if fields_or_indexes:
|
||||||
|
ctype, offset = self._typeoffsetof(ctype, *fields_or_indexes)
|
||||||
|
else:
|
||||||
|
if ctype.kind == "pointer":
|
||||||
|
raise TypeError("addressof(pointer)")
|
||||||
|
offset = 0
|
||||||
|
ctypeptr = self._pointer_to(ctype)
|
||||||
|
return self._backend.rawaddressof(ctypeptr, cdata, offset)
|
||||||
|
|
||||||
|
def _typeoffsetof(self, ctype, field_or_index, *fields_or_indexes):
|
||||||
|
ctype, offset = self._backend.typeoffsetof(ctype, field_or_index)
|
||||||
|
for field1 in fields_or_indexes:
|
||||||
|
ctype, offset1 = self._backend.typeoffsetof(ctype, field1, 1)
|
||||||
|
offset += offset1
|
||||||
|
return ctype, offset
|
||||||
|
|
||||||
|
def include(self, ffi_to_include):
|
||||||
|
"""Includes the typedefs, structs, unions and enums defined
|
||||||
|
in another FFI instance. Usage is similar to a #include in C,
|
||||||
|
where a part of the program might include types defined in
|
||||||
|
another part for its own usage. Note that the include()
|
||||||
|
method has no effect on functions, constants and global
|
||||||
|
variables, which must anyway be accessed directly from the
|
||||||
|
lib object returned by the original FFI instance.
|
||||||
|
"""
|
||||||
|
if not isinstance(ffi_to_include, FFI):
|
||||||
|
raise TypeError("ffi.include() expects an argument that is also of"
|
||||||
|
" type cffi.FFI, not %r" % (
|
||||||
|
type(ffi_to_include).__name__,))
|
||||||
|
if ffi_to_include is self:
|
||||||
|
raise ValueError("self.include(self)")
|
||||||
|
with ffi_to_include._lock:
|
||||||
|
with self._lock:
|
||||||
|
self._parser.include(ffi_to_include._parser)
|
||||||
|
self._cdefsources.append('[')
|
||||||
|
self._cdefsources.extend(ffi_to_include._cdefsources)
|
||||||
|
self._cdefsources.append(']')
|
||||||
|
self._included_ffis.append(ffi_to_include)
|
||||||
|
|
||||||
|
def new_handle(self, x):
|
||||||
|
return self._backend.newp_handle(self.BVoidP, x)
|
||||||
|
|
||||||
|
def from_handle(self, x):
|
||||||
|
return self._backend.from_handle(x)
|
||||||
|
|
||||||
|
def set_unicode(self, enabled_flag):
|
||||||
|
"""Windows: if 'enabled_flag' is True, enable the UNICODE and
|
||||||
|
_UNICODE defines in C, and declare the types like TCHAR and LPTCSTR
|
||||||
|
to be (pointers to) wchar_t. If 'enabled_flag' is False,
|
||||||
|
declare these types to be (pointers to) plain 8-bit characters.
|
||||||
|
This is mostly for backward compatibility; you usually want True.
|
||||||
|
"""
|
||||||
|
if self._windows_unicode is not None:
|
||||||
|
raise ValueError("set_unicode() can only be called once")
|
||||||
|
enabled_flag = bool(enabled_flag)
|
||||||
|
if enabled_flag:
|
||||||
|
self.cdef("typedef wchar_t TBYTE;"
|
||||||
|
"typedef wchar_t TCHAR;"
|
||||||
|
"typedef const wchar_t *LPCTSTR;"
|
||||||
|
"typedef const wchar_t *PCTSTR;"
|
||||||
|
"typedef wchar_t *LPTSTR;"
|
||||||
|
"typedef wchar_t *PTSTR;"
|
||||||
|
"typedef TBYTE *PTBYTE;"
|
||||||
|
"typedef TCHAR *PTCHAR;")
|
||||||
|
else:
|
||||||
|
self.cdef("typedef char TBYTE;"
|
||||||
|
"typedef char TCHAR;"
|
||||||
|
"typedef const char *LPCTSTR;"
|
||||||
|
"typedef const char *PCTSTR;"
|
||||||
|
"typedef char *LPTSTR;"
|
||||||
|
"typedef char *PTSTR;"
|
||||||
|
"typedef TBYTE *PTBYTE;"
|
||||||
|
"typedef TCHAR *PTCHAR;")
|
||||||
|
self._windows_unicode = enabled_flag
|
||||||
|
|
||||||
|
def _apply_windows_unicode(self, kwds):
|
||||||
|
defmacros = kwds.get('define_macros', ())
|
||||||
|
if not isinstance(defmacros, (list, tuple)):
|
||||||
|
raise TypeError("'define_macros' must be a list or tuple")
|
||||||
|
defmacros = list(defmacros) + [('UNICODE', '1'),
|
||||||
|
('_UNICODE', '1')]
|
||||||
|
kwds['define_macros'] = defmacros
|
||||||
|
|
||||||
|
def _apply_embedding_fix(self, kwds):
|
||||||
|
# must include an argument like "-lpython2.7" for the compiler
|
||||||
|
def ensure(key, value):
|
||||||
|
lst = kwds.setdefault(key, [])
|
||||||
|
if value not in lst:
|
||||||
|
lst.append(value)
|
||||||
|
#
|
||||||
|
if '__pypy__' in sys.builtin_module_names:
|
||||||
|
import os
|
||||||
|
if sys.platform == "win32":
|
||||||
|
# we need 'libpypy-c.lib'. Current distributions of
|
||||||
|
# pypy (>= 4.1) contain it as 'libs/python27.lib'.
|
||||||
|
pythonlib = "python27"
|
||||||
|
if hasattr(sys, 'prefix'):
|
||||||
|
ensure('library_dirs', os.path.join(sys.prefix, 'libs'))
|
||||||
|
else:
|
||||||
|
# we need 'libpypy-c.{so,dylib}', which should be by
|
||||||
|
# default located in 'sys.prefix/bin' for installed
|
||||||
|
# systems.
|
||||||
|
pythonlib = "pypy-c"
|
||||||
|
if hasattr(sys, 'prefix'):
|
||||||
|
ensure('library_dirs', os.path.join(sys.prefix, 'bin'))
|
||||||
|
# On uninstalled pypy's, the libpypy-c is typically found in
|
||||||
|
# .../pypy/goal/.
|
||||||
|
if hasattr(sys, 'prefix'):
|
||||||
|
ensure('library_dirs', os.path.join(sys.prefix, 'pypy', 'goal'))
|
||||||
|
else:
|
||||||
|
if sys.platform == "win32":
|
||||||
|
template = "python%d%d"
|
||||||
|
if hasattr(sys, 'gettotalrefcount'):
|
||||||
|
template += '_d'
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
import sysconfig
|
||||||
|
except ImportError: # 2.6
|
||||||
|
from distutils import sysconfig
|
||||||
|
template = "python%d.%d"
|
||||||
|
if sysconfig.get_config_var('DEBUG_EXT'):
|
||||||
|
template += sysconfig.get_config_var('DEBUG_EXT')
|
||||||
|
pythonlib = (template %
|
||||||
|
(sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
|
||||||
|
if hasattr(sys, 'abiflags'):
|
||||||
|
pythonlib += sys.abiflags
|
||||||
|
ensure('libraries', pythonlib)
|
||||||
|
if sys.platform == "win32":
|
||||||
|
ensure('extra_link_args', '/MANIFEST')
|
||||||
|
|
||||||
|
def set_source(self, module_name, source, source_extension='.c', **kwds):
|
||||||
|
if hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() cannot be called several times "
|
||||||
|
"per ffi object")
|
||||||
|
if not isinstance(module_name, basestring):
|
||||||
|
raise TypeError("'module_name' must be a string")
|
||||||
|
self._assigned_source = (str(module_name), source,
|
||||||
|
source_extension, kwds)
|
||||||
|
|
||||||
|
def distutils_extension(self, tmpdir='build', verbose=True):
|
||||||
|
from distutils.dir_util import mkpath
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
if hasattr(self, 'verifier'): # fallback, 'tmpdir' ignored
|
||||||
|
return self.verifier.get_extension()
|
||||||
|
raise ValueError("set_source() must be called before"
|
||||||
|
" distutils_extension()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
if source is None:
|
||||||
|
raise TypeError("distutils_extension() is only for C extension "
|
||||||
|
"modules, not for dlopen()-style pure Python "
|
||||||
|
"modules")
|
||||||
|
mkpath(tmpdir)
|
||||||
|
ext, updated = recompile(self, module_name,
|
||||||
|
source, tmpdir=tmpdir, extradir=tmpdir,
|
||||||
|
source_extension=source_extension,
|
||||||
|
call_c_compiler=False, **kwds)
|
||||||
|
if verbose:
|
||||||
|
if updated:
|
||||||
|
sys.stderr.write("regenerated: %r\n" % (ext.sources[0],))
|
||||||
|
else:
|
||||||
|
sys.stderr.write("not modified: %r\n" % (ext.sources[0],))
|
||||||
|
return ext
|
||||||
|
|
||||||
|
def emit_c_code(self, filename):
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() must be called before emit_c_code()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
if source is None:
|
||||||
|
raise TypeError("emit_c_code() is only for C extension modules, "
|
||||||
|
"not for dlopen()-style pure Python modules")
|
||||||
|
recompile(self, module_name, source,
|
||||||
|
c_file=filename, call_c_compiler=False, **kwds)
|
||||||
|
|
||||||
|
def emit_python_code(self, filename):
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() must be called before emit_c_code()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
if source is not None:
|
||||||
|
raise TypeError("emit_python_code() is only for dlopen()-style "
|
||||||
|
"pure Python modules, not for C extension modules")
|
||||||
|
recompile(self, module_name, source,
|
||||||
|
c_file=filename, call_c_compiler=False, **kwds)
|
||||||
|
|
||||||
|
def compile(self, tmpdir='.', verbose=0, target=None):
|
||||||
|
"""The 'target' argument gives the final file name of the
|
||||||
|
compiled DLL. Use '*' to force distutils' choice, suitable for
|
||||||
|
regular CPython C API modules. Use a file name ending in '.*'
|
||||||
|
to ask for the system's default extension for dynamic libraries
|
||||||
|
(.so/.dll/.dylib).
|
||||||
|
|
||||||
|
The default is '*' when building a non-embedded C API extension,
|
||||||
|
and (module_name + '.*') when building an embedded library.
|
||||||
|
"""
|
||||||
|
from .recompiler import recompile
|
||||||
|
#
|
||||||
|
if not hasattr(self, '_assigned_source'):
|
||||||
|
raise ValueError("set_source() must be called before compile()")
|
||||||
|
module_name, source, source_extension, kwds = self._assigned_source
|
||||||
|
return recompile(self, module_name, source, tmpdir=tmpdir,
|
||||||
|
target=target, source_extension=source_extension,
|
||||||
|
compiler_verbose=verbose, **kwds)
|
||||||
|
|
||||||
|
def init_once(self, func, tag):
|
||||||
|
# Read _init_once_cache[tag], which is either (False, lock) if
|
||||||
|
# we're calling the function now in some thread, or (True, result).
|
||||||
|
# Don't call setdefault() in most cases, to avoid allocating and
|
||||||
|
# immediately freeing a lock; but still use setdefaut() to avoid
|
||||||
|
# races.
|
||||||
|
try:
|
||||||
|
x = self._init_once_cache[tag]
|
||||||
|
except KeyError:
|
||||||
|
x = self._init_once_cache.setdefault(tag, (False, allocate_lock()))
|
||||||
|
# Common case: we got (True, result), so we return the result.
|
||||||
|
if x[0]:
|
||||||
|
return x[1]
|
||||||
|
# Else, it's a lock. Acquire it to serialize the following tests.
|
||||||
|
with x[1]:
|
||||||
|
# Read again from _init_once_cache the current status.
|
||||||
|
x = self._init_once_cache[tag]
|
||||||
|
if x[0]:
|
||||||
|
return x[1]
|
||||||
|
# Call the function and store the result back.
|
||||||
|
result = func()
|
||||||
|
self._init_once_cache[tag] = (True, result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def embedding_init_code(self, pysource):
|
||||||
|
if self._embedding:
|
||||||
|
raise ValueError("embedding_init_code() can only be called once")
|
||||||
|
# fix 'pysource' before it gets dumped into the C file:
|
||||||
|
# - remove empty lines at the beginning, so it starts at "line 1"
|
||||||
|
# - dedent, if all non-empty lines are indented
|
||||||
|
# - check for SyntaxErrors
|
||||||
|
import re
|
||||||
|
match = re.match(r'\s*\n', pysource)
|
||||||
|
if match:
|
||||||
|
pysource = pysource[match.end():]
|
||||||
|
lines = pysource.splitlines() or ['']
|
||||||
|
prefix = re.match(r'\s*', lines[0]).group()
|
||||||
|
for i in range(1, len(lines)):
|
||||||
|
line = lines[i]
|
||||||
|
if line.rstrip():
|
||||||
|
while not line.startswith(prefix):
|
||||||
|
prefix = prefix[:-1]
|
||||||
|
i = len(prefix)
|
||||||
|
lines = [line[i:]+'\n' for line in lines]
|
||||||
|
pysource = ''.join(lines)
|
||||||
|
#
|
||||||
|
compile(pysource, "cffi_init", "exec")
|
||||||
|
#
|
||||||
|
self._embedding = pysource
|
||||||
|
|
||||||
|
def def_extern(self, *args, **kwds):
|
||||||
|
raise ValueError("ffi.def_extern() is only available on API-mode FFI "
|
||||||
|
"objects")
|
||||||
|
|
||||||
|
def list_types(self):
|
||||||
|
"""Returns the user type names known to this FFI instance.
|
||||||
|
This returns a tuple containing three lists of names:
|
||||||
|
(typedef_names, names_of_structs, names_of_unions)
|
||||||
|
"""
|
||||||
|
typedefs = []
|
||||||
|
structs = []
|
||||||
|
unions = []
|
||||||
|
for key in self._parser._declarations:
|
||||||
|
if key.startswith('typedef '):
|
||||||
|
typedefs.append(key[8:])
|
||||||
|
elif key.startswith('struct '):
|
||||||
|
structs.append(key[7:])
|
||||||
|
elif key.startswith('union '):
|
||||||
|
unions.append(key[6:])
|
||||||
|
typedefs.sort()
|
||||||
|
structs.sort()
|
||||||
|
unions.sort()
|
||||||
|
return (typedefs, structs, unions)
|
||||||
|
|
||||||
|
|
||||||
|
def _load_backend_lib(backend, name, flags):
|
||||||
|
if name is None:
|
||||||
|
if sys.platform != "win32":
|
||||||
|
return backend.load_library(None, flags)
|
||||||
|
name = "c" # Windows: load_library(None) fails, but this works
|
||||||
|
# (backward compatibility hack only)
|
||||||
|
try:
|
||||||
|
if '.' not in name and '/' not in name:
|
||||||
|
raise OSError("library not found: %r" % (name,))
|
||||||
|
return backend.load_library(name, flags)
|
||||||
|
except OSError:
|
||||||
|
import ctypes.util
|
||||||
|
path = ctypes.util.find_library(name)
|
||||||
|
if path is None:
|
||||||
|
raise # propagate the original OSError
|
||||||
|
return backend.load_library(path, flags)
|
||||||
|
|
||||||
|
def _make_ffi_library(ffi, libname, flags):
|
||||||
|
import os
|
||||||
|
backend = ffi._backend
|
||||||
|
backendlib = _load_backend_lib(backend, libname, flags)
|
||||||
|
#
|
||||||
|
def accessor_function(name):
|
||||||
|
key = 'function ' + name
|
||||||
|
tp, _ = ffi._parser._declarations[key]
|
||||||
|
BType = ffi._get_cached_btype(tp)
|
||||||
|
try:
|
||||||
|
value = backendlib.load_function(BType, name)
|
||||||
|
except KeyError as e:
|
||||||
|
raise AttributeError('%s: %s' % (name, e))
|
||||||
|
library.__dict__[name] = value
|
||||||
|
#
|
||||||
|
def accessor_variable(name):
|
||||||
|
key = 'variable ' + name
|
||||||
|
tp, _ = ffi._parser._declarations[key]
|
||||||
|
BType = ffi._get_cached_btype(tp)
|
||||||
|
read_variable = backendlib.read_variable
|
||||||
|
write_variable = backendlib.write_variable
|
||||||
|
setattr(FFILibrary, name, property(
|
||||||
|
lambda self: read_variable(BType, name),
|
||||||
|
lambda self, value: write_variable(BType, name, value)))
|
||||||
|
#
|
||||||
|
def accessor_constant(name):
|
||||||
|
raise NotImplementedError("non-integer constant '%s' cannot be "
|
||||||
|
"accessed from a dlopen() library" % (name,))
|
||||||
|
#
|
||||||
|
def accessor_int_constant(name):
|
||||||
|
library.__dict__[name] = ffi._parser._int_constants[name]
|
||||||
|
#
|
||||||
|
accessors = {}
|
||||||
|
accessors_version = [False]
|
||||||
|
#
|
||||||
|
def update_accessors():
|
||||||
|
if accessors_version[0] is ffi._cdef_version:
|
||||||
|
return
|
||||||
|
#
|
||||||
|
from . import model
|
||||||
|
for key, (tp, _) in ffi._parser._declarations.items():
|
||||||
|
if not isinstance(tp, model.EnumType):
|
||||||
|
tag, name = key.split(' ', 1)
|
||||||
|
if tag == 'function':
|
||||||
|
accessors[name] = accessor_function
|
||||||
|
elif tag == 'variable':
|
||||||
|
accessors[name] = accessor_variable
|
||||||
|
elif tag == 'constant':
|
||||||
|
accessors[name] = accessor_constant
|
||||||
|
else:
|
||||||
|
for i, enumname in enumerate(tp.enumerators):
|
||||||
|
def accessor_enum(name, tp=tp, i=i):
|
||||||
|
tp.check_not_partial()
|
||||||
|
library.__dict__[name] = tp.enumvalues[i]
|
||||||
|
accessors[enumname] = accessor_enum
|
||||||
|
for name in ffi._parser._int_constants:
|
||||||
|
accessors.setdefault(name, accessor_int_constant)
|
||||||
|
accessors_version[0] = ffi._cdef_version
|
||||||
|
#
|
||||||
|
def make_accessor(name):
|
||||||
|
with ffi._lock:
|
||||||
|
if name in library.__dict__ or name in FFILibrary.__dict__:
|
||||||
|
return # added by another thread while waiting for the lock
|
||||||
|
if name not in accessors:
|
||||||
|
update_accessors()
|
||||||
|
if name not in accessors:
|
||||||
|
raise AttributeError(name)
|
||||||
|
accessors[name](name)
|
||||||
|
#
|
||||||
|
class FFILibrary(object):
|
||||||
|
def __getattr__(self, name):
|
||||||
|
make_accessor(name)
|
||||||
|
return getattr(self, name)
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
try:
|
||||||
|
property = getattr(self.__class__, name)
|
||||||
|
except AttributeError:
|
||||||
|
make_accessor(name)
|
||||||
|
setattr(self, name, value)
|
||||||
|
else:
|
||||||
|
property.__set__(self, value)
|
||||||
|
def __dir__(self):
|
||||||
|
with ffi._lock:
|
||||||
|
update_accessors()
|
||||||
|
return accessors.keys()
|
||||||
|
#
|
||||||
|
if libname is not None:
|
||||||
|
try:
|
||||||
|
if not isinstance(libname, str): # unicode, on Python 2
|
||||||
|
libname = libname.encode('utf-8')
|
||||||
|
FFILibrary.__name__ = 'FFILibrary_%s' % libname
|
||||||
|
except UnicodeError:
|
||||||
|
pass
|
||||||
|
library = FFILibrary()
|
||||||
|
return library, library.__dict__
|
||||||
|
|
||||||
|
def _builtin_function_type(func):
|
||||||
|
# a hack to make at least ffi.typeof(builtin_function) work,
|
||||||
|
# if the builtin function was obtained by 'vengine_cpy'.
|
||||||
|
import sys
|
||||||
|
try:
|
||||||
|
module = sys.modules[func.__module__]
|
||||||
|
ffi = module._cffi_original_ffi
|
||||||
|
types_of_builtin_funcs = module._cffi_types_of_builtin_funcs
|
||||||
|
tp = types_of_builtin_funcs[func]
|
||||||
|
except (KeyError, AttributeError, TypeError):
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
with ffi._lock:
|
||||||
|
return ffi._get_cached_btype(tp)
|
1097
lib/python3.4/site-packages/cffi/backend_ctypes.py
Normal file
1097
lib/python3.4/site-packages/cffi/backend_ctypes.py
Normal file
File diff suppressed because it is too large
Load diff
179
lib/python3.4/site-packages/cffi/cffi_opcode.py
Normal file
179
lib/python3.4/site-packages/cffi/cffi_opcode.py
Normal file
|
@ -0,0 +1,179 @@
|
||||||
|
|
||||||
|
class CffiOp(object):
|
||||||
|
def __init__(self, op, arg):
|
||||||
|
self.op = op
|
||||||
|
self.arg = arg
|
||||||
|
|
||||||
|
def as_c_expr(self):
|
||||||
|
if self.op is None:
|
||||||
|
assert isinstance(self.arg, str)
|
||||||
|
return '(_cffi_opcode_t)(%s)' % (self.arg,)
|
||||||
|
classname = CLASS_NAME[self.op]
|
||||||
|
return '_CFFI_OP(_CFFI_OP_%s, %s)' % (classname, self.arg)
|
||||||
|
|
||||||
|
def as_python_bytes(self):
|
||||||
|
if self.op is None and self.arg.isdigit():
|
||||||
|
value = int(self.arg) # non-negative: '-' not in self.arg
|
||||||
|
if value >= 2**31:
|
||||||
|
raise OverflowError("cannot emit %r: limited to 2**31-1"
|
||||||
|
% (self.arg,))
|
||||||
|
return format_four_bytes(value)
|
||||||
|
if isinstance(self.arg, str):
|
||||||
|
from .ffiplatform import VerificationError
|
||||||
|
raise VerificationError("cannot emit to Python: %r" % (self.arg,))
|
||||||
|
return format_four_bytes((self.arg << 8) | self.op)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
classname = CLASS_NAME.get(self.op, self.op)
|
||||||
|
return '(%s %s)' % (classname, self.arg)
|
||||||
|
|
||||||
|
def format_four_bytes(num):
|
||||||
|
return '\\x%02X\\x%02X\\x%02X\\x%02X' % (
|
||||||
|
(num >> 24) & 0xFF,
|
||||||
|
(num >> 16) & 0xFF,
|
||||||
|
(num >> 8) & 0xFF,
|
||||||
|
(num ) & 0xFF)
|
||||||
|
|
||||||
|
OP_PRIMITIVE = 1
|
||||||
|
OP_POINTER = 3
|
||||||
|
OP_ARRAY = 5
|
||||||
|
OP_OPEN_ARRAY = 7
|
||||||
|
OP_STRUCT_UNION = 9
|
||||||
|
OP_ENUM = 11
|
||||||
|
OP_FUNCTION = 13
|
||||||
|
OP_FUNCTION_END = 15
|
||||||
|
OP_NOOP = 17
|
||||||
|
OP_BITFIELD = 19
|
||||||
|
OP_TYPENAME = 21
|
||||||
|
OP_CPYTHON_BLTN_V = 23 # varargs
|
||||||
|
OP_CPYTHON_BLTN_N = 25 # noargs
|
||||||
|
OP_CPYTHON_BLTN_O = 27 # O (i.e. a single arg)
|
||||||
|
OP_CONSTANT = 29
|
||||||
|
OP_CONSTANT_INT = 31
|
||||||
|
OP_GLOBAL_VAR = 33
|
||||||
|
OP_DLOPEN_FUNC = 35
|
||||||
|
OP_DLOPEN_CONST = 37
|
||||||
|
OP_GLOBAL_VAR_F = 39
|
||||||
|
OP_EXTERN_PYTHON = 41
|
||||||
|
|
||||||
|
PRIM_VOID = 0
|
||||||
|
PRIM_BOOL = 1
|
||||||
|
PRIM_CHAR = 2
|
||||||
|
PRIM_SCHAR = 3
|
||||||
|
PRIM_UCHAR = 4
|
||||||
|
PRIM_SHORT = 5
|
||||||
|
PRIM_USHORT = 6
|
||||||
|
PRIM_INT = 7
|
||||||
|
PRIM_UINT = 8
|
||||||
|
PRIM_LONG = 9
|
||||||
|
PRIM_ULONG = 10
|
||||||
|
PRIM_LONGLONG = 11
|
||||||
|
PRIM_ULONGLONG = 12
|
||||||
|
PRIM_FLOAT = 13
|
||||||
|
PRIM_DOUBLE = 14
|
||||||
|
PRIM_LONGDOUBLE = 15
|
||||||
|
|
||||||
|
PRIM_WCHAR = 16
|
||||||
|
PRIM_INT8 = 17
|
||||||
|
PRIM_UINT8 = 18
|
||||||
|
PRIM_INT16 = 19
|
||||||
|
PRIM_UINT16 = 20
|
||||||
|
PRIM_INT32 = 21
|
||||||
|
PRIM_UINT32 = 22
|
||||||
|
PRIM_INT64 = 23
|
||||||
|
PRIM_UINT64 = 24
|
||||||
|
PRIM_INTPTR = 25
|
||||||
|
PRIM_UINTPTR = 26
|
||||||
|
PRIM_PTRDIFF = 27
|
||||||
|
PRIM_SIZE = 28
|
||||||
|
PRIM_SSIZE = 29
|
||||||
|
PRIM_INT_LEAST8 = 30
|
||||||
|
PRIM_UINT_LEAST8 = 31
|
||||||
|
PRIM_INT_LEAST16 = 32
|
||||||
|
PRIM_UINT_LEAST16 = 33
|
||||||
|
PRIM_INT_LEAST32 = 34
|
||||||
|
PRIM_UINT_LEAST32 = 35
|
||||||
|
PRIM_INT_LEAST64 = 36
|
||||||
|
PRIM_UINT_LEAST64 = 37
|
||||||
|
PRIM_INT_FAST8 = 38
|
||||||
|
PRIM_UINT_FAST8 = 39
|
||||||
|
PRIM_INT_FAST16 = 40
|
||||||
|
PRIM_UINT_FAST16 = 41
|
||||||
|
PRIM_INT_FAST32 = 42
|
||||||
|
PRIM_UINT_FAST32 = 43
|
||||||
|
PRIM_INT_FAST64 = 44
|
||||||
|
PRIM_UINT_FAST64 = 45
|
||||||
|
PRIM_INTMAX = 46
|
||||||
|
PRIM_UINTMAX = 47
|
||||||
|
|
||||||
|
_NUM_PRIM = 48
|
||||||
|
_UNKNOWN_PRIM = -1
|
||||||
|
_UNKNOWN_FLOAT_PRIM = -2
|
||||||
|
_UNKNOWN_LONG_DOUBLE = -3
|
||||||
|
|
||||||
|
_IO_FILE_STRUCT = -1
|
||||||
|
|
||||||
|
PRIMITIVE_TO_INDEX = {
|
||||||
|
'char': PRIM_CHAR,
|
||||||
|
'short': PRIM_SHORT,
|
||||||
|
'int': PRIM_INT,
|
||||||
|
'long': PRIM_LONG,
|
||||||
|
'long long': PRIM_LONGLONG,
|
||||||
|
'signed char': PRIM_SCHAR,
|
||||||
|
'unsigned char': PRIM_UCHAR,
|
||||||
|
'unsigned short': PRIM_USHORT,
|
||||||
|
'unsigned int': PRIM_UINT,
|
||||||
|
'unsigned long': PRIM_ULONG,
|
||||||
|
'unsigned long long': PRIM_ULONGLONG,
|
||||||
|
'float': PRIM_FLOAT,
|
||||||
|
'double': PRIM_DOUBLE,
|
||||||
|
'long double': PRIM_LONGDOUBLE,
|
||||||
|
'_Bool': PRIM_BOOL,
|
||||||
|
'wchar_t': PRIM_WCHAR,
|
||||||
|
'int8_t': PRIM_INT8,
|
||||||
|
'uint8_t': PRIM_UINT8,
|
||||||
|
'int16_t': PRIM_INT16,
|
||||||
|
'uint16_t': PRIM_UINT16,
|
||||||
|
'int32_t': PRIM_INT32,
|
||||||
|
'uint32_t': PRIM_UINT32,
|
||||||
|
'int64_t': PRIM_INT64,
|
||||||
|
'uint64_t': PRIM_UINT64,
|
||||||
|
'intptr_t': PRIM_INTPTR,
|
||||||
|
'uintptr_t': PRIM_UINTPTR,
|
||||||
|
'ptrdiff_t': PRIM_PTRDIFF,
|
||||||
|
'size_t': PRIM_SIZE,
|
||||||
|
'ssize_t': PRIM_SSIZE,
|
||||||
|
'int_least8_t': PRIM_INT_LEAST8,
|
||||||
|
'uint_least8_t': PRIM_UINT_LEAST8,
|
||||||
|
'int_least16_t': PRIM_INT_LEAST16,
|
||||||
|
'uint_least16_t': PRIM_UINT_LEAST16,
|
||||||
|
'int_least32_t': PRIM_INT_LEAST32,
|
||||||
|
'uint_least32_t': PRIM_UINT_LEAST32,
|
||||||
|
'int_least64_t': PRIM_INT_LEAST64,
|
||||||
|
'uint_least64_t': PRIM_UINT_LEAST64,
|
||||||
|
'int_fast8_t': PRIM_INT_FAST8,
|
||||||
|
'uint_fast8_t': PRIM_UINT_FAST8,
|
||||||
|
'int_fast16_t': PRIM_INT_FAST16,
|
||||||
|
'uint_fast16_t': PRIM_UINT_FAST16,
|
||||||
|
'int_fast32_t': PRIM_INT_FAST32,
|
||||||
|
'uint_fast32_t': PRIM_UINT_FAST32,
|
||||||
|
'int_fast64_t': PRIM_INT_FAST64,
|
||||||
|
'uint_fast64_t': PRIM_UINT_FAST64,
|
||||||
|
'intmax_t': PRIM_INTMAX,
|
||||||
|
'uintmax_t': PRIM_UINTMAX,
|
||||||
|
}
|
||||||
|
|
||||||
|
F_UNION = 0x01
|
||||||
|
F_CHECK_FIELDS = 0x02
|
||||||
|
F_PACKED = 0x04
|
||||||
|
F_EXTERNAL = 0x08
|
||||||
|
F_OPAQUE = 0x10
|
||||||
|
|
||||||
|
G_FLAGS = dict([('_CFFI_' + _key, globals()[_key])
|
||||||
|
for _key in ['F_UNION', 'F_CHECK_FIELDS', 'F_PACKED',
|
||||||
|
'F_EXTERNAL', 'F_OPAQUE']])
|
||||||
|
|
||||||
|
CLASS_NAME = {}
|
||||||
|
for _name, _value in list(globals().items()):
|
||||||
|
if _name.startswith('OP_') and isinstance(_value, int):
|
||||||
|
CLASS_NAME[_value] = _name[3:]
|
79
lib/python3.4/site-packages/cffi/commontypes.py
Normal file
79
lib/python3.4/site-packages/cffi/commontypes.py
Normal file
|
@ -0,0 +1,79 @@
|
||||||
|
import sys
|
||||||
|
from . import api, model
|
||||||
|
|
||||||
|
|
||||||
|
COMMON_TYPES = {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
# fetch "bool" and all simple Windows types
|
||||||
|
from _cffi_backend import _get_common_types
|
||||||
|
_get_common_types(COMMON_TYPES)
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
COMMON_TYPES['FILE'] = model.unknown_type('FILE', '_IO_FILE')
|
||||||
|
COMMON_TYPES['bool'] = '_Bool' # in case we got ImportError above
|
||||||
|
|
||||||
|
for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||||
|
if _type.endswith('_t'):
|
||||||
|
COMMON_TYPES[_type] = _type
|
||||||
|
del _type
|
||||||
|
|
||||||
|
_CACHE = {}
|
||||||
|
|
||||||
|
def resolve_common_type(parser, commontype):
|
||||||
|
try:
|
||||||
|
return _CACHE[commontype]
|
||||||
|
except KeyError:
|
||||||
|
cdecl = COMMON_TYPES.get(commontype, commontype)
|
||||||
|
if not isinstance(cdecl, str):
|
||||||
|
result, quals = cdecl, 0 # cdecl is already a BaseType
|
||||||
|
elif cdecl in model.PrimitiveType.ALL_PRIMITIVE_TYPES:
|
||||||
|
result, quals = model.PrimitiveType(cdecl), 0
|
||||||
|
elif cdecl == 'set-unicode-needed':
|
||||||
|
raise api.FFIError("The Windows type %r is only available after "
|
||||||
|
"you call ffi.set_unicode()" % (commontype,))
|
||||||
|
else:
|
||||||
|
if commontype == cdecl:
|
||||||
|
raise api.FFIError(
|
||||||
|
"Unsupported type: %r. Please look at "
|
||||||
|
"http://cffi.readthedocs.io/en/latest/cdef.html#ffi-cdef-limitations "
|
||||||
|
"and file an issue if you think this type should really "
|
||||||
|
"be supported." % (commontype,))
|
||||||
|
result, quals = parser.parse_type_and_quals(cdecl) # recursive
|
||||||
|
|
||||||
|
assert isinstance(result, model.BaseTypeByIdentity)
|
||||||
|
_CACHE[commontype] = result, quals
|
||||||
|
return result, quals
|
||||||
|
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
# extra types for Windows (most of them are in commontypes.c)
|
||||||
|
|
||||||
|
|
||||||
|
def win_common_types():
|
||||||
|
return {
|
||||||
|
"UNICODE_STRING": model.StructType(
|
||||||
|
"_UNICODE_STRING",
|
||||||
|
["Length",
|
||||||
|
"MaximumLength",
|
||||||
|
"Buffer"],
|
||||||
|
[model.PrimitiveType("unsigned short"),
|
||||||
|
model.PrimitiveType("unsigned short"),
|
||||||
|
model.PointerType(model.PrimitiveType("wchar_t"))],
|
||||||
|
[-1, -1, -1]),
|
||||||
|
"PUNICODE_STRING": "UNICODE_STRING *",
|
||||||
|
"PCUNICODE_STRING": "const UNICODE_STRING *",
|
||||||
|
|
||||||
|
"TBYTE": "set-unicode-needed",
|
||||||
|
"TCHAR": "set-unicode-needed",
|
||||||
|
"LPCTSTR": "set-unicode-needed",
|
||||||
|
"PCTSTR": "set-unicode-needed",
|
||||||
|
"LPTSTR": "set-unicode-needed",
|
||||||
|
"PTSTR": "set-unicode-needed",
|
||||||
|
"PTBYTE": "set-unicode-needed",
|
||||||
|
"PTCHAR": "set-unicode-needed",
|
||||||
|
}
|
||||||
|
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
COMMON_TYPES.update(win_common_types())
|
849
lib/python3.4/site-packages/cffi/cparser.py
Normal file
849
lib/python3.4/site-packages/cffi/cparser.py
Normal file
|
@ -0,0 +1,849 @@
|
||||||
|
from . import api, model
|
||||||
|
from .commontypes import COMMON_TYPES, resolve_common_type
|
||||||
|
try:
|
||||||
|
from . import _pycparser as pycparser
|
||||||
|
except ImportError:
|
||||||
|
import pycparser
|
||||||
|
import weakref, re, sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
if sys.version_info < (3,):
|
||||||
|
import thread as _thread
|
||||||
|
else:
|
||||||
|
import _thread
|
||||||
|
lock = _thread.allocate_lock()
|
||||||
|
except ImportError:
|
||||||
|
lock = None
|
||||||
|
|
||||||
|
_r_comment = re.compile(r"/\*.*?\*/|//([^\n\\]|\\.)*?$",
|
||||||
|
re.DOTALL | re.MULTILINE)
|
||||||
|
_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)"
|
||||||
|
r"\b((?:[^\n\\]|\\.)*?)$",
|
||||||
|
re.DOTALL | re.MULTILINE)
|
||||||
|
_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}")
|
||||||
|
_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$")
|
||||||
|
_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]")
|
||||||
|
_r_words = re.compile(r"\w+|\S")
|
||||||
|
_parser_cache = None
|
||||||
|
_r_int_literal = re.compile(r"-?0?x?[0-9a-f]+[lu]*$", re.IGNORECASE)
|
||||||
|
_r_stdcall1 = re.compile(r"\b(__stdcall|WINAPI)\b")
|
||||||
|
_r_stdcall2 = re.compile(r"[(]\s*(__stdcall|WINAPI)\b")
|
||||||
|
_r_cdecl = re.compile(r"\b__cdecl\b")
|
||||||
|
_r_extern_python = re.compile(r'\bextern\s*"'
|
||||||
|
r'(Python|Python\s*\+\s*C|C\s*\+\s*Python)"\s*.')
|
||||||
|
_r_star_const_space = re.compile( # matches "* const "
|
||||||
|
r"[*]\s*((const|volatile|restrict)\b\s*)+")
|
||||||
|
|
||||||
|
def _get_parser():
|
||||||
|
global _parser_cache
|
||||||
|
if _parser_cache is None:
|
||||||
|
_parser_cache = pycparser.CParser()
|
||||||
|
return _parser_cache
|
||||||
|
|
||||||
|
def _workaround_for_old_pycparser(csource):
|
||||||
|
# Workaround for a pycparser issue (fixed between pycparser 2.10 and
|
||||||
|
# 2.14): "char*const***" gives us a wrong syntax tree, the same as
|
||||||
|
# for "char***(*const)". This means we can't tell the difference
|
||||||
|
# afterwards. But "char(*const(***))" gives us the right syntax
|
||||||
|
# tree. The issue only occurs if there are several stars in
|
||||||
|
# sequence with no parenthesis inbetween, just possibly qualifiers.
|
||||||
|
# Attempt to fix it by adding some parentheses in the source: each
|
||||||
|
# time we see "* const" or "* const *", we add an opening
|
||||||
|
# parenthesis before each star---the hard part is figuring out where
|
||||||
|
# to close them.
|
||||||
|
parts = []
|
||||||
|
while True:
|
||||||
|
match = _r_star_const_space.search(csource)
|
||||||
|
if not match:
|
||||||
|
break
|
||||||
|
#print repr(''.join(parts)+csource), '=>',
|
||||||
|
parts.append(csource[:match.start()])
|
||||||
|
parts.append('('); closing = ')'
|
||||||
|
parts.append(match.group()) # e.g. "* const "
|
||||||
|
endpos = match.end()
|
||||||
|
if csource.startswith('*', endpos):
|
||||||
|
parts.append('('); closing += ')'
|
||||||
|
level = 0
|
||||||
|
i = endpos
|
||||||
|
while i < len(csource):
|
||||||
|
c = csource[i]
|
||||||
|
if c == '(':
|
||||||
|
level += 1
|
||||||
|
elif c == ')':
|
||||||
|
if level == 0:
|
||||||
|
break
|
||||||
|
level -= 1
|
||||||
|
elif c in ',;=':
|
||||||
|
if level == 0:
|
||||||
|
break
|
||||||
|
i += 1
|
||||||
|
csource = csource[endpos:i] + closing + csource[i:]
|
||||||
|
#print repr(''.join(parts)+csource)
|
||||||
|
parts.append(csource)
|
||||||
|
return ''.join(parts)
|
||||||
|
|
||||||
|
def _preprocess_extern_python(csource):
|
||||||
|
# input: `extern "Python" int foo(int);` or
|
||||||
|
# `extern "Python" { int foo(int); }`
|
||||||
|
# output:
|
||||||
|
# void __cffi_extern_python_start;
|
||||||
|
# int foo(int);
|
||||||
|
# void __cffi_extern_python_stop;
|
||||||
|
#
|
||||||
|
# input: `extern "Python+C" int foo(int);`
|
||||||
|
# output:
|
||||||
|
# void __cffi_extern_python_plus_c_start;
|
||||||
|
# int foo(int);
|
||||||
|
# void __cffi_extern_python_stop;
|
||||||
|
parts = []
|
||||||
|
while True:
|
||||||
|
match = _r_extern_python.search(csource)
|
||||||
|
if not match:
|
||||||
|
break
|
||||||
|
endpos = match.end() - 1
|
||||||
|
#print
|
||||||
|
#print ''.join(parts)+csource
|
||||||
|
#print '=>'
|
||||||
|
parts.append(csource[:match.start()])
|
||||||
|
if 'C' in match.group(1):
|
||||||
|
parts.append('void __cffi_extern_python_plus_c_start; ')
|
||||||
|
else:
|
||||||
|
parts.append('void __cffi_extern_python_start; ')
|
||||||
|
if csource[endpos] == '{':
|
||||||
|
# grouping variant
|
||||||
|
closing = csource.find('}', endpos)
|
||||||
|
if closing < 0:
|
||||||
|
raise api.CDefError("'extern \"Python\" {': no '}' found")
|
||||||
|
if csource.find('{', endpos + 1, closing) >= 0:
|
||||||
|
raise NotImplementedError("cannot use { } inside a block "
|
||||||
|
"'extern \"Python\" { ... }'")
|
||||||
|
parts.append(csource[endpos+1:closing])
|
||||||
|
csource = csource[closing+1:]
|
||||||
|
else:
|
||||||
|
# non-grouping variant
|
||||||
|
semicolon = csource.find(';', endpos)
|
||||||
|
if semicolon < 0:
|
||||||
|
raise api.CDefError("'extern \"Python\": no ';' found")
|
||||||
|
parts.append(csource[endpos:semicolon+1])
|
||||||
|
csource = csource[semicolon+1:]
|
||||||
|
parts.append(' void __cffi_extern_python_stop;')
|
||||||
|
#print ''.join(parts)+csource
|
||||||
|
#print
|
||||||
|
parts.append(csource)
|
||||||
|
return ''.join(parts)
|
||||||
|
|
||||||
|
def _preprocess(csource):
|
||||||
|
# Remove comments. NOTE: this only work because the cdef() section
|
||||||
|
# should not contain any string literal!
|
||||||
|
csource = _r_comment.sub(' ', csource)
|
||||||
|
# Remove the "#define FOO x" lines
|
||||||
|
macros = {}
|
||||||
|
for match in _r_define.finditer(csource):
|
||||||
|
macroname, macrovalue = match.groups()
|
||||||
|
macrovalue = macrovalue.replace('\\\n', '').strip()
|
||||||
|
macros[macroname] = macrovalue
|
||||||
|
csource = _r_define.sub('', csource)
|
||||||
|
#
|
||||||
|
if pycparser.__version__ < '2.14':
|
||||||
|
csource = _workaround_for_old_pycparser(csource)
|
||||||
|
#
|
||||||
|
# BIG HACK: replace WINAPI or __stdcall with "volatile const".
|
||||||
|
# It doesn't make sense for the return type of a function to be
|
||||||
|
# "volatile volatile const", so we abuse it to detect __stdcall...
|
||||||
|
# Hack number 2 is that "int(volatile *fptr)();" is not valid C
|
||||||
|
# syntax, so we place the "volatile" before the opening parenthesis.
|
||||||
|
csource = _r_stdcall2.sub(' volatile volatile const(', csource)
|
||||||
|
csource = _r_stdcall1.sub(' volatile volatile const ', csource)
|
||||||
|
csource = _r_cdecl.sub(' ', csource)
|
||||||
|
#
|
||||||
|
# Replace `extern "Python"` with start/end markers
|
||||||
|
csource = _preprocess_extern_python(csource)
|
||||||
|
#
|
||||||
|
# Replace "[...]" with "[__dotdotdotarray__]"
|
||||||
|
csource = _r_partial_array.sub('[__dotdotdotarray__]', csource)
|
||||||
|
#
|
||||||
|
# Replace "...}" with "__dotdotdotNUM__}". This construction should
|
||||||
|
# occur only at the end of enums; at the end of structs we have "...;}"
|
||||||
|
# and at the end of vararg functions "...);". Also replace "=...[,}]"
|
||||||
|
# with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when
|
||||||
|
# giving an unknown value.
|
||||||
|
matches = list(_r_partial_enum.finditer(csource))
|
||||||
|
for number, match in enumerate(reversed(matches)):
|
||||||
|
p = match.start()
|
||||||
|
if csource[p] == '=':
|
||||||
|
p2 = csource.find('...', p, match.end())
|
||||||
|
assert p2 > p
|
||||||
|
csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number,
|
||||||
|
csource[p2+3:])
|
||||||
|
else:
|
||||||
|
assert csource[p:p+3] == '...'
|
||||||
|
csource = '%s __dotdotdot%d__ %s' % (csource[:p], number,
|
||||||
|
csource[p+3:])
|
||||||
|
# Replace all remaining "..." with the same name, "__dotdotdot__",
|
||||||
|
# which is declared with a typedef for the purpose of C parsing.
|
||||||
|
return csource.replace('...', ' __dotdotdot__ '), macros
|
||||||
|
|
||||||
|
def _common_type_names(csource):
|
||||||
|
# Look in the source for what looks like usages of types from the
|
||||||
|
# list of common types. A "usage" is approximated here as the
|
||||||
|
# appearance of the word, minus a "definition" of the type, which
|
||||||
|
# is the last word in a "typedef" statement. Approximative only
|
||||||
|
# but should be fine for all the common types.
|
||||||
|
look_for_words = set(COMMON_TYPES)
|
||||||
|
look_for_words.add(';')
|
||||||
|
look_for_words.add(',')
|
||||||
|
look_for_words.add('(')
|
||||||
|
look_for_words.add(')')
|
||||||
|
look_for_words.add('typedef')
|
||||||
|
words_used = set()
|
||||||
|
is_typedef = False
|
||||||
|
paren = 0
|
||||||
|
previous_word = ''
|
||||||
|
for word in _r_words.findall(csource):
|
||||||
|
if word in look_for_words:
|
||||||
|
if word == ';':
|
||||||
|
if is_typedef:
|
||||||
|
words_used.discard(previous_word)
|
||||||
|
look_for_words.discard(previous_word)
|
||||||
|
is_typedef = False
|
||||||
|
elif word == 'typedef':
|
||||||
|
is_typedef = True
|
||||||
|
paren = 0
|
||||||
|
elif word == '(':
|
||||||
|
paren += 1
|
||||||
|
elif word == ')':
|
||||||
|
paren -= 1
|
||||||
|
elif word == ',':
|
||||||
|
if is_typedef and paren == 0:
|
||||||
|
words_used.discard(previous_word)
|
||||||
|
look_for_words.discard(previous_word)
|
||||||
|
else: # word in COMMON_TYPES
|
||||||
|
words_used.add(word)
|
||||||
|
previous_word = word
|
||||||
|
return words_used
|
||||||
|
|
||||||
|
|
||||||
|
class Parser(object):
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._declarations = {}
|
||||||
|
self._included_declarations = set()
|
||||||
|
self._anonymous_counter = 0
|
||||||
|
self._structnode2type = weakref.WeakKeyDictionary()
|
||||||
|
self._options = {}
|
||||||
|
self._int_constants = {}
|
||||||
|
self._recomplete = []
|
||||||
|
self._uses_new_feature = None
|
||||||
|
|
||||||
|
def _parse(self, csource):
|
||||||
|
csource, macros = _preprocess(csource)
|
||||||
|
# XXX: for more efficiency we would need to poke into the
|
||||||
|
# internals of CParser... the following registers the
|
||||||
|
# typedefs, because their presence or absence influences the
|
||||||
|
# parsing itself (but what they are typedef'ed to plays no role)
|
||||||
|
ctn = _common_type_names(csource)
|
||||||
|
typenames = []
|
||||||
|
for name in sorted(self._declarations):
|
||||||
|
if name.startswith('typedef '):
|
||||||
|
name = name[8:]
|
||||||
|
typenames.append(name)
|
||||||
|
ctn.discard(name)
|
||||||
|
typenames += sorted(ctn)
|
||||||
|
#
|
||||||
|
csourcelines = ['typedef int %s;' % typename for typename in typenames]
|
||||||
|
csourcelines.append('typedef int __dotdotdot__;')
|
||||||
|
csourcelines.append(csource)
|
||||||
|
csource = '\n'.join(csourcelines)
|
||||||
|
if lock is not None:
|
||||||
|
lock.acquire() # pycparser is not thread-safe...
|
||||||
|
try:
|
||||||
|
ast = _get_parser().parse(csource)
|
||||||
|
except pycparser.c_parser.ParseError as e:
|
||||||
|
self.convert_pycparser_error(e, csource)
|
||||||
|
finally:
|
||||||
|
if lock is not None:
|
||||||
|
lock.release()
|
||||||
|
# csource will be used to find buggy source text
|
||||||
|
return ast, macros, csource
|
||||||
|
|
||||||
|
def _convert_pycparser_error(self, e, csource):
|
||||||
|
# xxx look for ":NUM:" at the start of str(e) and try to interpret
|
||||||
|
# it as a line number
|
||||||
|
line = None
|
||||||
|
msg = str(e)
|
||||||
|
if msg.startswith(':') and ':' in msg[1:]:
|
||||||
|
linenum = msg[1:msg.find(':',1)]
|
||||||
|
if linenum.isdigit():
|
||||||
|
linenum = int(linenum, 10)
|
||||||
|
csourcelines = csource.splitlines()
|
||||||
|
if 1 <= linenum <= len(csourcelines):
|
||||||
|
line = csourcelines[linenum-1]
|
||||||
|
return line
|
||||||
|
|
||||||
|
def convert_pycparser_error(self, e, csource):
|
||||||
|
line = self._convert_pycparser_error(e, csource)
|
||||||
|
|
||||||
|
msg = str(e)
|
||||||
|
if line:
|
||||||
|
msg = 'cannot parse "%s"\n%s' % (line.strip(), msg)
|
||||||
|
else:
|
||||||
|
msg = 'parse error\n%s' % (msg,)
|
||||||
|
raise api.CDefError(msg)
|
||||||
|
|
||||||
|
def parse(self, csource, override=False, packed=False, dllexport=False):
|
||||||
|
prev_options = self._options
|
||||||
|
try:
|
||||||
|
self._options = {'override': override,
|
||||||
|
'packed': packed,
|
||||||
|
'dllexport': dllexport}
|
||||||
|
self._internal_parse(csource)
|
||||||
|
finally:
|
||||||
|
self._options = prev_options
|
||||||
|
|
||||||
|
def _internal_parse(self, csource):
|
||||||
|
ast, macros, csource = self._parse(csource)
|
||||||
|
# add the macros
|
||||||
|
self._process_macros(macros)
|
||||||
|
# find the first "__dotdotdot__" and use that as a separator
|
||||||
|
# between the repeated typedefs and the real csource
|
||||||
|
iterator = iter(ast.ext)
|
||||||
|
for decl in iterator:
|
||||||
|
if decl.name == '__dotdotdot__':
|
||||||
|
break
|
||||||
|
#
|
||||||
|
try:
|
||||||
|
self._inside_extern_python = '__cffi_extern_python_stop'
|
||||||
|
for decl in iterator:
|
||||||
|
if isinstance(decl, pycparser.c_ast.Decl):
|
||||||
|
self._parse_decl(decl)
|
||||||
|
elif isinstance(decl, pycparser.c_ast.Typedef):
|
||||||
|
if not decl.name:
|
||||||
|
raise api.CDefError("typedef does not declare any name",
|
||||||
|
decl)
|
||||||
|
quals = 0
|
||||||
|
if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType)
|
||||||
|
and decl.type.type.names[-1] == '__dotdotdot__'):
|
||||||
|
realtype = self._get_unknown_type(decl)
|
||||||
|
elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and
|
||||||
|
isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and
|
||||||
|
isinstance(decl.type.type.type,
|
||||||
|
pycparser.c_ast.IdentifierType) and
|
||||||
|
decl.type.type.type.names == ['__dotdotdot__']):
|
||||||
|
realtype = model.unknown_ptr_type(decl.name)
|
||||||
|
else:
|
||||||
|
realtype, quals = self._get_type_and_quals(
|
||||||
|
decl.type, name=decl.name)
|
||||||
|
self._declare('typedef ' + decl.name, realtype, quals=quals)
|
||||||
|
else:
|
||||||
|
raise api.CDefError("unrecognized construct", decl)
|
||||||
|
except api.FFIError as e:
|
||||||
|
msg = self._convert_pycparser_error(e, csource)
|
||||||
|
if msg:
|
||||||
|
e.args = (e.args[0] + "\n *** Err: %s" % msg,)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _add_constants(self, key, val):
|
||||||
|
if key in self._int_constants:
|
||||||
|
if self._int_constants[key] == val:
|
||||||
|
return # ignore identical double declarations
|
||||||
|
raise api.FFIError(
|
||||||
|
"multiple declarations of constant: %s" % (key,))
|
||||||
|
self._int_constants[key] = val
|
||||||
|
|
||||||
|
def _add_integer_constant(self, name, int_str):
|
||||||
|
int_str = int_str.lower().rstrip("ul")
|
||||||
|
neg = int_str.startswith('-')
|
||||||
|
if neg:
|
||||||
|
int_str = int_str[1:]
|
||||||
|
# "010" is not valid oct in py3
|
||||||
|
if (int_str.startswith("0") and int_str != '0'
|
||||||
|
and not int_str.startswith("0x")):
|
||||||
|
int_str = "0o" + int_str[1:]
|
||||||
|
pyvalue = int(int_str, 0)
|
||||||
|
if neg:
|
||||||
|
pyvalue = -pyvalue
|
||||||
|
self._add_constants(name, pyvalue)
|
||||||
|
self._declare('macro ' + name, pyvalue)
|
||||||
|
|
||||||
|
def _process_macros(self, macros):
|
||||||
|
for key, value in macros.items():
|
||||||
|
value = value.strip()
|
||||||
|
if _r_int_literal.match(value):
|
||||||
|
self._add_integer_constant(key, value)
|
||||||
|
elif value == '...':
|
||||||
|
self._declare('macro ' + key, value)
|
||||||
|
else:
|
||||||
|
raise api.CDefError(
|
||||||
|
'only supports one of the following syntax:\n'
|
||||||
|
' #define %s ... (literally dot-dot-dot)\n'
|
||||||
|
' #define %s NUMBER (with NUMBER an integer'
|
||||||
|
' constant, decimal/hex/octal)\n'
|
||||||
|
'got:\n'
|
||||||
|
' #define %s %s'
|
||||||
|
% (key, key, key, value))
|
||||||
|
|
||||||
|
def _declare_function(self, tp, quals, decl):
|
||||||
|
tp = self._get_type_pointer(tp, quals)
|
||||||
|
if self._options.get('dllexport'):
|
||||||
|
tag = 'dllexport_python '
|
||||||
|
elif self._inside_extern_python == '__cffi_extern_python_start':
|
||||||
|
tag = 'extern_python '
|
||||||
|
elif self._inside_extern_python == '__cffi_extern_python_plus_c_start':
|
||||||
|
tag = 'extern_python_plus_c '
|
||||||
|
else:
|
||||||
|
tag = 'function '
|
||||||
|
self._declare(tag + decl.name, tp)
|
||||||
|
|
||||||
|
def _parse_decl(self, decl):
|
||||||
|
node = decl.type
|
||||||
|
if isinstance(node, pycparser.c_ast.FuncDecl):
|
||||||
|
tp, quals = self._get_type_and_quals(node, name=decl.name)
|
||||||
|
assert isinstance(tp, model.RawFunctionType)
|
||||||
|
self._declare_function(tp, quals, decl)
|
||||||
|
else:
|
||||||
|
if isinstance(node, pycparser.c_ast.Struct):
|
||||||
|
self._get_struct_union_enum_type('struct', node)
|
||||||
|
elif isinstance(node, pycparser.c_ast.Union):
|
||||||
|
self._get_struct_union_enum_type('union', node)
|
||||||
|
elif isinstance(node, pycparser.c_ast.Enum):
|
||||||
|
self._get_struct_union_enum_type('enum', node)
|
||||||
|
elif not decl.name:
|
||||||
|
raise api.CDefError("construct does not declare any variable",
|
||||||
|
decl)
|
||||||
|
#
|
||||||
|
if decl.name:
|
||||||
|
tp, quals = self._get_type_and_quals(node,
|
||||||
|
partial_length_ok=True)
|
||||||
|
if tp.is_raw_function:
|
||||||
|
self._declare_function(tp, quals, decl)
|
||||||
|
elif (tp.is_integer_type() and
|
||||||
|
hasattr(decl, 'init') and
|
||||||
|
hasattr(decl.init, 'value') and
|
||||||
|
_r_int_literal.match(decl.init.value)):
|
||||||
|
self._add_integer_constant(decl.name, decl.init.value)
|
||||||
|
elif (tp.is_integer_type() and
|
||||||
|
isinstance(decl.init, pycparser.c_ast.UnaryOp) and
|
||||||
|
decl.init.op == '-' and
|
||||||
|
hasattr(decl.init.expr, 'value') and
|
||||||
|
_r_int_literal.match(decl.init.expr.value)):
|
||||||
|
self._add_integer_constant(decl.name,
|
||||||
|
'-' + decl.init.expr.value)
|
||||||
|
elif (tp is model.void_type and
|
||||||
|
decl.name.startswith('__cffi_extern_python_')):
|
||||||
|
# hack: `extern "Python"` in the C source is replaced
|
||||||
|
# with "void __cffi_extern_python_start;" and
|
||||||
|
# "void __cffi_extern_python_stop;"
|
||||||
|
self._inside_extern_python = decl.name
|
||||||
|
else:
|
||||||
|
if self._inside_extern_python !='__cffi_extern_python_stop':
|
||||||
|
raise api.CDefError(
|
||||||
|
"cannot declare constants or "
|
||||||
|
"variables with 'extern \"Python\"'")
|
||||||
|
if (quals & model.Q_CONST) and not tp.is_array_type:
|
||||||
|
self._declare('constant ' + decl.name, tp, quals=quals)
|
||||||
|
else:
|
||||||
|
self._declare('variable ' + decl.name, tp, quals=quals)
|
||||||
|
|
||||||
|
def parse_type(self, cdecl):
|
||||||
|
return self.parse_type_and_quals(cdecl)[0]
|
||||||
|
|
||||||
|
def parse_type_and_quals(self, cdecl):
|
||||||
|
ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2]
|
||||||
|
assert not macros
|
||||||
|
exprnode = ast.ext[-1].type.args.params[0]
|
||||||
|
if isinstance(exprnode, pycparser.c_ast.ID):
|
||||||
|
raise api.CDefError("unknown identifier '%s'" % (exprnode.name,))
|
||||||
|
return self._get_type_and_quals(exprnode.type)
|
||||||
|
|
||||||
|
def _declare(self, name, obj, included=False, quals=0):
|
||||||
|
if name in self._declarations:
|
||||||
|
prevobj, prevquals = self._declarations[name]
|
||||||
|
if prevobj is obj and prevquals == quals:
|
||||||
|
return
|
||||||
|
if not self._options.get('override'):
|
||||||
|
raise api.FFIError(
|
||||||
|
"multiple declarations of %s (for interactive usage, "
|
||||||
|
"try cdef(xx, override=True))" % (name,))
|
||||||
|
assert '__dotdotdot__' not in name.split()
|
||||||
|
self._declarations[name] = (obj, quals)
|
||||||
|
if included:
|
||||||
|
self._included_declarations.add(obj)
|
||||||
|
|
||||||
|
def _extract_quals(self, type):
|
||||||
|
quals = 0
|
||||||
|
if isinstance(type, (pycparser.c_ast.TypeDecl,
|
||||||
|
pycparser.c_ast.PtrDecl)):
|
||||||
|
if 'const' in type.quals:
|
||||||
|
quals |= model.Q_CONST
|
||||||
|
if 'volatile' in type.quals:
|
||||||
|
quals |= model.Q_VOLATILE
|
||||||
|
if 'restrict' in type.quals:
|
||||||
|
quals |= model.Q_RESTRICT
|
||||||
|
return quals
|
||||||
|
|
||||||
|
def _get_type_pointer(self, type, quals, declname=None):
|
||||||
|
if isinstance(type, model.RawFunctionType):
|
||||||
|
return type.as_function_pointer()
|
||||||
|
if (isinstance(type, model.StructOrUnionOrEnum) and
|
||||||
|
type.name.startswith('$') and type.name[1:].isdigit() and
|
||||||
|
type.forcename is None and declname is not None):
|
||||||
|
return model.NamedPointerType(type, declname, quals)
|
||||||
|
return model.PointerType(type, quals)
|
||||||
|
|
||||||
|
def _get_type_and_quals(self, typenode, name=None, partial_length_ok=False):
|
||||||
|
# first, dereference typedefs, if we have it already parsed, we're good
|
||||||
|
if (isinstance(typenode, pycparser.c_ast.TypeDecl) and
|
||||||
|
isinstance(typenode.type, pycparser.c_ast.IdentifierType) and
|
||||||
|
len(typenode.type.names) == 1 and
|
||||||
|
('typedef ' + typenode.type.names[0]) in self._declarations):
|
||||||
|
tp, quals = self._declarations['typedef ' + typenode.type.names[0]]
|
||||||
|
quals |= self._extract_quals(typenode)
|
||||||
|
return tp, quals
|
||||||
|
#
|
||||||
|
if isinstance(typenode, pycparser.c_ast.ArrayDecl):
|
||||||
|
# array type
|
||||||
|
if typenode.dim is None:
|
||||||
|
length = None
|
||||||
|
else:
|
||||||
|
length = self._parse_constant(
|
||||||
|
typenode.dim, partial_length_ok=partial_length_ok)
|
||||||
|
tp, quals = self._get_type_and_quals(typenode.type,
|
||||||
|
partial_length_ok=partial_length_ok)
|
||||||
|
return model.ArrayType(tp, length), quals
|
||||||
|
#
|
||||||
|
if isinstance(typenode, pycparser.c_ast.PtrDecl):
|
||||||
|
# pointer type
|
||||||
|
itemtype, itemquals = self._get_type_and_quals(typenode.type)
|
||||||
|
tp = self._get_type_pointer(itemtype, itemquals, declname=name)
|
||||||
|
quals = self._extract_quals(typenode)
|
||||||
|
return tp, quals
|
||||||
|
#
|
||||||
|
if isinstance(typenode, pycparser.c_ast.TypeDecl):
|
||||||
|
quals = self._extract_quals(typenode)
|
||||||
|
type = typenode.type
|
||||||
|
if isinstance(type, pycparser.c_ast.IdentifierType):
|
||||||
|
# assume a primitive type. get it from .names, but reduce
|
||||||
|
# synonyms to a single chosen combination
|
||||||
|
names = list(type.names)
|
||||||
|
if names != ['signed', 'char']: # keep this unmodified
|
||||||
|
prefixes = {}
|
||||||
|
while names:
|
||||||
|
name = names[0]
|
||||||
|
if name in ('short', 'long', 'signed', 'unsigned'):
|
||||||
|
prefixes[name] = prefixes.get(name, 0) + 1
|
||||||
|
del names[0]
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
# ignore the 'signed' prefix below, and reorder the others
|
||||||
|
newnames = []
|
||||||
|
for prefix in ('unsigned', 'short', 'long'):
|
||||||
|
for i in range(prefixes.get(prefix, 0)):
|
||||||
|
newnames.append(prefix)
|
||||||
|
if not names:
|
||||||
|
names = ['int'] # implicitly
|
||||||
|
if names == ['int']: # but kill it if 'short' or 'long'
|
||||||
|
if 'short' in prefixes or 'long' in prefixes:
|
||||||
|
names = []
|
||||||
|
names = newnames + names
|
||||||
|
ident = ' '.join(names)
|
||||||
|
if ident == 'void':
|
||||||
|
return model.void_type, quals
|
||||||
|
if ident == '__dotdotdot__':
|
||||||
|
raise api.FFIError(':%d: bad usage of "..."' %
|
||||||
|
typenode.coord.line)
|
||||||
|
tp0, quals0 = resolve_common_type(self, ident)
|
||||||
|
return tp0, (quals | quals0)
|
||||||
|
#
|
||||||
|
if isinstance(type, pycparser.c_ast.Struct):
|
||||||
|
# 'struct foobar'
|
||||||
|
tp = self._get_struct_union_enum_type('struct', type, name)
|
||||||
|
return tp, quals
|
||||||
|
#
|
||||||
|
if isinstance(type, pycparser.c_ast.Union):
|
||||||
|
# 'union foobar'
|
||||||
|
tp = self._get_struct_union_enum_type('union', type, name)
|
||||||
|
return tp, quals
|
||||||
|
#
|
||||||
|
if isinstance(type, pycparser.c_ast.Enum):
|
||||||
|
# 'enum foobar'
|
||||||
|
tp = self._get_struct_union_enum_type('enum', type, name)
|
||||||
|
return tp, quals
|
||||||
|
#
|
||||||
|
if isinstance(typenode, pycparser.c_ast.FuncDecl):
|
||||||
|
# a function type
|
||||||
|
return self._parse_function_type(typenode, name), 0
|
||||||
|
#
|
||||||
|
# nested anonymous structs or unions end up here
|
||||||
|
if isinstance(typenode, pycparser.c_ast.Struct):
|
||||||
|
return self._get_struct_union_enum_type('struct', typenode, name,
|
||||||
|
nested=True), 0
|
||||||
|
if isinstance(typenode, pycparser.c_ast.Union):
|
||||||
|
return self._get_struct_union_enum_type('union', typenode, name,
|
||||||
|
nested=True), 0
|
||||||
|
#
|
||||||
|
raise api.FFIError(":%d: bad or unsupported type declaration" %
|
||||||
|
typenode.coord.line)
|
||||||
|
|
||||||
|
def _parse_function_type(self, typenode, funcname=None):
|
||||||
|
params = list(getattr(typenode.args, 'params', []))
|
||||||
|
for i, arg in enumerate(params):
|
||||||
|
if not hasattr(arg, 'type'):
|
||||||
|
raise api.CDefError("%s arg %d: unknown type '%s'"
|
||||||
|
" (if you meant to use the old C syntax of giving"
|
||||||
|
" untyped arguments, it is not supported)"
|
||||||
|
% (funcname or 'in expression', i + 1,
|
||||||
|
getattr(arg, 'name', '?')))
|
||||||
|
ellipsis = (
|
||||||
|
len(params) > 0 and
|
||||||
|
isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and
|
||||||
|
isinstance(params[-1].type.type,
|
||||||
|
pycparser.c_ast.IdentifierType) and
|
||||||
|
params[-1].type.type.names == ['__dotdotdot__'])
|
||||||
|
if ellipsis:
|
||||||
|
params.pop()
|
||||||
|
if not params:
|
||||||
|
raise api.CDefError(
|
||||||
|
"%s: a function with only '(...)' as argument"
|
||||||
|
" is not correct C" % (funcname or 'in expression'))
|
||||||
|
args = [self._as_func_arg(*self._get_type_and_quals(argdeclnode.type))
|
||||||
|
for argdeclnode in params]
|
||||||
|
if not ellipsis and args == [model.void_type]:
|
||||||
|
args = []
|
||||||
|
result, quals = self._get_type_and_quals(typenode.type)
|
||||||
|
# the 'quals' on the result type are ignored. HACK: we absure them
|
||||||
|
# to detect __stdcall functions: we textually replace "__stdcall"
|
||||||
|
# with "volatile volatile const" above.
|
||||||
|
abi = None
|
||||||
|
if hasattr(typenode.type, 'quals'): # else, probable syntax error anyway
|
||||||
|
if typenode.type.quals[-3:] == ['volatile', 'volatile', 'const']:
|
||||||
|
abi = '__stdcall'
|
||||||
|
return model.RawFunctionType(tuple(args), result, ellipsis, abi)
|
||||||
|
|
||||||
|
def _as_func_arg(self, type, quals):
|
||||||
|
if isinstance(type, model.ArrayType):
|
||||||
|
return model.PointerType(type.item, quals)
|
||||||
|
elif isinstance(type, model.RawFunctionType):
|
||||||
|
return type.as_function_pointer()
|
||||||
|
else:
|
||||||
|
return type
|
||||||
|
|
||||||
|
def _get_struct_union_enum_type(self, kind, type, name=None, nested=False):
|
||||||
|
# First, a level of caching on the exact 'type' node of the AST.
|
||||||
|
# This is obscure, but needed because pycparser "unrolls" declarations
|
||||||
|
# such as "typedef struct { } foo_t, *foo_p" and we end up with
|
||||||
|
# an AST that is not a tree, but a DAG, with the "type" node of the
|
||||||
|
# two branches foo_t and foo_p of the trees being the same node.
|
||||||
|
# It's a bit silly but detecting "DAG-ness" in the AST tree seems
|
||||||
|
# to be the only way to distinguish this case from two independent
|
||||||
|
# structs. See test_struct_with_two_usages.
|
||||||
|
try:
|
||||||
|
return self._structnode2type[type]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
#
|
||||||
|
# Note that this must handle parsing "struct foo" any number of
|
||||||
|
# times and always return the same StructType object. Additionally,
|
||||||
|
# one of these times (not necessarily the first), the fields of
|
||||||
|
# the struct can be specified with "struct foo { ...fields... }".
|
||||||
|
# If no name is given, then we have to create a new anonymous struct
|
||||||
|
# with no caching; in this case, the fields are either specified
|
||||||
|
# right now or never.
|
||||||
|
#
|
||||||
|
force_name = name
|
||||||
|
name = type.name
|
||||||
|
#
|
||||||
|
# get the type or create it if needed
|
||||||
|
if name is None:
|
||||||
|
# 'force_name' is used to guess a more readable name for
|
||||||
|
# anonymous structs, for the common case "typedef struct { } foo".
|
||||||
|
if force_name is not None:
|
||||||
|
explicit_name = '$%s' % force_name
|
||||||
|
else:
|
||||||
|
self._anonymous_counter += 1
|
||||||
|
explicit_name = '$%d' % self._anonymous_counter
|
||||||
|
tp = None
|
||||||
|
else:
|
||||||
|
explicit_name = name
|
||||||
|
key = '%s %s' % (kind, name)
|
||||||
|
tp, _ = self._declarations.get(key, (None, None))
|
||||||
|
#
|
||||||
|
if tp is None:
|
||||||
|
if kind == 'struct':
|
||||||
|
tp = model.StructType(explicit_name, None, None, None)
|
||||||
|
elif kind == 'union':
|
||||||
|
tp = model.UnionType(explicit_name, None, None, None)
|
||||||
|
elif kind == 'enum':
|
||||||
|
if explicit_name == '__dotdotdot__':
|
||||||
|
raise CDefError("Enums cannot be declared with ...")
|
||||||
|
tp = self._build_enum_type(explicit_name, type.values)
|
||||||
|
else:
|
||||||
|
raise AssertionError("kind = %r" % (kind,))
|
||||||
|
if name is not None:
|
||||||
|
self._declare(key, tp)
|
||||||
|
else:
|
||||||
|
if kind == 'enum' and type.values is not None:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"enum %s: the '{}' declaration should appear on the first "
|
||||||
|
"time the enum is mentioned, not later" % explicit_name)
|
||||||
|
if not tp.forcename:
|
||||||
|
tp.force_the_name(force_name)
|
||||||
|
if tp.forcename and '$' in tp.name:
|
||||||
|
self._declare('anonymous %s' % tp.forcename, tp)
|
||||||
|
#
|
||||||
|
self._structnode2type[type] = tp
|
||||||
|
#
|
||||||
|
# enums: done here
|
||||||
|
if kind == 'enum':
|
||||||
|
return tp
|
||||||
|
#
|
||||||
|
# is there a 'type.decls'? If yes, then this is the place in the
|
||||||
|
# C sources that declare the fields. If no, then just return the
|
||||||
|
# existing type, possibly still incomplete.
|
||||||
|
if type.decls is None:
|
||||||
|
return tp
|
||||||
|
#
|
||||||
|
if tp.fldnames is not None:
|
||||||
|
raise api.CDefError("duplicate declaration of struct %s" % name)
|
||||||
|
fldnames = []
|
||||||
|
fldtypes = []
|
||||||
|
fldbitsize = []
|
||||||
|
fldquals = []
|
||||||
|
for decl in type.decls:
|
||||||
|
if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and
|
||||||
|
''.join(decl.type.names) == '__dotdotdot__'):
|
||||||
|
# XXX pycparser is inconsistent: 'names' should be a list
|
||||||
|
# of strings, but is sometimes just one string. Use
|
||||||
|
# str.join() as a way to cope with both.
|
||||||
|
self._make_partial(tp, nested)
|
||||||
|
continue
|
||||||
|
if decl.bitsize is None:
|
||||||
|
bitsize = -1
|
||||||
|
else:
|
||||||
|
bitsize = self._parse_constant(decl.bitsize)
|
||||||
|
self._partial_length = False
|
||||||
|
type, fqual = self._get_type_and_quals(decl.type,
|
||||||
|
partial_length_ok=True)
|
||||||
|
if self._partial_length:
|
||||||
|
self._make_partial(tp, nested)
|
||||||
|
if isinstance(type, model.StructType) and type.partial:
|
||||||
|
self._make_partial(tp, nested)
|
||||||
|
fldnames.append(decl.name or '')
|
||||||
|
fldtypes.append(type)
|
||||||
|
fldbitsize.append(bitsize)
|
||||||
|
fldquals.append(fqual)
|
||||||
|
tp.fldnames = tuple(fldnames)
|
||||||
|
tp.fldtypes = tuple(fldtypes)
|
||||||
|
tp.fldbitsize = tuple(fldbitsize)
|
||||||
|
tp.fldquals = tuple(fldquals)
|
||||||
|
if fldbitsize != [-1] * len(fldbitsize):
|
||||||
|
if isinstance(tp, model.StructType) and tp.partial:
|
||||||
|
raise NotImplementedError("%s: using both bitfields and '...;'"
|
||||||
|
% (tp,))
|
||||||
|
tp.packed = self._options.get('packed')
|
||||||
|
if tp.completed: # must be re-completed: it is not opaque any more
|
||||||
|
tp.completed = 0
|
||||||
|
self._recomplete.append(tp)
|
||||||
|
return tp
|
||||||
|
|
||||||
|
def _make_partial(self, tp, nested):
|
||||||
|
if not isinstance(tp, model.StructOrUnion):
|
||||||
|
raise api.CDefError("%s cannot be partial" % (tp,))
|
||||||
|
if not tp.has_c_name() and not nested:
|
||||||
|
raise NotImplementedError("%s is partial but has no C name" %(tp,))
|
||||||
|
tp.partial = True
|
||||||
|
|
||||||
|
def _parse_constant(self, exprnode, partial_length_ok=False):
|
||||||
|
# for now, limited to expressions that are an immediate number
|
||||||
|
# or positive/negative number
|
||||||
|
if isinstance(exprnode, pycparser.c_ast.Constant):
|
||||||
|
s = exprnode.value
|
||||||
|
if s.startswith('0'):
|
||||||
|
if s.startswith('0x') or s.startswith('0X'):
|
||||||
|
return int(s, 16)
|
||||||
|
return int(s, 8)
|
||||||
|
elif '1' <= s[0] <= '9':
|
||||||
|
return int(s, 10)
|
||||||
|
elif s[0] == "'" and s[-1] == "'" and (
|
||||||
|
len(s) == 3 or (len(s) == 4 and s[1] == "\\")):
|
||||||
|
return ord(s[-2])
|
||||||
|
else:
|
||||||
|
raise api.CDefError("invalid constant %r" % (s,))
|
||||||
|
#
|
||||||
|
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
||||||
|
exprnode.op == '+'):
|
||||||
|
return self._parse_constant(exprnode.expr)
|
||||||
|
#
|
||||||
|
if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and
|
||||||
|
exprnode.op == '-'):
|
||||||
|
return -self._parse_constant(exprnode.expr)
|
||||||
|
# load previously defined int constant
|
||||||
|
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
||||||
|
exprnode.name in self._int_constants):
|
||||||
|
return self._int_constants[exprnode.name]
|
||||||
|
#
|
||||||
|
if partial_length_ok:
|
||||||
|
if (isinstance(exprnode, pycparser.c_ast.ID) and
|
||||||
|
exprnode.name == '__dotdotdotarray__'):
|
||||||
|
self._partial_length = True
|
||||||
|
return '...'
|
||||||
|
#
|
||||||
|
raise api.FFIError(":%d: unsupported expression: expected a "
|
||||||
|
"simple numeric constant" % exprnode.coord.line)
|
||||||
|
|
||||||
|
def _build_enum_type(self, explicit_name, decls):
|
||||||
|
if decls is not None:
|
||||||
|
partial = False
|
||||||
|
enumerators = []
|
||||||
|
enumvalues = []
|
||||||
|
nextenumvalue = 0
|
||||||
|
for enum in decls.enumerators:
|
||||||
|
if _r_enum_dotdotdot.match(enum.name):
|
||||||
|
partial = True
|
||||||
|
continue
|
||||||
|
if enum.value is not None:
|
||||||
|
nextenumvalue = self._parse_constant(enum.value)
|
||||||
|
enumerators.append(enum.name)
|
||||||
|
enumvalues.append(nextenumvalue)
|
||||||
|
self._add_constants(enum.name, nextenumvalue)
|
||||||
|
nextenumvalue += 1
|
||||||
|
enumerators = tuple(enumerators)
|
||||||
|
enumvalues = tuple(enumvalues)
|
||||||
|
tp = model.EnumType(explicit_name, enumerators, enumvalues)
|
||||||
|
tp.partial = partial
|
||||||
|
else: # opaque enum
|
||||||
|
tp = model.EnumType(explicit_name, (), ())
|
||||||
|
return tp
|
||||||
|
|
||||||
|
def include(self, other):
|
||||||
|
for name, (tp, quals) in other._declarations.items():
|
||||||
|
if name.startswith('anonymous $enum_$'):
|
||||||
|
continue # fix for test_anonymous_enum_include
|
||||||
|
kind = name.split(' ', 1)[0]
|
||||||
|
if kind in ('struct', 'union', 'enum', 'anonymous', 'typedef'):
|
||||||
|
self._declare(name, tp, included=True, quals=quals)
|
||||||
|
for k, v in other._int_constants.items():
|
||||||
|
self._add_constants(k, v)
|
||||||
|
|
||||||
|
def _get_unknown_type(self, decl):
|
||||||
|
typenames = decl.type.type.names
|
||||||
|
assert typenames[-1] == '__dotdotdot__'
|
||||||
|
if len(typenames) == 1:
|
||||||
|
return model.unknown_type(decl.name)
|
||||||
|
|
||||||
|
if (typenames[:-1] == ['float'] or
|
||||||
|
typenames[:-1] == ['double']):
|
||||||
|
# not for 'long double' so far
|
||||||
|
result = model.UnknownFloatType(decl.name)
|
||||||
|
else:
|
||||||
|
for t in typenames[:-1]:
|
||||||
|
if t not in ['int', 'short', 'long', 'signed',
|
||||||
|
'unsigned', 'char']:
|
||||||
|
raise api.FFIError(':%d: bad usage of "..."' %
|
||||||
|
decl.coord.line)
|
||||||
|
result = model.UnknownIntegerType(decl.name)
|
||||||
|
|
||||||
|
if self._uses_new_feature is None:
|
||||||
|
self._uses_new_feature = "'typedef %s... %s'" % (
|
||||||
|
' '.join(typenames[:-1]), decl.name)
|
||||||
|
|
||||||
|
return result
|
121
lib/python3.4/site-packages/cffi/ffiplatform.py
Normal file
121
lib/python3.4/site-packages/cffi/ffiplatform.py
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
import sys, os
|
||||||
|
|
||||||
|
|
||||||
|
class VerificationError(Exception):
|
||||||
|
""" An error raised when verification fails
|
||||||
|
"""
|
||||||
|
|
||||||
|
class VerificationMissing(Exception):
|
||||||
|
""" An error raised when incomplete structures are passed into
|
||||||
|
cdef, but no verification has been done
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
LIST_OF_FILE_NAMES = ['sources', 'include_dirs', 'library_dirs',
|
||||||
|
'extra_objects', 'depends']
|
||||||
|
|
||||||
|
def get_extension(srcfilename, modname, sources=(), **kwds):
|
||||||
|
from distutils.core import Extension
|
||||||
|
allsources = [srcfilename]
|
||||||
|
for src in sources:
|
||||||
|
allsources.append(os.path.normpath(src))
|
||||||
|
return Extension(name=modname, sources=allsources, **kwds)
|
||||||
|
|
||||||
|
def compile(tmpdir, ext, compiler_verbose=0):
|
||||||
|
"""Compile a C extension module using distutils."""
|
||||||
|
|
||||||
|
saved_environ = os.environ.copy()
|
||||||
|
try:
|
||||||
|
outputfilename = _build(tmpdir, ext, compiler_verbose)
|
||||||
|
outputfilename = os.path.abspath(outputfilename)
|
||||||
|
finally:
|
||||||
|
# workaround for a distutils bugs where some env vars can
|
||||||
|
# become longer and longer every time it is used
|
||||||
|
for key, value in saved_environ.items():
|
||||||
|
if os.environ.get(key) != value:
|
||||||
|
os.environ[key] = value
|
||||||
|
return outputfilename
|
||||||
|
|
||||||
|
def _build(tmpdir, ext, compiler_verbose=0):
|
||||||
|
# XXX compact but horrible :-(
|
||||||
|
from distutils.core import Distribution
|
||||||
|
import distutils.errors, distutils.log
|
||||||
|
#
|
||||||
|
dist = Distribution({'ext_modules': [ext]})
|
||||||
|
dist.parse_config_files()
|
||||||
|
options = dist.get_option_dict('build_ext')
|
||||||
|
options['force'] = ('ffiplatform', True)
|
||||||
|
options['build_lib'] = ('ffiplatform', tmpdir)
|
||||||
|
options['build_temp'] = ('ffiplatform', tmpdir)
|
||||||
|
#
|
||||||
|
try:
|
||||||
|
old_level = distutils.log.set_threshold(0) or 0
|
||||||
|
try:
|
||||||
|
distutils.log.set_verbosity(compiler_verbose)
|
||||||
|
dist.run_command('build_ext')
|
||||||
|
cmd_obj = dist.get_command_obj('build_ext')
|
||||||
|
[soname] = cmd_obj.get_outputs()
|
||||||
|
finally:
|
||||||
|
distutils.log.set_threshold(old_level)
|
||||||
|
except (distutils.errors.CompileError,
|
||||||
|
distutils.errors.LinkError) as e:
|
||||||
|
raise VerificationError('%s: %s' % (e.__class__.__name__, e))
|
||||||
|
#
|
||||||
|
return soname
|
||||||
|
|
||||||
|
try:
|
||||||
|
from os.path import samefile
|
||||||
|
except ImportError:
|
||||||
|
def samefile(f1, f2):
|
||||||
|
return os.path.abspath(f1) == os.path.abspath(f2)
|
||||||
|
|
||||||
|
def maybe_relative_path(path):
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
return path # already relative
|
||||||
|
dir = path
|
||||||
|
names = []
|
||||||
|
while True:
|
||||||
|
prevdir = dir
|
||||||
|
dir, name = os.path.split(prevdir)
|
||||||
|
if dir == prevdir or not dir:
|
||||||
|
return path # failed to make it relative
|
||||||
|
names.append(name)
|
||||||
|
try:
|
||||||
|
if samefile(dir, os.curdir):
|
||||||
|
names.reverse()
|
||||||
|
return os.path.join(*names)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
|
||||||
|
try:
|
||||||
|
int_or_long = (int, long)
|
||||||
|
import cStringIO
|
||||||
|
except NameError:
|
||||||
|
int_or_long = int # Python 3
|
||||||
|
import io as cStringIO
|
||||||
|
|
||||||
|
def _flatten(x, f):
|
||||||
|
if isinstance(x, str):
|
||||||
|
f.write('%ds%s' % (len(x), x))
|
||||||
|
elif isinstance(x, dict):
|
||||||
|
keys = sorted(x.keys())
|
||||||
|
f.write('%dd' % len(keys))
|
||||||
|
for key in keys:
|
||||||
|
_flatten(key, f)
|
||||||
|
_flatten(x[key], f)
|
||||||
|
elif isinstance(x, (list, tuple)):
|
||||||
|
f.write('%dl' % len(x))
|
||||||
|
for value in x:
|
||||||
|
_flatten(value, f)
|
||||||
|
elif isinstance(x, int_or_long):
|
||||||
|
f.write('%di' % (x,))
|
||||||
|
else:
|
||||||
|
raise TypeError(
|
||||||
|
"the keywords to verify() contains unsupported object %r" % (x,))
|
||||||
|
|
||||||
|
def flatten(x):
|
||||||
|
f = cStringIO.StringIO()
|
||||||
|
_flatten(x, f)
|
||||||
|
return f.getvalue()
|
30
lib/python3.4/site-packages/cffi/lock.py
Normal file
30
lib/python3.4/site-packages/cffi/lock.py
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if sys.version_info < (3,):
|
||||||
|
try:
|
||||||
|
from thread import allocate_lock
|
||||||
|
except ImportError:
|
||||||
|
from dummy_thread import allocate_lock
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
from _thread import allocate_lock
|
||||||
|
except ImportError:
|
||||||
|
from _dummy_thread import allocate_lock
|
||||||
|
|
||||||
|
|
||||||
|
##import sys
|
||||||
|
##l1 = allocate_lock
|
||||||
|
|
||||||
|
##class allocate_lock(object):
|
||||||
|
## def __init__(self):
|
||||||
|
## self._real = l1()
|
||||||
|
## def __enter__(self):
|
||||||
|
## for i in range(4, 0, -1):
|
||||||
|
## print sys._getframe(i).f_code
|
||||||
|
## print
|
||||||
|
## return self._real.__enter__()
|
||||||
|
## def __exit__(self, *args):
|
||||||
|
## return self._real.__exit__(*args)
|
||||||
|
## def acquire(self, f):
|
||||||
|
## assert f is False
|
||||||
|
## return self._real.acquire(f)
|
602
lib/python3.4/site-packages/cffi/model.py
Normal file
602
lib/python3.4/site-packages/cffi/model.py
Normal file
|
@ -0,0 +1,602 @@
|
||||||
|
import types, sys
|
||||||
|
import weakref
|
||||||
|
|
||||||
|
from .lock import allocate_lock
|
||||||
|
|
||||||
|
|
||||||
|
# type qualifiers
|
||||||
|
Q_CONST = 0x01
|
||||||
|
Q_RESTRICT = 0x02
|
||||||
|
Q_VOLATILE = 0x04
|
||||||
|
|
||||||
|
def qualify(quals, replace_with):
|
||||||
|
if quals & Q_CONST:
|
||||||
|
replace_with = ' const ' + replace_with.lstrip()
|
||||||
|
if quals & Q_VOLATILE:
|
||||||
|
replace_with = ' volatile ' + replace_with.lstrip()
|
||||||
|
if quals & Q_RESTRICT:
|
||||||
|
# It seems that __restrict is supported by gcc and msvc.
|
||||||
|
# If you hit some different compiler, add a #define in
|
||||||
|
# _cffi_include.h for it (and in its copies, documented there)
|
||||||
|
replace_with = ' __restrict ' + replace_with.lstrip()
|
||||||
|
return replace_with
|
||||||
|
|
||||||
|
|
||||||
|
class BaseTypeByIdentity(object):
|
||||||
|
is_array_type = False
|
||||||
|
is_raw_function = False
|
||||||
|
|
||||||
|
def get_c_name(self, replace_with='', context='a C file', quals=0):
|
||||||
|
result = self.c_name_with_marker
|
||||||
|
assert result.count('&') == 1
|
||||||
|
# some logic duplication with ffi.getctype()... :-(
|
||||||
|
replace_with = replace_with.strip()
|
||||||
|
if replace_with:
|
||||||
|
if replace_with.startswith('*') and '&[' in result:
|
||||||
|
replace_with = '(%s)' % replace_with
|
||||||
|
elif not replace_with[0] in '[(':
|
||||||
|
replace_with = ' ' + replace_with
|
||||||
|
replace_with = qualify(quals, replace_with)
|
||||||
|
result = result.replace('&', replace_with)
|
||||||
|
if '$' in result:
|
||||||
|
from .ffiplatform import VerificationError
|
||||||
|
raise VerificationError(
|
||||||
|
"cannot generate '%s' in %s: unknown type name"
|
||||||
|
% (self._get_c_name(), context))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _get_c_name(self):
|
||||||
|
return self.c_name_with_marker.replace('&', '')
|
||||||
|
|
||||||
|
def has_c_name(self):
|
||||||
|
return '$' not in self._get_c_name()
|
||||||
|
|
||||||
|
def is_integer_type(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||||
|
try:
|
||||||
|
BType = ffi._cached_btypes[self]
|
||||||
|
except KeyError:
|
||||||
|
BType = self.build_backend_type(ffi, finishlist)
|
||||||
|
BType2 = ffi._cached_btypes.setdefault(self, BType)
|
||||||
|
assert BType2 is BType
|
||||||
|
return BType
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '<%s>' % (self._get_c_name(),)
|
||||||
|
|
||||||
|
def _get_items(self):
|
||||||
|
return [(name, getattr(self, name)) for name in self._attrs_]
|
||||||
|
|
||||||
|
|
||||||
|
class BaseType(BaseTypeByIdentity):
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return (self.__class__ == other.__class__ and
|
||||||
|
self._get_items() == other._get_items())
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.__class__, tuple(self._get_items())))
|
||||||
|
|
||||||
|
|
||||||
|
class VoidType(BaseType):
|
||||||
|
_attrs_ = ()
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.c_name_with_marker = 'void&'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
return global_cache(self, ffi, 'new_void_type')
|
||||||
|
|
||||||
|
void_type = VoidType()
|
||||||
|
|
||||||
|
|
||||||
|
class BasePrimitiveType(BaseType):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PrimitiveType(BasePrimitiveType):
|
||||||
|
_attrs_ = ('name',)
|
||||||
|
|
||||||
|
ALL_PRIMITIVE_TYPES = {
|
||||||
|
'char': 'c',
|
||||||
|
'short': 'i',
|
||||||
|
'int': 'i',
|
||||||
|
'long': 'i',
|
||||||
|
'long long': 'i',
|
||||||
|
'signed char': 'i',
|
||||||
|
'unsigned char': 'i',
|
||||||
|
'unsigned short': 'i',
|
||||||
|
'unsigned int': 'i',
|
||||||
|
'unsigned long': 'i',
|
||||||
|
'unsigned long long': 'i',
|
||||||
|
'float': 'f',
|
||||||
|
'double': 'f',
|
||||||
|
'long double': 'f',
|
||||||
|
'_Bool': 'i',
|
||||||
|
# the following types are not primitive in the C sense
|
||||||
|
'wchar_t': 'c',
|
||||||
|
'int8_t': 'i',
|
||||||
|
'uint8_t': 'i',
|
||||||
|
'int16_t': 'i',
|
||||||
|
'uint16_t': 'i',
|
||||||
|
'int32_t': 'i',
|
||||||
|
'uint32_t': 'i',
|
||||||
|
'int64_t': 'i',
|
||||||
|
'uint64_t': 'i',
|
||||||
|
'int_least8_t': 'i',
|
||||||
|
'uint_least8_t': 'i',
|
||||||
|
'int_least16_t': 'i',
|
||||||
|
'uint_least16_t': 'i',
|
||||||
|
'int_least32_t': 'i',
|
||||||
|
'uint_least32_t': 'i',
|
||||||
|
'int_least64_t': 'i',
|
||||||
|
'uint_least64_t': 'i',
|
||||||
|
'int_fast8_t': 'i',
|
||||||
|
'uint_fast8_t': 'i',
|
||||||
|
'int_fast16_t': 'i',
|
||||||
|
'uint_fast16_t': 'i',
|
||||||
|
'int_fast32_t': 'i',
|
||||||
|
'uint_fast32_t': 'i',
|
||||||
|
'int_fast64_t': 'i',
|
||||||
|
'uint_fast64_t': 'i',
|
||||||
|
'intptr_t': 'i',
|
||||||
|
'uintptr_t': 'i',
|
||||||
|
'intmax_t': 'i',
|
||||||
|
'uintmax_t': 'i',
|
||||||
|
'ptrdiff_t': 'i',
|
||||||
|
'size_t': 'i',
|
||||||
|
'ssize_t': 'i',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
assert name in self.ALL_PRIMITIVE_TYPES
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def is_char_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'c'
|
||||||
|
def is_integer_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'i'
|
||||||
|
def is_float_type(self):
|
||||||
|
return self.ALL_PRIMITIVE_TYPES[self.name] == 'f'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
return global_cache(self, ffi, 'new_primitive_type', self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class UnknownIntegerType(BasePrimitiveType):
|
||||||
|
_attrs_ = ('name',)
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def is_integer_type(self):
|
||||||
|
return True
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
raise NotImplementedError("integer type '%s' can only be used after "
|
||||||
|
"compilation" % self.name)
|
||||||
|
|
||||||
|
class UnknownFloatType(BasePrimitiveType):
|
||||||
|
_attrs_ = ('name', )
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
raise NotImplementedError("float type '%s' can only be used after "
|
||||||
|
"compilation" % self.name)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseFunctionType(BaseType):
|
||||||
|
_attrs_ = ('args', 'result', 'ellipsis', 'abi')
|
||||||
|
|
||||||
|
def __init__(self, args, result, ellipsis, abi=None):
|
||||||
|
self.args = args
|
||||||
|
self.result = result
|
||||||
|
self.ellipsis = ellipsis
|
||||||
|
self.abi = abi
|
||||||
|
#
|
||||||
|
reprargs = [arg._get_c_name() for arg in self.args]
|
||||||
|
if self.ellipsis:
|
||||||
|
reprargs.append('...')
|
||||||
|
reprargs = reprargs or ['void']
|
||||||
|
replace_with = self._base_pattern % (', '.join(reprargs),)
|
||||||
|
if abi is not None:
|
||||||
|
replace_with = replace_with[:1] + abi + ' ' + replace_with[1:]
|
||||||
|
self.c_name_with_marker = (
|
||||||
|
self.result.c_name_with_marker.replace('&', replace_with))
|
||||||
|
|
||||||
|
|
||||||
|
class RawFunctionType(BaseFunctionType):
|
||||||
|
# Corresponds to a C type like 'int(int)', which is the C type of
|
||||||
|
# a function, but not a pointer-to-function. The backend has no
|
||||||
|
# notion of such a type; it's used temporarily by parsing.
|
||||||
|
_base_pattern = '(&)(%s)'
|
||||||
|
is_raw_function = True
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
from . import api
|
||||||
|
raise api.CDefError("cannot render the type %r: it is a function "
|
||||||
|
"type, not a pointer-to-function type" % (self,))
|
||||||
|
|
||||||
|
def as_function_pointer(self):
|
||||||
|
return FunctionPtrType(self.args, self.result, self.ellipsis, self.abi)
|
||||||
|
|
||||||
|
|
||||||
|
class FunctionPtrType(BaseFunctionType):
|
||||||
|
_base_pattern = '(*&)(%s)'
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
result = self.result.get_cached_btype(ffi, finishlist)
|
||||||
|
args = []
|
||||||
|
for tp in self.args:
|
||||||
|
args.append(tp.get_cached_btype(ffi, finishlist))
|
||||||
|
abi_args = ()
|
||||||
|
if self.abi == "__stdcall":
|
||||||
|
if not self.ellipsis: # __stdcall ignored for variadic funcs
|
||||||
|
try:
|
||||||
|
abi_args = (ffi._backend.FFI_STDCALL,)
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
return global_cache(self, ffi, 'new_function_type',
|
||||||
|
tuple(args), result, self.ellipsis, *abi_args)
|
||||||
|
|
||||||
|
def as_raw_function(self):
|
||||||
|
return RawFunctionType(self.args, self.result, self.ellipsis, self.abi)
|
||||||
|
|
||||||
|
|
||||||
|
class PointerType(BaseType):
|
||||||
|
_attrs_ = ('totype', 'quals')
|
||||||
|
|
||||||
|
def __init__(self, totype, quals=0):
|
||||||
|
self.totype = totype
|
||||||
|
self.quals = quals
|
||||||
|
extra = qualify(quals, " *&")
|
||||||
|
if totype.is_array_type:
|
||||||
|
extra = "(%s)" % (extra.lstrip(),)
|
||||||
|
self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra)
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True)
|
||||||
|
return global_cache(self, ffi, 'new_pointer_type', BItem)
|
||||||
|
|
||||||
|
voidp_type = PointerType(void_type)
|
||||||
|
|
||||||
|
def ConstPointerType(totype):
|
||||||
|
return PointerType(totype, Q_CONST)
|
||||||
|
|
||||||
|
const_voidp_type = ConstPointerType(void_type)
|
||||||
|
|
||||||
|
|
||||||
|
class NamedPointerType(PointerType):
|
||||||
|
_attrs_ = ('totype', 'name')
|
||||||
|
|
||||||
|
def __init__(self, totype, name, quals=0):
|
||||||
|
PointerType.__init__(self, totype, quals)
|
||||||
|
self.name = name
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
|
||||||
|
class ArrayType(BaseType):
|
||||||
|
_attrs_ = ('item', 'length')
|
||||||
|
is_array_type = True
|
||||||
|
|
||||||
|
def __init__(self, item, length):
|
||||||
|
self.item = item
|
||||||
|
self.length = length
|
||||||
|
#
|
||||||
|
if length is None:
|
||||||
|
brackets = '&[]'
|
||||||
|
elif length == '...':
|
||||||
|
brackets = '&[/*...*/]'
|
||||||
|
else:
|
||||||
|
brackets = '&[%s]' % length
|
||||||
|
self.c_name_with_marker = (
|
||||||
|
self.item.c_name_with_marker.replace('&', brackets))
|
||||||
|
|
||||||
|
def resolve_length(self, newlength):
|
||||||
|
return ArrayType(self.item, newlength)
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
if self.length == '...':
|
||||||
|
from . import api
|
||||||
|
raise api.CDefError("cannot render the type %r: unknown length" %
|
||||||
|
(self,))
|
||||||
|
self.item.get_cached_btype(ffi, finishlist) # force the item BType
|
||||||
|
BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist)
|
||||||
|
return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length)
|
||||||
|
|
||||||
|
char_array_type = ArrayType(PrimitiveType('char'), None)
|
||||||
|
|
||||||
|
|
||||||
|
class StructOrUnionOrEnum(BaseTypeByIdentity):
|
||||||
|
_attrs_ = ('name',)
|
||||||
|
forcename = None
|
||||||
|
|
||||||
|
def build_c_name_with_marker(self):
|
||||||
|
name = self.forcename or '%s %s' % (self.kind, self.name)
|
||||||
|
self.c_name_with_marker = name + '&'
|
||||||
|
|
||||||
|
def force_the_name(self, forcename):
|
||||||
|
self.forcename = forcename
|
||||||
|
self.build_c_name_with_marker()
|
||||||
|
|
||||||
|
def get_official_name(self):
|
||||||
|
assert self.c_name_with_marker.endswith('&')
|
||||||
|
return self.c_name_with_marker[:-1]
|
||||||
|
|
||||||
|
|
||||||
|
class StructOrUnion(StructOrUnionOrEnum):
|
||||||
|
fixedlayout = None
|
||||||
|
completed = 0
|
||||||
|
partial = False
|
||||||
|
packed = False
|
||||||
|
|
||||||
|
def __init__(self, name, fldnames, fldtypes, fldbitsize, fldquals=None):
|
||||||
|
self.name = name
|
||||||
|
self.fldnames = fldnames
|
||||||
|
self.fldtypes = fldtypes
|
||||||
|
self.fldbitsize = fldbitsize
|
||||||
|
self.fldquals = fldquals
|
||||||
|
self.build_c_name_with_marker()
|
||||||
|
|
||||||
|
def has_anonymous_struct_fields(self):
|
||||||
|
if self.fldtypes is None:
|
||||||
|
return False
|
||||||
|
for name, type in zip(self.fldnames, self.fldtypes):
|
||||||
|
if name == '' and isinstance(type, StructOrUnion):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def enumfields(self):
|
||||||
|
fldquals = self.fldquals
|
||||||
|
if fldquals is None:
|
||||||
|
fldquals = (0,) * len(self.fldnames)
|
||||||
|
for name, type, bitsize, quals in zip(self.fldnames, self.fldtypes,
|
||||||
|
self.fldbitsize, fldquals):
|
||||||
|
if name == '' and isinstance(type, StructOrUnion):
|
||||||
|
# nested anonymous struct/union
|
||||||
|
for result in type.enumfields():
|
||||||
|
yield result
|
||||||
|
else:
|
||||||
|
yield (name, type, bitsize, quals)
|
||||||
|
|
||||||
|
def force_flatten(self):
|
||||||
|
# force the struct or union to have a declaration that lists
|
||||||
|
# directly all fields returned by enumfields(), flattening
|
||||||
|
# nested anonymous structs/unions.
|
||||||
|
names = []
|
||||||
|
types = []
|
||||||
|
bitsizes = []
|
||||||
|
fldquals = []
|
||||||
|
for name, type, bitsize, quals in self.enumfields():
|
||||||
|
names.append(name)
|
||||||
|
types.append(type)
|
||||||
|
bitsizes.append(bitsize)
|
||||||
|
fldquals.append(quals)
|
||||||
|
self.fldnames = tuple(names)
|
||||||
|
self.fldtypes = tuple(types)
|
||||||
|
self.fldbitsize = tuple(bitsizes)
|
||||||
|
self.fldquals = tuple(fldquals)
|
||||||
|
|
||||||
|
def get_cached_btype(self, ffi, finishlist, can_delay=False):
|
||||||
|
BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist,
|
||||||
|
can_delay)
|
||||||
|
if not can_delay:
|
||||||
|
self.finish_backend_type(ffi, finishlist)
|
||||||
|
return BType
|
||||||
|
|
||||||
|
def finish_backend_type(self, ffi, finishlist):
|
||||||
|
if self.completed:
|
||||||
|
if self.completed != 2:
|
||||||
|
raise NotImplementedError("recursive structure declaration "
|
||||||
|
"for '%s'" % (self.name,))
|
||||||
|
return
|
||||||
|
BType = ffi._cached_btypes[self]
|
||||||
|
#
|
||||||
|
self.completed = 1
|
||||||
|
#
|
||||||
|
if self.fldtypes is None:
|
||||||
|
pass # not completing it: it's an opaque struct
|
||||||
|
#
|
||||||
|
elif self.fixedlayout is None:
|
||||||
|
fldtypes = [tp.get_cached_btype(ffi, finishlist)
|
||||||
|
for tp in self.fldtypes]
|
||||||
|
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize))
|
||||||
|
sflags = 0
|
||||||
|
if self.packed:
|
||||||
|
sflags = 8 # SF_PACKED
|
||||||
|
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||||
|
-1, -1, sflags)
|
||||||
|
#
|
||||||
|
else:
|
||||||
|
fldtypes = []
|
||||||
|
fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout
|
||||||
|
for i in range(len(self.fldnames)):
|
||||||
|
fsize = fieldsize[i]
|
||||||
|
ftype = self.fldtypes[i]
|
||||||
|
#
|
||||||
|
if isinstance(ftype, ArrayType) and ftype.length == '...':
|
||||||
|
# fix the length to match the total size
|
||||||
|
BItemType = ftype.item.get_cached_btype(ffi, finishlist)
|
||||||
|
nlen, nrest = divmod(fsize, ffi.sizeof(BItemType))
|
||||||
|
if nrest != 0:
|
||||||
|
self._verification_error(
|
||||||
|
"field '%s.%s' has a bogus size?" % (
|
||||||
|
self.name, self.fldnames[i] or '{}'))
|
||||||
|
ftype = ftype.resolve_length(nlen)
|
||||||
|
self.fldtypes = (self.fldtypes[:i] + (ftype,) +
|
||||||
|
self.fldtypes[i+1:])
|
||||||
|
#
|
||||||
|
BFieldType = ftype.get_cached_btype(ffi, finishlist)
|
||||||
|
if isinstance(ftype, ArrayType) and ftype.length is None:
|
||||||
|
assert fsize == 0
|
||||||
|
else:
|
||||||
|
bitemsize = ffi.sizeof(BFieldType)
|
||||||
|
if bitemsize != fsize:
|
||||||
|
self._verification_error(
|
||||||
|
"field '%s.%s' is declared as %d bytes, but is "
|
||||||
|
"really %d bytes" % (self.name,
|
||||||
|
self.fldnames[i] or '{}',
|
||||||
|
bitemsize, fsize))
|
||||||
|
fldtypes.append(BFieldType)
|
||||||
|
#
|
||||||
|
lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs))
|
||||||
|
ffi._backend.complete_struct_or_union(BType, lst, self,
|
||||||
|
totalsize, totalalignment)
|
||||||
|
self.completed = 2
|
||||||
|
|
||||||
|
def _verification_error(self, msg):
|
||||||
|
from .ffiplatform import VerificationError
|
||||||
|
raise VerificationError(msg)
|
||||||
|
|
||||||
|
def check_not_partial(self):
|
||||||
|
if self.partial and self.fixedlayout is None:
|
||||||
|
from . import ffiplatform
|
||||||
|
raise ffiplatform.VerificationMissing(self._get_c_name())
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
self.check_not_partial()
|
||||||
|
finishlist.append(self)
|
||||||
|
#
|
||||||
|
return global_cache(self, ffi, 'new_%s_type' % self.kind,
|
||||||
|
self.get_official_name(), key=self)
|
||||||
|
|
||||||
|
|
||||||
|
class StructType(StructOrUnion):
|
||||||
|
kind = 'struct'
|
||||||
|
|
||||||
|
|
||||||
|
class UnionType(StructOrUnion):
|
||||||
|
kind = 'union'
|
||||||
|
|
||||||
|
|
||||||
|
class EnumType(StructOrUnionOrEnum):
|
||||||
|
kind = 'enum'
|
||||||
|
partial = False
|
||||||
|
partial_resolved = False
|
||||||
|
|
||||||
|
def __init__(self, name, enumerators, enumvalues, baseinttype=None):
|
||||||
|
self.name = name
|
||||||
|
self.enumerators = enumerators
|
||||||
|
self.enumvalues = enumvalues
|
||||||
|
self.baseinttype = baseinttype
|
||||||
|
self.build_c_name_with_marker()
|
||||||
|
|
||||||
|
def force_the_name(self, forcename):
|
||||||
|
StructOrUnionOrEnum.force_the_name(self, forcename)
|
||||||
|
if self.forcename is None:
|
||||||
|
name = self.get_official_name()
|
||||||
|
self.forcename = '$' + name.replace(' ', '_')
|
||||||
|
|
||||||
|
def check_not_partial(self):
|
||||||
|
if self.partial and not self.partial_resolved:
|
||||||
|
from . import ffiplatform
|
||||||
|
raise ffiplatform.VerificationMissing(self._get_c_name())
|
||||||
|
|
||||||
|
def build_backend_type(self, ffi, finishlist):
|
||||||
|
self.check_not_partial()
|
||||||
|
base_btype = self.build_baseinttype(ffi, finishlist)
|
||||||
|
return global_cache(self, ffi, 'new_enum_type',
|
||||||
|
self.get_official_name(),
|
||||||
|
self.enumerators, self.enumvalues,
|
||||||
|
base_btype, key=self)
|
||||||
|
|
||||||
|
def build_baseinttype(self, ffi, finishlist):
|
||||||
|
if self.baseinttype is not None:
|
||||||
|
return self.baseinttype.get_cached_btype(ffi, finishlist)
|
||||||
|
#
|
||||||
|
from . import api
|
||||||
|
if self.enumvalues:
|
||||||
|
smallest_value = min(self.enumvalues)
|
||||||
|
largest_value = max(self.enumvalues)
|
||||||
|
else:
|
||||||
|
import warnings
|
||||||
|
warnings.warn("%r has no values explicitly defined; next version "
|
||||||
|
"will refuse to guess which integer type it is "
|
||||||
|
"meant to be (unsigned/signed, int/long)"
|
||||||
|
% self._get_c_name())
|
||||||
|
smallest_value = largest_value = 0
|
||||||
|
if smallest_value < 0: # needs a signed type
|
||||||
|
sign = 1
|
||||||
|
candidate1 = PrimitiveType("int")
|
||||||
|
candidate2 = PrimitiveType("long")
|
||||||
|
else:
|
||||||
|
sign = 0
|
||||||
|
candidate1 = PrimitiveType("unsigned int")
|
||||||
|
candidate2 = PrimitiveType("unsigned long")
|
||||||
|
btype1 = candidate1.get_cached_btype(ffi, finishlist)
|
||||||
|
btype2 = candidate2.get_cached_btype(ffi, finishlist)
|
||||||
|
size1 = ffi.sizeof(btype1)
|
||||||
|
size2 = ffi.sizeof(btype2)
|
||||||
|
if (smallest_value >= ((-1) << (8*size1-1)) and
|
||||||
|
largest_value < (1 << (8*size1-sign))):
|
||||||
|
return btype1
|
||||||
|
if (smallest_value >= ((-1) << (8*size2-1)) and
|
||||||
|
largest_value < (1 << (8*size2-sign))):
|
||||||
|
return btype2
|
||||||
|
raise api.CDefError("%s values don't all fit into either 'long' "
|
||||||
|
"or 'unsigned long'" % self._get_c_name())
|
||||||
|
|
||||||
|
def unknown_type(name, structname=None):
|
||||||
|
if structname is None:
|
||||||
|
structname = '$%s' % name
|
||||||
|
tp = StructType(structname, None, None, None)
|
||||||
|
tp.force_the_name(name)
|
||||||
|
tp.origin = "unknown_type"
|
||||||
|
return tp
|
||||||
|
|
||||||
|
def unknown_ptr_type(name, structname=None):
|
||||||
|
if structname is None:
|
||||||
|
structname = '$$%s' % name
|
||||||
|
tp = StructType(structname, None, None, None)
|
||||||
|
return NamedPointerType(tp, name)
|
||||||
|
|
||||||
|
|
||||||
|
global_lock = allocate_lock()
|
||||||
|
|
||||||
|
def global_cache(srctype, ffi, funcname, *args, **kwds):
|
||||||
|
key = kwds.pop('key', (funcname, args))
|
||||||
|
assert not kwds
|
||||||
|
try:
|
||||||
|
return ffi._backend.__typecache[key]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
except AttributeError:
|
||||||
|
# initialize the __typecache attribute, either at the module level
|
||||||
|
# if ffi._backend is a module, or at the class level if ffi._backend
|
||||||
|
# is some instance.
|
||||||
|
if isinstance(ffi._backend, types.ModuleType):
|
||||||
|
ffi._backend.__typecache = weakref.WeakValueDictionary()
|
||||||
|
else:
|
||||||
|
type(ffi._backend).__typecache = weakref.WeakValueDictionary()
|
||||||
|
try:
|
||||||
|
res = getattr(ffi._backend, funcname)(*args)
|
||||||
|
except NotImplementedError as e:
|
||||||
|
raise NotImplementedError("%s: %r: %s" % (funcname, srctype, e))
|
||||||
|
# note that setdefault() on WeakValueDictionary is not atomic
|
||||||
|
# and contains a rare bug (http://bugs.python.org/issue19542);
|
||||||
|
# we have to use a lock and do it ourselves
|
||||||
|
cache = ffi._backend.__typecache
|
||||||
|
with global_lock:
|
||||||
|
res1 = cache.get(key)
|
||||||
|
if res1 is None:
|
||||||
|
cache[key] = res
|
||||||
|
return res
|
||||||
|
else:
|
||||||
|
return res1
|
||||||
|
|
||||||
|
def pointer_cache(ffi, BType):
|
||||||
|
return global_cache('?', ffi, 'new_pointer_type', BType)
|
||||||
|
|
||||||
|
def attach_exception_info(e, name):
|
||||||
|
if e.args and type(e.args[0]) is str:
|
||||||
|
e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:]
|
177
lib/python3.4/site-packages/cffi/parse_c_type.h
Normal file
177
lib/python3.4/site-packages/cffi/parse_c_type.h
Normal file
|
@ -0,0 +1,177 @@
|
||||||
|
|
||||||
|
/* This part is from file 'cffi/parse_c_type.h'. It is copied at the
|
||||||
|
beginning of C sources generated by CFFI's ffi.set_source(). */
|
||||||
|
|
||||||
|
typedef void *_cffi_opcode_t;
|
||||||
|
|
||||||
|
#define _CFFI_OP(opcode, arg) (_cffi_opcode_t)(opcode | (((uintptr_t)(arg)) << 8))
|
||||||
|
#define _CFFI_GETOP(cffi_opcode) ((unsigned char)(uintptr_t)cffi_opcode)
|
||||||
|
#define _CFFI_GETARG(cffi_opcode) (((intptr_t)cffi_opcode) >> 8)
|
||||||
|
|
||||||
|
#define _CFFI_OP_PRIMITIVE 1
|
||||||
|
#define _CFFI_OP_POINTER 3
|
||||||
|
#define _CFFI_OP_ARRAY 5
|
||||||
|
#define _CFFI_OP_OPEN_ARRAY 7
|
||||||
|
#define _CFFI_OP_STRUCT_UNION 9
|
||||||
|
#define _CFFI_OP_ENUM 11
|
||||||
|
#define _CFFI_OP_FUNCTION 13
|
||||||
|
#define _CFFI_OP_FUNCTION_END 15
|
||||||
|
#define _CFFI_OP_NOOP 17
|
||||||
|
#define _CFFI_OP_BITFIELD 19
|
||||||
|
#define _CFFI_OP_TYPENAME 21
|
||||||
|
#define _CFFI_OP_CPYTHON_BLTN_V 23 // varargs
|
||||||
|
#define _CFFI_OP_CPYTHON_BLTN_N 25 // noargs
|
||||||
|
#define _CFFI_OP_CPYTHON_BLTN_O 27 // O (i.e. a single arg)
|
||||||
|
#define _CFFI_OP_CONSTANT 29
|
||||||
|
#define _CFFI_OP_CONSTANT_INT 31
|
||||||
|
#define _CFFI_OP_GLOBAL_VAR 33
|
||||||
|
#define _CFFI_OP_DLOPEN_FUNC 35
|
||||||
|
#define _CFFI_OP_DLOPEN_CONST 37
|
||||||
|
#define _CFFI_OP_GLOBAL_VAR_F 39
|
||||||
|
#define _CFFI_OP_EXTERN_PYTHON 41
|
||||||
|
|
||||||
|
#define _CFFI_PRIM_VOID 0
|
||||||
|
#define _CFFI_PRIM_BOOL 1
|
||||||
|
#define _CFFI_PRIM_CHAR 2
|
||||||
|
#define _CFFI_PRIM_SCHAR 3
|
||||||
|
#define _CFFI_PRIM_UCHAR 4
|
||||||
|
#define _CFFI_PRIM_SHORT 5
|
||||||
|
#define _CFFI_PRIM_USHORT 6
|
||||||
|
#define _CFFI_PRIM_INT 7
|
||||||
|
#define _CFFI_PRIM_UINT 8
|
||||||
|
#define _CFFI_PRIM_LONG 9
|
||||||
|
#define _CFFI_PRIM_ULONG 10
|
||||||
|
#define _CFFI_PRIM_LONGLONG 11
|
||||||
|
#define _CFFI_PRIM_ULONGLONG 12
|
||||||
|
#define _CFFI_PRIM_FLOAT 13
|
||||||
|
#define _CFFI_PRIM_DOUBLE 14
|
||||||
|
#define _CFFI_PRIM_LONGDOUBLE 15
|
||||||
|
|
||||||
|
#define _CFFI_PRIM_WCHAR 16
|
||||||
|
#define _CFFI_PRIM_INT8 17
|
||||||
|
#define _CFFI_PRIM_UINT8 18
|
||||||
|
#define _CFFI_PRIM_INT16 19
|
||||||
|
#define _CFFI_PRIM_UINT16 20
|
||||||
|
#define _CFFI_PRIM_INT32 21
|
||||||
|
#define _CFFI_PRIM_UINT32 22
|
||||||
|
#define _CFFI_PRIM_INT64 23
|
||||||
|
#define _CFFI_PRIM_UINT64 24
|
||||||
|
#define _CFFI_PRIM_INTPTR 25
|
||||||
|
#define _CFFI_PRIM_UINTPTR 26
|
||||||
|
#define _CFFI_PRIM_PTRDIFF 27
|
||||||
|
#define _CFFI_PRIM_SIZE 28
|
||||||
|
#define _CFFI_PRIM_SSIZE 29
|
||||||
|
#define _CFFI_PRIM_INT_LEAST8 30
|
||||||
|
#define _CFFI_PRIM_UINT_LEAST8 31
|
||||||
|
#define _CFFI_PRIM_INT_LEAST16 32
|
||||||
|
#define _CFFI_PRIM_UINT_LEAST16 33
|
||||||
|
#define _CFFI_PRIM_INT_LEAST32 34
|
||||||
|
#define _CFFI_PRIM_UINT_LEAST32 35
|
||||||
|
#define _CFFI_PRIM_INT_LEAST64 36
|
||||||
|
#define _CFFI_PRIM_UINT_LEAST64 37
|
||||||
|
#define _CFFI_PRIM_INT_FAST8 38
|
||||||
|
#define _CFFI_PRIM_UINT_FAST8 39
|
||||||
|
#define _CFFI_PRIM_INT_FAST16 40
|
||||||
|
#define _CFFI_PRIM_UINT_FAST16 41
|
||||||
|
#define _CFFI_PRIM_INT_FAST32 42
|
||||||
|
#define _CFFI_PRIM_UINT_FAST32 43
|
||||||
|
#define _CFFI_PRIM_INT_FAST64 44
|
||||||
|
#define _CFFI_PRIM_UINT_FAST64 45
|
||||||
|
#define _CFFI_PRIM_INTMAX 46
|
||||||
|
#define _CFFI_PRIM_UINTMAX 47
|
||||||
|
|
||||||
|
#define _CFFI__NUM_PRIM 48
|
||||||
|
#define _CFFI__UNKNOWN_PRIM (-1)
|
||||||
|
#define _CFFI__UNKNOWN_FLOAT_PRIM (-2)
|
||||||
|
#define _CFFI__UNKNOWN_LONG_DOUBLE (-3)
|
||||||
|
|
||||||
|
#define _CFFI__IO_FILE_STRUCT (-1)
|
||||||
|
|
||||||
|
|
||||||
|
struct _cffi_global_s {
|
||||||
|
const char *name;
|
||||||
|
void *address;
|
||||||
|
_cffi_opcode_t type_op;
|
||||||
|
void *size_or_direct_fn; // OP_GLOBAL_VAR: size, or 0 if unknown
|
||||||
|
// OP_CPYTHON_BLTN_*: addr of direct function
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_getconst_s {
|
||||||
|
unsigned long long value;
|
||||||
|
const struct _cffi_type_context_s *ctx;
|
||||||
|
int gindex;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_struct_union_s {
|
||||||
|
const char *name;
|
||||||
|
int type_index; // -> _cffi_types, on a OP_STRUCT_UNION
|
||||||
|
int flags; // _CFFI_F_* flags below
|
||||||
|
size_t size;
|
||||||
|
int alignment;
|
||||||
|
int first_field_index; // -> _cffi_fields array
|
||||||
|
int num_fields;
|
||||||
|
};
|
||||||
|
#define _CFFI_F_UNION 0x01 // is a union, not a struct
|
||||||
|
#define _CFFI_F_CHECK_FIELDS 0x02 // complain if fields are not in the
|
||||||
|
// "standard layout" or if some are missing
|
||||||
|
#define _CFFI_F_PACKED 0x04 // for CHECK_FIELDS, assume a packed struct
|
||||||
|
#define _CFFI_F_EXTERNAL 0x08 // in some other ffi.include()
|
||||||
|
#define _CFFI_F_OPAQUE 0x10 // opaque
|
||||||
|
|
||||||
|
struct _cffi_field_s {
|
||||||
|
const char *name;
|
||||||
|
size_t field_offset;
|
||||||
|
size_t field_size;
|
||||||
|
_cffi_opcode_t field_type_op;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_enum_s {
|
||||||
|
const char *name;
|
||||||
|
int type_index; // -> _cffi_types, on a OP_ENUM
|
||||||
|
int type_prim; // _CFFI_PRIM_xxx
|
||||||
|
const char *enumerators; // comma-delimited string
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_typename_s {
|
||||||
|
const char *name;
|
||||||
|
int type_index; /* if opaque, points to a possibly artificial
|
||||||
|
OP_STRUCT which is itself opaque */
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_type_context_s {
|
||||||
|
_cffi_opcode_t *types;
|
||||||
|
const struct _cffi_global_s *globals;
|
||||||
|
const struct _cffi_field_s *fields;
|
||||||
|
const struct _cffi_struct_union_s *struct_unions;
|
||||||
|
const struct _cffi_enum_s *enums;
|
||||||
|
const struct _cffi_typename_s *typenames;
|
||||||
|
int num_globals;
|
||||||
|
int num_struct_unions;
|
||||||
|
int num_enums;
|
||||||
|
int num_typenames;
|
||||||
|
const char *const *includes;
|
||||||
|
int num_types;
|
||||||
|
int flags; /* future extension */
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_parse_info_s {
|
||||||
|
const struct _cffi_type_context_s *ctx;
|
||||||
|
_cffi_opcode_t *output;
|
||||||
|
unsigned int output_size;
|
||||||
|
size_t error_location;
|
||||||
|
const char *error_message;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct _cffi_externpy_s {
|
||||||
|
const char *name;
|
||||||
|
size_t size_of_result;
|
||||||
|
void *reserved1, *reserved2;
|
||||||
|
};
|
||||||
|
|
||||||
|
#ifdef _CFFI_INTERNAL
|
||||||
|
static int parse_c_type(struct _cffi_parse_info_s *info, const char *input);
|
||||||
|
static int search_in_globals(const struct _cffi_type_context_s *ctx,
|
||||||
|
const char *search, size_t search_len);
|
||||||
|
static int search_in_struct_unions(const struct _cffi_type_context_s *ctx,
|
||||||
|
const char *search, size_t search_len);
|
||||||
|
#endif
|
1501
lib/python3.4/site-packages/cffi/recompiler.py
Normal file
1501
lib/python3.4/site-packages/cffi/recompiler.py
Normal file
File diff suppressed because it is too large
Load diff
161
lib/python3.4/site-packages/cffi/setuptools_ext.py
Normal file
161
lib/python3.4/site-packages/cffi/setuptools_ext.py
Normal file
|
@ -0,0 +1,161 @@
|
||||||
|
import os
|
||||||
|
|
||||||
|
try:
|
||||||
|
basestring
|
||||||
|
except NameError:
|
||||||
|
# Python 3.x
|
||||||
|
basestring = str
|
||||||
|
|
||||||
|
def error(msg):
|
||||||
|
from distutils.errors import DistutilsSetupError
|
||||||
|
raise DistutilsSetupError(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def execfile(filename, glob):
|
||||||
|
# We use execfile() (here rewritten for Python 3) instead of
|
||||||
|
# __import__() to load the build script. The problem with
|
||||||
|
# a normal import is that in some packages, the intermediate
|
||||||
|
# __init__.py files may already try to import the file that
|
||||||
|
# we are generating.
|
||||||
|
with open(filename) as f:
|
||||||
|
src = f.read()
|
||||||
|
src += '\n' # Python 2.6 compatibility
|
||||||
|
code = compile(src, filename, 'exec')
|
||||||
|
exec(code, glob, glob)
|
||||||
|
|
||||||
|
|
||||||
|
def add_cffi_module(dist, mod_spec):
|
||||||
|
from cffi.api import FFI
|
||||||
|
|
||||||
|
if not isinstance(mod_spec, basestring):
|
||||||
|
error("argument to 'cffi_modules=...' must be a str or a list of str,"
|
||||||
|
" not %r" % (type(mod_spec).__name__,))
|
||||||
|
mod_spec = str(mod_spec)
|
||||||
|
try:
|
||||||
|
build_file_name, ffi_var_name = mod_spec.split(':')
|
||||||
|
except ValueError:
|
||||||
|
error("%r must be of the form 'path/build.py:ffi_variable'" %
|
||||||
|
(mod_spec,))
|
||||||
|
if not os.path.exists(build_file_name):
|
||||||
|
ext = ''
|
||||||
|
rewritten = build_file_name.replace('.', '/') + '.py'
|
||||||
|
if os.path.exists(rewritten):
|
||||||
|
ext = ' (rewrite cffi_modules to [%r])' % (
|
||||||
|
rewritten + ':' + ffi_var_name,)
|
||||||
|
error("%r does not name an existing file%s" % (build_file_name, ext))
|
||||||
|
|
||||||
|
mod_vars = {'__name__': '__cffi__', '__file__': build_file_name}
|
||||||
|
execfile(build_file_name, mod_vars)
|
||||||
|
|
||||||
|
try:
|
||||||
|
ffi = mod_vars[ffi_var_name]
|
||||||
|
except KeyError:
|
||||||
|
error("%r: object %r not found in module" % (mod_spec,
|
||||||
|
ffi_var_name))
|
||||||
|
if not isinstance(ffi, FFI):
|
||||||
|
ffi = ffi() # maybe it's a function instead of directly an ffi
|
||||||
|
if not isinstance(ffi, FFI):
|
||||||
|
error("%r is not an FFI instance (got %r)" % (mod_spec,
|
||||||
|
type(ffi).__name__))
|
||||||
|
if not hasattr(ffi, '_assigned_source'):
|
||||||
|
error("%r: the set_source() method was not called" % (mod_spec,))
|
||||||
|
module_name, source, source_extension, kwds = ffi._assigned_source
|
||||||
|
if ffi._windows_unicode:
|
||||||
|
kwds = kwds.copy()
|
||||||
|
ffi._apply_windows_unicode(kwds)
|
||||||
|
|
||||||
|
if source is None:
|
||||||
|
_add_py_module(dist, ffi, module_name)
|
||||||
|
else:
|
||||||
|
_add_c_module(dist, ffi, module_name, source, source_extension, kwds)
|
||||||
|
|
||||||
|
|
||||||
|
def _add_c_module(dist, ffi, module_name, source, source_extension, kwds):
|
||||||
|
from distutils.core import Extension
|
||||||
|
from distutils.command.build_ext import build_ext
|
||||||
|
from distutils.dir_util import mkpath
|
||||||
|
from distutils import log
|
||||||
|
from cffi import recompiler
|
||||||
|
|
||||||
|
allsources = ['$PLACEHOLDER']
|
||||||
|
allsources.extend(kwds.pop('sources', []))
|
||||||
|
ext = Extension(name=module_name, sources=allsources, **kwds)
|
||||||
|
|
||||||
|
def make_mod(tmpdir, pre_run=None):
|
||||||
|
c_file = os.path.join(tmpdir, module_name + source_extension)
|
||||||
|
log.info("generating cffi module %r" % c_file)
|
||||||
|
mkpath(tmpdir)
|
||||||
|
# a setuptools-only, API-only hook: called with the "ext" and "ffi"
|
||||||
|
# arguments just before we turn the ffi into C code. To use it,
|
||||||
|
# subclass the 'distutils.command.build_ext.build_ext' class and
|
||||||
|
# add a method 'def pre_run(self, ext, ffi)'.
|
||||||
|
if pre_run is not None:
|
||||||
|
pre_run(ext, ffi)
|
||||||
|
updated = recompiler.make_c_source(ffi, module_name, source, c_file)
|
||||||
|
if not updated:
|
||||||
|
log.info("already up-to-date")
|
||||||
|
return c_file
|
||||||
|
|
||||||
|
if dist.ext_modules is None:
|
||||||
|
dist.ext_modules = []
|
||||||
|
dist.ext_modules.append(ext)
|
||||||
|
|
||||||
|
base_class = dist.cmdclass.get('build_ext', build_ext)
|
||||||
|
class build_ext_make_mod(base_class):
|
||||||
|
def run(self):
|
||||||
|
if ext.sources[0] == '$PLACEHOLDER':
|
||||||
|
pre_run = getattr(self, 'pre_run', None)
|
||||||
|
ext.sources[0] = make_mod(self.build_temp, pre_run)
|
||||||
|
base_class.run(self)
|
||||||
|
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||||
|
# NB. multiple runs here will create multiple 'build_ext_make_mod'
|
||||||
|
# classes. Even in this case the 'build_ext' command should be
|
||||||
|
# run once; but just in case, the logic above does nothing if
|
||||||
|
# called again.
|
||||||
|
|
||||||
|
|
||||||
|
def _add_py_module(dist, ffi, module_name):
|
||||||
|
from distutils.dir_util import mkpath
|
||||||
|
from distutils.command.build_py import build_py
|
||||||
|
from distutils.command.build_ext import build_ext
|
||||||
|
from distutils import log
|
||||||
|
from cffi import recompiler
|
||||||
|
|
||||||
|
def generate_mod(py_file):
|
||||||
|
log.info("generating cffi module %r" % py_file)
|
||||||
|
mkpath(os.path.dirname(py_file))
|
||||||
|
updated = recompiler.make_py_source(ffi, module_name, py_file)
|
||||||
|
if not updated:
|
||||||
|
log.info("already up-to-date")
|
||||||
|
|
||||||
|
base_class = dist.cmdclass.get('build_py', build_py)
|
||||||
|
class build_py_make_mod(base_class):
|
||||||
|
def run(self):
|
||||||
|
base_class.run(self)
|
||||||
|
module_path = module_name.split('.')
|
||||||
|
module_path[-1] += '.py'
|
||||||
|
generate_mod(os.path.join(self.build_lib, *module_path))
|
||||||
|
dist.cmdclass['build_py'] = build_py_make_mod
|
||||||
|
|
||||||
|
# the following is only for "build_ext -i"
|
||||||
|
base_class_2 = dist.cmdclass.get('build_ext', build_ext)
|
||||||
|
class build_ext_make_mod(base_class_2):
|
||||||
|
def run(self):
|
||||||
|
base_class_2.run(self)
|
||||||
|
if self.inplace:
|
||||||
|
# from get_ext_fullpath() in distutils/command/build_ext.py
|
||||||
|
module_path = module_name.split('.')
|
||||||
|
package = '.'.join(module_path[:-1])
|
||||||
|
build_py = self.get_finalized_command('build_py')
|
||||||
|
package_dir = build_py.get_package_dir(package)
|
||||||
|
file_name = module_path[-1] + '.py'
|
||||||
|
generate_mod(os.path.join(package_dir, file_name))
|
||||||
|
dist.cmdclass['build_ext'] = build_ext_make_mod
|
||||||
|
|
||||||
|
def cffi_modules(dist, attr, value):
|
||||||
|
assert attr == 'cffi_modules'
|
||||||
|
if isinstance(value, basestring):
|
||||||
|
value = [value]
|
||||||
|
|
||||||
|
for cffi_module in value:
|
||||||
|
add_cffi_module(dist, cffi_module)
|
1010
lib/python3.4/site-packages/cffi/vengine_cpy.py
Normal file
1010
lib/python3.4/site-packages/cffi/vengine_cpy.py
Normal file
File diff suppressed because it is too large
Load diff
671
lib/python3.4/site-packages/cffi/vengine_gen.py
Normal file
671
lib/python3.4/site-packages/cffi/vengine_gen.py
Normal file
|
@ -0,0 +1,671 @@
|
||||||
|
#
|
||||||
|
# DEPRECATED: implementation for ffi.verify()
|
||||||
|
#
|
||||||
|
import sys, os
|
||||||
|
import types
|
||||||
|
|
||||||
|
from . import model, ffiplatform
|
||||||
|
|
||||||
|
|
||||||
|
class VGenericEngine(object):
|
||||||
|
_class_key = 'g'
|
||||||
|
_gen_python_module = False
|
||||||
|
|
||||||
|
def __init__(self, verifier):
|
||||||
|
self.verifier = verifier
|
||||||
|
self.ffi = verifier.ffi
|
||||||
|
self.export_symbols = []
|
||||||
|
self._struct_pending_verification = {}
|
||||||
|
|
||||||
|
def patch_extension_kwds(self, kwds):
|
||||||
|
# add 'export_symbols' to the dictionary. Note that we add the
|
||||||
|
# list before filling it. When we fill it, it will thus also show
|
||||||
|
# up in kwds['export_symbols'].
|
||||||
|
kwds.setdefault('export_symbols', self.export_symbols)
|
||||||
|
|
||||||
|
def find_module(self, module_name, path, so_suffixes):
|
||||||
|
for so_suffix in so_suffixes:
|
||||||
|
basename = module_name + so_suffix
|
||||||
|
if path is None:
|
||||||
|
path = sys.path
|
||||||
|
for dirname in path:
|
||||||
|
filename = os.path.join(dirname, basename)
|
||||||
|
if os.path.isfile(filename):
|
||||||
|
return filename
|
||||||
|
|
||||||
|
def collect_types(self):
|
||||||
|
pass # not needed in the generic engine
|
||||||
|
|
||||||
|
def _prnt(self, what=''):
|
||||||
|
self._f.write(what + '\n')
|
||||||
|
|
||||||
|
def write_source_to_f(self):
|
||||||
|
prnt = self._prnt
|
||||||
|
# first paste some standard set of lines that are mostly '#include'
|
||||||
|
prnt(cffimod_header)
|
||||||
|
# then paste the C source given by the user, verbatim.
|
||||||
|
prnt(self.verifier.preamble)
|
||||||
|
#
|
||||||
|
# call generate_gen_xxx_decl(), for every xxx found from
|
||||||
|
# ffi._parser._declarations. This generates all the functions.
|
||||||
|
self._generate('decl')
|
||||||
|
#
|
||||||
|
# on Windows, distutils insists on putting init_cffi_xyz in
|
||||||
|
# 'export_symbols', so instead of fighting it, just give up and
|
||||||
|
# give it one
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
prefix = 'PyInit_'
|
||||||
|
else:
|
||||||
|
prefix = 'init'
|
||||||
|
modname = self.verifier.get_module_name()
|
||||||
|
prnt("void %s%s(void) { }\n" % (prefix, modname))
|
||||||
|
|
||||||
|
def load_library(self, flags=0):
|
||||||
|
# import it with the CFFI backend
|
||||||
|
backend = self.ffi._backend
|
||||||
|
# needs to make a path that contains '/', on Posix
|
||||||
|
filename = os.path.join(os.curdir, self.verifier.modulefilename)
|
||||||
|
module = backend.load_library(filename, flags)
|
||||||
|
#
|
||||||
|
# call loading_gen_struct() to get the struct layout inferred by
|
||||||
|
# the C compiler
|
||||||
|
self._load(module, 'loading')
|
||||||
|
|
||||||
|
# build the FFILibrary class and instance, this is a module subclass
|
||||||
|
# because modules are expected to have usually-constant-attributes and
|
||||||
|
# in PyPy this means the JIT is able to treat attributes as constant,
|
||||||
|
# which we want.
|
||||||
|
class FFILibrary(types.ModuleType):
|
||||||
|
_cffi_generic_module = module
|
||||||
|
_cffi_ffi = self.ffi
|
||||||
|
_cffi_dir = []
|
||||||
|
def __dir__(self):
|
||||||
|
return FFILibrary._cffi_dir
|
||||||
|
library = FFILibrary("")
|
||||||
|
#
|
||||||
|
# finally, call the loaded_gen_xxx() functions. This will set
|
||||||
|
# up the 'library' object.
|
||||||
|
self._load(module, 'loaded', library=library)
|
||||||
|
return library
|
||||||
|
|
||||||
|
def _get_declarations(self):
|
||||||
|
lst = [(key, tp) for (key, (tp, qual)) in
|
||||||
|
self.ffi._parser._declarations.items()]
|
||||||
|
lst.sort()
|
||||||
|
return lst
|
||||||
|
|
||||||
|
def _generate(self, step_name):
|
||||||
|
for name, tp in self._get_declarations():
|
||||||
|
kind, realname = name.split(' ', 1)
|
||||||
|
try:
|
||||||
|
method = getattr(self, '_generate_gen_%s_%s' % (kind,
|
||||||
|
step_name))
|
||||||
|
except AttributeError:
|
||||||
|
raise ffiplatform.VerificationError(
|
||||||
|
"not implemented in verify(): %r" % name)
|
||||||
|
try:
|
||||||
|
method(tp, realname)
|
||||||
|
except Exception as e:
|
||||||
|
model.attach_exception_info(e, name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _load(self, module, step_name, **kwds):
|
||||||
|
for name, tp in self._get_declarations():
|
||||||
|
kind, realname = name.split(' ', 1)
|
||||||
|
method = getattr(self, '_%s_gen_%s' % (step_name, kind))
|
||||||
|
try:
|
||||||
|
method(tp, realname, module, **kwds)
|
||||||
|
except Exception as e:
|
||||||
|
model.attach_exception_info(e, name)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def _generate_nothing(self, tp, name):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _loaded_noop(self, tp, name, module, **kwds):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# typedefs: generates no code so far
|
||||||
|
|
||||||
|
_generate_gen_typedef_decl = _generate_nothing
|
||||||
|
_loading_gen_typedef = _loaded_noop
|
||||||
|
_loaded_gen_typedef = _loaded_noop
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# function declarations
|
||||||
|
|
||||||
|
def _generate_gen_function_decl(self, tp, name):
|
||||||
|
assert isinstance(tp, model.FunctionPtrType)
|
||||||
|
if tp.ellipsis:
|
||||||
|
# cannot support vararg functions better than this: check for its
|
||||||
|
# exact type (including the fixed arguments), and build it as a
|
||||||
|
# constant function pointer (no _cffi_f_%s wrapper)
|
||||||
|
self._generate_gen_const(False, name, tp)
|
||||||
|
return
|
||||||
|
prnt = self._prnt
|
||||||
|
numargs = len(tp.args)
|
||||||
|
argnames = []
|
||||||
|
for i, type in enumerate(tp.args):
|
||||||
|
indirection = ''
|
||||||
|
if isinstance(type, model.StructOrUnion):
|
||||||
|
indirection = '*'
|
||||||
|
argnames.append('%sx%d' % (indirection, i))
|
||||||
|
context = 'argument of %s' % name
|
||||||
|
arglist = [type.get_c_name(' %s' % arg, context)
|
||||||
|
for type, arg in zip(tp.args, argnames)]
|
||||||
|
tpresult = tp.result
|
||||||
|
if isinstance(tpresult, model.StructOrUnion):
|
||||||
|
arglist.insert(0, tpresult.get_c_name(' *r', context))
|
||||||
|
tpresult = model.void_type
|
||||||
|
arglist = ', '.join(arglist) or 'void'
|
||||||
|
wrappername = '_cffi_f_%s' % name
|
||||||
|
self.export_symbols.append(wrappername)
|
||||||
|
if tp.abi:
|
||||||
|
abi = tp.abi + ' '
|
||||||
|
else:
|
||||||
|
abi = ''
|
||||||
|
funcdecl = ' %s%s(%s)' % (abi, wrappername, arglist)
|
||||||
|
context = 'result of %s' % name
|
||||||
|
prnt(tpresult.get_c_name(funcdecl, context))
|
||||||
|
prnt('{')
|
||||||
|
#
|
||||||
|
if isinstance(tp.result, model.StructOrUnion):
|
||||||
|
result_code = '*r = '
|
||||||
|
elif not isinstance(tp.result, model.VoidType):
|
||||||
|
result_code = 'return '
|
||||||
|
else:
|
||||||
|
result_code = ''
|
||||||
|
prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames)))
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
_loading_gen_function = _loaded_noop
|
||||||
|
|
||||||
|
def _loaded_gen_function(self, tp, name, module, library):
|
||||||
|
assert isinstance(tp, model.FunctionPtrType)
|
||||||
|
if tp.ellipsis:
|
||||||
|
newfunction = self._load_constant(False, tp, name, module)
|
||||||
|
else:
|
||||||
|
indirections = []
|
||||||
|
base_tp = tp
|
||||||
|
if (any(isinstance(typ, model.StructOrUnion) for typ in tp.args)
|
||||||
|
or isinstance(tp.result, model.StructOrUnion)):
|
||||||
|
indirect_args = []
|
||||||
|
for i, typ in enumerate(tp.args):
|
||||||
|
if isinstance(typ, model.StructOrUnion):
|
||||||
|
typ = model.PointerType(typ)
|
||||||
|
indirections.append((i, typ))
|
||||||
|
indirect_args.append(typ)
|
||||||
|
indirect_result = tp.result
|
||||||
|
if isinstance(indirect_result, model.StructOrUnion):
|
||||||
|
if indirect_result.fldtypes is None:
|
||||||
|
raise TypeError("'%s' is used as result type, "
|
||||||
|
"but is opaque" % (
|
||||||
|
indirect_result._get_c_name(),))
|
||||||
|
indirect_result = model.PointerType(indirect_result)
|
||||||
|
indirect_args.insert(0, indirect_result)
|
||||||
|
indirections.insert(0, ("result", indirect_result))
|
||||||
|
indirect_result = model.void_type
|
||||||
|
tp = model.FunctionPtrType(tuple(indirect_args),
|
||||||
|
indirect_result, tp.ellipsis)
|
||||||
|
BFunc = self.ffi._get_cached_btype(tp)
|
||||||
|
wrappername = '_cffi_f_%s' % name
|
||||||
|
newfunction = module.load_function(BFunc, wrappername)
|
||||||
|
for i, typ in indirections:
|
||||||
|
newfunction = self._make_struct_wrapper(newfunction, i, typ,
|
||||||
|
base_tp)
|
||||||
|
setattr(library, name, newfunction)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
def _make_struct_wrapper(self, oldfunc, i, tp, base_tp):
|
||||||
|
backend = self.ffi._backend
|
||||||
|
BType = self.ffi._get_cached_btype(tp)
|
||||||
|
if i == "result":
|
||||||
|
ffi = self.ffi
|
||||||
|
def newfunc(*args):
|
||||||
|
res = ffi.new(BType)
|
||||||
|
oldfunc(res, *args)
|
||||||
|
return res[0]
|
||||||
|
else:
|
||||||
|
def newfunc(*args):
|
||||||
|
args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:]
|
||||||
|
return oldfunc(*args)
|
||||||
|
newfunc._cffi_base_type = base_tp
|
||||||
|
return newfunc
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# named structs
|
||||||
|
|
||||||
|
def _generate_gen_struct_decl(self, tp, name):
|
||||||
|
assert name == tp.name
|
||||||
|
self._generate_struct_or_union_decl(tp, 'struct', name)
|
||||||
|
|
||||||
|
def _loading_gen_struct(self, tp, name, module):
|
||||||
|
self._loading_struct_or_union(tp, 'struct', name, module)
|
||||||
|
|
||||||
|
def _loaded_gen_struct(self, tp, name, module, **kwds):
|
||||||
|
self._loaded_struct_or_union(tp)
|
||||||
|
|
||||||
|
def _generate_gen_union_decl(self, tp, name):
|
||||||
|
assert name == tp.name
|
||||||
|
self._generate_struct_or_union_decl(tp, 'union', name)
|
||||||
|
|
||||||
|
def _loading_gen_union(self, tp, name, module):
|
||||||
|
self._loading_struct_or_union(tp, 'union', name, module)
|
||||||
|
|
||||||
|
def _loaded_gen_union(self, tp, name, module, **kwds):
|
||||||
|
self._loaded_struct_or_union(tp)
|
||||||
|
|
||||||
|
def _generate_struct_or_union_decl(self, tp, prefix, name):
|
||||||
|
if tp.fldnames is None:
|
||||||
|
return # nothing to do with opaque structs
|
||||||
|
checkfuncname = '_cffi_check_%s_%s' % (prefix, name)
|
||||||
|
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||||
|
cname = ('%s %s' % (prefix, name)).strip()
|
||||||
|
#
|
||||||
|
prnt = self._prnt
|
||||||
|
prnt('static void %s(%s *p)' % (checkfuncname, cname))
|
||||||
|
prnt('{')
|
||||||
|
prnt(' /* only to generate compile-time warnings or errors */')
|
||||||
|
prnt(' (void)p;')
|
||||||
|
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||||
|
if (isinstance(ftype, model.PrimitiveType)
|
||||||
|
and ftype.is_integer_type()) or fbitsize >= 0:
|
||||||
|
# accept all integers, but complain on float or double
|
||||||
|
prnt(' (void)((p->%s) << 1);' % fname)
|
||||||
|
else:
|
||||||
|
# only accept exactly the type declared.
|
||||||
|
try:
|
||||||
|
prnt(' { %s = &p->%s; (void)tmp; }' % (
|
||||||
|
ftype.get_c_name('*tmp', 'field %r'%fname, quals=fqual),
|
||||||
|
fname))
|
||||||
|
except ffiplatform.VerificationError as e:
|
||||||
|
prnt(' /* %s */' % str(e)) # cannot verify it, ignore
|
||||||
|
prnt('}')
|
||||||
|
self.export_symbols.append(layoutfuncname)
|
||||||
|
prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,))
|
||||||
|
prnt('{')
|
||||||
|
prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname)
|
||||||
|
prnt(' static intptr_t nums[] = {')
|
||||||
|
prnt(' sizeof(%s),' % cname)
|
||||||
|
prnt(' offsetof(struct _cffi_aligncheck, y),')
|
||||||
|
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||||
|
if fbitsize >= 0:
|
||||||
|
continue # xxx ignore fbitsize for now
|
||||||
|
prnt(' offsetof(%s, %s),' % (cname, fname))
|
||||||
|
if isinstance(ftype, model.ArrayType) and ftype.length is None:
|
||||||
|
prnt(' 0, /* %s */' % ftype._get_c_name())
|
||||||
|
else:
|
||||||
|
prnt(' sizeof(((%s *)0)->%s),' % (cname, fname))
|
||||||
|
prnt(' -1')
|
||||||
|
prnt(' };')
|
||||||
|
prnt(' return nums[i];')
|
||||||
|
prnt(' /* the next line is not executed, but compiled */')
|
||||||
|
prnt(' %s(0);' % (checkfuncname,))
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
def _loading_struct_or_union(self, tp, prefix, name, module):
|
||||||
|
if tp.fldnames is None:
|
||||||
|
return # nothing to do with opaque structs
|
||||||
|
layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name)
|
||||||
|
#
|
||||||
|
BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0]
|
||||||
|
function = module.load_function(BFunc, layoutfuncname)
|
||||||
|
layout = []
|
||||||
|
num = 0
|
||||||
|
while True:
|
||||||
|
x = function(num)
|
||||||
|
if x < 0: break
|
||||||
|
layout.append(x)
|
||||||
|
num += 1
|
||||||
|
if isinstance(tp, model.StructOrUnion) and tp.partial:
|
||||||
|
# use the function()'s sizes and offsets to guide the
|
||||||
|
# layout of the struct
|
||||||
|
totalsize = layout[0]
|
||||||
|
totalalignment = layout[1]
|
||||||
|
fieldofs = layout[2::2]
|
||||||
|
fieldsize = layout[3::2]
|
||||||
|
tp.force_flatten()
|
||||||
|
assert len(fieldofs) == len(fieldsize) == len(tp.fldnames)
|
||||||
|
tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment
|
||||||
|
else:
|
||||||
|
cname = ('%s %s' % (prefix, name)).strip()
|
||||||
|
self._struct_pending_verification[tp] = layout, cname
|
||||||
|
|
||||||
|
def _loaded_struct_or_union(self, tp):
|
||||||
|
if tp.fldnames is None:
|
||||||
|
return # nothing to do with opaque structs
|
||||||
|
self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered
|
||||||
|
|
||||||
|
if tp in self._struct_pending_verification:
|
||||||
|
# check that the layout sizes and offsets match the real ones
|
||||||
|
def check(realvalue, expectedvalue, msg):
|
||||||
|
if realvalue != expectedvalue:
|
||||||
|
raise ffiplatform.VerificationError(
|
||||||
|
"%s (we have %d, but C compiler says %d)"
|
||||||
|
% (msg, expectedvalue, realvalue))
|
||||||
|
ffi = self.ffi
|
||||||
|
BStruct = ffi._get_cached_btype(tp)
|
||||||
|
layout, cname = self._struct_pending_verification.pop(tp)
|
||||||
|
check(layout[0], ffi.sizeof(BStruct), "wrong total size")
|
||||||
|
check(layout[1], ffi.alignof(BStruct), "wrong total alignment")
|
||||||
|
i = 2
|
||||||
|
for fname, ftype, fbitsize, fqual in tp.enumfields():
|
||||||
|
if fbitsize >= 0:
|
||||||
|
continue # xxx ignore fbitsize for now
|
||||||
|
check(layout[i], ffi.offsetof(BStruct, fname),
|
||||||
|
"wrong offset for field %r" % (fname,))
|
||||||
|
if layout[i+1] != 0:
|
||||||
|
BField = ffi._get_cached_btype(ftype)
|
||||||
|
check(layout[i+1], ffi.sizeof(BField),
|
||||||
|
"wrong size for field %r" % (fname,))
|
||||||
|
i += 2
|
||||||
|
assert i == len(layout)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# 'anonymous' declarations. These are produced for anonymous structs
|
||||||
|
# or unions; the 'name' is obtained by a typedef.
|
||||||
|
|
||||||
|
def _generate_gen_anonymous_decl(self, tp, name):
|
||||||
|
if isinstance(tp, model.EnumType):
|
||||||
|
self._generate_gen_enum_decl(tp, name, '')
|
||||||
|
else:
|
||||||
|
self._generate_struct_or_union_decl(tp, '', name)
|
||||||
|
|
||||||
|
def _loading_gen_anonymous(self, tp, name, module):
|
||||||
|
if isinstance(tp, model.EnumType):
|
||||||
|
self._loading_gen_enum(tp, name, module, '')
|
||||||
|
else:
|
||||||
|
self._loading_struct_or_union(tp, '', name, module)
|
||||||
|
|
||||||
|
def _loaded_gen_anonymous(self, tp, name, module, **kwds):
|
||||||
|
if isinstance(tp, model.EnumType):
|
||||||
|
self._loaded_gen_enum(tp, name, module, **kwds)
|
||||||
|
else:
|
||||||
|
self._loaded_struct_or_union(tp)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# constants, likely declared with '#define'
|
||||||
|
|
||||||
|
def _generate_gen_const(self, is_int, name, tp=None, category='const',
|
||||||
|
check_value=None):
|
||||||
|
prnt = self._prnt
|
||||||
|
funcname = '_cffi_%s_%s' % (category, name)
|
||||||
|
self.export_symbols.append(funcname)
|
||||||
|
if check_value is not None:
|
||||||
|
assert is_int
|
||||||
|
assert category == 'const'
|
||||||
|
prnt('int %s(char *out_error)' % funcname)
|
||||||
|
prnt('{')
|
||||||
|
self._check_int_constant_value(name, check_value)
|
||||||
|
prnt(' return 0;')
|
||||||
|
prnt('}')
|
||||||
|
elif is_int:
|
||||||
|
assert category == 'const'
|
||||||
|
prnt('int %s(long long *out_value)' % funcname)
|
||||||
|
prnt('{')
|
||||||
|
prnt(' *out_value = (long long)(%s);' % (name,))
|
||||||
|
prnt(' return (%s) <= 0;' % (name,))
|
||||||
|
prnt('}')
|
||||||
|
else:
|
||||||
|
assert tp is not None
|
||||||
|
assert check_value is None
|
||||||
|
if category == 'var':
|
||||||
|
ampersand = '&'
|
||||||
|
else:
|
||||||
|
ampersand = ''
|
||||||
|
extra = ''
|
||||||
|
if category == 'const' and isinstance(tp, model.StructOrUnion):
|
||||||
|
extra = 'const *'
|
||||||
|
ampersand = '&'
|
||||||
|
prnt(tp.get_c_name(' %s%s(void)' % (extra, funcname), name))
|
||||||
|
prnt('{')
|
||||||
|
prnt(' return (%s%s);' % (ampersand, name))
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
def _generate_gen_constant_decl(self, tp, name):
|
||||||
|
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||||
|
self._generate_gen_const(is_int, name, tp)
|
||||||
|
|
||||||
|
_loading_gen_constant = _loaded_noop
|
||||||
|
|
||||||
|
def _load_constant(self, is_int, tp, name, module, check_value=None):
|
||||||
|
funcname = '_cffi_const_%s' % name
|
||||||
|
if check_value is not None:
|
||||||
|
assert is_int
|
||||||
|
self._load_known_int_constant(module, funcname)
|
||||||
|
value = check_value
|
||||||
|
elif is_int:
|
||||||
|
BType = self.ffi._typeof_locked("long long*")[0]
|
||||||
|
BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
p = self.ffi.new(BType)
|
||||||
|
negative = function(p)
|
||||||
|
value = int(p[0])
|
||||||
|
if value < 0 and not negative:
|
||||||
|
BLongLong = self.ffi._typeof_locked("long long")[0]
|
||||||
|
value += (1 << (8*self.ffi.sizeof(BLongLong)))
|
||||||
|
else:
|
||||||
|
assert check_value is None
|
||||||
|
fntypeextra = '(*)(void)'
|
||||||
|
if isinstance(tp, model.StructOrUnion):
|
||||||
|
fntypeextra = '*' + fntypeextra
|
||||||
|
BFunc = self.ffi._typeof_locked(tp.get_c_name(fntypeextra, name))[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
value = function()
|
||||||
|
if isinstance(tp, model.StructOrUnion):
|
||||||
|
value = value[0]
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _loaded_gen_constant(self, tp, name, module, library):
|
||||||
|
is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type()
|
||||||
|
value = self._load_constant(is_int, tp, name, module)
|
||||||
|
setattr(library, name, value)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# enums
|
||||||
|
|
||||||
|
def _check_int_constant_value(self, name, value):
|
||||||
|
prnt = self._prnt
|
||||||
|
if value <= 0:
|
||||||
|
prnt(' if ((%s) > 0 || (long)(%s) != %dL) {' % (
|
||||||
|
name, name, value))
|
||||||
|
else:
|
||||||
|
prnt(' if ((%s) <= 0 || (unsigned long)(%s) != %dUL) {' % (
|
||||||
|
name, name, value))
|
||||||
|
prnt(' char buf[64];')
|
||||||
|
prnt(' if ((%s) <= 0)' % name)
|
||||||
|
prnt(' sprintf(buf, "%%ld", (long)(%s));' % name)
|
||||||
|
prnt(' else')
|
||||||
|
prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' %
|
||||||
|
name)
|
||||||
|
prnt(' sprintf(out_error, "%s has the real value %s, not %s",')
|
||||||
|
prnt(' "%s", buf, "%d");' % (name[:100], value))
|
||||||
|
prnt(' return -1;')
|
||||||
|
prnt(' }')
|
||||||
|
|
||||||
|
def _load_known_int_constant(self, module, funcname):
|
||||||
|
BType = self.ffi._typeof_locked("char[]")[0]
|
||||||
|
BFunc = self.ffi._typeof_locked("int(*)(char*)")[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
p = self.ffi.new(BType, 256)
|
||||||
|
if function(p) < 0:
|
||||||
|
error = self.ffi.string(p)
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
error = str(error, 'utf-8')
|
||||||
|
raise ffiplatform.VerificationError(error)
|
||||||
|
|
||||||
|
def _enum_funcname(self, prefix, name):
|
||||||
|
# "$enum_$1" => "___D_enum____D_1"
|
||||||
|
name = name.replace('$', '___D_')
|
||||||
|
return '_cffi_e_%s_%s' % (prefix, name)
|
||||||
|
|
||||||
|
def _generate_gen_enum_decl(self, tp, name, prefix='enum'):
|
||||||
|
if tp.partial:
|
||||||
|
for enumerator in tp.enumerators:
|
||||||
|
self._generate_gen_const(True, enumerator)
|
||||||
|
return
|
||||||
|
#
|
||||||
|
funcname = self._enum_funcname(prefix, name)
|
||||||
|
self.export_symbols.append(funcname)
|
||||||
|
prnt = self._prnt
|
||||||
|
prnt('int %s(char *out_error)' % funcname)
|
||||||
|
prnt('{')
|
||||||
|
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||||
|
self._check_int_constant_value(enumerator, enumvalue)
|
||||||
|
prnt(' return 0;')
|
||||||
|
prnt('}')
|
||||||
|
prnt()
|
||||||
|
|
||||||
|
def _loading_gen_enum(self, tp, name, module, prefix='enum'):
|
||||||
|
if tp.partial:
|
||||||
|
enumvalues = [self._load_constant(True, tp, enumerator, module)
|
||||||
|
for enumerator in tp.enumerators]
|
||||||
|
tp.enumvalues = tuple(enumvalues)
|
||||||
|
tp.partial_resolved = True
|
||||||
|
else:
|
||||||
|
funcname = self._enum_funcname(prefix, name)
|
||||||
|
self._load_known_int_constant(module, funcname)
|
||||||
|
|
||||||
|
def _loaded_gen_enum(self, tp, name, module, library):
|
||||||
|
for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues):
|
||||||
|
setattr(library, enumerator, enumvalue)
|
||||||
|
type(library)._cffi_dir.append(enumerator)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# macros: for now only for integers
|
||||||
|
|
||||||
|
def _generate_gen_macro_decl(self, tp, name):
|
||||||
|
if tp == '...':
|
||||||
|
check_value = None
|
||||||
|
else:
|
||||||
|
check_value = tp # an integer
|
||||||
|
self._generate_gen_const(True, name, check_value=check_value)
|
||||||
|
|
||||||
|
_loading_gen_macro = _loaded_noop
|
||||||
|
|
||||||
|
def _loaded_gen_macro(self, tp, name, module, library):
|
||||||
|
if tp == '...':
|
||||||
|
check_value = None
|
||||||
|
else:
|
||||||
|
check_value = tp # an integer
|
||||||
|
value = self._load_constant(True, tp, name, module,
|
||||||
|
check_value=check_value)
|
||||||
|
setattr(library, name, value)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
# global variables
|
||||||
|
|
||||||
|
def _generate_gen_variable_decl(self, tp, name):
|
||||||
|
if isinstance(tp, model.ArrayType):
|
||||||
|
if tp.length == '...':
|
||||||
|
prnt = self._prnt
|
||||||
|
funcname = '_cffi_sizeof_%s' % (name,)
|
||||||
|
self.export_symbols.append(funcname)
|
||||||
|
prnt("size_t %s(void)" % funcname)
|
||||||
|
prnt("{")
|
||||||
|
prnt(" return sizeof(%s);" % (name,))
|
||||||
|
prnt("}")
|
||||||
|
tp_ptr = model.PointerType(tp.item)
|
||||||
|
self._generate_gen_const(False, name, tp_ptr)
|
||||||
|
else:
|
||||||
|
tp_ptr = model.PointerType(tp)
|
||||||
|
self._generate_gen_const(False, name, tp_ptr, category='var')
|
||||||
|
|
||||||
|
_loading_gen_variable = _loaded_noop
|
||||||
|
|
||||||
|
def _loaded_gen_variable(self, tp, name, module, library):
|
||||||
|
if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the
|
||||||
|
# sense that "a=..." is forbidden
|
||||||
|
if tp.length == '...':
|
||||||
|
funcname = '_cffi_sizeof_%s' % (name,)
|
||||||
|
BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
size = function()
|
||||||
|
BItemType = self.ffi._get_cached_btype(tp.item)
|
||||||
|
length, rest = divmod(size, self.ffi.sizeof(BItemType))
|
||||||
|
if rest != 0:
|
||||||
|
raise ffiplatform.VerificationError(
|
||||||
|
"bad size: %r does not seem to be an array of %s" %
|
||||||
|
(name, tp.item))
|
||||||
|
tp = tp.resolve_length(length)
|
||||||
|
tp_ptr = model.PointerType(tp.item)
|
||||||
|
value = self._load_constant(False, tp_ptr, name, module)
|
||||||
|
# 'value' is a <cdata 'type *'> which we have to replace with
|
||||||
|
# a <cdata 'type[N]'> if the N is actually known
|
||||||
|
if tp.length is not None:
|
||||||
|
BArray = self.ffi._get_cached_btype(tp)
|
||||||
|
value = self.ffi.cast(BArray, value)
|
||||||
|
setattr(library, name, value)
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
return
|
||||||
|
# remove ptr=<cdata 'int *'> from the library instance, and replace
|
||||||
|
# it by a property on the class, which reads/writes into ptr[0].
|
||||||
|
funcname = '_cffi_var_%s' % name
|
||||||
|
BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0]
|
||||||
|
function = module.load_function(BFunc, funcname)
|
||||||
|
ptr = function()
|
||||||
|
def getter(library):
|
||||||
|
return ptr[0]
|
||||||
|
def setter(library, value):
|
||||||
|
ptr[0] = value
|
||||||
|
setattr(type(library), name, property(getter, setter))
|
||||||
|
type(library)._cffi_dir.append(name)
|
||||||
|
|
||||||
|
cffimod_header = r'''
|
||||||
|
#include <stdio.h>
|
||||||
|
#include <stddef.h>
|
||||||
|
#include <stdarg.h>
|
||||||
|
#include <errno.h>
|
||||||
|
#include <sys/types.h> /* XXX for ssize_t on some platforms */
|
||||||
|
|
||||||
|
/* this block of #ifs should be kept exactly identical between
|
||||||
|
c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
# include <malloc.h> /* for alloca() */
|
||||||
|
# if _MSC_VER < 1600 /* MSVC < 2010 */
|
||||||
|
typedef __int8 int8_t;
|
||||||
|
typedef __int16 int16_t;
|
||||||
|
typedef __int32 int32_t;
|
||||||
|
typedef __int64 int64_t;
|
||||||
|
typedef unsigned __int8 uint8_t;
|
||||||
|
typedef unsigned __int16 uint16_t;
|
||||||
|
typedef unsigned __int32 uint32_t;
|
||||||
|
typedef unsigned __int64 uint64_t;
|
||||||
|
typedef __int8 int_least8_t;
|
||||||
|
typedef __int16 int_least16_t;
|
||||||
|
typedef __int32 int_least32_t;
|
||||||
|
typedef __int64 int_least64_t;
|
||||||
|
typedef unsigned __int8 uint_least8_t;
|
||||||
|
typedef unsigned __int16 uint_least16_t;
|
||||||
|
typedef unsigned __int32 uint_least32_t;
|
||||||
|
typedef unsigned __int64 uint_least64_t;
|
||||||
|
typedef __int8 int_fast8_t;
|
||||||
|
typedef __int16 int_fast16_t;
|
||||||
|
typedef __int32 int_fast32_t;
|
||||||
|
typedef __int64 int_fast64_t;
|
||||||
|
typedef unsigned __int8 uint_fast8_t;
|
||||||
|
typedef unsigned __int16 uint_fast16_t;
|
||||||
|
typedef unsigned __int32 uint_fast32_t;
|
||||||
|
typedef unsigned __int64 uint_fast64_t;
|
||||||
|
typedef __int64 intmax_t;
|
||||||
|
typedef unsigned __int64 uintmax_t;
|
||||||
|
# else
|
||||||
|
# include <stdint.h>
|
||||||
|
# endif
|
||||||
|
# if _MSC_VER < 1800 /* MSVC < 2013 */
|
||||||
|
typedef unsigned char _Bool;
|
||||||
|
# endif
|
||||||
|
#else
|
||||||
|
# include <stdint.h>
|
||||||
|
# if (defined (__SVR4) && defined (__sun)) || defined(_AIX)
|
||||||
|
# include <alloca.h>
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
'''
|
316
lib/python3.4/site-packages/cffi/verifier.py
Normal file
316
lib/python3.4/site-packages/cffi/verifier.py
Normal file
|
@ -0,0 +1,316 @@
|
||||||
|
#
|
||||||
|
# DEPRECATED: implementation for ffi.verify()
|
||||||
|
#
|
||||||
|
import sys, os, binascii, shutil, io
|
||||||
|
from . import __version_verifier_modules__
|
||||||
|
from . import ffiplatform
|
||||||
|
|
||||||
|
if sys.version_info >= (3, 3):
|
||||||
|
import importlib.machinery
|
||||||
|
def _extension_suffixes():
|
||||||
|
return importlib.machinery.EXTENSION_SUFFIXES[:]
|
||||||
|
else:
|
||||||
|
import imp
|
||||||
|
def _extension_suffixes():
|
||||||
|
return [suffix for suffix, _, type in imp.get_suffixes()
|
||||||
|
if type == imp.C_EXTENSION]
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
NativeIO = io.StringIO
|
||||||
|
else:
|
||||||
|
class NativeIO(io.BytesIO):
|
||||||
|
def write(self, s):
|
||||||
|
if isinstance(s, unicode):
|
||||||
|
s = s.encode('ascii')
|
||||||
|
super(NativeIO, self).write(s)
|
||||||
|
|
||||||
|
def _hack_at_distutils():
|
||||||
|
# Windows-only workaround for some configurations: see
|
||||||
|
# https://bugs.python.org/issue23246 (Python 2.7 with
|
||||||
|
# a specific MS compiler suite download)
|
||||||
|
if sys.platform == "win32":
|
||||||
|
try:
|
||||||
|
import setuptools # for side-effects, patches distutils
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class Verifier(object):
|
||||||
|
|
||||||
|
def __init__(self, ffi, preamble, tmpdir=None, modulename=None,
|
||||||
|
ext_package=None, tag='', force_generic_engine=False,
|
||||||
|
source_extension='.c', flags=None, relative_to=None, **kwds):
|
||||||
|
if ffi._parser._uses_new_feature:
|
||||||
|
raise ffiplatform.VerificationError(
|
||||||
|
"feature not supported with ffi.verify(), but only "
|
||||||
|
"with ffi.set_source(): %s" % (ffi._parser._uses_new_feature,))
|
||||||
|
self.ffi = ffi
|
||||||
|
self.preamble = preamble
|
||||||
|
if not modulename:
|
||||||
|
flattened_kwds = ffiplatform.flatten(kwds)
|
||||||
|
vengine_class = _locate_engine_class(ffi, force_generic_engine)
|
||||||
|
self._vengine = vengine_class(self)
|
||||||
|
self._vengine.patch_extension_kwds(kwds)
|
||||||
|
self.flags = flags
|
||||||
|
self.kwds = self.make_relative_to(kwds, relative_to)
|
||||||
|
#
|
||||||
|
if modulename:
|
||||||
|
if tag:
|
||||||
|
raise TypeError("can't specify both 'modulename' and 'tag'")
|
||||||
|
else:
|
||||||
|
key = '\x00'.join([sys.version[:3], __version_verifier_modules__,
|
||||||
|
preamble, flattened_kwds] +
|
||||||
|
ffi._cdefsources)
|
||||||
|
if sys.version_info >= (3,):
|
||||||
|
key = key.encode('utf-8')
|
||||||
|
k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff)
|
||||||
|
k1 = k1.lstrip('0x').rstrip('L')
|
||||||
|
k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff)
|
||||||
|
k2 = k2.lstrip('0').rstrip('L')
|
||||||
|
modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key,
|
||||||
|
k1, k2)
|
||||||
|
suffix = _get_so_suffixes()[0]
|
||||||
|
self.tmpdir = tmpdir or _caller_dir_pycache()
|
||||||
|
self.sourcefilename = os.path.join(self.tmpdir, modulename + source_extension)
|
||||||
|
self.modulefilename = os.path.join(self.tmpdir, modulename + suffix)
|
||||||
|
self.ext_package = ext_package
|
||||||
|
self._has_source = False
|
||||||
|
self._has_module = False
|
||||||
|
|
||||||
|
def write_source(self, file=None):
|
||||||
|
"""Write the C source code. It is produced in 'self.sourcefilename',
|
||||||
|
which can be tweaked beforehand."""
|
||||||
|
with self.ffi._lock:
|
||||||
|
if self._has_source and file is None:
|
||||||
|
raise ffiplatform.VerificationError(
|
||||||
|
"source code already written")
|
||||||
|
self._write_source(file)
|
||||||
|
|
||||||
|
def compile_module(self):
|
||||||
|
"""Write the C source code (if not done already) and compile it.
|
||||||
|
This produces a dynamic link library in 'self.modulefilename'."""
|
||||||
|
with self.ffi._lock:
|
||||||
|
if self._has_module:
|
||||||
|
raise ffiplatform.VerificationError("module already compiled")
|
||||||
|
if not self._has_source:
|
||||||
|
self._write_source()
|
||||||
|
self._compile_module()
|
||||||
|
|
||||||
|
def load_library(self):
|
||||||
|
"""Get a C module from this Verifier instance.
|
||||||
|
Returns an instance of a FFILibrary class that behaves like the
|
||||||
|
objects returned by ffi.dlopen(), but that delegates all
|
||||||
|
operations to the C module. If necessary, the C code is written
|
||||||
|
and compiled first.
|
||||||
|
"""
|
||||||
|
with self.ffi._lock:
|
||||||
|
if not self._has_module:
|
||||||
|
self._locate_module()
|
||||||
|
if not self._has_module:
|
||||||
|
if not self._has_source:
|
||||||
|
self._write_source()
|
||||||
|
self._compile_module()
|
||||||
|
return self._load_library()
|
||||||
|
|
||||||
|
def get_module_name(self):
|
||||||
|
basename = os.path.basename(self.modulefilename)
|
||||||
|
# kill both the .so extension and the other .'s, as introduced
|
||||||
|
# by Python 3: 'basename.cpython-33m.so'
|
||||||
|
basename = basename.split('.', 1)[0]
|
||||||
|
# and the _d added in Python 2 debug builds --- but try to be
|
||||||
|
# conservative and not kill a legitimate _d
|
||||||
|
if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'):
|
||||||
|
basename = basename[:-2]
|
||||||
|
return basename
|
||||||
|
|
||||||
|
def get_extension(self):
|
||||||
|
_hack_at_distutils() # backward compatibility hack
|
||||||
|
if not self._has_source:
|
||||||
|
with self.ffi._lock:
|
||||||
|
if not self._has_source:
|
||||||
|
self._write_source()
|
||||||
|
sourcename = ffiplatform.maybe_relative_path(self.sourcefilename)
|
||||||
|
modname = self.get_module_name()
|
||||||
|
return ffiplatform.get_extension(sourcename, modname, **self.kwds)
|
||||||
|
|
||||||
|
def generates_python_module(self):
|
||||||
|
return self._vengine._gen_python_module
|
||||||
|
|
||||||
|
def make_relative_to(self, kwds, relative_to):
|
||||||
|
if relative_to and os.path.dirname(relative_to):
|
||||||
|
dirname = os.path.dirname(relative_to)
|
||||||
|
kwds = kwds.copy()
|
||||||
|
for key in ffiplatform.LIST_OF_FILE_NAMES:
|
||||||
|
if key in kwds:
|
||||||
|
lst = kwds[key]
|
||||||
|
if not isinstance(lst, (list, tuple)):
|
||||||
|
raise TypeError("keyword '%s' should be a list or tuple"
|
||||||
|
% (key,))
|
||||||
|
lst = [os.path.join(dirname, fn) for fn in lst]
|
||||||
|
kwds[key] = lst
|
||||||
|
return kwds
|
||||||
|
|
||||||
|
# ----------
|
||||||
|
|
||||||
|
def _locate_module(self):
|
||||||
|
if not os.path.isfile(self.modulefilename):
|
||||||
|
if self.ext_package:
|
||||||
|
try:
|
||||||
|
pkg = __import__(self.ext_package, None, None, ['__doc__'])
|
||||||
|
except ImportError:
|
||||||
|
return # cannot import the package itself, give up
|
||||||
|
# (e.g. it might be called differently before installation)
|
||||||
|
path = pkg.__path__
|
||||||
|
else:
|
||||||
|
path = None
|
||||||
|
filename = self._vengine.find_module(self.get_module_name(), path,
|
||||||
|
_get_so_suffixes())
|
||||||
|
if filename is None:
|
||||||
|
return
|
||||||
|
self.modulefilename = filename
|
||||||
|
self._vengine.collect_types()
|
||||||
|
self._has_module = True
|
||||||
|
|
||||||
|
def _write_source_to(self, file):
|
||||||
|
self._vengine._f = file
|
||||||
|
try:
|
||||||
|
self._vengine.write_source_to_f()
|
||||||
|
finally:
|
||||||
|
del self._vengine._f
|
||||||
|
|
||||||
|
def _write_source(self, file=None):
|
||||||
|
if file is not None:
|
||||||
|
self._write_source_to(file)
|
||||||
|
else:
|
||||||
|
# Write our source file to an in memory file.
|
||||||
|
f = NativeIO()
|
||||||
|
self._write_source_to(f)
|
||||||
|
source_data = f.getvalue()
|
||||||
|
|
||||||
|
# Determine if this matches the current file
|
||||||
|
if os.path.exists(self.sourcefilename):
|
||||||
|
with open(self.sourcefilename, "r") as fp:
|
||||||
|
needs_written = not (fp.read() == source_data)
|
||||||
|
else:
|
||||||
|
needs_written = True
|
||||||
|
|
||||||
|
# Actually write the file out if it doesn't match
|
||||||
|
if needs_written:
|
||||||
|
_ensure_dir(self.sourcefilename)
|
||||||
|
with open(self.sourcefilename, "w") as fp:
|
||||||
|
fp.write(source_data)
|
||||||
|
|
||||||
|
# Set this flag
|
||||||
|
self._has_source = True
|
||||||
|
|
||||||
|
def _compile_module(self):
|
||||||
|
# compile this C source
|
||||||
|
tmpdir = os.path.dirname(self.sourcefilename)
|
||||||
|
outputfilename = ffiplatform.compile(tmpdir, self.get_extension())
|
||||||
|
try:
|
||||||
|
same = ffiplatform.samefile(outputfilename, self.modulefilename)
|
||||||
|
except OSError:
|
||||||
|
same = False
|
||||||
|
if not same:
|
||||||
|
_ensure_dir(self.modulefilename)
|
||||||
|
shutil.move(outputfilename, self.modulefilename)
|
||||||
|
self._has_module = True
|
||||||
|
|
||||||
|
def _load_library(self):
|
||||||
|
assert self._has_module
|
||||||
|
if self.flags is not None:
|
||||||
|
return self._vengine.load_library(self.flags)
|
||||||
|
else:
|
||||||
|
return self._vengine.load_library()
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
|
||||||
|
_FORCE_GENERIC_ENGINE = False # for tests
|
||||||
|
|
||||||
|
def _locate_engine_class(ffi, force_generic_engine):
|
||||||
|
if _FORCE_GENERIC_ENGINE:
|
||||||
|
force_generic_engine = True
|
||||||
|
if not force_generic_engine:
|
||||||
|
if '__pypy__' in sys.builtin_module_names:
|
||||||
|
force_generic_engine = True
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
import _cffi_backend
|
||||||
|
except ImportError:
|
||||||
|
_cffi_backend = '?'
|
||||||
|
if ffi._backend is not _cffi_backend:
|
||||||
|
force_generic_engine = True
|
||||||
|
if force_generic_engine:
|
||||||
|
from . import vengine_gen
|
||||||
|
return vengine_gen.VGenericEngine
|
||||||
|
else:
|
||||||
|
from . import vengine_cpy
|
||||||
|
return vengine_cpy.VCPythonEngine
|
||||||
|
|
||||||
|
# ____________________________________________________________
|
||||||
|
|
||||||
|
_TMPDIR = None
|
||||||
|
|
||||||
|
def _caller_dir_pycache():
|
||||||
|
if _TMPDIR:
|
||||||
|
return _TMPDIR
|
||||||
|
result = os.environ.get('CFFI_TMPDIR')
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
filename = sys._getframe(2).f_code.co_filename
|
||||||
|
return os.path.abspath(os.path.join(os.path.dirname(filename),
|
||||||
|
'__pycache__'))
|
||||||
|
|
||||||
|
def set_tmpdir(dirname):
|
||||||
|
"""Set the temporary directory to use instead of __pycache__."""
|
||||||
|
global _TMPDIR
|
||||||
|
_TMPDIR = dirname
|
||||||
|
|
||||||
|
def cleanup_tmpdir(tmpdir=None, keep_so=False):
|
||||||
|
"""Clean up the temporary directory by removing all files in it
|
||||||
|
called `_cffi_*.{c,so}` as well as the `build` subdirectory."""
|
||||||
|
tmpdir = tmpdir or _caller_dir_pycache()
|
||||||
|
try:
|
||||||
|
filelist = os.listdir(tmpdir)
|
||||||
|
except OSError:
|
||||||
|
return
|
||||||
|
if keep_so:
|
||||||
|
suffix = '.c' # only remove .c files
|
||||||
|
else:
|
||||||
|
suffix = _get_so_suffixes()[0].lower()
|
||||||
|
for fn in filelist:
|
||||||
|
if fn.lower().startswith('_cffi_') and (
|
||||||
|
fn.lower().endswith(suffix) or fn.lower().endswith('.c')):
|
||||||
|
try:
|
||||||
|
os.unlink(os.path.join(tmpdir, fn))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
clean_dir = [os.path.join(tmpdir, 'build')]
|
||||||
|
for dir in clean_dir:
|
||||||
|
try:
|
||||||
|
for fn in os.listdir(dir):
|
||||||
|
fn = os.path.join(dir, fn)
|
||||||
|
if os.path.isdir(fn):
|
||||||
|
clean_dir.append(fn)
|
||||||
|
else:
|
||||||
|
os.unlink(fn)
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def _get_so_suffixes():
|
||||||
|
suffixes = _extension_suffixes()
|
||||||
|
if not suffixes:
|
||||||
|
# bah, no C_EXTENSION available. Occurs on pypy without cpyext
|
||||||
|
if sys.platform == 'win32':
|
||||||
|
suffixes = [".pyd"]
|
||||||
|
else:
|
||||||
|
suffixes = [".so"]
|
||||||
|
|
||||||
|
return suffixes
|
||||||
|
|
||||||
|
def _ensure_dir(filename):
|
||||||
|
try:
|
||||||
|
os.makedirs(os.path.dirname(filename))
|
||||||
|
except OSError:
|
||||||
|
pass
|
|
@ -0,0 +1,85 @@
|
||||||
|
Metadata-Version: 1.1
|
||||||
|
Name: cryptography
|
||||||
|
Version: 1.4
|
||||||
|
Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers.
|
||||||
|
Home-page: https://github.com/pyca/cryptography
|
||||||
|
Author: The cryptography developers
|
||||||
|
Author-email: cryptography-dev@python.org
|
||||||
|
License: BSD or Apache License, Version 2.0
|
||||||
|
Description: Cryptography
|
||||||
|
============
|
||||||
|
|
||||||
|
.. image:: https://img.shields.io/pypi/v/cryptography.svg
|
||||||
|
:target: https://pypi.python.org/pypi/cryptography/
|
||||||
|
:alt: Latest Version
|
||||||
|
|
||||||
|
.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest
|
||||||
|
:target: https://cryptography.io
|
||||||
|
:alt: Latest Docs
|
||||||
|
|
||||||
|
.. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master
|
||||||
|
:target: https://travis-ci.org/pyca/cryptography
|
||||||
|
|
||||||
|
.. image:: https://codecov.io/github/pyca/cryptography/coverage.svg?branch=master
|
||||||
|
:target: https://codecov.io/github/pyca/cryptography?branch=master
|
||||||
|
|
||||||
|
|
||||||
|
``cryptography`` is a package which provides cryptographic recipes and
|
||||||
|
primitives to Python developers. Our goal is for it to be your "cryptographic
|
||||||
|
standard library". It supports Python 2.6-2.7, Python 3.3+, and PyPy 2.6+.
|
||||||
|
|
||||||
|
``cryptography`` includes both high level recipes, and low level interfaces to
|
||||||
|
common cryptographic algorithms such as symmetric ciphers, message digests and
|
||||||
|
key derivation functions. For example, to encrypt something with
|
||||||
|
``cryptography``'s high level symmetric encryption recipe:
|
||||||
|
|
||||||
|
.. code-block:: pycon
|
||||||
|
|
||||||
|
>>> from cryptography.fernet import Fernet
|
||||||
|
>>> # Put this somewhere safe!
|
||||||
|
>>> key = Fernet.generate_key()
|
||||||
|
>>> f = Fernet(key)
|
||||||
|
>>> token = f.encrypt(b"A really secret message. Not for prying eyes.")
|
||||||
|
>>> token
|
||||||
|
'...'
|
||||||
|
>>> f.decrypt(token)
|
||||||
|
'A really secret message. Not for prying eyes.'
|
||||||
|
|
||||||
|
You can find more information in the `documentation`_.
|
||||||
|
|
||||||
|
Discussion
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
If you run into bugs, you can file them in our `issue tracker`_.
|
||||||
|
|
||||||
|
We maintain a `cryptography-dev`_ mailing list for development discussion.
|
||||||
|
|
||||||
|
You can also join ``#cryptography-dev`` on Freenode to ask questions or get
|
||||||
|
involved.
|
||||||
|
|
||||||
|
|
||||||
|
.. _`documentation`: https://cryptography.io/
|
||||||
|
.. _`issue tracker`: https://github.com/pyca/cryptography/issues
|
||||||
|
.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev
|
||||||
|
|
||||||
|
Platform: UNKNOWN
|
||||||
|
Classifier: Intended Audience :: Developers
|
||||||
|
Classifier: License :: OSI Approved :: Apache Software License
|
||||||
|
Classifier: License :: OSI Approved :: BSD License
|
||||||
|
Classifier: Natural Language :: English
|
||||||
|
Classifier: Operating System :: MacOS :: MacOS X
|
||||||
|
Classifier: Operating System :: POSIX
|
||||||
|
Classifier: Operating System :: POSIX :: BSD
|
||||||
|
Classifier: Operating System :: POSIX :: Linux
|
||||||
|
Classifier: Operating System :: Microsoft :: Windows
|
||||||
|
Classifier: Programming Language :: Python
|
||||||
|
Classifier: Programming Language :: Python :: 2
|
||||||
|
Classifier: Programming Language :: Python :: 2.6
|
||||||
|
Classifier: Programming Language :: Python :: 2.7
|
||||||
|
Classifier: Programming Language :: Python :: 3
|
||||||
|
Classifier: Programming Language :: Python :: 3.3
|
||||||
|
Classifier: Programming Language :: Python :: 3.4
|
||||||
|
Classifier: Programming Language :: Python :: 3.5
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Classifier: Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Classifier: Topic :: Security :: Cryptography
|
|
@ -0,0 +1,280 @@
|
||||||
|
AUTHORS.rst
|
||||||
|
CHANGELOG.rst
|
||||||
|
CONTRIBUTING.rst
|
||||||
|
LICENSE
|
||||||
|
LICENSE.APACHE
|
||||||
|
LICENSE.BSD
|
||||||
|
MANIFEST.in
|
||||||
|
README.rst
|
||||||
|
setup.cfg
|
||||||
|
setup.py
|
||||||
|
docs/Makefile
|
||||||
|
docs/api-stability.rst
|
||||||
|
docs/changelog.rst
|
||||||
|
docs/community.rst
|
||||||
|
docs/conf.py
|
||||||
|
docs/cryptography-docs.py
|
||||||
|
docs/doing-a-release.rst
|
||||||
|
docs/exceptions.rst
|
||||||
|
docs/faq.rst
|
||||||
|
docs/fernet.rst
|
||||||
|
docs/glossary.rst
|
||||||
|
docs/index.rst
|
||||||
|
docs/installation.rst
|
||||||
|
docs/limitations.rst
|
||||||
|
docs/make.bat
|
||||||
|
docs/random-numbers.rst
|
||||||
|
docs/security.rst
|
||||||
|
docs/spelling_wordlist.txt
|
||||||
|
docs/_static/.keep
|
||||||
|
docs/development/c-bindings.rst
|
||||||
|
docs/development/getting-started.rst
|
||||||
|
docs/development/index.rst
|
||||||
|
docs/development/reviewing-patches.rst
|
||||||
|
docs/development/submitting-patches.rst
|
||||||
|
docs/development/test-vectors.rst
|
||||||
|
docs/development/custom-vectors/arc4.rst
|
||||||
|
docs/development/custom-vectors/cast5.rst
|
||||||
|
docs/development/custom-vectors/idea.rst
|
||||||
|
docs/development/custom-vectors/rsa-oaep-sha2.rst
|
||||||
|
docs/development/custom-vectors/secp256k1.rst
|
||||||
|
docs/development/custom-vectors/seed.rst
|
||||||
|
docs/development/custom-vectors/arc4/generate_arc4.py
|
||||||
|
docs/development/custom-vectors/arc4/verify_arc4.go
|
||||||
|
docs/development/custom-vectors/cast5/generate_cast5.py
|
||||||
|
docs/development/custom-vectors/cast5/verify_cast5.go
|
||||||
|
docs/development/custom-vectors/idea/generate_idea.py
|
||||||
|
docs/development/custom-vectors/idea/verify_idea.py
|
||||||
|
docs/development/custom-vectors/rsa-oaep-sha2/VerifyRSAOAEPSHA2.java
|
||||||
|
docs/development/custom-vectors/rsa-oaep-sha2/generate_rsa_oaep_sha2.py
|
||||||
|
docs/development/custom-vectors/secp256k1/generate_secp256k1.py
|
||||||
|
docs/development/custom-vectors/secp256k1/verify_secp256k1.py
|
||||||
|
docs/development/custom-vectors/seed/generate_seed.py
|
||||||
|
docs/development/custom-vectors/seed/verify_seed.py
|
||||||
|
docs/hazmat/backends/commoncrypto.rst
|
||||||
|
docs/hazmat/backends/index.rst
|
||||||
|
docs/hazmat/backends/interfaces.rst
|
||||||
|
docs/hazmat/backends/multibackend.rst
|
||||||
|
docs/hazmat/backends/openssl.rst
|
||||||
|
docs/hazmat/bindings/commoncrypto.rst
|
||||||
|
docs/hazmat/bindings/index.rst
|
||||||
|
docs/hazmat/bindings/openssl.rst
|
||||||
|
docs/hazmat/primitives/constant-time.rst
|
||||||
|
docs/hazmat/primitives/cryptographic-hashes.rst
|
||||||
|
docs/hazmat/primitives/index.rst
|
||||||
|
docs/hazmat/primitives/interfaces.rst
|
||||||
|
docs/hazmat/primitives/key-derivation-functions.rst
|
||||||
|
docs/hazmat/primitives/keywrap.rst
|
||||||
|
docs/hazmat/primitives/padding.rst
|
||||||
|
docs/hazmat/primitives/symmetric-encryption.rst
|
||||||
|
docs/hazmat/primitives/twofactor.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/dh.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/dsa.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/ec.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/index.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/interfaces.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/rsa.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/serialization.rst
|
||||||
|
docs/hazmat/primitives/asymmetric/utils.rst
|
||||||
|
docs/hazmat/primitives/mac/cmac.rst
|
||||||
|
docs/hazmat/primitives/mac/hmac.rst
|
||||||
|
docs/hazmat/primitives/mac/index.rst
|
||||||
|
docs/x509/index.rst
|
||||||
|
docs/x509/reference.rst
|
||||||
|
docs/x509/tutorial.rst
|
||||||
|
src/_cffi_src/__init__.py
|
||||||
|
src/_cffi_src/build_commoncrypto.py
|
||||||
|
src/_cffi_src/build_constant_time.py
|
||||||
|
src/_cffi_src/build_openssl.py
|
||||||
|
src/_cffi_src/build_padding.py
|
||||||
|
src/_cffi_src/utils.py
|
||||||
|
src/_cffi_src/commoncrypto/__init__.py
|
||||||
|
src/_cffi_src/commoncrypto/cf.py
|
||||||
|
src/_cffi_src/commoncrypto/common_cryptor.py
|
||||||
|
src/_cffi_src/commoncrypto/common_digest.py
|
||||||
|
src/_cffi_src/commoncrypto/common_hmac.py
|
||||||
|
src/_cffi_src/commoncrypto/common_key_derivation.py
|
||||||
|
src/_cffi_src/commoncrypto/common_symmetric_key_wrap.py
|
||||||
|
src/_cffi_src/commoncrypto/seccertificate.py
|
||||||
|
src/_cffi_src/commoncrypto/secimport.py
|
||||||
|
src/_cffi_src/commoncrypto/secitem.py
|
||||||
|
src/_cffi_src/commoncrypto/seckey.py
|
||||||
|
src/_cffi_src/commoncrypto/seckeychain.py
|
||||||
|
src/_cffi_src/commoncrypto/secpolicy.py
|
||||||
|
src/_cffi_src/commoncrypto/sectransform.py
|
||||||
|
src/_cffi_src/commoncrypto/sectrust.py
|
||||||
|
src/_cffi_src/hazmat_src/constant_time.c
|
||||||
|
src/_cffi_src/hazmat_src/constant_time.h
|
||||||
|
src/_cffi_src/hazmat_src/padding.c
|
||||||
|
src/_cffi_src/hazmat_src/padding.h
|
||||||
|
src/_cffi_src/openssl/__init__.py
|
||||||
|
src/_cffi_src/openssl/aes.py
|
||||||
|
src/_cffi_src/openssl/asn1.py
|
||||||
|
src/_cffi_src/openssl/bignum.py
|
||||||
|
src/_cffi_src/openssl/bio.py
|
||||||
|
src/_cffi_src/openssl/callbacks.py
|
||||||
|
src/_cffi_src/openssl/cmac.py
|
||||||
|
src/_cffi_src/openssl/cms.py
|
||||||
|
src/_cffi_src/openssl/crypto.py
|
||||||
|
src/_cffi_src/openssl/dh.py
|
||||||
|
src/_cffi_src/openssl/dsa.py
|
||||||
|
src/_cffi_src/openssl/ec.py
|
||||||
|
src/_cffi_src/openssl/ecdh.py
|
||||||
|
src/_cffi_src/openssl/ecdsa.py
|
||||||
|
src/_cffi_src/openssl/engine.py
|
||||||
|
src/_cffi_src/openssl/err.py
|
||||||
|
src/_cffi_src/openssl/evp.py
|
||||||
|
src/_cffi_src/openssl/hmac.py
|
||||||
|
src/_cffi_src/openssl/nid.py
|
||||||
|
src/_cffi_src/openssl/objects.py
|
||||||
|
src/_cffi_src/openssl/ocsp.py
|
||||||
|
src/_cffi_src/openssl/opensslv.py
|
||||||
|
src/_cffi_src/openssl/pem.py
|
||||||
|
src/_cffi_src/openssl/pkcs12.py
|
||||||
|
src/_cffi_src/openssl/pkcs7.py
|
||||||
|
src/_cffi_src/openssl/rand.py
|
||||||
|
src/_cffi_src/openssl/rsa.py
|
||||||
|
src/_cffi_src/openssl/ssl.py
|
||||||
|
src/_cffi_src/openssl/x509.py
|
||||||
|
src/_cffi_src/openssl/x509_vfy.py
|
||||||
|
src/_cffi_src/openssl/x509name.py
|
||||||
|
src/_cffi_src/openssl/x509v3.py
|
||||||
|
src/cryptography/__about__.py
|
||||||
|
src/cryptography/__init__.py
|
||||||
|
src/cryptography/exceptions.py
|
||||||
|
src/cryptography/fernet.py
|
||||||
|
src/cryptography/utils.py
|
||||||
|
src/cryptography.egg-info/PKG-INFO
|
||||||
|
src/cryptography.egg-info/SOURCES.txt
|
||||||
|
src/cryptography.egg-info/dependency_links.txt
|
||||||
|
src/cryptography.egg-info/entry_points.txt
|
||||||
|
src/cryptography.egg-info/not-zip-safe
|
||||||
|
src/cryptography.egg-info/requires.txt
|
||||||
|
src/cryptography.egg-info/top_level.txt
|
||||||
|
src/cryptography/hazmat/__init__.py
|
||||||
|
src/cryptography/hazmat/backends/__init__.py
|
||||||
|
src/cryptography/hazmat/backends/interfaces.py
|
||||||
|
src/cryptography/hazmat/backends/multibackend.py
|
||||||
|
src/cryptography/hazmat/backends/commoncrypto/__init__.py
|
||||||
|
src/cryptography/hazmat/backends/commoncrypto/backend.py
|
||||||
|
src/cryptography/hazmat/backends/commoncrypto/ciphers.py
|
||||||
|
src/cryptography/hazmat/backends/commoncrypto/hashes.py
|
||||||
|
src/cryptography/hazmat/backends/commoncrypto/hmac.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/__init__.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/backend.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/ciphers.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/cmac.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/decode_asn1.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/dsa.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/ec.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/encode_asn1.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/hashes.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/hmac.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/rsa.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/utils.py
|
||||||
|
src/cryptography/hazmat/backends/openssl/x509.py
|
||||||
|
src/cryptography/hazmat/bindings/__init__.py
|
||||||
|
src/cryptography/hazmat/bindings/commoncrypto/__init__.py
|
||||||
|
src/cryptography/hazmat/bindings/commoncrypto/binding.py
|
||||||
|
src/cryptography/hazmat/bindings/openssl/__init__.py
|
||||||
|
src/cryptography/hazmat/bindings/openssl/_conditional.py
|
||||||
|
src/cryptography/hazmat/bindings/openssl/binding.py
|
||||||
|
src/cryptography/hazmat/primitives/__init__.py
|
||||||
|
src/cryptography/hazmat/primitives/cmac.py
|
||||||
|
src/cryptography/hazmat/primitives/constant_time.py
|
||||||
|
src/cryptography/hazmat/primitives/hashes.py
|
||||||
|
src/cryptography/hazmat/primitives/hmac.py
|
||||||
|
src/cryptography/hazmat/primitives/keywrap.py
|
||||||
|
src/cryptography/hazmat/primitives/padding.py
|
||||||
|
src/cryptography/hazmat/primitives/serialization.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/__init__.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/dh.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/dsa.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/ec.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/padding.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/rsa.py
|
||||||
|
src/cryptography/hazmat/primitives/asymmetric/utils.py
|
||||||
|
src/cryptography/hazmat/primitives/ciphers/__init__.py
|
||||||
|
src/cryptography/hazmat/primitives/ciphers/algorithms.py
|
||||||
|
src/cryptography/hazmat/primitives/ciphers/base.py
|
||||||
|
src/cryptography/hazmat/primitives/ciphers/modes.py
|
||||||
|
src/cryptography/hazmat/primitives/interfaces/__init__.py
|
||||||
|
src/cryptography/hazmat/primitives/kdf/__init__.py
|
||||||
|
src/cryptography/hazmat/primitives/kdf/concatkdf.py
|
||||||
|
src/cryptography/hazmat/primitives/kdf/hkdf.py
|
||||||
|
src/cryptography/hazmat/primitives/kdf/kbkdf.py
|
||||||
|
src/cryptography/hazmat/primitives/kdf/pbkdf2.py
|
||||||
|
src/cryptography/hazmat/primitives/kdf/x963kdf.py
|
||||||
|
src/cryptography/hazmat/primitives/twofactor/__init__.py
|
||||||
|
src/cryptography/hazmat/primitives/twofactor/hotp.py
|
||||||
|
src/cryptography/hazmat/primitives/twofactor/totp.py
|
||||||
|
src/cryptography/hazmat/primitives/twofactor/utils.py
|
||||||
|
src/cryptography/x509/__init__.py
|
||||||
|
src/cryptography/x509/base.py
|
||||||
|
src/cryptography/x509/extensions.py
|
||||||
|
src/cryptography/x509/general_name.py
|
||||||
|
src/cryptography/x509/name.py
|
||||||
|
src/cryptography/x509/oid.py
|
||||||
|
tests/__init__.py
|
||||||
|
tests/conftest.py
|
||||||
|
tests/doubles.py
|
||||||
|
tests/test_fernet.py
|
||||||
|
tests/test_interfaces.py
|
||||||
|
tests/test_utils.py
|
||||||
|
tests/test_warnings.py
|
||||||
|
tests/test_x509.py
|
||||||
|
tests/test_x509_crlbuilder.py
|
||||||
|
tests/test_x509_ext.py
|
||||||
|
tests/test_x509_revokedcertbuilder.py
|
||||||
|
tests/utils.py
|
||||||
|
tests/hazmat/__init__.py
|
||||||
|
tests/hazmat/backends/__init__.py
|
||||||
|
tests/hazmat/backends/test_commoncrypto.py
|
||||||
|
tests/hazmat/backends/test_multibackend.py
|
||||||
|
tests/hazmat/backends/test_openssl.py
|
||||||
|
tests/hazmat/bindings/test_commoncrypto.py
|
||||||
|
tests/hazmat/bindings/test_openssl.py
|
||||||
|
tests/hazmat/primitives/__init__.py
|
||||||
|
tests/hazmat/primitives/fixtures_dsa.py
|
||||||
|
tests/hazmat/primitives/fixtures_ec.py
|
||||||
|
tests/hazmat/primitives/fixtures_rsa.py
|
||||||
|
tests/hazmat/primitives/test_3des.py
|
||||||
|
tests/hazmat/primitives/test_aes.py
|
||||||
|
tests/hazmat/primitives/test_arc4.py
|
||||||
|
tests/hazmat/primitives/test_asym_utils.py
|
||||||
|
tests/hazmat/primitives/test_block.py
|
||||||
|
tests/hazmat/primitives/test_blowfish.py
|
||||||
|
tests/hazmat/primitives/test_camellia.py
|
||||||
|
tests/hazmat/primitives/test_cast5.py
|
||||||
|
tests/hazmat/primitives/test_ciphers.py
|
||||||
|
tests/hazmat/primitives/test_cmac.py
|
||||||
|
tests/hazmat/primitives/test_concatkdf.py
|
||||||
|
tests/hazmat/primitives/test_constant_time.py
|
||||||
|
tests/hazmat/primitives/test_dh.py
|
||||||
|
tests/hazmat/primitives/test_dsa.py
|
||||||
|
tests/hazmat/primitives/test_ec.py
|
||||||
|
tests/hazmat/primitives/test_hash_vectors.py
|
||||||
|
tests/hazmat/primitives/test_hashes.py
|
||||||
|
tests/hazmat/primitives/test_hkdf.py
|
||||||
|
tests/hazmat/primitives/test_hkdf_vectors.py
|
||||||
|
tests/hazmat/primitives/test_hmac.py
|
||||||
|
tests/hazmat/primitives/test_hmac_vectors.py
|
||||||
|
tests/hazmat/primitives/test_idea.py
|
||||||
|
tests/hazmat/primitives/test_kbkdf.py
|
||||||
|
tests/hazmat/primitives/test_kbkdf_vectors.py
|
||||||
|
tests/hazmat/primitives/test_keywrap.py
|
||||||
|
tests/hazmat/primitives/test_padding.py
|
||||||
|
tests/hazmat/primitives/test_pbkdf2hmac.py
|
||||||
|
tests/hazmat/primitives/test_pbkdf2hmac_vectors.py
|
||||||
|
tests/hazmat/primitives/test_rsa.py
|
||||||
|
tests/hazmat/primitives/test_seed.py
|
||||||
|
tests/hazmat/primitives/test_serialization.py
|
||||||
|
tests/hazmat/primitives/test_x963_vectors.py
|
||||||
|
tests/hazmat/primitives/test_x963kdf.py
|
||||||
|
tests/hazmat/primitives/utils.py
|
||||||
|
tests/hazmat/primitives/twofactor/__init__.py
|
||||||
|
tests/hazmat/primitives/twofactor/test_hotp.py
|
||||||
|
tests/hazmat/primitives/twofactor/test_totp.py
|
||||||
|
tests/hypothesis/__init__.py
|
||||||
|
tests/hypothesis/test_fernet.py
|
||||||
|
tests/hypothesis/test_padding.py
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
[cryptography.backends]
|
||||||
|
openssl = cryptography.hazmat.backends.openssl:backend
|
||||||
|
|
|
@ -0,0 +1,149 @@
|
||||||
|
../cryptography/__about__.py
|
||||||
|
../cryptography/__init__.py
|
||||||
|
../cryptography/fernet.py
|
||||||
|
../cryptography/exceptions.py
|
||||||
|
../cryptography/utils.py
|
||||||
|
../cryptography/x509/oid.py
|
||||||
|
../cryptography/x509/__init__.py
|
||||||
|
../cryptography/x509/extensions.py
|
||||||
|
../cryptography/x509/name.py
|
||||||
|
../cryptography/x509/base.py
|
||||||
|
../cryptography/x509/general_name.py
|
||||||
|
../cryptography/hazmat/__init__.py
|
||||||
|
../cryptography/hazmat/backends/multibackend.py
|
||||||
|
../cryptography/hazmat/backends/interfaces.py
|
||||||
|
../cryptography/hazmat/backends/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/constant_time.py
|
||||||
|
../cryptography/hazmat/primitives/keywrap.py
|
||||||
|
../cryptography/hazmat/primitives/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/hmac.py
|
||||||
|
../cryptography/hazmat/primitives/cmac.py
|
||||||
|
../cryptography/hazmat/primitives/padding.py
|
||||||
|
../cryptography/hazmat/primitives/serialization.py
|
||||||
|
../cryptography/hazmat/primitives/hashes.py
|
||||||
|
../cryptography/hazmat/bindings/__init__.py
|
||||||
|
../cryptography/hazmat/backends/openssl/x509.py
|
||||||
|
../cryptography/hazmat/backends/openssl/encode_asn1.py
|
||||||
|
../cryptography/hazmat/backends/openssl/decode_asn1.py
|
||||||
|
../cryptography/hazmat/backends/openssl/ec.py
|
||||||
|
../cryptography/hazmat/backends/openssl/__init__.py
|
||||||
|
../cryptography/hazmat/backends/openssl/hmac.py
|
||||||
|
../cryptography/hazmat/backends/openssl/cmac.py
|
||||||
|
../cryptography/hazmat/backends/openssl/ciphers.py
|
||||||
|
../cryptography/hazmat/backends/openssl/utils.py
|
||||||
|
../cryptography/hazmat/backends/openssl/backend.py
|
||||||
|
../cryptography/hazmat/backends/openssl/dsa.py
|
||||||
|
../cryptography/hazmat/backends/openssl/rsa.py
|
||||||
|
../cryptography/hazmat/backends/openssl/hashes.py
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/__init__.py
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/hmac.py
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/ciphers.py
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/backend.py
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/hashes.py
|
||||||
|
../cryptography/hazmat/primitives/ciphers/modes.py
|
||||||
|
../cryptography/hazmat/primitives/ciphers/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/ciphers/base.py
|
||||||
|
../cryptography/hazmat/primitives/ciphers/algorithms.py
|
||||||
|
../cryptography/hazmat/primitives/kdf/hkdf.py
|
||||||
|
../cryptography/hazmat/primitives/kdf/kbkdf.py
|
||||||
|
../cryptography/hazmat/primitives/kdf/pbkdf2.py
|
||||||
|
../cryptography/hazmat/primitives/kdf/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/kdf/concatkdf.py
|
||||||
|
../cryptography/hazmat/primitives/kdf/x963kdf.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/dh.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/ec.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/utils.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/dsa.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/padding.py
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/rsa.py
|
||||||
|
../cryptography/hazmat/primitives/interfaces/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/twofactor/totp.py
|
||||||
|
../cryptography/hazmat/primitives/twofactor/hotp.py
|
||||||
|
../cryptography/hazmat/primitives/twofactor/__init__.py
|
||||||
|
../cryptography/hazmat/primitives/twofactor/utils.py
|
||||||
|
../cryptography/hazmat/bindings/openssl/__init__.py
|
||||||
|
../cryptography/hazmat/bindings/openssl/binding.py
|
||||||
|
../cryptography/hazmat/bindings/openssl/_conditional.py
|
||||||
|
../cryptography/hazmat/bindings/commoncrypto/__init__.py
|
||||||
|
../cryptography/hazmat/bindings/commoncrypto/binding.py
|
||||||
|
../cryptography/__pycache__/__about__.cpython-34.pyc
|
||||||
|
../cryptography/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/__pycache__/fernet.cpython-34.pyc
|
||||||
|
../cryptography/__pycache__/exceptions.cpython-34.pyc
|
||||||
|
../cryptography/__pycache__/utils.cpython-34.pyc
|
||||||
|
../cryptography/x509/__pycache__/oid.cpython-34.pyc
|
||||||
|
../cryptography/x509/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/x509/__pycache__/extensions.cpython-34.pyc
|
||||||
|
../cryptography/x509/__pycache__/name.cpython-34.pyc
|
||||||
|
../cryptography/x509/__pycache__/base.cpython-34.pyc
|
||||||
|
../cryptography/x509/__pycache__/general_name.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/__pycache__/multibackend.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/__pycache__/interfaces.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/constant_time.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/keywrap.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/hmac.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/cmac.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/padding.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/serialization.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/__pycache__/hashes.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/x509.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/encode_asn1.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/decode_asn1.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/ec.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/hmac.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/cmac.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/ciphers.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/utils.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/backend.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/dsa.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/rsa.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/openssl/__pycache__/hashes.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/__pycache__/hmac.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/__pycache__/ciphers.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/__pycache__/backend.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/backends/commoncrypto/__pycache__/hashes.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/ciphers/__pycache__/modes.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/ciphers/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/ciphers/__pycache__/base.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/ciphers/__pycache__/algorithms.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/kdf/__pycache__/hkdf.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/kdf/__pycache__/kbkdf.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/kdf/__pycache__/pbkdf2.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/kdf/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/kdf/__pycache__/concatkdf.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/kdf/__pycache__/x963kdf.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/dh.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/ec.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/utils.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/dsa.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/padding.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/asymmetric/__pycache__/rsa.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/interfaces/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/twofactor/__pycache__/totp.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/twofactor/__pycache__/hotp.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/twofactor/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/primitives/twofactor/__pycache__/utils.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/openssl/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/openssl/__pycache__/binding.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/openssl/__pycache__/_conditional.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/commoncrypto/__pycache__/__init__.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/commoncrypto/__pycache__/binding.cpython-34.pyc
|
||||||
|
../cryptography/hazmat/bindings/_openssl.cpython-34m.so
|
||||||
|
../cryptography/hazmat/bindings/_constant_time.cpython-34m.so
|
||||||
|
../cryptography/hazmat/bindings/_padding.cpython-34m.so
|
||||||
|
./
|
||||||
|
PKG-INFO
|
||||||
|
requires.txt
|
||||||
|
not-zip-safe
|
||||||
|
dependency_links.txt
|
||||||
|
SOURCES.txt
|
||||||
|
top_level.txt
|
||||||
|
entry_points.txt
|
|
@ -0,0 +1 @@
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
idna>=2.0
|
||||||
|
pyasn1>=0.1.8
|
||||||
|
six>=1.4.1
|
||||||
|
setuptools>=11.3
|
||||||
|
cffi>=1.4.1
|
||||||
|
|
||||||
|
[docstest]
|
||||||
|
doc8
|
||||||
|
pyenchant
|
||||||
|
readme_renderer
|
||||||
|
sphinx
|
||||||
|
sphinx_rtd_theme
|
||||||
|
sphinxcontrib-spelling
|
||||||
|
|
||||||
|
[pep8test]
|
||||||
|
flake8
|
||||||
|
flake8-import-order
|
||||||
|
pep8-naming
|
||||||
|
|
||||||
|
[test]
|
||||||
|
pytest
|
||||||
|
pretend
|
||||||
|
iso8601
|
||||||
|
pyasn1_modules
|
||||||
|
hypothesis>=1.11.4
|
||||||
|
cryptography_vectors==1.4
|
|
@ -0,0 +1,4 @@
|
||||||
|
_openssl
|
||||||
|
_constant_time
|
||||||
|
_padding
|
||||||
|
cryptography
|
23
lib/python3.4/site-packages/cryptography/__about__.py
Normal file
23
lib/python3.4/site-packages/cryptography/__about__.py
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||||
|
"__email__", "__license__", "__copyright__",
|
||||||
|
]
|
||||||
|
|
||||||
|
__title__ = "cryptography"
|
||||||
|
__summary__ = ("cryptography is a package which provides cryptographic recipes"
|
||||||
|
" and primitives to Python developers.")
|
||||||
|
__uri__ = "https://github.com/pyca/cryptography"
|
||||||
|
|
||||||
|
__version__ = "1.4"
|
||||||
|
|
||||||
|
__author__ = "The cryptography developers"
|
||||||
|
__email__ = "cryptography-dev@python.org"
|
||||||
|
|
||||||
|
__license__ = "BSD or Apache License, Version 2.0"
|
||||||
|
__copyright__ = "Copyright 2013-2016 {0}".format(__author__)
|
27
lib/python3.4/site-packages/cryptography/__init__.py
Normal file
27
lib/python3.4/site-packages/cryptography/__init__.py
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from cryptography.__about__ import (
|
||||||
|
__author__, __copyright__, __email__, __license__, __summary__, __title__,
|
||||||
|
__uri__, __version__
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"__title__", "__summary__", "__uri__", "__version__", "__author__",
|
||||||
|
"__email__", "__license__", "__copyright__",
|
||||||
|
]
|
||||||
|
|
||||||
|
if sys.version_info[:2] == (2, 6):
|
||||||
|
warnings.warn(
|
||||||
|
"Python 2.6 is no longer supported by the Python core team, please "
|
||||||
|
"upgrade your Python. A future version of cryptography will drop "
|
||||||
|
"support for Python 2.6",
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
56
lib/python3.4/site-packages/cryptography/exceptions.py
Normal file
56
lib/python3.4/site-packages/cryptography/exceptions.py
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class _Reasons(Enum):
|
||||||
|
BACKEND_MISSING_INTERFACE = 0
|
||||||
|
UNSUPPORTED_HASH = 1
|
||||||
|
UNSUPPORTED_CIPHER = 2
|
||||||
|
UNSUPPORTED_PADDING = 3
|
||||||
|
UNSUPPORTED_MGF = 4
|
||||||
|
UNSUPPORTED_PUBLIC_KEY_ALGORITHM = 5
|
||||||
|
UNSUPPORTED_ELLIPTIC_CURVE = 6
|
||||||
|
UNSUPPORTED_SERIALIZATION = 7
|
||||||
|
UNSUPPORTED_X509 = 8
|
||||||
|
UNSUPPORTED_EXCHANGE_ALGORITHM = 9
|
||||||
|
|
||||||
|
|
||||||
|
class UnsupportedAlgorithm(Exception):
|
||||||
|
def __init__(self, message, reason=None):
|
||||||
|
super(UnsupportedAlgorithm, self).__init__(message)
|
||||||
|
self._reason = reason
|
||||||
|
|
||||||
|
|
||||||
|
class AlreadyFinalized(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class AlreadyUpdated(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NotYetFinalized(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidTag(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidSignature(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class InternalError(Exception):
|
||||||
|
def __init__(self, msg, err_code):
|
||||||
|
super(InternalError, self).__init__(msg)
|
||||||
|
self.err_code = err_code
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKey(Exception):
|
||||||
|
pass
|
143
lib/python3.4/site-packages/cryptography/fernet.py
Normal file
143
lib/python3.4/site-packages/cryptography/fernet.py
Normal file
|
@ -0,0 +1,143 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import base64
|
||||||
|
import binascii
|
||||||
|
import os
|
||||||
|
import struct
|
||||||
|
import time
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
from cryptography.hazmat.backends import default_backend
|
||||||
|
from cryptography.hazmat.primitives import hashes, padding
|
||||||
|
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||||
|
from cryptography.hazmat.primitives.hmac import HMAC
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidToken(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
_MAX_CLOCK_SKEW = 60
|
||||||
|
|
||||||
|
|
||||||
|
class Fernet(object):
|
||||||
|
def __init__(self, key, backend=None):
|
||||||
|
if backend is None:
|
||||||
|
backend = default_backend()
|
||||||
|
|
||||||
|
key = base64.urlsafe_b64decode(key)
|
||||||
|
if len(key) != 32:
|
||||||
|
raise ValueError(
|
||||||
|
"Fernet key must be 32 url-safe base64-encoded bytes."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._signing_key = key[:16]
|
||||||
|
self._encryption_key = key[16:]
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def generate_key(cls):
|
||||||
|
return base64.urlsafe_b64encode(os.urandom(32))
|
||||||
|
|
||||||
|
def encrypt(self, data):
|
||||||
|
current_time = int(time.time())
|
||||||
|
iv = os.urandom(16)
|
||||||
|
return self._encrypt_from_parts(data, current_time, iv)
|
||||||
|
|
||||||
|
def _encrypt_from_parts(self, data, current_time, iv):
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
raise TypeError("data must be bytes.")
|
||||||
|
|
||||||
|
padder = padding.PKCS7(algorithms.AES.block_size).padder()
|
||||||
|
padded_data = padder.update(data) + padder.finalize()
|
||||||
|
encryptor = Cipher(
|
||||||
|
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
|
||||||
|
).encryptor()
|
||||||
|
ciphertext = encryptor.update(padded_data) + encryptor.finalize()
|
||||||
|
|
||||||
|
basic_parts = (
|
||||||
|
b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext
|
||||||
|
)
|
||||||
|
|
||||||
|
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
|
||||||
|
h.update(basic_parts)
|
||||||
|
hmac = h.finalize()
|
||||||
|
return base64.urlsafe_b64encode(basic_parts + hmac)
|
||||||
|
|
||||||
|
def decrypt(self, token, ttl=None):
|
||||||
|
if not isinstance(token, bytes):
|
||||||
|
raise TypeError("token must be bytes.")
|
||||||
|
|
||||||
|
current_time = int(time.time())
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = base64.urlsafe_b64decode(token)
|
||||||
|
except (TypeError, binascii.Error):
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
if not data or six.indexbytes(data, 0) != 0x80:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
try:
|
||||||
|
timestamp, = struct.unpack(">Q", data[1:9])
|
||||||
|
except struct.error:
|
||||||
|
raise InvalidToken
|
||||||
|
if ttl is not None:
|
||||||
|
if timestamp + ttl < current_time:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
if current_time + _MAX_CLOCK_SKEW < timestamp:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend)
|
||||||
|
h.update(data[:-32])
|
||||||
|
try:
|
||||||
|
h.verify(data[-32:])
|
||||||
|
except InvalidSignature:
|
||||||
|
raise InvalidToken
|
||||||
|
|
||||||
|
iv = data[9:25]
|
||||||
|
ciphertext = data[25:-32]
|
||||||
|
decryptor = Cipher(
|
||||||
|
algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend
|
||||||
|
).decryptor()
|
||||||
|
plaintext_padded = decryptor.update(ciphertext)
|
||||||
|
try:
|
||||||
|
plaintext_padded += decryptor.finalize()
|
||||||
|
except ValueError:
|
||||||
|
raise InvalidToken
|
||||||
|
unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder()
|
||||||
|
|
||||||
|
unpadded = unpadder.update(plaintext_padded)
|
||||||
|
try:
|
||||||
|
unpadded += unpadder.finalize()
|
||||||
|
except ValueError:
|
||||||
|
raise InvalidToken
|
||||||
|
return unpadded
|
||||||
|
|
||||||
|
|
||||||
|
class MultiFernet(object):
|
||||||
|
def __init__(self, fernets):
|
||||||
|
fernets = list(fernets)
|
||||||
|
if not fernets:
|
||||||
|
raise ValueError(
|
||||||
|
"MultiFernet requires at least one Fernet instance"
|
||||||
|
)
|
||||||
|
self._fernets = fernets
|
||||||
|
|
||||||
|
def encrypt(self, msg):
|
||||||
|
return self._fernets[0].encrypt(msg)
|
||||||
|
|
||||||
|
def decrypt(self, msg, ttl=None):
|
||||||
|
for f in self._fernets:
|
||||||
|
try:
|
||||||
|
return f.decrypt(msg, ttl)
|
||||||
|
except InvalidToken:
|
||||||
|
pass
|
||||||
|
raise InvalidToken
|
11
lib/python3.4/site-packages/cryptography/hazmat/__init__.py
Normal file
11
lib/python3.4/site-packages/cryptography/hazmat/__init__.py
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
"""
|
||||||
|
Hazardous Materials
|
||||||
|
|
||||||
|
This is a "Hazardous Materials" module. You should ONLY use it if you're
|
||||||
|
100% absolutely sure that you know what you're doing because this module
|
||||||
|
is full of land mines, dragons, and dinosaurs with laser guns.
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
|
@ -0,0 +1,37 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends.multibackend import MultiBackend
|
||||||
|
|
||||||
|
|
||||||
|
_available_backends_list = None
|
||||||
|
|
||||||
|
|
||||||
|
def _available_backends():
|
||||||
|
global _available_backends_list
|
||||||
|
|
||||||
|
if _available_backends_list is None:
|
||||||
|
_available_backends_list = [
|
||||||
|
ep.resolve()
|
||||||
|
for ep in pkg_resources.iter_entry_points(
|
||||||
|
"cryptography.backends"
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
return _available_backends_list
|
||||||
|
|
||||||
|
_default_backend = None
|
||||||
|
|
||||||
|
|
||||||
|
def default_backend():
|
||||||
|
global _default_backend
|
||||||
|
|
||||||
|
if _default_backend is None:
|
||||||
|
_default_backend = MultiBackend(_available_backends())
|
||||||
|
|
||||||
|
return _default_backend
|
|
@ -0,0 +1,10 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends.commoncrypto.backend import backend
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["backend"]
|
|
@ -0,0 +1,245 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import InternalError
|
||||||
|
from cryptography.hazmat.backends.commoncrypto.ciphers import (
|
||||||
|
_CipherContext, _GCMCipherContext
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext
|
||||||
|
from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext
|
||||||
|
from cryptography.hazmat.backends.interfaces import (
|
||||||
|
CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.bindings.commoncrypto.binding import Binding
|
||||||
|
from cryptography.hazmat.primitives.ciphers.algorithms import (
|
||||||
|
AES, ARC4, Blowfish, CAST5, TripleDES
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.ciphers.modes import (
|
||||||
|
CBC, CFB, CFB8, CTR, ECB, GCM, OFB
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
HashMethods = namedtuple(
|
||||||
|
"HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(CipherBackend)
|
||||||
|
@utils.register_interface(HashBackend)
|
||||||
|
@utils.register_interface(HMACBackend)
|
||||||
|
@utils.register_interface(PBKDF2HMACBackend)
|
||||||
|
class Backend(object):
|
||||||
|
"""
|
||||||
|
CommonCrypto API wrapper.
|
||||||
|
"""
|
||||||
|
name = "commoncrypto"
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._binding = Binding()
|
||||||
|
self._ffi = self._binding.ffi
|
||||||
|
self._lib = self._binding.lib
|
||||||
|
|
||||||
|
self._cipher_registry = {}
|
||||||
|
self._register_default_ciphers()
|
||||||
|
self._hash_mapping = {
|
||||||
|
"md5": HashMethods(
|
||||||
|
"CC_MD5_CTX *", self._lib.CC_MD5_Init,
|
||||||
|
self._lib.CC_MD5_Update, self._lib.CC_MD5_Final
|
||||||
|
),
|
||||||
|
"sha1": HashMethods(
|
||||||
|
"CC_SHA1_CTX *", self._lib.CC_SHA1_Init,
|
||||||
|
self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final
|
||||||
|
),
|
||||||
|
"sha224": HashMethods(
|
||||||
|
"CC_SHA256_CTX *", self._lib.CC_SHA224_Init,
|
||||||
|
self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final
|
||||||
|
),
|
||||||
|
"sha256": HashMethods(
|
||||||
|
"CC_SHA256_CTX *", self._lib.CC_SHA256_Init,
|
||||||
|
self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final
|
||||||
|
),
|
||||||
|
"sha384": HashMethods(
|
||||||
|
"CC_SHA512_CTX *", self._lib.CC_SHA384_Init,
|
||||||
|
self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final
|
||||||
|
),
|
||||||
|
"sha512": HashMethods(
|
||||||
|
"CC_SHA512_CTX *", self._lib.CC_SHA512_Init,
|
||||||
|
self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
self._supported_hmac_algorithms = {
|
||||||
|
"md5": self._lib.kCCHmacAlgMD5,
|
||||||
|
"sha1": self._lib.kCCHmacAlgSHA1,
|
||||||
|
"sha224": self._lib.kCCHmacAlgSHA224,
|
||||||
|
"sha256": self._lib.kCCHmacAlgSHA256,
|
||||||
|
"sha384": self._lib.kCCHmacAlgSHA384,
|
||||||
|
"sha512": self._lib.kCCHmacAlgSHA512,
|
||||||
|
}
|
||||||
|
|
||||||
|
self._supported_pbkdf2_hmac_algorithms = {
|
||||||
|
"sha1": self._lib.kCCPRFHmacAlgSHA1,
|
||||||
|
"sha224": self._lib.kCCPRFHmacAlgSHA224,
|
||||||
|
"sha256": self._lib.kCCPRFHmacAlgSHA256,
|
||||||
|
"sha384": self._lib.kCCPRFHmacAlgSHA384,
|
||||||
|
"sha512": self._lib.kCCPRFHmacAlgSHA512,
|
||||||
|
}
|
||||||
|
|
||||||
|
def hash_supported(self, algorithm):
|
||||||
|
return algorithm.name in self._hash_mapping
|
||||||
|
|
||||||
|
def hmac_supported(self, algorithm):
|
||||||
|
return algorithm.name in self._supported_hmac_algorithms
|
||||||
|
|
||||||
|
def create_hash_ctx(self, algorithm):
|
||||||
|
return _HashContext(self, algorithm)
|
||||||
|
|
||||||
|
def create_hmac_ctx(self, key, algorithm):
|
||||||
|
return _HMACContext(self, key, algorithm)
|
||||||
|
|
||||||
|
def cipher_supported(self, cipher, mode):
|
||||||
|
return (type(cipher), type(mode)) in self._cipher_registry
|
||||||
|
|
||||||
|
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||||
|
if isinstance(mode, GCM):
|
||||||
|
return _GCMCipherContext(
|
||||||
|
self, cipher, mode, self._lib.kCCEncrypt
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt)
|
||||||
|
|
||||||
|
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||||
|
if isinstance(mode, GCM):
|
||||||
|
return _GCMCipherContext(
|
||||||
|
self, cipher, mode, self._lib.kCCDecrypt
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt)
|
||||||
|
|
||||||
|
def pbkdf2_hmac_supported(self, algorithm):
|
||||||
|
return algorithm.name in self._supported_pbkdf2_hmac_algorithms
|
||||||
|
|
||||||
|
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||||
|
key_material):
|
||||||
|
alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name]
|
||||||
|
buf = self._ffi.new("char[]", length)
|
||||||
|
res = self._lib.CCKeyDerivationPBKDF(
|
||||||
|
self._lib.kCCPBKDF2,
|
||||||
|
key_material,
|
||||||
|
len(key_material),
|
||||||
|
salt,
|
||||||
|
len(salt),
|
||||||
|
alg_enum,
|
||||||
|
iterations,
|
||||||
|
buf,
|
||||||
|
length
|
||||||
|
)
|
||||||
|
self._check_cipher_response(res)
|
||||||
|
|
||||||
|
return self._ffi.buffer(buf)[:]
|
||||||
|
|
||||||
|
def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls,
|
||||||
|
mode_const):
|
||||||
|
if (cipher_cls, mode_cls) in self._cipher_registry:
|
||||||
|
raise ValueError("Duplicate registration for: {0} {1}.".format(
|
||||||
|
cipher_cls, mode_cls)
|
||||||
|
)
|
||||||
|
self._cipher_registry[cipher_cls, mode_cls] = (cipher_const,
|
||||||
|
mode_const)
|
||||||
|
|
||||||
|
def _register_default_ciphers(self):
|
||||||
|
for mode_cls, mode_const in [
|
||||||
|
(CBC, self._lib.kCCModeCBC),
|
||||||
|
(ECB, self._lib.kCCModeECB),
|
||||||
|
(CFB, self._lib.kCCModeCFB),
|
||||||
|
(CFB8, self._lib.kCCModeCFB8),
|
||||||
|
(OFB, self._lib.kCCModeOFB),
|
||||||
|
(CTR, self._lib.kCCModeCTR),
|
||||||
|
(GCM, self._lib.kCCModeGCM),
|
||||||
|
]:
|
||||||
|
self._register_cipher_adapter(
|
||||||
|
AES,
|
||||||
|
self._lib.kCCAlgorithmAES128,
|
||||||
|
mode_cls,
|
||||||
|
mode_const
|
||||||
|
)
|
||||||
|
for mode_cls, mode_const in [
|
||||||
|
(CBC, self._lib.kCCModeCBC),
|
||||||
|
(ECB, self._lib.kCCModeECB),
|
||||||
|
(CFB, self._lib.kCCModeCFB),
|
||||||
|
(CFB8, self._lib.kCCModeCFB8),
|
||||||
|
(OFB, self._lib.kCCModeOFB),
|
||||||
|
]:
|
||||||
|
self._register_cipher_adapter(
|
||||||
|
TripleDES,
|
||||||
|
self._lib.kCCAlgorithm3DES,
|
||||||
|
mode_cls,
|
||||||
|
mode_const
|
||||||
|
)
|
||||||
|
for mode_cls, mode_const in [
|
||||||
|
(CBC, self._lib.kCCModeCBC),
|
||||||
|
(ECB, self._lib.kCCModeECB),
|
||||||
|
(CFB, self._lib.kCCModeCFB),
|
||||||
|
(OFB, self._lib.kCCModeOFB)
|
||||||
|
]:
|
||||||
|
self._register_cipher_adapter(
|
||||||
|
Blowfish,
|
||||||
|
self._lib.kCCAlgorithmBlowfish,
|
||||||
|
mode_cls,
|
||||||
|
mode_const
|
||||||
|
)
|
||||||
|
for mode_cls, mode_const in [
|
||||||
|
(CBC, self._lib.kCCModeCBC),
|
||||||
|
(ECB, self._lib.kCCModeECB),
|
||||||
|
(CFB, self._lib.kCCModeCFB),
|
||||||
|
(OFB, self._lib.kCCModeOFB),
|
||||||
|
(CTR, self._lib.kCCModeCTR)
|
||||||
|
]:
|
||||||
|
self._register_cipher_adapter(
|
||||||
|
CAST5,
|
||||||
|
self._lib.kCCAlgorithmCAST,
|
||||||
|
mode_cls,
|
||||||
|
mode_const
|
||||||
|
)
|
||||||
|
self._register_cipher_adapter(
|
||||||
|
ARC4,
|
||||||
|
self._lib.kCCAlgorithmRC4,
|
||||||
|
type(None),
|
||||||
|
self._lib.kCCModeRC4
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_cipher_response(self, response):
|
||||||
|
if response == self._lib.kCCSuccess:
|
||||||
|
return
|
||||||
|
elif response == self._lib.kCCAlignmentError:
|
||||||
|
# This error is not currently triggered due to a bug filed as
|
||||||
|
# rdar://15589470
|
||||||
|
raise ValueError(
|
||||||
|
"The length of the provided data is not a multiple of "
|
||||||
|
"the block length."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise InternalError(
|
||||||
|
"The backend returned an unknown error, consider filing a bug."
|
||||||
|
" Code: {0}.".format(response),
|
||||||
|
response
|
||||||
|
)
|
||||||
|
|
||||||
|
def _release_cipher_ctx(self, ctx):
|
||||||
|
"""
|
||||||
|
Called by the garbage collector and used to safely dereference and
|
||||||
|
release the context.
|
||||||
|
"""
|
||||||
|
if ctx[0] != self._ffi.NULL:
|
||||||
|
res = self._lib.CCCryptorRelease(ctx[0])
|
||||||
|
self._check_cipher_response(res)
|
||||||
|
ctx[0] = self._ffi.NULL
|
||||||
|
|
||||||
|
|
||||||
|
backend = Backend()
|
|
@ -0,0 +1,193 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
InvalidTag, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives import ciphers, constant_time
|
||||||
|
from cryptography.hazmat.primitives.ciphers import modes
|
||||||
|
from cryptography.hazmat.primitives.ciphers.modes import (
|
||||||
|
CFB, CFB8, CTR, OFB
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(ciphers.CipherContext)
|
||||||
|
class _CipherContext(object):
|
||||||
|
def __init__(self, backend, cipher, mode, operation):
|
||||||
|
self._backend = backend
|
||||||
|
self._cipher = cipher
|
||||||
|
self._mode = mode
|
||||||
|
self._operation = operation
|
||||||
|
# There is a bug in CommonCrypto where block ciphers do not raise
|
||||||
|
# kCCAlignmentError when finalizing if you supply non-block aligned
|
||||||
|
# data. To work around this we need to keep track of the block
|
||||||
|
# alignment ourselves, but only for alg+mode combos that require
|
||||||
|
# block alignment. OFB, CFB, and CTR make a block cipher algorithm
|
||||||
|
# into a stream cipher so we don't need to track them (and thus their
|
||||||
|
# block size is effectively 1 byte just like OpenSSL/CommonCrypto
|
||||||
|
# treat RC4 and other stream cipher block sizes).
|
||||||
|
# This bug has been filed as rdar://15589470
|
||||||
|
self._bytes_processed = 0
|
||||||
|
if (isinstance(cipher, ciphers.BlockCipherAlgorithm) and not
|
||||||
|
isinstance(mode, (OFB, CFB, CFB8, CTR))):
|
||||||
|
self._byte_block_size = cipher.block_size // 8
|
||||||
|
else:
|
||||||
|
self._byte_block_size = 1
|
||||||
|
|
||||||
|
registry = self._backend._cipher_registry
|
||||||
|
try:
|
||||||
|
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"cipher {0} in {1} mode is not supported "
|
||||||
|
"by this backend.".format(
|
||||||
|
cipher.name, mode.name if mode else mode),
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = self._backend._ffi.new("CCCryptorRef *")
|
||||||
|
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
|
||||||
|
|
||||||
|
if isinstance(mode, modes.ModeWithInitializationVector):
|
||||||
|
iv_nonce = mode.initialization_vector
|
||||||
|
elif isinstance(mode, modes.ModeWithNonce):
|
||||||
|
iv_nonce = mode.nonce
|
||||||
|
else:
|
||||||
|
iv_nonce = self._backend._ffi.NULL
|
||||||
|
|
||||||
|
if isinstance(mode, CTR):
|
||||||
|
mode_option = self._backend._lib.kCCModeOptionCTR_BE
|
||||||
|
else:
|
||||||
|
mode_option = 0
|
||||||
|
|
||||||
|
res = self._backend._lib.CCCryptorCreateWithMode(
|
||||||
|
operation,
|
||||||
|
mode_enum, cipher_enum,
|
||||||
|
self._backend._lib.ccNoPadding, iv_nonce,
|
||||||
|
cipher.key, len(cipher.key),
|
||||||
|
self._backend._ffi.NULL, 0, 0, mode_option, ctx)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
# Count bytes processed to handle block alignment.
|
||||||
|
self._bytes_processed += len(data)
|
||||||
|
buf = self._backend._ffi.new(
|
||||||
|
"unsigned char[]", len(data) + self._byte_block_size - 1)
|
||||||
|
outlen = self._backend._ffi.new("size_t *")
|
||||||
|
res = self._backend._lib.CCCryptorUpdate(
|
||||||
|
self._ctx[0], data, len(data), buf,
|
||||||
|
len(data) + self._byte_block_size - 1, outlen)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
# Raise error if block alignment is wrong.
|
||||||
|
if self._bytes_processed % self._byte_block_size:
|
||||||
|
raise ValueError(
|
||||||
|
"The length of the provided data is not a multiple of "
|
||||||
|
"the block length."
|
||||||
|
)
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size)
|
||||||
|
outlen = self._backend._ffi.new("size_t *")
|
||||||
|
res = self._backend._lib.CCCryptorFinal(
|
||||||
|
self._ctx[0], buf, len(buf), outlen)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
self._backend._release_cipher_ctx(self._ctx)
|
||||||
|
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(ciphers.AEADCipherContext)
|
||||||
|
@utils.register_interface(ciphers.AEADEncryptionContext)
|
||||||
|
class _GCMCipherContext(object):
|
||||||
|
def __init__(self, backend, cipher, mode, operation):
|
||||||
|
self._backend = backend
|
||||||
|
self._cipher = cipher
|
||||||
|
self._mode = mode
|
||||||
|
self._operation = operation
|
||||||
|
self._tag = None
|
||||||
|
|
||||||
|
registry = self._backend._cipher_registry
|
||||||
|
try:
|
||||||
|
cipher_enum, mode_enum = registry[type(cipher), type(mode)]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"cipher {0} in {1} mode is not supported "
|
||||||
|
"by this backend.".format(
|
||||||
|
cipher.name, mode.name if mode else mode),
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER
|
||||||
|
)
|
||||||
|
|
||||||
|
ctx = self._backend._ffi.new("CCCryptorRef *")
|
||||||
|
ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx)
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
res = self._backend._lib.CCCryptorCreateWithMode(
|
||||||
|
operation,
|
||||||
|
mode_enum, cipher_enum,
|
||||||
|
self._backend._lib.ccNoPadding,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
cipher.key, len(cipher.key),
|
||||||
|
self._backend._ffi.NULL, 0, 0, 0, self._ctx)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
|
||||||
|
res = self._backend._lib.CCCryptorGCMAddIV(
|
||||||
|
self._ctx[0],
|
||||||
|
mode.initialization_vector,
|
||||||
|
len(mode.initialization_vector)
|
||||||
|
)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
# CommonCrypto has a bug where calling update without at least one
|
||||||
|
# call to authenticate_additional_data will result in null byte output
|
||||||
|
# for ciphertext. The following empty byte string call prevents the
|
||||||
|
# issue, which is present in at least 10.8 and 10.9.
|
||||||
|
# Filed as rdar://18314544
|
||||||
|
self.authenticate_additional_data(b"")
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", len(data))
|
||||||
|
args = (self._ctx[0], data, len(data), buf)
|
||||||
|
if self._operation == self._backend._lib.kCCEncrypt:
|
||||||
|
res = self._backend._lib.CCCryptorGCMEncrypt(*args)
|
||||||
|
else:
|
||||||
|
res = self._backend._lib.CCCryptorGCMDecrypt(*args)
|
||||||
|
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
return self._backend._ffi.buffer(buf)[:]
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
# CommonCrypto has a yet another bug where you must make at least one
|
||||||
|
# call to update. If you pass just AAD and call finalize without a call
|
||||||
|
# to update you'll get null bytes for tag. The following update call
|
||||||
|
# prevents this issue, which is present in at least 10.8 and 10.9.
|
||||||
|
# Filed as rdar://18314580
|
||||||
|
self.update(b"")
|
||||||
|
tag_size = self._cipher.block_size // 8
|
||||||
|
tag_buf = self._backend._ffi.new("unsigned char[]", tag_size)
|
||||||
|
tag_len = self._backend._ffi.new("size_t *", tag_size)
|
||||||
|
res = self._backend._lib.CCCryptorGCMFinal(
|
||||||
|
self._ctx[0], tag_buf, tag_len
|
||||||
|
)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
self._backend._release_cipher_ctx(self._ctx)
|
||||||
|
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
||||||
|
if (self._operation == self._backend._lib.kCCDecrypt and
|
||||||
|
not constant_time.bytes_eq(
|
||||||
|
self._tag[:len(self._mode.tag)], self._mode.tag
|
||||||
|
)):
|
||||||
|
raise InvalidTag
|
||||||
|
return b""
|
||||||
|
|
||||||
|
def authenticate_additional_data(self, data):
|
||||||
|
res = self._backend._lib.CCCryptorGCMAddAAD(
|
||||||
|
self._ctx[0], data, len(data)
|
||||||
|
)
|
||||||
|
self._backend._check_cipher_response(res)
|
||||||
|
|
||||||
|
tag = utils.read_only_property("_tag")
|
|
@ -0,0 +1,55 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(hashes.HashContext)
|
||||||
|
class _HashContext(object):
|
||||||
|
def __init__(self, backend, algorithm, ctx=None):
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
try:
|
||||||
|
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
ctx = self._backend._ffi.new(methods.ctx)
|
||||||
|
res = methods.hash_init(ctx)
|
||||||
|
assert res == 1
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||||
|
new_ctx = self._backend._ffi.new(methods.ctx)
|
||||||
|
# CommonCrypto has no APIs for copying hashes, so we have to copy the
|
||||||
|
# underlying struct.
|
||||||
|
new_ctx[0] = self._ctx[0]
|
||||||
|
|
||||||
|
return _HashContext(self._backend, self.algorithm, ctx=new_ctx)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||||
|
res = methods.hash_update(self._ctx, data, len(data))
|
||||||
|
assert res == 1
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
methods = self._backend._hash_mapping[self.algorithm.name]
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]",
|
||||||
|
self.algorithm.digest_size)
|
||||||
|
res = methods.hash_final(buf, self._ctx)
|
||||||
|
assert res == 1
|
||||||
|
return self._backend._ffi.buffer(buf)[:]
|
|
@ -0,0 +1,59 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(interfaces.MACContext)
|
||||||
|
@utils.register_interface(hashes.HashContext)
|
||||||
|
class _HMACContext(object):
|
||||||
|
def __init__(self, backend, key, algorithm, ctx=None):
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._backend = backend
|
||||||
|
if ctx is None:
|
||||||
|
ctx = self._backend._ffi.new("CCHmacContext *")
|
||||||
|
try:
|
||||||
|
alg = self._backend._supported_hmac_algorithms[algorithm.name]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported HMAC hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
|
||||||
|
self._backend._lib.CCHmacInit(ctx, alg, key, len(key))
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
self._key = key
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
copied_ctx = self._backend._ffi.new("CCHmacContext *")
|
||||||
|
# CommonCrypto has no APIs for copying HMACs, so we have to copy the
|
||||||
|
# underlying struct.
|
||||||
|
copied_ctx[0] = self._ctx[0]
|
||||||
|
return _HMACContext(
|
||||||
|
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._backend._lib.CCHmacUpdate(self._ctx, data, len(data))
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]",
|
||||||
|
self.algorithm.digest_size)
|
||||||
|
self._backend._lib.CCHmacFinal(self._ctx, buf)
|
||||||
|
return self._backend._ffi.buffer(buf)[:]
|
||||||
|
|
||||||
|
def verify(self, signature):
|
||||||
|
digest = self.finalize()
|
||||||
|
if not constant_time.bytes_eq(digest, signature):
|
||||||
|
raise InvalidSignature("Signature did not match digest.")
|
|
@ -0,0 +1,359 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class CipherBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def cipher_supported(self, cipher, mode):
|
||||||
|
"""
|
||||||
|
Return True if the given cipher and mode are supported.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||||
|
"""
|
||||||
|
Get a CipherContext that can be used for encryption.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||||
|
"""
|
||||||
|
Get a CipherContext that can be used for decryption.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class HashBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def hash_supported(self, algorithm):
|
||||||
|
"""
|
||||||
|
Return True if the hash algorithm is supported by this backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_hash_ctx(self, algorithm):
|
||||||
|
"""
|
||||||
|
Create a HashContext for calculating a message digest.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class HMACBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def hmac_supported(self, algorithm):
|
||||||
|
"""
|
||||||
|
Return True if the hash algorithm is supported for HMAC by this
|
||||||
|
backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_hmac_ctx(self, key, algorithm):
|
||||||
|
"""
|
||||||
|
Create a MACContext for calculating a message authentication code.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class CMACBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def cmac_algorithm_supported(self, algorithm):
|
||||||
|
"""
|
||||||
|
Returns True if the block cipher is supported for CMAC by this backend
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_cmac_ctx(self, algorithm):
|
||||||
|
"""
|
||||||
|
Create a MACContext for calculating a message authentication code.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class PBKDF2HMACBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def pbkdf2_hmac_supported(self, algorithm):
|
||||||
|
"""
|
||||||
|
Return True if the hash algorithm is supported for PBKDF2 by this
|
||||||
|
backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||||
|
key_material):
|
||||||
|
"""
|
||||||
|
Return length bytes derived from provided PBKDF2 parameters.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class RSABackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_rsa_private_key(self, public_exponent, key_size):
|
||||||
|
"""
|
||||||
|
Generate an RSAPrivateKey instance with public_exponent and a modulus
|
||||||
|
of key_size bits.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def rsa_padding_supported(self, padding):
|
||||||
|
"""
|
||||||
|
Returns True if the backend supports the given padding options.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
||||||
|
"""
|
||||||
|
Returns True if the backend supports the given parameters for key
|
||||||
|
generation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_rsa_private_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns an RSAPrivateKey provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_rsa_public_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns an RSAPublicKey provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DSABackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_dsa_parameters(self, key_size):
|
||||||
|
"""
|
||||||
|
Generate a DSAParameters instance with a modulus of key_size bits.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_dsa_private_key(self, parameters):
|
||||||
|
"""
|
||||||
|
Generate a DSAPrivateKey instance with parameters as a DSAParameters
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_dsa_private_key_and_parameters(self, key_size):
|
||||||
|
"""
|
||||||
|
Generate a DSAPrivateKey instance using key size only.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def dsa_hash_supported(self, algorithm):
|
||||||
|
"""
|
||||||
|
Return True if the hash algorithm is supported by the backend for DSA.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def dsa_parameters_supported(self, p, q, g):
|
||||||
|
"""
|
||||||
|
Return True if the parameters are supported by the backend for DSA.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_dsa_private_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns a DSAPrivateKey provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_dsa_public_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns a DSAPublicKey provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_dsa_parameter_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns a DSAParameters provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class EllipticCurveBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def elliptic_curve_signature_algorithm_supported(
|
||||||
|
self, signature_algorithm, curve
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Returns True if the backend supports the named elliptic curve with the
|
||||||
|
specified signature algorithm.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def elliptic_curve_supported(self, curve):
|
||||||
|
"""
|
||||||
|
Returns True if the backend supports the named elliptic curve.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_elliptic_curve_private_key(self, curve):
|
||||||
|
"""
|
||||||
|
Return an object conforming to the EllipticCurvePrivateKey interface.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_elliptic_curve_public_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Return an EllipticCurvePublicKey provider using the given numbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_elliptic_curve_private_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Return an EllipticCurvePrivateKey provider using the given numbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
||||||
|
"""
|
||||||
|
Returns whether the exchange algorithm is supported by this backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class PEMSerializationBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_pem_private_key(self, data, password):
|
||||||
|
"""
|
||||||
|
Loads a private key from PEM encoded data, using the provided password
|
||||||
|
if the data is encrypted.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_pem_public_key(self, data):
|
||||||
|
"""
|
||||||
|
Loads a public key from PEM encoded data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DERSerializationBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_der_private_key(self, data, password):
|
||||||
|
"""
|
||||||
|
Loads a private key from DER encoded data. Uses the provided password
|
||||||
|
if the data is encrypted.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_der_public_key(self, data):
|
||||||
|
"""
|
||||||
|
Loads a public key from DER encoded data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class X509Backend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_pem_x509_certificate(self, data):
|
||||||
|
"""
|
||||||
|
Load an X.509 certificate from PEM encoded data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_der_x509_certificate(self, data):
|
||||||
|
"""
|
||||||
|
Load an X.509 certificate from DER encoded data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_der_x509_csr(self, data):
|
||||||
|
"""
|
||||||
|
Load an X.509 CSR from DER encoded data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_pem_x509_csr(self, data):
|
||||||
|
"""
|
||||||
|
Load an X.509 CSR from PEM encoded data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_x509_csr(self, builder, private_key, algorithm):
|
||||||
|
"""
|
||||||
|
Create and sign an X.509 CSR from a CSR builder object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_x509_certificate(self, builder, private_key, algorithm):
|
||||||
|
"""
|
||||||
|
Create and sign an X.509 certificate from a CertificateBuilder object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_x509_crl(self, builder, private_key, algorithm):
|
||||||
|
"""
|
||||||
|
Create and sign an X.509 CertificateRevocationList from a
|
||||||
|
CertificateRevocationListBuilder object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def create_x509_revoked_certificate(self, builder):
|
||||||
|
"""
|
||||||
|
Create a RevokedCertificate object from a RevokedCertificateBuilder
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHBackend(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_dh_parameters(self, key_size):
|
||||||
|
"""
|
||||||
|
Generate a DHParameters instance with a modulus of key_size bits.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_dh_private_key(self, parameters):
|
||||||
|
"""
|
||||||
|
Generate a DHPrivateKey instance with parameters as a DHParameters
|
||||||
|
object.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_dh_private_key_and_parameters(self, key_size):
|
||||||
|
"""
|
||||||
|
Generate a DHPrivateKey instance using key size only.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_dh_private_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns a DHPrivateKey provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_dh_public_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns a DHPublicKey provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def load_dh_parameter_numbers(self, numbers):
|
||||||
|
"""
|
||||||
|
Returns a DHParameters provider.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def dh_exchange_algorithm_supported(self, exchange_algorithm):
|
||||||
|
"""
|
||||||
|
Returns whether the exchange algorithm is supported by this backend.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def dh_parameters_supported(self, p, g):
|
||||||
|
"""
|
||||||
|
Returns whether the backend supports DH with these parameter values.
|
||||||
|
"""
|
|
@ -0,0 +1,411 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||||
|
from cryptography.hazmat.backends.interfaces import (
|
||||||
|
CMACBackend, CipherBackend, DERSerializationBackend, DSABackend,
|
||||||
|
EllipticCurveBackend, HMACBackend, HashBackend, PBKDF2HMACBackend,
|
||||||
|
PEMSerializationBackend, RSABackend, X509Backend
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(CMACBackend)
|
||||||
|
@utils.register_interface(CipherBackend)
|
||||||
|
@utils.register_interface(DERSerializationBackend)
|
||||||
|
@utils.register_interface(HashBackend)
|
||||||
|
@utils.register_interface(HMACBackend)
|
||||||
|
@utils.register_interface(PBKDF2HMACBackend)
|
||||||
|
@utils.register_interface(RSABackend)
|
||||||
|
@utils.register_interface(DSABackend)
|
||||||
|
@utils.register_interface(EllipticCurveBackend)
|
||||||
|
@utils.register_interface(PEMSerializationBackend)
|
||||||
|
@utils.register_interface(X509Backend)
|
||||||
|
class MultiBackend(object):
|
||||||
|
name = "multibackend"
|
||||||
|
|
||||||
|
def __init__(self, backends):
|
||||||
|
if len(backends) == 0:
|
||||||
|
raise ValueError(
|
||||||
|
"Multibackend cannot be initialized with no backends. If you "
|
||||||
|
"are seeing this error when trying to use default_backend() "
|
||||||
|
"please try uninstalling and reinstalling cryptography."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._backends = backends
|
||||||
|
|
||||||
|
def _filtered_backends(self, interface):
|
||||||
|
for b in self._backends:
|
||||||
|
if isinstance(b, interface):
|
||||||
|
yield b
|
||||||
|
|
||||||
|
def cipher_supported(self, cipher, mode):
|
||||||
|
return any(
|
||||||
|
b.cipher_supported(cipher, mode)
|
||||||
|
for b in self._filtered_backends(CipherBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_symmetric_encryption_ctx(self, cipher, mode):
|
||||||
|
for b in self._filtered_backends(CipherBackend):
|
||||||
|
try:
|
||||||
|
return b.create_symmetric_encryption_ctx(cipher, mode)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
pass
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"cipher {0} in {1} mode is not supported by this backend.".format(
|
||||||
|
cipher.name, mode.name if mode else mode),
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_symmetric_decryption_ctx(self, cipher, mode):
|
||||||
|
for b in self._filtered_backends(CipherBackend):
|
||||||
|
try:
|
||||||
|
return b.create_symmetric_decryption_ctx(cipher, mode)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
pass
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"cipher {0} in {1} mode is not supported by this backend.".format(
|
||||||
|
cipher.name, mode.name if mode else mode),
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER
|
||||||
|
)
|
||||||
|
|
||||||
|
def hash_supported(self, algorithm):
|
||||||
|
return any(
|
||||||
|
b.hash_supported(algorithm)
|
||||||
|
for b in self._filtered_backends(HashBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_hash_ctx(self, algorithm):
|
||||||
|
for b in self._filtered_backends(HashBackend):
|
||||||
|
try:
|
||||||
|
return b.create_hash_ctx(algorithm)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
pass
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
|
||||||
|
def hmac_supported(self, algorithm):
|
||||||
|
return any(
|
||||||
|
b.hmac_supported(algorithm)
|
||||||
|
for b in self._filtered_backends(HMACBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_hmac_ctx(self, key, algorithm):
|
||||||
|
for b in self._filtered_backends(HMACBackend):
|
||||||
|
try:
|
||||||
|
return b.create_hmac_ctx(key, algorithm)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
pass
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
|
||||||
|
def pbkdf2_hmac_supported(self, algorithm):
|
||||||
|
return any(
|
||||||
|
b.pbkdf2_hmac_supported(algorithm)
|
||||||
|
for b in self._filtered_backends(PBKDF2HMACBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations,
|
||||||
|
key_material):
|
||||||
|
for b in self._filtered_backends(PBKDF2HMACBackend):
|
||||||
|
try:
|
||||||
|
return b.derive_pbkdf2_hmac(
|
||||||
|
algorithm, length, salt, iterations, key_material
|
||||||
|
)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
pass
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_rsa_private_key(self, public_exponent, key_size):
|
||||||
|
for b in self._filtered_backends(RSABackend):
|
||||||
|
return b.generate_rsa_private_key(public_exponent, key_size)
|
||||||
|
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def generate_rsa_parameters_supported(self, public_exponent, key_size):
|
||||||
|
for b in self._filtered_backends(RSABackend):
|
||||||
|
return b.generate_rsa_parameters_supported(
|
||||||
|
public_exponent, key_size
|
||||||
|
)
|
||||||
|
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def rsa_padding_supported(self, padding):
|
||||||
|
for b in self._filtered_backends(RSABackend):
|
||||||
|
return b.rsa_padding_supported(padding)
|
||||||
|
raise UnsupportedAlgorithm("RSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def load_rsa_private_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(RSABackend):
|
||||||
|
return b.load_rsa_private_numbers(numbers)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm("RSA is not supported by the backend",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def load_rsa_public_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(RSABackend):
|
||||||
|
return b.load_rsa_public_numbers(numbers)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm("RSA is not supported by the backend",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def generate_dsa_parameters(self, key_size):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.generate_dsa_parameters(key_size)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def generate_dsa_private_key(self, parameters):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.generate_dsa_private_key(parameters)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def generate_dsa_private_key_and_parameters(self, key_size):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.generate_dsa_private_key_and_parameters(key_size)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def dsa_hash_supported(self, algorithm):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.dsa_hash_supported(algorithm)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def dsa_parameters_supported(self, p, q, g):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.dsa_parameters_supported(p, q, g)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def load_dsa_public_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.load_dsa_public_numbers(numbers)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def load_dsa_private_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.load_dsa_private_numbers(numbers)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def load_dsa_parameter_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(DSABackend):
|
||||||
|
return b.load_dsa_parameter_numbers(numbers)
|
||||||
|
raise UnsupportedAlgorithm("DSA is not supported by the backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def cmac_algorithm_supported(self, algorithm):
|
||||||
|
return any(
|
||||||
|
b.cmac_algorithm_supported(algorithm)
|
||||||
|
for b in self._filtered_backends(CMACBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_cmac_ctx(self, algorithm):
|
||||||
|
for b in self._filtered_backends(CMACBackend):
|
||||||
|
try:
|
||||||
|
return b.create_cmac_ctx(algorithm)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
pass
|
||||||
|
raise UnsupportedAlgorithm("This backend does not support CMAC.",
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER)
|
||||||
|
|
||||||
|
def elliptic_curve_supported(self, curve):
|
||||||
|
return any(
|
||||||
|
b.elliptic_curve_supported(curve)
|
||||||
|
for b in self._filtered_backends(EllipticCurveBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def elliptic_curve_signature_algorithm_supported(
|
||||||
|
self, signature_algorithm, curve
|
||||||
|
):
|
||||||
|
return any(
|
||||||
|
b.elliptic_curve_signature_algorithm_supported(
|
||||||
|
signature_algorithm, curve
|
||||||
|
)
|
||||||
|
for b in self._filtered_backends(EllipticCurveBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_elliptic_curve_private_key(self, curve):
|
||||||
|
for b in self._filtered_backends(EllipticCurveBackend):
|
||||||
|
try:
|
||||||
|
return b.generate_elliptic_curve_private_key(curve)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this elliptic curve.",
|
||||||
|
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_elliptic_curve_private_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(EllipticCurveBackend):
|
||||||
|
try:
|
||||||
|
return b.load_elliptic_curve_private_numbers(numbers)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this elliptic curve.",
|
||||||
|
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_elliptic_curve_public_numbers(self, numbers):
|
||||||
|
for b in self._filtered_backends(EllipticCurveBackend):
|
||||||
|
try:
|
||||||
|
return b.load_elliptic_curve_public_numbers(numbers)
|
||||||
|
except UnsupportedAlgorithm:
|
||||||
|
continue
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this elliptic curve.",
|
||||||
|
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||||
|
)
|
||||||
|
|
||||||
|
def elliptic_curve_exchange_algorithm_supported(self, algorithm, curve):
|
||||||
|
return any(
|
||||||
|
b.elliptic_curve_exchange_algorithm_supported(algorithm, curve)
|
||||||
|
for b in self._filtered_backends(EllipticCurveBackend)
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_pem_private_key(self, data, password):
|
||||||
|
for b in self._filtered_backends(PEMSerializationBackend):
|
||||||
|
return b.load_pem_private_key(data, password)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this key serialization.",
|
||||||
|
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_pem_public_key(self, data):
|
||||||
|
for b in self._filtered_backends(PEMSerializationBackend):
|
||||||
|
return b.load_pem_public_key(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this key serialization.",
|
||||||
|
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_der_private_key(self, data, password):
|
||||||
|
for b in self._filtered_backends(DERSerializationBackend):
|
||||||
|
return b.load_der_private_key(data, password)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this key serialization.",
|
||||||
|
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_der_public_key(self, data):
|
||||||
|
for b in self._filtered_backends(DERSerializationBackend):
|
||||||
|
return b.load_der_public_key(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support this key serialization.",
|
||||||
|
_Reasons.UNSUPPORTED_SERIALIZATION
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_pem_x509_certificate(self, data):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.load_pem_x509_certificate(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_der_x509_certificate(self, data):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.load_der_x509_certificate(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_pem_x509_crl(self, data):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.load_pem_x509_crl(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_der_x509_crl(self, data):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.load_der_x509_crl(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_der_x509_csr(self, data):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.load_der_x509_csr(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_pem_x509_csr(self, data):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.load_pem_x509_csr(data)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_x509_csr(self, builder, private_key, algorithm):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.create_x509_csr(builder, private_key, algorithm)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_x509_certificate(self, builder, private_key, algorithm):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.create_x509_certificate(builder, private_key, algorithm)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_x509_crl(self, builder, private_key, algorithm):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.create_x509_crl(builder, private_key, algorithm)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_x509_revoked_certificate(self, builder):
|
||||||
|
for b in self._filtered_backends(X509Backend):
|
||||||
|
return b.create_x509_revoked_certificate(builder)
|
||||||
|
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support X.509.",
|
||||||
|
_Reasons.UNSUPPORTED_X509
|
||||||
|
)
|
|
@ -0,0 +1,10 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography.hazmat.backends.openssl.backend import backend
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["backend"]
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,213 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons
|
||||||
|
from cryptography.hazmat.primitives import ciphers
|
||||||
|
from cryptography.hazmat.primitives.ciphers import modes
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(ciphers.CipherContext)
|
||||||
|
@utils.register_interface(ciphers.AEADCipherContext)
|
||||||
|
@utils.register_interface(ciphers.AEADEncryptionContext)
|
||||||
|
class _CipherContext(object):
|
||||||
|
_ENCRYPT = 1
|
||||||
|
_DECRYPT = 0
|
||||||
|
|
||||||
|
def __init__(self, backend, cipher, mode, operation):
|
||||||
|
self._backend = backend
|
||||||
|
self._cipher = cipher
|
||||||
|
self._mode = mode
|
||||||
|
self._operation = operation
|
||||||
|
self._tag = None
|
||||||
|
|
||||||
|
if isinstance(self._cipher, ciphers.BlockCipherAlgorithm):
|
||||||
|
self._block_size = self._cipher.block_size
|
||||||
|
else:
|
||||||
|
self._block_size = 1
|
||||||
|
|
||||||
|
ctx = self._backend._lib.EVP_CIPHER_CTX_new()
|
||||||
|
ctx = self._backend._ffi.gc(
|
||||||
|
ctx, self._backend._lib.EVP_CIPHER_CTX_free
|
||||||
|
)
|
||||||
|
|
||||||
|
registry = self._backend._cipher_registry
|
||||||
|
try:
|
||||||
|
adapter = registry[type(cipher), type(mode)]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"cipher {0} in {1} mode is not supported "
|
||||||
|
"by this backend.".format(
|
||||||
|
cipher.name, mode.name if mode else mode),
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER
|
||||||
|
)
|
||||||
|
|
||||||
|
evp_cipher = adapter(self._backend, cipher, mode)
|
||||||
|
if evp_cipher == self._backend._ffi.NULL:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"cipher {0} in {1} mode is not supported "
|
||||||
|
"by this backend.".format(
|
||||||
|
cipher.name, mode.name if mode else mode),
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER
|
||||||
|
)
|
||||||
|
|
||||||
|
if isinstance(mode, modes.ModeWithInitializationVector):
|
||||||
|
iv_nonce = mode.initialization_vector
|
||||||
|
elif isinstance(mode, modes.ModeWithNonce):
|
||||||
|
iv_nonce = mode.nonce
|
||||||
|
else:
|
||||||
|
iv_nonce = self._backend._ffi.NULL
|
||||||
|
# begin init with cipher and operation type
|
||||||
|
res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
operation)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
# set the key length to handle variable key ciphers
|
||||||
|
res = self._backend._lib.EVP_CIPHER_CTX_set_key_length(
|
||||||
|
ctx, len(cipher.key)
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
if isinstance(mode, modes.GCM):
|
||||||
|
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||||
|
ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN,
|
||||||
|
len(iv_nonce), self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
if operation == self._DECRYPT:
|
||||||
|
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||||
|
ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG,
|
||||||
|
len(mode.tag), mode.tag
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
# pass key/iv
|
||||||
|
res = self._backend._lib.EVP_CipherInit_ex(
|
||||||
|
ctx,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
cipher.key,
|
||||||
|
iv_nonce,
|
||||||
|
operation
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
# We purposely disable padding here as it's handled higher up in the
|
||||||
|
# API.
|
||||||
|
self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0)
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
# OpenSSL 0.9.8e has an assertion in its EVP code that causes it
|
||||||
|
# to SIGABRT if you call update with an empty byte string. This can be
|
||||||
|
# removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch
|
||||||
|
# should be taken only when length is zero and mode is not GCM because
|
||||||
|
# AES GCM can return improper tag values if you don't call update
|
||||||
|
# with empty plaintext when authenticating AAD for ...reasons.
|
||||||
|
if len(data) == 0 and not isinstance(self._mode, modes.GCM):
|
||||||
|
return b""
|
||||||
|
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]",
|
||||||
|
len(data) + self._block_size - 1)
|
||||||
|
outlen = self._backend._ffi.new("int *")
|
||||||
|
res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data,
|
||||||
|
len(data))
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
# OpenSSL 1.0.1 on Ubuntu 12.04 (and possibly other distributions)
|
||||||
|
# appears to have a bug where you must make at least one call to update
|
||||||
|
# even if you are only using authenticate_additional_data or the
|
||||||
|
# GCM tag will be wrong. An (empty) call to update resolves this
|
||||||
|
# and is harmless for all other versions of OpenSSL.
|
||||||
|
if isinstance(self._mode, modes.GCM):
|
||||||
|
self.update(b"")
|
||||||
|
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", self._block_size)
|
||||||
|
outlen = self._backend._ffi.new("int *")
|
||||||
|
res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen)
|
||||||
|
if res == 0:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
|
||||||
|
if not errors and isinstance(self._mode, modes.GCM):
|
||||||
|
raise InvalidTag
|
||||||
|
|
||||||
|
self._backend.openssl_assert(
|
||||||
|
errors[0][1:] == (
|
||||||
|
self._backend._lib.ERR_LIB_EVP,
|
||||||
|
self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX,
|
||||||
|
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
|
||||||
|
) or errors[0][1:] == (
|
||||||
|
self._backend._lib.ERR_LIB_EVP,
|
||||||
|
self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX,
|
||||||
|
self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise ValueError(
|
||||||
|
"The length of the provided data is not a multiple of "
|
||||||
|
"the block length."
|
||||||
|
)
|
||||||
|
|
||||||
|
if (isinstance(self._mode, modes.GCM) and
|
||||||
|
self._operation == self._ENCRYPT):
|
||||||
|
block_byte_size = self._block_size // 8
|
||||||
|
tag_buf = self._backend._ffi.new(
|
||||||
|
"unsigned char[]", block_byte_size
|
||||||
|
)
|
||||||
|
res = self._backend._lib.EVP_CIPHER_CTX_ctrl(
|
||||||
|
self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG,
|
||||||
|
block_byte_size, tag_buf
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
self._tag = self._backend._ffi.buffer(tag_buf)[:]
|
||||||
|
|
||||||
|
res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||||
|
|
||||||
|
def authenticate_additional_data(self, data):
|
||||||
|
outlen = self._backend._ffi.new("int *")
|
||||||
|
res = self._backend._lib.EVP_CipherUpdate(
|
||||||
|
self._ctx, self._backend._ffi.NULL, outlen, data, len(data)
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
tag = utils.read_only_property("_tag")
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(ciphers.CipherContext)
|
||||||
|
class _AESCTRCipherContext(object):
|
||||||
|
"""
|
||||||
|
This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can
|
||||||
|
be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020).
|
||||||
|
"""
|
||||||
|
def __init__(self, backend, cipher, mode):
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
self._key = self._backend._ffi.new("AES_KEY *")
|
||||||
|
res = self._backend._lib.AES_set_encrypt_key(
|
||||||
|
cipher.key, len(cipher.key) * 8, self._key
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 0)
|
||||||
|
self._ecount = self._backend._ffi.new("char[]", 16)
|
||||||
|
self._nonce = self._backend._ffi.new("char[16]", mode.nonce)
|
||||||
|
self._num = self._backend._ffi.new("unsigned int *", 0)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", len(data))
|
||||||
|
self._backend._lib.AES_ctr128_encrypt(
|
||||||
|
data, buf, len(data), self._key, self._nonce,
|
||||||
|
self._ecount, self._num
|
||||||
|
)
|
||||||
|
return self._backend._ffi.buffer(buf)[:]
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
self._key = None
|
||||||
|
self._ecount = None
|
||||||
|
self._nonce = None
|
||||||
|
self._num = None
|
||||||
|
return b""
|
|
@ -0,0 +1,80 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives import constant_time, interfaces
|
||||||
|
from cryptography.hazmat.primitives.ciphers.modes import CBC
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(interfaces.MACContext)
|
||||||
|
class _CMACContext(object):
|
||||||
|
def __init__(self, backend, algorithm, ctx=None):
|
||||||
|
if not backend.cmac_algorithm_supported(algorithm):
|
||||||
|
raise UnsupportedAlgorithm("This backend does not support CMAC.",
|
||||||
|
_Reasons.UNSUPPORTED_CIPHER)
|
||||||
|
|
||||||
|
self._backend = backend
|
||||||
|
self._key = algorithm.key
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._output_length = algorithm.block_size // 8
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
registry = self._backend._cipher_registry
|
||||||
|
adapter = registry[type(algorithm), CBC]
|
||||||
|
|
||||||
|
evp_cipher = adapter(self._backend, algorithm, CBC)
|
||||||
|
|
||||||
|
ctx = self._backend._lib.CMAC_CTX_new()
|
||||||
|
|
||||||
|
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||||
|
ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free)
|
||||||
|
|
||||||
|
self._backend._lib.CMAC_Init(
|
||||||
|
ctx, self._key, len(self._key),
|
||||||
|
evp_cipher, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
res = self._backend._lib.CMAC_Update(self._ctx, data, len(data))
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", self._output_length)
|
||||||
|
length = self._backend._ffi.new("size_t *", self._output_length)
|
||||||
|
res = self._backend._lib.CMAC_Final(
|
||||||
|
self._ctx, buf, length
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
self._ctx = None
|
||||||
|
|
||||||
|
return self._backend._ffi.buffer(buf)[:]
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
copied_ctx = self._backend._lib.CMAC_CTX_new()
|
||||||
|
copied_ctx = self._backend._ffi.gc(
|
||||||
|
copied_ctx, self._backend._lib.CMAC_CTX_free
|
||||||
|
)
|
||||||
|
res = self._backend._lib.CMAC_CTX_copy(
|
||||||
|
copied_ctx, self._ctx
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return _CMACContext(
|
||||||
|
self._backend, self._algorithm, ctx=copied_ctx
|
||||||
|
)
|
||||||
|
|
||||||
|
def verify(self, signature):
|
||||||
|
digest = self.finalize()
|
||||||
|
if not constant_time.bytes_eq(digest, signature):
|
||||||
|
raise InvalidSignature("Signature did not match digest.")
|
|
@ -0,0 +1,804 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
import ipaddress
|
||||||
|
|
||||||
|
from email.utils import parseaddr
|
||||||
|
|
||||||
|
import idna
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from six.moves import urllib_parse
|
||||||
|
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.x509.oid import (
|
||||||
|
CRLEntryExtensionOID, CertificatePoliciesOID, ExtensionOID
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _obj2txt(backend, obj):
|
||||||
|
# Set to 80 on the recommendation of
|
||||||
|
# https://www.openssl.org/docs/crypto/OBJ_nid2ln.html#return_values
|
||||||
|
buf_len = 80
|
||||||
|
buf = backend._ffi.new("char[]", buf_len)
|
||||||
|
res = backend._lib.OBJ_obj2txt(buf, buf_len, obj, 1)
|
||||||
|
backend.openssl_assert(res > 0)
|
||||||
|
return backend._ffi.buffer(buf, res)[:].decode()
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_x509_name_entry(backend, x509_name_entry):
|
||||||
|
obj = backend._lib.X509_NAME_ENTRY_get_object(x509_name_entry)
|
||||||
|
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||||
|
data = backend._lib.X509_NAME_ENTRY_get_data(x509_name_entry)
|
||||||
|
backend.openssl_assert(data != backend._ffi.NULL)
|
||||||
|
value = _asn1_string_to_utf8(backend, data)
|
||||||
|
oid = _obj2txt(backend, obj)
|
||||||
|
|
||||||
|
return x509.NameAttribute(x509.ObjectIdentifier(oid), value)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_x509_name(backend, x509_name):
|
||||||
|
count = backend._lib.X509_NAME_entry_count(x509_name)
|
||||||
|
attributes = []
|
||||||
|
for x in range(count):
|
||||||
|
entry = backend._lib.X509_NAME_get_entry(x509_name, x)
|
||||||
|
attributes.append(_decode_x509_name_entry(backend, entry))
|
||||||
|
|
||||||
|
return x509.Name(attributes)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_general_names(backend, gns):
|
||||||
|
num = backend._lib.sk_GENERAL_NAME_num(gns)
|
||||||
|
names = []
|
||||||
|
for i in range(num):
|
||||||
|
gn = backend._lib.sk_GENERAL_NAME_value(gns, i)
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
names.append(_decode_general_name(backend, gn))
|
||||||
|
|
||||||
|
return names
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_general_name(backend, gn):
|
||||||
|
if gn.type == backend._lib.GEN_DNS:
|
||||||
|
data = _asn1_string_to_bytes(backend, gn.d.dNSName)
|
||||||
|
if not data:
|
||||||
|
decoded = u""
|
||||||
|
elif data.startswith(b"*."):
|
||||||
|
# This is a wildcard name. We need to remove the leading wildcard,
|
||||||
|
# IDNA decode, then re-add the wildcard. Wildcard characters should
|
||||||
|
# always be left-most (RFC 2595 section 2.4).
|
||||||
|
decoded = u"*." + idna.decode(data[2:])
|
||||||
|
else:
|
||||||
|
# Not a wildcard, decode away. If the string has a * in it anywhere
|
||||||
|
# invalid this will raise an InvalidCodePoint
|
||||||
|
decoded = idna.decode(data)
|
||||||
|
if data.startswith(b"."):
|
||||||
|
# idna strips leading periods. Name constraints can have that
|
||||||
|
# so we need to re-add it. Sigh.
|
||||||
|
decoded = u"." + decoded
|
||||||
|
|
||||||
|
return x509.DNSName(decoded)
|
||||||
|
elif gn.type == backend._lib.GEN_URI:
|
||||||
|
data = _asn1_string_to_ascii(backend, gn.d.uniformResourceIdentifier)
|
||||||
|
parsed = urllib_parse.urlparse(data)
|
||||||
|
if parsed.hostname:
|
||||||
|
hostname = idna.decode(parsed.hostname)
|
||||||
|
else:
|
||||||
|
hostname = ""
|
||||||
|
if parsed.port:
|
||||||
|
netloc = hostname + u":" + six.text_type(parsed.port)
|
||||||
|
else:
|
||||||
|
netloc = hostname
|
||||||
|
|
||||||
|
# Note that building a URL in this fashion means it should be
|
||||||
|
# semantically indistinguishable from the original but is not
|
||||||
|
# guaranteed to be exactly the same.
|
||||||
|
uri = urllib_parse.urlunparse((
|
||||||
|
parsed.scheme,
|
||||||
|
netloc,
|
||||||
|
parsed.path,
|
||||||
|
parsed.params,
|
||||||
|
parsed.query,
|
||||||
|
parsed.fragment
|
||||||
|
))
|
||||||
|
return x509.UniformResourceIdentifier(uri)
|
||||||
|
elif gn.type == backend._lib.GEN_RID:
|
||||||
|
oid = _obj2txt(backend, gn.d.registeredID)
|
||||||
|
return x509.RegisteredID(x509.ObjectIdentifier(oid))
|
||||||
|
elif gn.type == backend._lib.GEN_IPADD:
|
||||||
|
data = _asn1_string_to_bytes(backend, gn.d.iPAddress)
|
||||||
|
data_len = len(data)
|
||||||
|
if data_len == 8 or data_len == 32:
|
||||||
|
# This is an IPv4 or IPv6 Network and not a single IP. This
|
||||||
|
# type of data appears in Name Constraints. Unfortunately,
|
||||||
|
# ipaddress doesn't support packed bytes + netmask. Additionally,
|
||||||
|
# IPv6Network can only handle CIDR rather than the full 16 byte
|
||||||
|
# netmask. To handle this we convert the netmask to integer, then
|
||||||
|
# find the first 0 bit, which will be the prefix. If another 1
|
||||||
|
# bit is present after that the netmask is invalid.
|
||||||
|
base = ipaddress.ip_address(data[:data_len // 2])
|
||||||
|
netmask = ipaddress.ip_address(data[data_len // 2:])
|
||||||
|
bits = bin(int(netmask))[2:]
|
||||||
|
prefix = bits.find('0')
|
||||||
|
# If no 0 bits are found it is a /32 or /128
|
||||||
|
if prefix == -1:
|
||||||
|
prefix = len(bits)
|
||||||
|
|
||||||
|
if "1" in bits[prefix:]:
|
||||||
|
raise ValueError("Invalid netmask")
|
||||||
|
|
||||||
|
ip = ipaddress.ip_network(base.exploded + u"/{0}".format(prefix))
|
||||||
|
else:
|
||||||
|
ip = ipaddress.ip_address(data)
|
||||||
|
|
||||||
|
return x509.IPAddress(ip)
|
||||||
|
elif gn.type == backend._lib.GEN_DIRNAME:
|
||||||
|
return x509.DirectoryName(
|
||||||
|
_decode_x509_name(backend, gn.d.directoryName)
|
||||||
|
)
|
||||||
|
elif gn.type == backend._lib.GEN_EMAIL:
|
||||||
|
data = _asn1_string_to_ascii(backend, gn.d.rfc822Name)
|
||||||
|
name, address = parseaddr(data)
|
||||||
|
parts = address.split(u"@")
|
||||||
|
if name or not address:
|
||||||
|
# parseaddr has found a name (e.g. Name <email>) or the entire
|
||||||
|
# value is an empty string.
|
||||||
|
raise ValueError("Invalid rfc822name value")
|
||||||
|
elif len(parts) == 1:
|
||||||
|
# Single label email name. This is valid for local delivery. No
|
||||||
|
# IDNA decoding can be done since there is no domain component.
|
||||||
|
return x509.RFC822Name(address)
|
||||||
|
else:
|
||||||
|
# A normal email of the form user@domain.com. Let's attempt to
|
||||||
|
# decode the domain component and return the entire address.
|
||||||
|
return x509.RFC822Name(
|
||||||
|
parts[0] + u"@" + idna.decode(parts[1])
|
||||||
|
)
|
||||||
|
elif gn.type == backend._lib.GEN_OTHERNAME:
|
||||||
|
type_id = _obj2txt(backend, gn.d.otherName.type_id)
|
||||||
|
value = _asn1_to_der(backend, gn.d.otherName.value)
|
||||||
|
return x509.OtherName(x509.ObjectIdentifier(type_id), value)
|
||||||
|
else:
|
||||||
|
# x400Address or ediPartyName
|
||||||
|
raise x509.UnsupportedGeneralNameType(
|
||||||
|
"{0} is not a supported type".format(
|
||||||
|
x509._GENERAL_NAMES.get(gn.type, gn.type)
|
||||||
|
),
|
||||||
|
gn.type
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_ocsp_no_check(backend, ext):
|
||||||
|
return x509.OCSPNoCheck()
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_crl_number(backend, ext):
|
||||||
|
asn1_int = backend._ffi.cast("ASN1_INTEGER *", ext)
|
||||||
|
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
||||||
|
return x509.CRLNumber(_asn1_integer_to_int(backend, asn1_int))
|
||||||
|
|
||||||
|
|
||||||
|
class _X509ExtensionParser(object):
|
||||||
|
def __init__(self, ext_count, get_ext, handlers, unsupported_exts=None):
|
||||||
|
self.ext_count = ext_count
|
||||||
|
self.get_ext = get_ext
|
||||||
|
self.handlers = handlers
|
||||||
|
self.unsupported_exts = unsupported_exts
|
||||||
|
|
||||||
|
def parse(self, backend, x509_obj):
|
||||||
|
extensions = []
|
||||||
|
seen_oids = set()
|
||||||
|
for i in range(self.ext_count(backend, x509_obj)):
|
||||||
|
ext = self.get_ext(backend, x509_obj, i)
|
||||||
|
backend.openssl_assert(ext != backend._ffi.NULL)
|
||||||
|
crit = backend._lib.X509_EXTENSION_get_critical(ext)
|
||||||
|
critical = crit == 1
|
||||||
|
oid = x509.ObjectIdentifier(
|
||||||
|
_obj2txt(backend, backend._lib.X509_EXTENSION_get_object(ext))
|
||||||
|
)
|
||||||
|
if oid in seen_oids:
|
||||||
|
raise x509.DuplicateExtension(
|
||||||
|
"Duplicate {0} extension found".format(oid), oid
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
handler = self.handlers[oid]
|
||||||
|
except KeyError:
|
||||||
|
if critical:
|
||||||
|
raise x509.UnsupportedExtension(
|
||||||
|
"Critical extension {0} is not currently supported"
|
||||||
|
.format(oid), oid
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Dump the DER payload into an UnrecognizedExtension object
|
||||||
|
data = backend._lib.X509_EXTENSION_get_data(ext)
|
||||||
|
backend.openssl_assert(data != backend._ffi.NULL)
|
||||||
|
der = backend._ffi.buffer(data.data, data.length)[:]
|
||||||
|
unrecognized = x509.UnrecognizedExtension(oid, der)
|
||||||
|
extensions.append(
|
||||||
|
x509.Extension(oid, critical, unrecognized)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# For extensions which are not supported by OpenSSL we pass the
|
||||||
|
# extension object directly to the parsing routine so it can
|
||||||
|
# be decoded manually.
|
||||||
|
if self.unsupported_exts and oid in self.unsupported_exts:
|
||||||
|
ext_data = ext
|
||||||
|
else:
|
||||||
|
ext_data = backend._lib.X509V3_EXT_d2i(ext)
|
||||||
|
if ext_data == backend._ffi.NULL:
|
||||||
|
backend._consume_errors()
|
||||||
|
raise ValueError(
|
||||||
|
"The {0} extension is invalid and can't be "
|
||||||
|
"parsed".format(oid)
|
||||||
|
)
|
||||||
|
|
||||||
|
value = handler(backend, ext_data)
|
||||||
|
extensions.append(x509.Extension(oid, critical, value))
|
||||||
|
|
||||||
|
seen_oids.add(oid)
|
||||||
|
|
||||||
|
return x509.Extensions(extensions)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_certificate_policies(backend, cp):
|
||||||
|
cp = backend._ffi.cast("Cryptography_STACK_OF_POLICYINFO *", cp)
|
||||||
|
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
|
||||||
|
num = backend._lib.sk_POLICYINFO_num(cp)
|
||||||
|
certificate_policies = []
|
||||||
|
for i in range(num):
|
||||||
|
qualifiers = None
|
||||||
|
pi = backend._lib.sk_POLICYINFO_value(cp, i)
|
||||||
|
oid = x509.ObjectIdentifier(_obj2txt(backend, pi.policyid))
|
||||||
|
if pi.qualifiers != backend._ffi.NULL:
|
||||||
|
qnum = backend._lib.sk_POLICYQUALINFO_num(pi.qualifiers)
|
||||||
|
qualifiers = []
|
||||||
|
for j in range(qnum):
|
||||||
|
pqi = backend._lib.sk_POLICYQUALINFO_value(
|
||||||
|
pi.qualifiers, j
|
||||||
|
)
|
||||||
|
pqualid = x509.ObjectIdentifier(
|
||||||
|
_obj2txt(backend, pqi.pqualid)
|
||||||
|
)
|
||||||
|
if pqualid == CertificatePoliciesOID.CPS_QUALIFIER:
|
||||||
|
cpsuri = backend._ffi.buffer(
|
||||||
|
pqi.d.cpsuri.data, pqi.d.cpsuri.length
|
||||||
|
)[:].decode('ascii')
|
||||||
|
qualifiers.append(cpsuri)
|
||||||
|
else:
|
||||||
|
assert pqualid == CertificatePoliciesOID.CPS_USER_NOTICE
|
||||||
|
user_notice = _decode_user_notice(
|
||||||
|
backend, pqi.d.usernotice
|
||||||
|
)
|
||||||
|
qualifiers.append(user_notice)
|
||||||
|
|
||||||
|
certificate_policies.append(
|
||||||
|
x509.PolicyInformation(oid, qualifiers)
|
||||||
|
)
|
||||||
|
|
||||||
|
return x509.CertificatePolicies(certificate_policies)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_user_notice(backend, un):
|
||||||
|
explicit_text = None
|
||||||
|
notice_reference = None
|
||||||
|
|
||||||
|
if un.exptext != backend._ffi.NULL:
|
||||||
|
explicit_text = _asn1_string_to_utf8(backend, un.exptext)
|
||||||
|
|
||||||
|
if un.noticeref != backend._ffi.NULL:
|
||||||
|
organization = _asn1_string_to_utf8(
|
||||||
|
backend, un.noticeref.organization
|
||||||
|
)
|
||||||
|
|
||||||
|
num = backend._lib.sk_ASN1_INTEGER_num(
|
||||||
|
un.noticeref.noticenos
|
||||||
|
)
|
||||||
|
notice_numbers = []
|
||||||
|
for i in range(num):
|
||||||
|
asn1_int = backend._lib.sk_ASN1_INTEGER_value(
|
||||||
|
un.noticeref.noticenos, i
|
||||||
|
)
|
||||||
|
notice_num = _asn1_integer_to_int(backend, asn1_int)
|
||||||
|
notice_numbers.append(notice_num)
|
||||||
|
|
||||||
|
notice_reference = x509.NoticeReference(
|
||||||
|
organization, notice_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
return x509.UserNotice(notice_reference, explicit_text)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_basic_constraints(backend, bc_st):
|
||||||
|
basic_constraints = backend._ffi.cast("BASIC_CONSTRAINTS *", bc_st)
|
||||||
|
basic_constraints = backend._ffi.gc(
|
||||||
|
basic_constraints, backend._lib.BASIC_CONSTRAINTS_free
|
||||||
|
)
|
||||||
|
# The byte representation of an ASN.1 boolean true is \xff. OpenSSL
|
||||||
|
# chooses to just map this to its ordinal value, so true is 255 and
|
||||||
|
# false is 0.
|
||||||
|
ca = basic_constraints.ca == 255
|
||||||
|
path_length = _asn1_integer_to_int_or_none(
|
||||||
|
backend, basic_constraints.pathlen
|
||||||
|
)
|
||||||
|
|
||||||
|
return x509.BasicConstraints(ca, path_length)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_subject_key_identifier(backend, asn1_string):
|
||||||
|
asn1_string = backend._ffi.cast("ASN1_OCTET_STRING *", asn1_string)
|
||||||
|
asn1_string = backend._ffi.gc(
|
||||||
|
asn1_string, backend._lib.ASN1_OCTET_STRING_free
|
||||||
|
)
|
||||||
|
return x509.SubjectKeyIdentifier(
|
||||||
|
backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_authority_key_identifier(backend, akid):
|
||||||
|
akid = backend._ffi.cast("AUTHORITY_KEYID *", akid)
|
||||||
|
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
|
||||||
|
key_identifier = None
|
||||||
|
authority_cert_issuer = None
|
||||||
|
|
||||||
|
if akid.keyid != backend._ffi.NULL:
|
||||||
|
key_identifier = backend._ffi.buffer(
|
||||||
|
akid.keyid.data, akid.keyid.length
|
||||||
|
)[:]
|
||||||
|
|
||||||
|
if akid.issuer != backend._ffi.NULL:
|
||||||
|
authority_cert_issuer = _decode_general_names(
|
||||||
|
backend, akid.issuer
|
||||||
|
)
|
||||||
|
|
||||||
|
authority_cert_serial_number = _asn1_integer_to_int_or_none(
|
||||||
|
backend, akid.serial
|
||||||
|
)
|
||||||
|
|
||||||
|
return x509.AuthorityKeyIdentifier(
|
||||||
|
key_identifier, authority_cert_issuer, authority_cert_serial_number
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_authority_information_access(backend, aia):
|
||||||
|
aia = backend._ffi.cast("Cryptography_STACK_OF_ACCESS_DESCRIPTION *", aia)
|
||||||
|
aia = backend._ffi.gc(aia, backend._lib.sk_ACCESS_DESCRIPTION_free)
|
||||||
|
num = backend._lib.sk_ACCESS_DESCRIPTION_num(aia)
|
||||||
|
access_descriptions = []
|
||||||
|
for i in range(num):
|
||||||
|
ad = backend._lib.sk_ACCESS_DESCRIPTION_value(aia, i)
|
||||||
|
backend.openssl_assert(ad.method != backend._ffi.NULL)
|
||||||
|
oid = x509.ObjectIdentifier(_obj2txt(backend, ad.method))
|
||||||
|
backend.openssl_assert(ad.location != backend._ffi.NULL)
|
||||||
|
gn = _decode_general_name(backend, ad.location)
|
||||||
|
access_descriptions.append(x509.AccessDescription(oid, gn))
|
||||||
|
|
||||||
|
return x509.AuthorityInformationAccess(access_descriptions)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_key_usage(backend, bit_string):
|
||||||
|
bit_string = backend._ffi.cast("ASN1_BIT_STRING *", bit_string)
|
||||||
|
bit_string = backend._ffi.gc(bit_string, backend._lib.ASN1_BIT_STRING_free)
|
||||||
|
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
||||||
|
digital_signature = get_bit(bit_string, 0) == 1
|
||||||
|
content_commitment = get_bit(bit_string, 1) == 1
|
||||||
|
key_encipherment = get_bit(bit_string, 2) == 1
|
||||||
|
data_encipherment = get_bit(bit_string, 3) == 1
|
||||||
|
key_agreement = get_bit(bit_string, 4) == 1
|
||||||
|
key_cert_sign = get_bit(bit_string, 5) == 1
|
||||||
|
crl_sign = get_bit(bit_string, 6) == 1
|
||||||
|
encipher_only = get_bit(bit_string, 7) == 1
|
||||||
|
decipher_only = get_bit(bit_string, 8) == 1
|
||||||
|
return x509.KeyUsage(
|
||||||
|
digital_signature,
|
||||||
|
content_commitment,
|
||||||
|
key_encipherment,
|
||||||
|
data_encipherment,
|
||||||
|
key_agreement,
|
||||||
|
key_cert_sign,
|
||||||
|
crl_sign,
|
||||||
|
encipher_only,
|
||||||
|
decipher_only
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_general_names_extension(backend, gns):
|
||||||
|
gns = backend._ffi.cast("GENERAL_NAMES *", gns)
|
||||||
|
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
||||||
|
general_names = _decode_general_names(backend, gns)
|
||||||
|
return general_names
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_subject_alt_name(backend, ext):
|
||||||
|
return x509.SubjectAlternativeName(
|
||||||
|
_decode_general_names_extension(backend, ext)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_issuer_alt_name(backend, ext):
|
||||||
|
return x509.IssuerAlternativeName(
|
||||||
|
_decode_general_names_extension(backend, ext)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_name_constraints(backend, nc):
|
||||||
|
nc = backend._ffi.cast("NAME_CONSTRAINTS *", nc)
|
||||||
|
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
|
||||||
|
permitted = _decode_general_subtrees(backend, nc.permittedSubtrees)
|
||||||
|
excluded = _decode_general_subtrees(backend, nc.excludedSubtrees)
|
||||||
|
return x509.NameConstraints(
|
||||||
|
permitted_subtrees=permitted, excluded_subtrees=excluded
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_general_subtrees(backend, stack_subtrees):
|
||||||
|
if stack_subtrees == backend._ffi.NULL:
|
||||||
|
return None
|
||||||
|
|
||||||
|
num = backend._lib.sk_GENERAL_SUBTREE_num(stack_subtrees)
|
||||||
|
subtrees = []
|
||||||
|
|
||||||
|
for i in range(num):
|
||||||
|
obj = backend._lib.sk_GENERAL_SUBTREE_value(stack_subtrees, i)
|
||||||
|
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||||
|
name = _decode_general_name(backend, obj.base)
|
||||||
|
subtrees.append(name)
|
||||||
|
|
||||||
|
return subtrees
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_policy_constraints(backend, pc):
|
||||||
|
pc = backend._ffi.cast("POLICY_CONSTRAINTS *", pc)
|
||||||
|
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
|
||||||
|
|
||||||
|
require_explicit_policy = _asn1_integer_to_int_or_none(
|
||||||
|
backend, pc.requireExplicitPolicy
|
||||||
|
)
|
||||||
|
inhibit_policy_mapping = _asn1_integer_to_int_or_none(
|
||||||
|
backend, pc.inhibitPolicyMapping
|
||||||
|
)
|
||||||
|
|
||||||
|
return x509.PolicyConstraints(
|
||||||
|
require_explicit_policy, inhibit_policy_mapping
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_extended_key_usage(backend, sk):
|
||||||
|
sk = backend._ffi.cast("Cryptography_STACK_OF_ASN1_OBJECT *", sk)
|
||||||
|
sk = backend._ffi.gc(sk, backend._lib.sk_ASN1_OBJECT_free)
|
||||||
|
num = backend._lib.sk_ASN1_OBJECT_num(sk)
|
||||||
|
ekus = []
|
||||||
|
|
||||||
|
for i in range(num):
|
||||||
|
obj = backend._lib.sk_ASN1_OBJECT_value(sk, i)
|
||||||
|
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||||
|
oid = x509.ObjectIdentifier(_obj2txt(backend, obj))
|
||||||
|
ekus.append(oid)
|
||||||
|
|
||||||
|
return x509.ExtendedKeyUsage(ekus)
|
||||||
|
|
||||||
|
|
||||||
|
_DISTPOINT_TYPE_FULLNAME = 0
|
||||||
|
_DISTPOINT_TYPE_RELATIVENAME = 1
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_crl_distribution_points(backend, cdps):
|
||||||
|
cdps = backend._ffi.cast("Cryptography_STACK_OF_DIST_POINT *", cdps)
|
||||||
|
cdps = backend._ffi.gc(cdps, backend._lib.sk_DIST_POINT_free)
|
||||||
|
num = backend._lib.sk_DIST_POINT_num(cdps)
|
||||||
|
|
||||||
|
dist_points = []
|
||||||
|
for i in range(num):
|
||||||
|
full_name = None
|
||||||
|
relative_name = None
|
||||||
|
crl_issuer = None
|
||||||
|
reasons = None
|
||||||
|
cdp = backend._lib.sk_DIST_POINT_value(cdps, i)
|
||||||
|
if cdp.reasons != backend._ffi.NULL:
|
||||||
|
# We will check each bit from RFC 5280
|
||||||
|
# ReasonFlags ::= BIT STRING {
|
||||||
|
# unused (0),
|
||||||
|
# keyCompromise (1),
|
||||||
|
# cACompromise (2),
|
||||||
|
# affiliationChanged (3),
|
||||||
|
# superseded (4),
|
||||||
|
# cessationOfOperation (5),
|
||||||
|
# certificateHold (6),
|
||||||
|
# privilegeWithdrawn (7),
|
||||||
|
# aACompromise (8) }
|
||||||
|
reasons = []
|
||||||
|
get_bit = backend._lib.ASN1_BIT_STRING_get_bit
|
||||||
|
if get_bit(cdp.reasons, 1):
|
||||||
|
reasons.append(x509.ReasonFlags.key_compromise)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 2):
|
||||||
|
reasons.append(x509.ReasonFlags.ca_compromise)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 3):
|
||||||
|
reasons.append(x509.ReasonFlags.affiliation_changed)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 4):
|
||||||
|
reasons.append(x509.ReasonFlags.superseded)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 5):
|
||||||
|
reasons.append(x509.ReasonFlags.cessation_of_operation)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 6):
|
||||||
|
reasons.append(x509.ReasonFlags.certificate_hold)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 7):
|
||||||
|
reasons.append(x509.ReasonFlags.privilege_withdrawn)
|
||||||
|
|
||||||
|
if get_bit(cdp.reasons, 8):
|
||||||
|
reasons.append(x509.ReasonFlags.aa_compromise)
|
||||||
|
|
||||||
|
reasons = frozenset(reasons)
|
||||||
|
|
||||||
|
if cdp.CRLissuer != backend._ffi.NULL:
|
||||||
|
crl_issuer = _decode_general_names(backend, cdp.CRLissuer)
|
||||||
|
|
||||||
|
# Certificates may have a crl_issuer/reasons and no distribution
|
||||||
|
# point so make sure it's not null.
|
||||||
|
if cdp.distpoint != backend._ffi.NULL:
|
||||||
|
# Type 0 is fullName, there is no #define for it in the code.
|
||||||
|
if cdp.distpoint.type == _DISTPOINT_TYPE_FULLNAME:
|
||||||
|
full_name = _decode_general_names(
|
||||||
|
backend, cdp.distpoint.name.fullname
|
||||||
|
)
|
||||||
|
# OpenSSL code doesn't test for a specific type for
|
||||||
|
# relativename, everything that isn't fullname is considered
|
||||||
|
# relativename.
|
||||||
|
else:
|
||||||
|
rns = cdp.distpoint.name.relativename
|
||||||
|
rnum = backend._lib.sk_X509_NAME_ENTRY_num(rns)
|
||||||
|
attributes = []
|
||||||
|
for i in range(rnum):
|
||||||
|
rn = backend._lib.sk_X509_NAME_ENTRY_value(
|
||||||
|
rns, i
|
||||||
|
)
|
||||||
|
backend.openssl_assert(rn != backend._ffi.NULL)
|
||||||
|
attributes.append(
|
||||||
|
_decode_x509_name_entry(backend, rn)
|
||||||
|
)
|
||||||
|
|
||||||
|
relative_name = x509.Name(attributes)
|
||||||
|
|
||||||
|
dist_points.append(
|
||||||
|
x509.DistributionPoint(
|
||||||
|
full_name, relative_name, reasons, crl_issuer
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return x509.CRLDistributionPoints(dist_points)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_inhibit_any_policy(backend, asn1_int):
|
||||||
|
asn1_int = backend._ffi.cast("ASN1_INTEGER *", asn1_int)
|
||||||
|
asn1_int = backend._ffi.gc(asn1_int, backend._lib.ASN1_INTEGER_free)
|
||||||
|
skip_certs = _asn1_integer_to_int(backend, asn1_int)
|
||||||
|
return x509.InhibitAnyPolicy(skip_certs)
|
||||||
|
|
||||||
|
|
||||||
|
# CRLReason ::= ENUMERATED {
|
||||||
|
# unspecified (0),
|
||||||
|
# keyCompromise (1),
|
||||||
|
# cACompromise (2),
|
||||||
|
# affiliationChanged (3),
|
||||||
|
# superseded (4),
|
||||||
|
# cessationOfOperation (5),
|
||||||
|
# certificateHold (6),
|
||||||
|
# -- value 7 is not used
|
||||||
|
# removeFromCRL (8),
|
||||||
|
# privilegeWithdrawn (9),
|
||||||
|
# aACompromise (10) }
|
||||||
|
_CRL_ENTRY_REASON_CODE_TO_ENUM = {
|
||||||
|
0: x509.ReasonFlags.unspecified,
|
||||||
|
1: x509.ReasonFlags.key_compromise,
|
||||||
|
2: x509.ReasonFlags.ca_compromise,
|
||||||
|
3: x509.ReasonFlags.affiliation_changed,
|
||||||
|
4: x509.ReasonFlags.superseded,
|
||||||
|
5: x509.ReasonFlags.cessation_of_operation,
|
||||||
|
6: x509.ReasonFlags.certificate_hold,
|
||||||
|
8: x509.ReasonFlags.remove_from_crl,
|
||||||
|
9: x509.ReasonFlags.privilege_withdrawn,
|
||||||
|
10: x509.ReasonFlags.aa_compromise,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_CRL_ENTRY_REASON_ENUM_TO_CODE = {
|
||||||
|
x509.ReasonFlags.unspecified: 0,
|
||||||
|
x509.ReasonFlags.key_compromise: 1,
|
||||||
|
x509.ReasonFlags.ca_compromise: 2,
|
||||||
|
x509.ReasonFlags.affiliation_changed: 3,
|
||||||
|
x509.ReasonFlags.superseded: 4,
|
||||||
|
x509.ReasonFlags.cessation_of_operation: 5,
|
||||||
|
x509.ReasonFlags.certificate_hold: 6,
|
||||||
|
x509.ReasonFlags.remove_from_crl: 8,
|
||||||
|
x509.ReasonFlags.privilege_withdrawn: 9,
|
||||||
|
x509.ReasonFlags.aa_compromise: 10
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_crl_reason(backend, enum):
|
||||||
|
enum = backend._ffi.cast("ASN1_ENUMERATED *", enum)
|
||||||
|
enum = backend._ffi.gc(enum, backend._lib.ASN1_ENUMERATED_free)
|
||||||
|
code = backend._lib.ASN1_ENUMERATED_get(enum)
|
||||||
|
|
||||||
|
try:
|
||||||
|
return x509.CRLReason(_CRL_ENTRY_REASON_CODE_TO_ENUM[code])
|
||||||
|
except KeyError:
|
||||||
|
raise ValueError("Unsupported reason code: {0}".format(code))
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_invalidity_date(backend, inv_date):
|
||||||
|
generalized_time = backend._ffi.cast(
|
||||||
|
"ASN1_GENERALIZEDTIME *", inv_date
|
||||||
|
)
|
||||||
|
generalized_time = backend._ffi.gc(
|
||||||
|
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
|
||||||
|
)
|
||||||
|
return x509.InvalidityDate(
|
||||||
|
_parse_asn1_generalized_time(backend, generalized_time)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _decode_cert_issuer(backend, ext):
|
||||||
|
"""
|
||||||
|
This handler decodes the CertificateIssuer entry extension directly
|
||||||
|
from the X509_EXTENSION object. This is necessary because this entry
|
||||||
|
extension is not directly supported by OpenSSL 0.9.8.
|
||||||
|
"""
|
||||||
|
|
||||||
|
data_ptr_ptr = backend._ffi.new("const unsigned char **")
|
||||||
|
value = backend._lib.X509_EXTENSION_get_data(ext)
|
||||||
|
data_ptr_ptr[0] = value.data
|
||||||
|
gns = backend._lib.d2i_GENERAL_NAMES(
|
||||||
|
backend._ffi.NULL, data_ptr_ptr, value.length
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check the result of d2i_GENERAL_NAMES() is valid. Usually this is covered
|
||||||
|
# in _X509ExtensionParser but since we are responsible for decoding this
|
||||||
|
# entry extension ourselves, we have to this here.
|
||||||
|
if gns == backend._ffi.NULL:
|
||||||
|
backend._consume_errors()
|
||||||
|
raise ValueError(
|
||||||
|
"The {0} extension is corrupted and can't be parsed".format(
|
||||||
|
CRLEntryExtensionOID.CERTIFICATE_ISSUER))
|
||||||
|
|
||||||
|
gns = backend._ffi.gc(gns, backend._lib.GENERAL_NAMES_free)
|
||||||
|
return x509.CertificateIssuer(_decode_general_names(backend, gns))
|
||||||
|
|
||||||
|
|
||||||
|
def _asn1_to_der(backend, asn1_type):
|
||||||
|
buf = backend._ffi.new("unsigned char **")
|
||||||
|
res = backend._lib.i2d_ASN1_TYPE(asn1_type, buf)
|
||||||
|
backend.openssl_assert(res >= 0)
|
||||||
|
backend.openssl_assert(buf[0] != backend._ffi.NULL)
|
||||||
|
buf = backend._ffi.gc(
|
||||||
|
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
|
||||||
|
)
|
||||||
|
return backend._ffi.buffer(buf[0], res)[:]
|
||||||
|
|
||||||
|
|
||||||
|
def _asn1_integer_to_int(backend, asn1_int):
|
||||||
|
bn = backend._lib.ASN1_INTEGER_to_BN(asn1_int, backend._ffi.NULL)
|
||||||
|
backend.openssl_assert(bn != backend._ffi.NULL)
|
||||||
|
bn = backend._ffi.gc(bn, backend._lib.BN_free)
|
||||||
|
return backend._bn_to_int(bn)
|
||||||
|
|
||||||
|
|
||||||
|
def _asn1_integer_to_int_or_none(backend, asn1_int):
|
||||||
|
if asn1_int == backend._ffi.NULL:
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
return _asn1_integer_to_int(backend, asn1_int)
|
||||||
|
|
||||||
|
|
||||||
|
def _asn1_string_to_bytes(backend, asn1_string):
|
||||||
|
return backend._ffi.buffer(asn1_string.data, asn1_string.length)[:]
|
||||||
|
|
||||||
|
|
||||||
|
def _asn1_string_to_ascii(backend, asn1_string):
|
||||||
|
return _asn1_string_to_bytes(backend, asn1_string).decode("ascii")
|
||||||
|
|
||||||
|
|
||||||
|
def _asn1_string_to_utf8(backend, asn1_string):
|
||||||
|
buf = backend._ffi.new("unsigned char **")
|
||||||
|
res = backend._lib.ASN1_STRING_to_UTF8(buf, asn1_string)
|
||||||
|
if res == -1:
|
||||||
|
raise ValueError(
|
||||||
|
"Unsupported ASN1 string type. Type: {0}".format(asn1_string.type)
|
||||||
|
)
|
||||||
|
|
||||||
|
backend.openssl_assert(buf[0] != backend._ffi.NULL)
|
||||||
|
buf = backend._ffi.gc(
|
||||||
|
buf, lambda buffer: backend._lib.OPENSSL_free(buffer[0])
|
||||||
|
)
|
||||||
|
return backend._ffi.buffer(buf[0], res)[:].decode('utf8')
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_asn1_time(backend, asn1_time):
|
||||||
|
backend.openssl_assert(asn1_time != backend._ffi.NULL)
|
||||||
|
generalized_time = backend._lib.ASN1_TIME_to_generalizedtime(
|
||||||
|
asn1_time, backend._ffi.NULL
|
||||||
|
)
|
||||||
|
backend.openssl_assert(generalized_time != backend._ffi.NULL)
|
||||||
|
generalized_time = backend._ffi.gc(
|
||||||
|
generalized_time, backend._lib.ASN1_GENERALIZEDTIME_free
|
||||||
|
)
|
||||||
|
return _parse_asn1_generalized_time(backend, generalized_time)
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_asn1_generalized_time(backend, generalized_time):
|
||||||
|
time = _asn1_string_to_ascii(
|
||||||
|
backend, backend._ffi.cast("ASN1_STRING *", generalized_time)
|
||||||
|
)
|
||||||
|
return datetime.datetime.strptime(time, "%Y%m%d%H%M%SZ")
|
||||||
|
|
||||||
|
|
||||||
|
_EXTENSION_HANDLERS = {
|
||||||
|
ExtensionOID.BASIC_CONSTRAINTS: _decode_basic_constraints,
|
||||||
|
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _decode_subject_key_identifier,
|
||||||
|
ExtensionOID.KEY_USAGE: _decode_key_usage,
|
||||||
|
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _decode_subject_alt_name,
|
||||||
|
ExtensionOID.EXTENDED_KEY_USAGE: _decode_extended_key_usage,
|
||||||
|
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
|
||||||
|
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||||
|
_decode_authority_information_access
|
||||||
|
),
|
||||||
|
ExtensionOID.CERTIFICATE_POLICIES: _decode_certificate_policies,
|
||||||
|
ExtensionOID.CRL_DISTRIBUTION_POINTS: _decode_crl_distribution_points,
|
||||||
|
ExtensionOID.OCSP_NO_CHECK: _decode_ocsp_no_check,
|
||||||
|
ExtensionOID.INHIBIT_ANY_POLICY: _decode_inhibit_any_policy,
|
||||||
|
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
|
||||||
|
ExtensionOID.NAME_CONSTRAINTS: _decode_name_constraints,
|
||||||
|
ExtensionOID.POLICY_CONSTRAINTS: _decode_policy_constraints,
|
||||||
|
}
|
||||||
|
|
||||||
|
_REVOKED_EXTENSION_HANDLERS = {
|
||||||
|
CRLEntryExtensionOID.CRL_REASON: _decode_crl_reason,
|
||||||
|
CRLEntryExtensionOID.INVALIDITY_DATE: _decode_invalidity_date,
|
||||||
|
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _decode_cert_issuer,
|
||||||
|
}
|
||||||
|
|
||||||
|
_REVOKED_UNSUPPORTED_EXTENSIONS = set([
|
||||||
|
CRLEntryExtensionOID.CERTIFICATE_ISSUER,
|
||||||
|
])
|
||||||
|
|
||||||
|
_CRL_EXTENSION_HANDLERS = {
|
||||||
|
ExtensionOID.CRL_NUMBER: _decode_crl_number,
|
||||||
|
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _decode_authority_key_identifier,
|
||||||
|
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _decode_issuer_alt_name,
|
||||||
|
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||||
|
_decode_authority_information_access
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
|
||||||
|
ext_count=lambda backend, x: backend._lib.X509_get_ext_count(x),
|
||||||
|
get_ext=lambda backend, x, i: backend._lib.X509_get_ext(x, i),
|
||||||
|
handlers=_EXTENSION_HANDLERS
|
||||||
|
)
|
||||||
|
|
||||||
|
_CSR_EXTENSION_PARSER = _X509ExtensionParser(
|
||||||
|
ext_count=lambda backend, x: backend._lib.sk_X509_EXTENSION_num(x),
|
||||||
|
get_ext=lambda backend, x, i: backend._lib.sk_X509_EXTENSION_value(x, i),
|
||||||
|
handlers=_EXTENSION_HANDLERS
|
||||||
|
)
|
||||||
|
|
||||||
|
_REVOKED_CERTIFICATE_EXTENSION_PARSER = _X509ExtensionParser(
|
||||||
|
ext_count=lambda backend, x: backend._lib.X509_REVOKED_get_ext_count(x),
|
||||||
|
get_ext=lambda backend, x, i: backend._lib.X509_REVOKED_get_ext(x, i),
|
||||||
|
handlers=_REVOKED_EXTENSION_HANDLERS,
|
||||||
|
unsupported_exts=_REVOKED_UNSUPPORTED_EXTENSIONS
|
||||||
|
)
|
||||||
|
|
||||||
|
_CRL_EXTENSION_PARSER = _X509ExtensionParser(
|
||||||
|
ext_count=lambda backend, x: backend._lib.X509_CRL_get_ext_count(x),
|
||||||
|
get_ext=lambda backend, x, i: backend._lib.X509_CRL_get_ext(x, i),
|
||||||
|
handlers=_CRL_EXTENSION_HANDLERS,
|
||||||
|
)
|
|
@ -0,0 +1,303 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import InvalidSignature
|
||||||
|
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import (
|
||||||
|
AsymmetricSignatureContext, AsymmetricVerificationContext, dsa
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _truncate_digest_for_dsa(dsa_cdata, digest, backend):
|
||||||
|
"""
|
||||||
|
This function truncates digests that are longer than a given DS
|
||||||
|
key's length so they can be signed. OpenSSL does this for us in
|
||||||
|
1.0.0c+ and it isn't needed in 0.9.8, but that leaves us with three
|
||||||
|
releases (1.0.0, 1.0.0a, and 1.0.0b) where this is a problem. This
|
||||||
|
truncation is not required in 0.9.8 because DSA is limited to SHA-1.
|
||||||
|
"""
|
||||||
|
|
||||||
|
q = backend._ffi.new("BIGNUM **")
|
||||||
|
backend._lib.DSA_get0_pqg(
|
||||||
|
dsa_cdata, backend._ffi.NULL, q, backend._ffi.NULL
|
||||||
|
)
|
||||||
|
backend.openssl_assert(q[0] != backend._ffi.NULL)
|
||||||
|
|
||||||
|
order_bits = backend._lib.BN_num_bits(q[0])
|
||||||
|
return _truncate_digest(digest, order_bits)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricVerificationContext)
|
||||||
|
class _DSAVerificationContext(object):
|
||||||
|
def __init__(self, backend, public_key, signature, algorithm):
|
||||||
|
self._backend = backend
|
||||||
|
self._public_key = public_key
|
||||||
|
self._signature = signature
|
||||||
|
self._algorithm = algorithm
|
||||||
|
|
||||||
|
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._hash_ctx.update(data)
|
||||||
|
|
||||||
|
def verify(self):
|
||||||
|
data_to_verify = self._hash_ctx.finalize()
|
||||||
|
|
||||||
|
data_to_verify = _truncate_digest_for_dsa(
|
||||||
|
self._public_key._dsa_cdata, data_to_verify, self._backend
|
||||||
|
)
|
||||||
|
|
||||||
|
# The first parameter passed to DSA_verify is unused by OpenSSL but
|
||||||
|
# must be an integer.
|
||||||
|
res = self._backend._lib.DSA_verify(
|
||||||
|
0, data_to_verify, len(data_to_verify), self._signature,
|
||||||
|
len(self._signature), self._public_key._dsa_cdata)
|
||||||
|
|
||||||
|
if res != 1:
|
||||||
|
self._backend._consume_errors()
|
||||||
|
raise InvalidSignature
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricSignatureContext)
|
||||||
|
class _DSASignatureContext(object):
|
||||||
|
def __init__(self, backend, private_key, algorithm):
|
||||||
|
self._backend = backend
|
||||||
|
self._private_key = private_key
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._hash_ctx.update(data)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
data_to_sign = self._hash_ctx.finalize()
|
||||||
|
data_to_sign = _truncate_digest_for_dsa(
|
||||||
|
self._private_key._dsa_cdata, data_to_sign, self._backend
|
||||||
|
)
|
||||||
|
sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata)
|
||||||
|
sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len)
|
||||||
|
buflen = self._backend._ffi.new("unsigned int *")
|
||||||
|
|
||||||
|
# The first parameter passed to DSA_sign is unused by OpenSSL but
|
||||||
|
# must be an integer.
|
||||||
|
res = self._backend._lib.DSA_sign(
|
||||||
|
0, data_to_sign, len(data_to_sign), sig_buf,
|
||||||
|
buflen, self._private_key._dsa_cdata)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
self._backend.openssl_assert(buflen[0])
|
||||||
|
|
||||||
|
return self._backend._ffi.buffer(sig_buf)[:buflen[0]]
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(dsa.DSAParametersWithNumbers)
|
||||||
|
class _DSAParameters(object):
|
||||||
|
def __init__(self, backend, dsa_cdata):
|
||||||
|
self._backend = backend
|
||||||
|
self._dsa_cdata = dsa_cdata
|
||||||
|
|
||||||
|
def parameter_numbers(self):
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
g = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||||
|
return dsa.DSAParameterNumbers(
|
||||||
|
p=self._backend._bn_to_int(p[0]),
|
||||||
|
q=self._backend._bn_to_int(q[0]),
|
||||||
|
g=self._backend._bn_to_int(g[0])
|
||||||
|
)
|
||||||
|
|
||||||
|
def generate_private_key(self):
|
||||||
|
return self._backend.generate_dsa_private_key(self)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(dsa.DSAPrivateKeyWithSerialization)
|
||||||
|
class _DSAPrivateKey(object):
|
||||||
|
def __init__(self, backend, dsa_cdata, evp_pkey):
|
||||||
|
self._backend = backend
|
||||||
|
self._dsa_cdata = dsa_cdata
|
||||||
|
self._evp_pkey = evp_pkey
|
||||||
|
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(
|
||||||
|
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
|
||||||
|
self._key_size = self._backend._lib.BN_num_bits(p[0])
|
||||||
|
|
||||||
|
key_size = utils.read_only_property("_key_size")
|
||||||
|
|
||||||
|
def signer(self, signature_algorithm):
|
||||||
|
return _DSASignatureContext(self._backend, self, signature_algorithm)
|
||||||
|
|
||||||
|
def private_numbers(self):
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
g = self._backend._ffi.new("BIGNUM **")
|
||||||
|
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||||
|
priv_key = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend._lib.DSA_get0_key(self._dsa_cdata, pub_key, priv_key)
|
||||||
|
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(priv_key[0] != self._backend._ffi.NULL)
|
||||||
|
return dsa.DSAPrivateNumbers(
|
||||||
|
public_numbers=dsa.DSAPublicNumbers(
|
||||||
|
parameter_numbers=dsa.DSAParameterNumbers(
|
||||||
|
p=self._backend._bn_to_int(p[0]),
|
||||||
|
q=self._backend._bn_to_int(q[0]),
|
||||||
|
g=self._backend._bn_to_int(g[0])
|
||||||
|
),
|
||||||
|
y=self._backend._bn_to_int(pub_key[0])
|
||||||
|
),
|
||||||
|
x=self._backend._bn_to_int(priv_key[0])
|
||||||
|
)
|
||||||
|
|
||||||
|
def public_key(self):
|
||||||
|
dsa_cdata = self._backend._lib.DSA_new()
|
||||||
|
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||||
|
dsa_cdata = self._backend._ffi.gc(
|
||||||
|
dsa_cdata, self._backend._lib.DSA_free
|
||||||
|
)
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
g = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||||
|
p_dup = self._backend._lib.BN_dup(p[0])
|
||||||
|
q_dup = self._backend._lib.BN_dup(q[0])
|
||||||
|
g_dup = self._backend._lib.BN_dup(g[0])
|
||||||
|
res = self._backend._lib.DSA_set0_pqg(dsa_cdata, p_dup, q_dup, g_dup)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_key(
|
||||||
|
self._dsa_cdata, pub_key, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||||
|
pub_key_dup = self._backend._lib.BN_dup(pub_key[0])
|
||||||
|
res = self._backend._lib.DSA_set0_key(
|
||||||
|
dsa_cdata, pub_key_dup, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
evp_pkey = self._backend._dsa_cdata_to_evp_pkey(dsa_cdata)
|
||||||
|
return _DSAPublicKey(self._backend, dsa_cdata, evp_pkey)
|
||||||
|
|
||||||
|
def parameters(self):
|
||||||
|
dsa_cdata = self._backend._lib.DSA_new()
|
||||||
|
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||||
|
dsa_cdata = self._backend._ffi.gc(
|
||||||
|
dsa_cdata, self._backend._lib.DSA_free
|
||||||
|
)
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
g = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||||
|
p_dup = self._backend._lib.BN_dup(p[0])
|
||||||
|
q_dup = self._backend._lib.BN_dup(q[0])
|
||||||
|
g_dup = self._backend._lib.BN_dup(g[0])
|
||||||
|
res = self._backend._lib.DSA_set0_pqg(dsa_cdata, p_dup, q_dup, g_dup)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return _DSAParameters(self._backend, dsa_cdata)
|
||||||
|
|
||||||
|
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||||
|
return self._backend._private_key_bytes(
|
||||||
|
encoding,
|
||||||
|
format,
|
||||||
|
encryption_algorithm,
|
||||||
|
self._evp_pkey,
|
||||||
|
self._dsa_cdata
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(dsa.DSAPublicKeyWithSerialization)
|
||||||
|
class _DSAPublicKey(object):
|
||||||
|
def __init__(self, backend, dsa_cdata, evp_pkey):
|
||||||
|
self._backend = backend
|
||||||
|
self._dsa_cdata = dsa_cdata
|
||||||
|
self._evp_pkey = evp_pkey
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(
|
||||||
|
dsa_cdata, p, self._backend._ffi.NULL, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(p[0] != backend._ffi.NULL)
|
||||||
|
self._key_size = self._backend._lib.BN_num_bits(p[0])
|
||||||
|
|
||||||
|
key_size = utils.read_only_property("_key_size")
|
||||||
|
|
||||||
|
def verifier(self, signature, signature_algorithm):
|
||||||
|
if not isinstance(signature, bytes):
|
||||||
|
raise TypeError("signature must be bytes.")
|
||||||
|
|
||||||
|
return _DSAVerificationContext(
|
||||||
|
self._backend, self, signature, signature_algorithm
|
||||||
|
)
|
||||||
|
|
||||||
|
def public_numbers(self):
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
g = self._backend._ffi.new("BIGNUM **")
|
||||||
|
pub_key = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend._lib.DSA_get0_key(
|
||||||
|
self._dsa_cdata, pub_key, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(pub_key[0] != self._backend._ffi.NULL)
|
||||||
|
return dsa.DSAPublicNumbers(
|
||||||
|
parameter_numbers=dsa.DSAParameterNumbers(
|
||||||
|
p=self._backend._bn_to_int(p[0]),
|
||||||
|
q=self._backend._bn_to_int(q[0]),
|
||||||
|
g=self._backend._bn_to_int(g[0])
|
||||||
|
),
|
||||||
|
y=self._backend._bn_to_int(pub_key[0])
|
||||||
|
)
|
||||||
|
|
||||||
|
def parameters(self):
|
||||||
|
dsa_cdata = self._backend._lib.DSA_new()
|
||||||
|
self._backend.openssl_assert(dsa_cdata != self._backend._ffi.NULL)
|
||||||
|
dsa_cdata = self._backend._ffi.gc(
|
||||||
|
dsa_cdata, self._backend._lib.DSA_free
|
||||||
|
)
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
g = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.DSA_get0_pqg(self._dsa_cdata, p, q, g)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(g[0] != self._backend._ffi.NULL)
|
||||||
|
p_dup = self._backend._lib.BN_dup(p[0])
|
||||||
|
q_dup = self._backend._lib.BN_dup(q[0])
|
||||||
|
g_dup = self._backend._lib.BN_dup(g[0])
|
||||||
|
res = self._backend._lib.DSA_set0_pqg(dsa_cdata, p_dup, q_dup, g_dup)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return _DSAParameters(self._backend, dsa_cdata)
|
||||||
|
|
||||||
|
def public_bytes(self, encoding, format):
|
||||||
|
if format is serialization.PublicFormat.PKCS1:
|
||||||
|
raise ValueError(
|
||||||
|
"DSA public keys do not support PKCS1 serialization"
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._backend._public_key_bytes(
|
||||||
|
encoding,
|
||||||
|
format,
|
||||||
|
self,
|
||||||
|
self._evp_pkey,
|
||||||
|
None
|
||||||
|
)
|
|
@ -0,0 +1,305 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.backends.openssl.utils import _truncate_digest
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import (
|
||||||
|
AsymmetricSignatureContext, AsymmetricVerificationContext, ec
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend):
|
||||||
|
"""
|
||||||
|
This function truncates digests that are longer than a given elliptic
|
||||||
|
curve key's length so they can be signed. Since elliptic curve keys are
|
||||||
|
much shorter than RSA keys many digests (e.g. SHA-512) may require
|
||||||
|
truncation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
_lib = backend._lib
|
||||||
|
_ffi = backend._ffi
|
||||||
|
|
||||||
|
group = _lib.EC_KEY_get0_group(ec_key_cdata)
|
||||||
|
|
||||||
|
with backend._tmp_bn_ctx() as bn_ctx:
|
||||||
|
order = _lib.BN_CTX_get(bn_ctx)
|
||||||
|
backend.openssl_assert(order != _ffi.NULL)
|
||||||
|
|
||||||
|
res = _lib.EC_GROUP_get_order(group, order, bn_ctx)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
order_bits = _lib.BN_num_bits(order)
|
||||||
|
|
||||||
|
return _truncate_digest(digest, order_bits)
|
||||||
|
|
||||||
|
|
||||||
|
def _ec_key_curve_sn(backend, ec_key):
|
||||||
|
group = backend._lib.EC_KEY_get0_group(ec_key)
|
||||||
|
backend.openssl_assert(group != backend._ffi.NULL)
|
||||||
|
|
||||||
|
nid = backend._lib.EC_GROUP_get_curve_name(group)
|
||||||
|
# The following check is to find EC keys with unnamed curves and raise
|
||||||
|
# an error for now.
|
||||||
|
if nid == backend._lib.NID_undef:
|
||||||
|
raise NotImplementedError(
|
||||||
|
"ECDSA certificates with unnamed curves are unsupported "
|
||||||
|
"at this time"
|
||||||
|
)
|
||||||
|
|
||||||
|
curve_name = backend._lib.OBJ_nid2sn(nid)
|
||||||
|
backend.openssl_assert(curve_name != backend._ffi.NULL)
|
||||||
|
|
||||||
|
sn = backend._ffi.string(curve_name).decode('ascii')
|
||||||
|
return sn
|
||||||
|
|
||||||
|
|
||||||
|
def _mark_asn1_named_ec_curve(backend, ec_cdata):
|
||||||
|
"""
|
||||||
|
Set the named curve flag on the EC_KEY. This causes OpenSSL to
|
||||||
|
serialize EC keys along with their curve OID which makes
|
||||||
|
deserialization easier.
|
||||||
|
"""
|
||||||
|
|
||||||
|
backend._lib.EC_KEY_set_asn1_flag(
|
||||||
|
ec_cdata, backend._lib.OPENSSL_EC_NAMED_CURVE
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _sn_to_elliptic_curve(backend, sn):
|
||||||
|
try:
|
||||||
|
return ec._CURVE_TYPES[sn]()
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported elliptic curve".format(sn),
|
||||||
|
_Reasons.UNSUPPORTED_ELLIPTIC_CURVE
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricSignatureContext)
|
||||||
|
class _ECDSASignatureContext(object):
|
||||||
|
def __init__(self, backend, private_key, algorithm):
|
||||||
|
self._backend = backend
|
||||||
|
self._private_key = private_key
|
||||||
|
self._digest = hashes.Hash(algorithm, backend)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._digest.update(data)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
ec_key = self._private_key._ec_key
|
||||||
|
|
||||||
|
digest = self._digest.finalize()
|
||||||
|
|
||||||
|
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
|
||||||
|
|
||||||
|
max_size = self._backend._lib.ECDSA_size(ec_key)
|
||||||
|
self._backend.openssl_assert(max_size > 0)
|
||||||
|
|
||||||
|
sigbuf = self._backend._ffi.new("char[]", max_size)
|
||||||
|
siglen_ptr = self._backend._ffi.new("unsigned int[]", 1)
|
||||||
|
res = self._backend._lib.ECDSA_sign(
|
||||||
|
0,
|
||||||
|
digest,
|
||||||
|
len(digest),
|
||||||
|
sigbuf,
|
||||||
|
siglen_ptr,
|
||||||
|
ec_key
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]]
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricVerificationContext)
|
||||||
|
class _ECDSAVerificationContext(object):
|
||||||
|
def __init__(self, backend, public_key, signature, algorithm):
|
||||||
|
self._backend = backend
|
||||||
|
self._public_key = public_key
|
||||||
|
self._signature = signature
|
||||||
|
self._digest = hashes.Hash(algorithm, backend)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._digest.update(data)
|
||||||
|
|
||||||
|
def verify(self):
|
||||||
|
ec_key = self._public_key._ec_key
|
||||||
|
|
||||||
|
digest = self._digest.finalize()
|
||||||
|
|
||||||
|
digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend)
|
||||||
|
|
||||||
|
res = self._backend._lib.ECDSA_verify(
|
||||||
|
0,
|
||||||
|
digest,
|
||||||
|
len(digest),
|
||||||
|
self._signature,
|
||||||
|
len(self._signature),
|
||||||
|
ec_key
|
||||||
|
)
|
||||||
|
if res != 1:
|
||||||
|
self._backend._consume_errors()
|
||||||
|
raise InvalidSignature
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(ec.EllipticCurvePrivateKeyWithSerialization)
|
||||||
|
class _EllipticCurvePrivateKey(object):
|
||||||
|
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
||||||
|
self._backend = backend
|
||||||
|
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
||||||
|
self._ec_key = ec_key_cdata
|
||||||
|
self._evp_pkey = evp_pkey
|
||||||
|
|
||||||
|
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
||||||
|
self._curve = _sn_to_elliptic_curve(backend, sn)
|
||||||
|
|
||||||
|
curve = utils.read_only_property("_curve")
|
||||||
|
|
||||||
|
def signer(self, signature_algorithm):
|
||||||
|
if isinstance(signature_algorithm, ec.ECDSA):
|
||||||
|
return _ECDSASignatureContext(
|
||||||
|
self._backend, self, signature_algorithm.algorithm
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Unsupported elliptic curve signature algorithm.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def exchange(self, algorithm, peer_public_key):
|
||||||
|
if not (
|
||||||
|
self._backend.elliptic_curve_exchange_algorithm_supported(
|
||||||
|
algorithm, self.curve
|
||||||
|
)
|
||||||
|
):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This backend does not support the ECDH algorithm.",
|
||||||
|
_Reasons.UNSUPPORTED_EXCHANGE_ALGORITHM
|
||||||
|
)
|
||||||
|
|
||||||
|
if peer_public_key.curve.name != self.curve.name:
|
||||||
|
raise ValueError(
|
||||||
|
"peer_public_key and self are not on the same curve"
|
||||||
|
)
|
||||||
|
|
||||||
|
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||||
|
z_len = (self._backend._lib.EC_GROUP_get_degree(group) + 7) // 8
|
||||||
|
self._backend.openssl_assert(z_len > 0)
|
||||||
|
z_buf = self._backend._ffi.new("uint8_t[]", z_len)
|
||||||
|
peer_key = self._backend._lib.EC_KEY_get0_public_key(
|
||||||
|
peer_public_key._ec_key
|
||||||
|
)
|
||||||
|
|
||||||
|
r = self._backend._lib.ECDH_compute_key(
|
||||||
|
z_buf, z_len, peer_key, self._ec_key, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(r > 0)
|
||||||
|
return self._backend._ffi.buffer(z_buf)[:z_len]
|
||||||
|
|
||||||
|
def public_key(self):
|
||||||
|
group = self._backend._lib.EC_KEY_get0_group(self._ec_key)
|
||||||
|
self._backend.openssl_assert(group != self._backend._ffi.NULL)
|
||||||
|
|
||||||
|
curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group)
|
||||||
|
|
||||||
|
public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid)
|
||||||
|
self._backend.openssl_assert(public_ec_key != self._backend._ffi.NULL)
|
||||||
|
public_ec_key = self._backend._ffi.gc(
|
||||||
|
public_ec_key, self._backend._lib.EC_KEY_free
|
||||||
|
)
|
||||||
|
|
||||||
|
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||||
|
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||||
|
|
||||||
|
res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
evp_pkey = self._backend._ec_cdata_to_evp_pkey(public_ec_key)
|
||||||
|
|
||||||
|
return _EllipticCurvePublicKey(self._backend, public_ec_key, evp_pkey)
|
||||||
|
|
||||||
|
def private_numbers(self):
|
||||||
|
bn = self._backend._lib.EC_KEY_get0_private_key(self._ec_key)
|
||||||
|
private_value = self._backend._bn_to_int(bn)
|
||||||
|
return ec.EllipticCurvePrivateNumbers(
|
||||||
|
private_value=private_value,
|
||||||
|
public_numbers=self.public_key().public_numbers()
|
||||||
|
)
|
||||||
|
|
||||||
|
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||||
|
return self._backend._private_key_bytes(
|
||||||
|
encoding,
|
||||||
|
format,
|
||||||
|
encryption_algorithm,
|
||||||
|
self._evp_pkey,
|
||||||
|
self._ec_key
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(ec.EllipticCurvePublicKeyWithSerialization)
|
||||||
|
class _EllipticCurvePublicKey(object):
|
||||||
|
def __init__(self, backend, ec_key_cdata, evp_pkey):
|
||||||
|
self._backend = backend
|
||||||
|
_mark_asn1_named_ec_curve(backend, ec_key_cdata)
|
||||||
|
self._ec_key = ec_key_cdata
|
||||||
|
self._evp_pkey = evp_pkey
|
||||||
|
|
||||||
|
sn = _ec_key_curve_sn(backend, ec_key_cdata)
|
||||||
|
self._curve = _sn_to_elliptic_curve(backend, sn)
|
||||||
|
|
||||||
|
curve = utils.read_only_property("_curve")
|
||||||
|
|
||||||
|
def verifier(self, signature, signature_algorithm):
|
||||||
|
if not isinstance(signature, bytes):
|
||||||
|
raise TypeError("signature must be bytes.")
|
||||||
|
|
||||||
|
if isinstance(signature_algorithm, ec.ECDSA):
|
||||||
|
return _ECDSAVerificationContext(
|
||||||
|
self._backend, self, signature, signature_algorithm.algorithm
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Unsupported elliptic curve signature algorithm.",
|
||||||
|
_Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM)
|
||||||
|
|
||||||
|
def public_numbers(self):
|
||||||
|
set_func, get_func, group = (
|
||||||
|
self._backend._ec_key_determine_group_get_set_funcs(self._ec_key)
|
||||||
|
)
|
||||||
|
point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key)
|
||||||
|
self._backend.openssl_assert(point != self._backend._ffi.NULL)
|
||||||
|
|
||||||
|
with self._backend._tmp_bn_ctx() as bn_ctx:
|
||||||
|
bn_x = self._backend._lib.BN_CTX_get(bn_ctx)
|
||||||
|
bn_y = self._backend._lib.BN_CTX_get(bn_ctx)
|
||||||
|
|
||||||
|
res = get_func(group, point, bn_x, bn_y, bn_ctx)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
x = self._backend._bn_to_int(bn_x)
|
||||||
|
y = self._backend._bn_to_int(bn_y)
|
||||||
|
|
||||||
|
return ec.EllipticCurvePublicNumbers(
|
||||||
|
x=x,
|
||||||
|
y=y,
|
||||||
|
curve=self._curve
|
||||||
|
)
|
||||||
|
|
||||||
|
def public_bytes(self, encoding, format):
|
||||||
|
if format is serialization.PublicFormat.PKCS1:
|
||||||
|
raise ValueError(
|
||||||
|
"EC public keys do not support PKCS1 serialization"
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._backend._public_key_bytes(
|
||||||
|
encoding,
|
||||||
|
format,
|
||||||
|
self,
|
||||||
|
self._evp_pkey,
|
||||||
|
None
|
||||||
|
)
|
|
@ -0,0 +1,592 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import calendar
|
||||||
|
|
||||||
|
import idna
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import x509
|
||||||
|
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||||
|
_CRL_ENTRY_REASON_ENUM_TO_CODE, _DISTPOINT_TYPE_FULLNAME,
|
||||||
|
_DISTPOINT_TYPE_RELATIVENAME
|
||||||
|
)
|
||||||
|
from cryptography.x509.oid import CRLEntryExtensionOID, ExtensionOID, NameOID
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_asn1_int(backend, x):
|
||||||
|
"""
|
||||||
|
Converts a python integer to an ASN1_INTEGER. The returned ASN1_INTEGER
|
||||||
|
will not be garbage collected (to support adding them to structs that take
|
||||||
|
ownership of the object). Be sure to register it for GC if it will be
|
||||||
|
discarded after use.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# Convert Python integer to OpenSSL "bignum" in case value exceeds
|
||||||
|
# machine's native integer limits (note: `int_to_bn` doesn't automatically
|
||||||
|
# GC).
|
||||||
|
i = backend._int_to_bn(x)
|
||||||
|
i = backend._ffi.gc(i, backend._lib.BN_free)
|
||||||
|
|
||||||
|
# Wrap in an ASN.1 integer. Don't GC -- as documented.
|
||||||
|
i = backend._lib.BN_to_ASN1_INTEGER(i, backend._ffi.NULL)
|
||||||
|
backend.openssl_assert(i != backend._ffi.NULL)
|
||||||
|
return i
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_asn1_int_gc(backend, x):
|
||||||
|
i = _encode_asn1_int(backend, x)
|
||||||
|
i = backend._ffi.gc(i, backend._lib.ASN1_INTEGER_free)
|
||||||
|
return i
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_asn1_str(backend, data, length):
|
||||||
|
"""
|
||||||
|
Create an ASN1_OCTET_STRING from a Python byte string.
|
||||||
|
"""
|
||||||
|
s = backend._lib.ASN1_OCTET_STRING_new()
|
||||||
|
res = backend._lib.ASN1_OCTET_STRING_set(s, data, length)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_asn1_utf8_str(backend, string):
|
||||||
|
"""
|
||||||
|
Create an ASN1_UTF8STRING from a Python unicode string.
|
||||||
|
This object will be an ASN1_STRING with UTF8 type in OpenSSL and
|
||||||
|
can be decoded with ASN1_STRING_to_UTF8.
|
||||||
|
"""
|
||||||
|
s = backend._lib.ASN1_UTF8STRING_new()
|
||||||
|
res = backend._lib.ASN1_STRING_set(
|
||||||
|
s, string.encode("utf8"), len(string.encode("utf8"))
|
||||||
|
)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_asn1_str_gc(backend, data, length):
|
||||||
|
s = _encode_asn1_str(backend, data, length)
|
||||||
|
s = backend._ffi.gc(s, backend._lib.ASN1_OCTET_STRING_free)
|
||||||
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_inhibit_any_policy(backend, inhibit_any_policy):
|
||||||
|
return _encode_asn1_int_gc(backend, inhibit_any_policy.skip_certs)
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_name(backend, attributes):
|
||||||
|
"""
|
||||||
|
The X509_NAME created will not be gc'd. Use _encode_name_gc if needed.
|
||||||
|
"""
|
||||||
|
subject = backend._lib.X509_NAME_new()
|
||||||
|
for attribute in attributes:
|
||||||
|
name_entry = _encode_name_entry(backend, attribute)
|
||||||
|
res = backend._lib.X509_NAME_add_entry(subject, name_entry, -1, 0)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
return subject
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_name_gc(backend, attributes):
|
||||||
|
subject = _encode_name(backend, attributes)
|
||||||
|
subject = backend._ffi.gc(subject, backend._lib.X509_NAME_free)
|
||||||
|
return subject
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_sk_name_entry(backend, attributes):
|
||||||
|
"""
|
||||||
|
The sk_X50_NAME_ENTRY created will not be gc'd.
|
||||||
|
"""
|
||||||
|
stack = backend._lib.sk_X509_NAME_ENTRY_new_null()
|
||||||
|
for attribute in attributes:
|
||||||
|
name_entry = _encode_name_entry(backend, attribute)
|
||||||
|
res = backend._lib.sk_X509_NAME_ENTRY_push(stack, name_entry)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
return stack
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_name_entry(backend, attribute):
|
||||||
|
value = attribute.value.encode('utf8')
|
||||||
|
obj = _txt2obj_gc(backend, attribute.oid.dotted_string)
|
||||||
|
if attribute.oid == NameOID.COUNTRY_NAME:
|
||||||
|
# Per RFC5280 Appendix A.1 countryName should be encoded as
|
||||||
|
# PrintableString, not UTF8String
|
||||||
|
type = backend._lib.MBSTRING_ASC
|
||||||
|
else:
|
||||||
|
type = backend._lib.MBSTRING_UTF8
|
||||||
|
name_entry = backend._lib.X509_NAME_ENTRY_create_by_OBJ(
|
||||||
|
backend._ffi.NULL, obj, type, value, -1
|
||||||
|
)
|
||||||
|
return name_entry
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_crl_number(backend, crl_number):
|
||||||
|
return _encode_asn1_int_gc(backend, crl_number.crl_number)
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_crl_reason(backend, crl_reason):
|
||||||
|
asn1enum = backend._lib.ASN1_ENUMERATED_new()
|
||||||
|
backend.openssl_assert(asn1enum != backend._ffi.NULL)
|
||||||
|
asn1enum = backend._ffi.gc(asn1enum, backend._lib.ASN1_ENUMERATED_free)
|
||||||
|
res = backend._lib.ASN1_ENUMERATED_set(
|
||||||
|
asn1enum, _CRL_ENTRY_REASON_ENUM_TO_CODE[crl_reason.reason]
|
||||||
|
)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
return asn1enum
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_invalidity_date(backend, invalidity_date):
|
||||||
|
time = backend._lib.ASN1_GENERALIZEDTIME_set(
|
||||||
|
backend._ffi.NULL, calendar.timegm(
|
||||||
|
invalidity_date.invalidity_date.timetuple()
|
||||||
|
)
|
||||||
|
)
|
||||||
|
backend.openssl_assert(time != backend._ffi.NULL)
|
||||||
|
time = backend._ffi.gc(time, backend._lib.ASN1_GENERALIZEDTIME_free)
|
||||||
|
|
||||||
|
return time
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_certificate_policies(backend, certificate_policies):
|
||||||
|
cp = backend._lib.sk_POLICYINFO_new_null()
|
||||||
|
backend.openssl_assert(cp != backend._ffi.NULL)
|
||||||
|
cp = backend._ffi.gc(cp, backend._lib.sk_POLICYINFO_free)
|
||||||
|
for policy_info in certificate_policies:
|
||||||
|
pi = backend._lib.POLICYINFO_new()
|
||||||
|
backend.openssl_assert(pi != backend._ffi.NULL)
|
||||||
|
res = backend._lib.sk_POLICYINFO_push(cp, pi)
|
||||||
|
backend.openssl_assert(res >= 1)
|
||||||
|
oid = _txt2obj(backend, policy_info.policy_identifier.dotted_string)
|
||||||
|
pi.policyid = oid
|
||||||
|
if policy_info.policy_qualifiers:
|
||||||
|
pqis = backend._lib.sk_POLICYQUALINFO_new_null()
|
||||||
|
backend.openssl_assert(pqis != backend._ffi.NULL)
|
||||||
|
for qualifier in policy_info.policy_qualifiers:
|
||||||
|
pqi = backend._lib.POLICYQUALINFO_new()
|
||||||
|
backend.openssl_assert(pqi != backend._ffi.NULL)
|
||||||
|
res = backend._lib.sk_POLICYQUALINFO_push(pqis, pqi)
|
||||||
|
backend.openssl_assert(res >= 1)
|
||||||
|
if isinstance(qualifier, six.text_type):
|
||||||
|
pqi.pqualid = _txt2obj(
|
||||||
|
backend, x509.OID_CPS_QUALIFIER.dotted_string
|
||||||
|
)
|
||||||
|
pqi.d.cpsuri = _encode_asn1_str(
|
||||||
|
backend,
|
||||||
|
qualifier.encode("ascii"),
|
||||||
|
len(qualifier.encode("ascii"))
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
assert isinstance(qualifier, x509.UserNotice)
|
||||||
|
pqi.pqualid = _txt2obj(
|
||||||
|
backend, x509.OID_CPS_USER_NOTICE.dotted_string
|
||||||
|
)
|
||||||
|
un = backend._lib.USERNOTICE_new()
|
||||||
|
backend.openssl_assert(un != backend._ffi.NULL)
|
||||||
|
pqi.d.usernotice = un
|
||||||
|
if qualifier.explicit_text:
|
||||||
|
un.exptext = _encode_asn1_utf8_str(
|
||||||
|
backend, qualifier.explicit_text
|
||||||
|
)
|
||||||
|
|
||||||
|
un.noticeref = _encode_notice_reference(
|
||||||
|
backend, qualifier.notice_reference
|
||||||
|
)
|
||||||
|
|
||||||
|
pi.qualifiers = pqis
|
||||||
|
|
||||||
|
return cp
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_notice_reference(backend, notice):
|
||||||
|
if notice is None:
|
||||||
|
return backend._ffi.NULL
|
||||||
|
else:
|
||||||
|
nr = backend._lib.NOTICEREF_new()
|
||||||
|
backend.openssl_assert(nr != backend._ffi.NULL)
|
||||||
|
# organization is a required field
|
||||||
|
nr.organization = _encode_asn1_utf8_str(backend, notice.organization)
|
||||||
|
|
||||||
|
notice_stack = backend._lib.sk_ASN1_INTEGER_new_null()
|
||||||
|
nr.noticenos = notice_stack
|
||||||
|
for number in notice.notice_numbers:
|
||||||
|
num = _encode_asn1_int(backend, number)
|
||||||
|
res = backend._lib.sk_ASN1_INTEGER_push(notice_stack, num)
|
||||||
|
backend.openssl_assert(res >= 1)
|
||||||
|
|
||||||
|
return nr
|
||||||
|
|
||||||
|
|
||||||
|
def _txt2obj(backend, name):
|
||||||
|
"""
|
||||||
|
Converts a Python string with an ASN.1 object ID in dotted form to a
|
||||||
|
ASN1_OBJECT.
|
||||||
|
"""
|
||||||
|
name = name.encode('ascii')
|
||||||
|
obj = backend._lib.OBJ_txt2obj(name, 1)
|
||||||
|
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def _txt2obj_gc(backend, name):
|
||||||
|
obj = _txt2obj(backend, name)
|
||||||
|
obj = backend._ffi.gc(obj, backend._lib.ASN1_OBJECT_free)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_ocsp_nocheck(backend, ext):
|
||||||
|
"""
|
||||||
|
The OCSP No Check extension is defined as a null ASN.1 value embedded in
|
||||||
|
an ASN.1 string.
|
||||||
|
"""
|
||||||
|
return _encode_asn1_str_gc(backend, b"\x05\x00", 2)
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_key_usage(backend, key_usage):
|
||||||
|
set_bit = backend._lib.ASN1_BIT_STRING_set_bit
|
||||||
|
ku = backend._lib.ASN1_BIT_STRING_new()
|
||||||
|
ku = backend._ffi.gc(ku, backend._lib.ASN1_BIT_STRING_free)
|
||||||
|
res = set_bit(ku, 0, key_usage.digital_signature)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 1, key_usage.content_commitment)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 2, key_usage.key_encipherment)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 3, key_usage.data_encipherment)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 4, key_usage.key_agreement)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 5, key_usage.key_cert_sign)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 6, key_usage.crl_sign)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
if key_usage.key_agreement:
|
||||||
|
res = set_bit(ku, 7, key_usage.encipher_only)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 8, key_usage.decipher_only)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
else:
|
||||||
|
res = set_bit(ku, 7, 0)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = set_bit(ku, 8, 0)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
return ku
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_authority_key_identifier(backend, authority_keyid):
|
||||||
|
akid = backend._lib.AUTHORITY_KEYID_new()
|
||||||
|
backend.openssl_assert(akid != backend._ffi.NULL)
|
||||||
|
akid = backend._ffi.gc(akid, backend._lib.AUTHORITY_KEYID_free)
|
||||||
|
if authority_keyid.key_identifier is not None:
|
||||||
|
akid.keyid = _encode_asn1_str(
|
||||||
|
backend,
|
||||||
|
authority_keyid.key_identifier,
|
||||||
|
len(authority_keyid.key_identifier)
|
||||||
|
)
|
||||||
|
|
||||||
|
if authority_keyid.authority_cert_issuer is not None:
|
||||||
|
akid.issuer = _encode_general_names(
|
||||||
|
backend, authority_keyid.authority_cert_issuer
|
||||||
|
)
|
||||||
|
|
||||||
|
if authority_keyid.authority_cert_serial_number is not None:
|
||||||
|
akid.serial = _encode_asn1_int(
|
||||||
|
backend, authority_keyid.authority_cert_serial_number
|
||||||
|
)
|
||||||
|
|
||||||
|
return akid
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_basic_constraints(backend, basic_constraints):
|
||||||
|
constraints = backend._lib.BASIC_CONSTRAINTS_new()
|
||||||
|
constraints = backend._ffi.gc(
|
||||||
|
constraints, backend._lib.BASIC_CONSTRAINTS_free
|
||||||
|
)
|
||||||
|
constraints.ca = 255 if basic_constraints.ca else 0
|
||||||
|
if basic_constraints.ca and basic_constraints.path_length is not None:
|
||||||
|
constraints.pathlen = _encode_asn1_int(
|
||||||
|
backend, basic_constraints.path_length
|
||||||
|
)
|
||||||
|
|
||||||
|
return constraints
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_authority_information_access(backend, authority_info_access):
|
||||||
|
aia = backend._lib.sk_ACCESS_DESCRIPTION_new_null()
|
||||||
|
backend.openssl_assert(aia != backend._ffi.NULL)
|
||||||
|
aia = backend._ffi.gc(
|
||||||
|
aia, backend._lib.sk_ACCESS_DESCRIPTION_free
|
||||||
|
)
|
||||||
|
for access_description in authority_info_access:
|
||||||
|
ad = backend._lib.ACCESS_DESCRIPTION_new()
|
||||||
|
method = _txt2obj(
|
||||||
|
backend, access_description.access_method.dotted_string
|
||||||
|
)
|
||||||
|
gn = _encode_general_name(backend, access_description.access_location)
|
||||||
|
ad.method = method
|
||||||
|
ad.location = gn
|
||||||
|
res = backend._lib.sk_ACCESS_DESCRIPTION_push(aia, ad)
|
||||||
|
backend.openssl_assert(res >= 1)
|
||||||
|
|
||||||
|
return aia
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_general_names(backend, names):
|
||||||
|
general_names = backend._lib.GENERAL_NAMES_new()
|
||||||
|
backend.openssl_assert(general_names != backend._ffi.NULL)
|
||||||
|
for name in names:
|
||||||
|
gn = _encode_general_name(backend, name)
|
||||||
|
res = backend._lib.sk_GENERAL_NAME_push(general_names, gn)
|
||||||
|
backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
return general_names
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_alt_name(backend, san):
|
||||||
|
general_names = _encode_general_names(backend, san)
|
||||||
|
general_names = backend._ffi.gc(
|
||||||
|
general_names, backend._lib.GENERAL_NAMES_free
|
||||||
|
)
|
||||||
|
return general_names
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_subject_key_identifier(backend, ski):
|
||||||
|
return _encode_asn1_str_gc(backend, ski.digest, len(ski.digest))
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_general_name(backend, name):
|
||||||
|
if isinstance(name, x509.DNSName):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
gn.type = backend._lib.GEN_DNS
|
||||||
|
|
||||||
|
ia5 = backend._lib.ASN1_IA5STRING_new()
|
||||||
|
backend.openssl_assert(ia5 != backend._ffi.NULL)
|
||||||
|
|
||||||
|
if name.value.startswith(u"*."):
|
||||||
|
value = b"*." + idna.encode(name.value[2:])
|
||||||
|
else:
|
||||||
|
value = idna.encode(name.value)
|
||||||
|
|
||||||
|
res = backend._lib.ASN1_STRING_set(ia5, value, len(value))
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
gn.d.dNSName = ia5
|
||||||
|
elif isinstance(name, x509.RegisteredID):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
gn.type = backend._lib.GEN_RID
|
||||||
|
obj = backend._lib.OBJ_txt2obj(
|
||||||
|
name.value.dotted_string.encode('ascii'), 1
|
||||||
|
)
|
||||||
|
backend.openssl_assert(obj != backend._ffi.NULL)
|
||||||
|
gn.d.registeredID = obj
|
||||||
|
elif isinstance(name, x509.DirectoryName):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
dir_name = _encode_name(backend, name.value)
|
||||||
|
gn.type = backend._lib.GEN_DIRNAME
|
||||||
|
gn.d.directoryName = dir_name
|
||||||
|
elif isinstance(name, x509.IPAddress):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
ipaddr = _encode_asn1_str(
|
||||||
|
backend, name.value.packed, len(name.value.packed)
|
||||||
|
)
|
||||||
|
gn.type = backend._lib.GEN_IPADD
|
||||||
|
gn.d.iPAddress = ipaddr
|
||||||
|
elif isinstance(name, x509.OtherName):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
other_name = backend._lib.OTHERNAME_new()
|
||||||
|
backend.openssl_assert(other_name != backend._ffi.NULL)
|
||||||
|
|
||||||
|
type_id = backend._lib.OBJ_txt2obj(
|
||||||
|
name.type_id.dotted_string.encode('ascii'), 1
|
||||||
|
)
|
||||||
|
backend.openssl_assert(type_id != backend._ffi.NULL)
|
||||||
|
data = backend._ffi.new("unsigned char[]", name.value)
|
||||||
|
data_ptr_ptr = backend._ffi.new("unsigned char **")
|
||||||
|
data_ptr_ptr[0] = data
|
||||||
|
value = backend._lib.d2i_ASN1_TYPE(
|
||||||
|
backend._ffi.NULL, data_ptr_ptr, len(name.value)
|
||||||
|
)
|
||||||
|
if value == backend._ffi.NULL:
|
||||||
|
backend._consume_errors()
|
||||||
|
raise ValueError("Invalid ASN.1 data")
|
||||||
|
other_name.type_id = type_id
|
||||||
|
other_name.value = value
|
||||||
|
gn.type = backend._lib.GEN_OTHERNAME
|
||||||
|
gn.d.otherName = other_name
|
||||||
|
elif isinstance(name, x509.RFC822Name):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
asn1_str = _encode_asn1_str(
|
||||||
|
backend, name._encoded, len(name._encoded)
|
||||||
|
)
|
||||||
|
gn.type = backend._lib.GEN_EMAIL
|
||||||
|
gn.d.rfc822Name = asn1_str
|
||||||
|
elif isinstance(name, x509.UniformResourceIdentifier):
|
||||||
|
gn = backend._lib.GENERAL_NAME_new()
|
||||||
|
backend.openssl_assert(gn != backend._ffi.NULL)
|
||||||
|
asn1_str = _encode_asn1_str(
|
||||||
|
backend, name._encoded, len(name._encoded)
|
||||||
|
)
|
||||||
|
gn.type = backend._lib.GEN_URI
|
||||||
|
gn.d.uniformResourceIdentifier = asn1_str
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"{0} is an unknown GeneralName type".format(name)
|
||||||
|
)
|
||||||
|
|
||||||
|
return gn
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_extended_key_usage(backend, extended_key_usage):
|
||||||
|
eku = backend._lib.sk_ASN1_OBJECT_new_null()
|
||||||
|
eku = backend._ffi.gc(eku, backend._lib.sk_ASN1_OBJECT_free)
|
||||||
|
for oid in extended_key_usage:
|
||||||
|
obj = _txt2obj(backend, oid.dotted_string)
|
||||||
|
res = backend._lib.sk_ASN1_OBJECT_push(eku, obj)
|
||||||
|
backend.openssl_assert(res >= 1)
|
||||||
|
|
||||||
|
return eku
|
||||||
|
|
||||||
|
|
||||||
|
_CRLREASONFLAGS = {
|
||||||
|
x509.ReasonFlags.key_compromise: 1,
|
||||||
|
x509.ReasonFlags.ca_compromise: 2,
|
||||||
|
x509.ReasonFlags.affiliation_changed: 3,
|
||||||
|
x509.ReasonFlags.superseded: 4,
|
||||||
|
x509.ReasonFlags.cessation_of_operation: 5,
|
||||||
|
x509.ReasonFlags.certificate_hold: 6,
|
||||||
|
x509.ReasonFlags.privilege_withdrawn: 7,
|
||||||
|
x509.ReasonFlags.aa_compromise: 8,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_crl_distribution_points(backend, crl_distribution_points):
|
||||||
|
cdp = backend._lib.sk_DIST_POINT_new_null()
|
||||||
|
cdp = backend._ffi.gc(cdp, backend._lib.sk_DIST_POINT_free)
|
||||||
|
for point in crl_distribution_points:
|
||||||
|
dp = backend._lib.DIST_POINT_new()
|
||||||
|
backend.openssl_assert(dp != backend._ffi.NULL)
|
||||||
|
|
||||||
|
if point.reasons:
|
||||||
|
bitmask = backend._lib.ASN1_BIT_STRING_new()
|
||||||
|
backend.openssl_assert(bitmask != backend._ffi.NULL)
|
||||||
|
dp.reasons = bitmask
|
||||||
|
for reason in point.reasons:
|
||||||
|
res = backend._lib.ASN1_BIT_STRING_set_bit(
|
||||||
|
bitmask, _CRLREASONFLAGS[reason], 1
|
||||||
|
)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
|
||||||
|
if point.full_name:
|
||||||
|
dpn = backend._lib.DIST_POINT_NAME_new()
|
||||||
|
backend.openssl_assert(dpn != backend._ffi.NULL)
|
||||||
|
dpn.type = _DISTPOINT_TYPE_FULLNAME
|
||||||
|
dpn.name.fullname = _encode_general_names(backend, point.full_name)
|
||||||
|
dp.distpoint = dpn
|
||||||
|
|
||||||
|
if point.relative_name:
|
||||||
|
dpn = backend._lib.DIST_POINT_NAME_new()
|
||||||
|
backend.openssl_assert(dpn != backend._ffi.NULL)
|
||||||
|
dpn.type = _DISTPOINT_TYPE_RELATIVENAME
|
||||||
|
relativename = _encode_sk_name_entry(backend, point.relative_name)
|
||||||
|
backend.openssl_assert(relativename != backend._ffi.NULL)
|
||||||
|
dpn.name.relativename = relativename
|
||||||
|
dp.distpoint = dpn
|
||||||
|
|
||||||
|
if point.crl_issuer:
|
||||||
|
dp.CRLissuer = _encode_general_names(backend, point.crl_issuer)
|
||||||
|
|
||||||
|
res = backend._lib.sk_DIST_POINT_push(cdp, dp)
|
||||||
|
backend.openssl_assert(res >= 1)
|
||||||
|
|
||||||
|
return cdp
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_name_constraints(backend, name_constraints):
|
||||||
|
nc = backend._lib.NAME_CONSTRAINTS_new()
|
||||||
|
backend.openssl_assert(nc != backend._ffi.NULL)
|
||||||
|
nc = backend._ffi.gc(nc, backend._lib.NAME_CONSTRAINTS_free)
|
||||||
|
permitted = _encode_general_subtree(
|
||||||
|
backend, name_constraints.permitted_subtrees
|
||||||
|
)
|
||||||
|
nc.permittedSubtrees = permitted
|
||||||
|
excluded = _encode_general_subtree(
|
||||||
|
backend, name_constraints.excluded_subtrees
|
||||||
|
)
|
||||||
|
nc.excludedSubtrees = excluded
|
||||||
|
|
||||||
|
return nc
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_policy_constraints(backend, policy_constraints):
|
||||||
|
pc = backend._lib.POLICY_CONSTRAINTS_new()
|
||||||
|
backend.openssl_assert(pc != backend._ffi.NULL)
|
||||||
|
pc = backend._ffi.gc(pc, backend._lib.POLICY_CONSTRAINTS_free)
|
||||||
|
if policy_constraints.require_explicit_policy is not None:
|
||||||
|
pc.requireExplicitPolicy = _encode_asn1_int(
|
||||||
|
backend, policy_constraints.require_explicit_policy
|
||||||
|
)
|
||||||
|
|
||||||
|
if policy_constraints.inhibit_policy_mapping is not None:
|
||||||
|
pc.inhibitPolicyMapping = _encode_asn1_int(
|
||||||
|
backend, policy_constraints.inhibit_policy_mapping
|
||||||
|
)
|
||||||
|
|
||||||
|
return pc
|
||||||
|
|
||||||
|
|
||||||
|
def _encode_general_subtree(backend, subtrees):
|
||||||
|
if subtrees is None:
|
||||||
|
return backend._ffi.NULL
|
||||||
|
else:
|
||||||
|
general_subtrees = backend._lib.sk_GENERAL_SUBTREE_new_null()
|
||||||
|
for name in subtrees:
|
||||||
|
gs = backend._lib.GENERAL_SUBTREE_new()
|
||||||
|
gs.base = _encode_general_name(backend, name)
|
||||||
|
res = backend._lib.sk_GENERAL_SUBTREE_push(general_subtrees, gs)
|
||||||
|
assert res >= 1
|
||||||
|
|
||||||
|
return general_subtrees
|
||||||
|
|
||||||
|
|
||||||
|
_EXTENSION_ENCODE_HANDLERS = {
|
||||||
|
ExtensionOID.BASIC_CONSTRAINTS: _encode_basic_constraints,
|
||||||
|
ExtensionOID.SUBJECT_KEY_IDENTIFIER: _encode_subject_key_identifier,
|
||||||
|
ExtensionOID.KEY_USAGE: _encode_key_usage,
|
||||||
|
ExtensionOID.SUBJECT_ALTERNATIVE_NAME: _encode_alt_name,
|
||||||
|
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
|
||||||
|
ExtensionOID.EXTENDED_KEY_USAGE: _encode_extended_key_usage,
|
||||||
|
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
|
||||||
|
ExtensionOID.CERTIFICATE_POLICIES: _encode_certificate_policies,
|
||||||
|
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||||
|
_encode_authority_information_access
|
||||||
|
),
|
||||||
|
ExtensionOID.CRL_DISTRIBUTION_POINTS: _encode_crl_distribution_points,
|
||||||
|
ExtensionOID.INHIBIT_ANY_POLICY: _encode_inhibit_any_policy,
|
||||||
|
ExtensionOID.OCSP_NO_CHECK: _encode_ocsp_nocheck,
|
||||||
|
ExtensionOID.NAME_CONSTRAINTS: _encode_name_constraints,
|
||||||
|
ExtensionOID.POLICY_CONSTRAINTS: _encode_policy_constraints,
|
||||||
|
}
|
||||||
|
|
||||||
|
_CRL_EXTENSION_ENCODE_HANDLERS = {
|
||||||
|
ExtensionOID.ISSUER_ALTERNATIVE_NAME: _encode_alt_name,
|
||||||
|
ExtensionOID.AUTHORITY_KEY_IDENTIFIER: _encode_authority_key_identifier,
|
||||||
|
ExtensionOID.AUTHORITY_INFORMATION_ACCESS: (
|
||||||
|
_encode_authority_information_access
|
||||||
|
),
|
||||||
|
ExtensionOID.CRL_NUMBER: _encode_crl_number,
|
||||||
|
}
|
||||||
|
|
||||||
|
_CRL_ENTRY_EXTENSION_ENCODE_HANDLERS = {
|
||||||
|
CRLEntryExtensionOID.CERTIFICATE_ISSUER: _encode_alt_name,
|
||||||
|
CRLEntryExtensionOID.CRL_REASON: _encode_crl_reason,
|
||||||
|
CRLEntryExtensionOID.INVALIDITY_DATE: _encode_invalidity_date,
|
||||||
|
}
|
|
@ -0,0 +1,61 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(hashes.HashContext)
|
||||||
|
class _HashContext(object):
|
||||||
|
def __init__(self, backend, algorithm, ctx=None):
|
||||||
|
self._algorithm = algorithm
|
||||||
|
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||||
|
ctx = self._backend._ffi.gc(
|
||||||
|
ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||||
|
)
|
||||||
|
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||||
|
algorithm.name.encode("ascii"))
|
||||||
|
if evp_md == self._backend._ffi.NULL:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md,
|
||||||
|
self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
copied_ctx = self._backend._lib.Cryptography_EVP_MD_CTX_new()
|
||||||
|
copied_ctx = self._backend._ffi.gc(
|
||||||
|
copied_ctx, self._backend._lib.Cryptography_EVP_MD_CTX_free
|
||||||
|
)
|
||||||
|
res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
return _HashContext(self._backend, self.algorithm, ctx=copied_ctx)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data))
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]",
|
||||||
|
self._backend._lib.EVP_MAX_MD_SIZE)
|
||||||
|
outlen = self._backend._ffi.new("unsigned int *")
|
||||||
|
res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
||||||
|
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
|
@ -0,0 +1,80 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives import constant_time, hashes, interfaces
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(interfaces.MACContext)
|
||||||
|
@utils.register_interface(hashes.HashContext)
|
||||||
|
class _HMACContext(object):
|
||||||
|
def __init__(self, backend, key, algorithm, ctx=None):
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
|
||||||
|
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||||
|
ctx = self._backend._ffi.gc(
|
||||||
|
ctx, self._backend._lib.Cryptography_HMAC_CTX_free
|
||||||
|
)
|
||||||
|
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||||
|
algorithm.name.encode('ascii'))
|
||||||
|
if evp_md == self._backend._ffi.NULL:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not a supported hash on this backend.".format(
|
||||||
|
algorithm.name),
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
res = self._backend._lib.Cryptography_HMAC_Init_ex(
|
||||||
|
ctx, key, len(key), evp_md, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
self._ctx = ctx
|
||||||
|
self._key = key
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
copied_ctx = self._backend._lib.Cryptography_HMAC_CTX_new()
|
||||||
|
self._backend.openssl_assert(copied_ctx != self._backend._ffi.NULL)
|
||||||
|
copied_ctx = self._backend._ffi.gc(
|
||||||
|
copied_ctx, self._backend._lib.Cryptography_HMAC_CTX_free
|
||||||
|
)
|
||||||
|
res = self._backend._lib.Cryptography_HMAC_CTX_copy(
|
||||||
|
copied_ctx, self._ctx
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
return _HMACContext(
|
||||||
|
self._backend, self._key, self.algorithm, ctx=copied_ctx
|
||||||
|
)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
res = self._backend._lib.Cryptography_HMAC_Update(
|
||||||
|
self._ctx, data, len(data)
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]",
|
||||||
|
self._backend._lib.EVP_MAX_MD_SIZE)
|
||||||
|
outlen = self._backend._ffi.new("unsigned int *")
|
||||||
|
res = self._backend._lib.Cryptography_HMAC_Final(
|
||||||
|
self._ctx, buf, outlen
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res != 0)
|
||||||
|
self._backend.openssl_assert(outlen[0] == self.algorithm.digest_size)
|
||||||
|
return self._backend._ffi.buffer(buf)[:outlen[0]]
|
||||||
|
|
||||||
|
def verify(self, signature):
|
||||||
|
digest = self.finalize()
|
||||||
|
if not constant_time.bytes_eq(digest, signature):
|
||||||
|
raise InvalidSignature("Signature did not match digest.")
|
|
@ -0,0 +1,674 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import math
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
from cryptography.hazmat.primitives.asymmetric import (
|
||||||
|
AsymmetricSignatureContext, AsymmetricVerificationContext, rsa
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.padding import (
|
||||||
|
AsymmetricPadding, MGF1, OAEP, PKCS1v15, PSS
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives.asymmetric.rsa import (
|
||||||
|
RSAPrivateKeyWithSerialization, RSAPublicKeyWithSerialization
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_rsa_pss_salt_length(pss, key_size, digest_size):
|
||||||
|
salt = pss._salt_length
|
||||||
|
|
||||||
|
if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH:
|
||||||
|
# bit length - 1 per RFC 3447
|
||||||
|
emlen = int(math.ceil((key_size - 1) / 8.0))
|
||||||
|
salt_length = emlen - digest_size - 2
|
||||||
|
assert salt_length >= 0
|
||||||
|
return salt_length
|
||||||
|
else:
|
||||||
|
return salt
|
||||||
|
|
||||||
|
|
||||||
|
def _enc_dec_rsa(backend, key, data, padding):
|
||||||
|
if not isinstance(padding, AsymmetricPadding):
|
||||||
|
raise TypeError("Padding must be an instance of AsymmetricPadding.")
|
||||||
|
|
||||||
|
if isinstance(padding, PKCS1v15):
|
||||||
|
padding_enum = backend._lib.RSA_PKCS1_PADDING
|
||||||
|
elif isinstance(padding, OAEP):
|
||||||
|
padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING
|
||||||
|
|
||||||
|
if not isinstance(padding._mgf, MGF1):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Only MGF1 is supported by this backend.",
|
||||||
|
_Reasons.UNSUPPORTED_MGF
|
||||||
|
)
|
||||||
|
|
||||||
|
if not backend.rsa_padding_supported(padding):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"This combination of padding and hash algorithm is not "
|
||||||
|
"supported by this backend.",
|
||||||
|
_Reasons.UNSUPPORTED_PADDING
|
||||||
|
)
|
||||||
|
|
||||||
|
if padding._label is not None and padding._label != b"":
|
||||||
|
raise ValueError("This backend does not support OAEP labels.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not supported by this backend.".format(
|
||||||
|
padding.name
|
||||||
|
),
|
||||||
|
_Reasons.UNSUPPORTED_PADDING
|
||||||
|
)
|
||||||
|
|
||||||
|
if backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||||
|
return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding)
|
||||||
|
else:
|
||||||
|
return _enc_dec_rsa_098(backend, key, data, padding_enum)
|
||||||
|
|
||||||
|
|
||||||
|
def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum, padding):
|
||||||
|
if isinstance(key, _RSAPublicKey):
|
||||||
|
init = backend._lib.EVP_PKEY_encrypt_init
|
||||||
|
crypt = backend._lib.Cryptography_EVP_PKEY_encrypt
|
||||||
|
else:
|
||||||
|
init = backend._lib.EVP_PKEY_decrypt_init
|
||||||
|
crypt = backend._lib.Cryptography_EVP_PKEY_decrypt
|
||||||
|
|
||||||
|
pkey_ctx = backend._lib.EVP_PKEY_CTX_new(
|
||||||
|
key._evp_pkey, backend._ffi.NULL
|
||||||
|
)
|
||||||
|
backend.openssl_assert(pkey_ctx != backend._ffi.NULL)
|
||||||
|
pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free)
|
||||||
|
res = init(pkey_ctx)
|
||||||
|
backend.openssl_assert(res == 1)
|
||||||
|
res = backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
||||||
|
pkey_ctx, padding_enum)
|
||||||
|
backend.openssl_assert(res > 0)
|
||||||
|
buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey)
|
||||||
|
backend.openssl_assert(buf_size > 0)
|
||||||
|
if (
|
||||||
|
isinstance(padding, OAEP) and
|
||||||
|
backend._lib.Cryptography_HAS_RSA_OAEP_MD
|
||||||
|
):
|
||||||
|
mgf1_md = backend._lib.EVP_get_digestbyname(
|
||||||
|
padding._mgf._algorithm.name.encode("ascii"))
|
||||||
|
backend.openssl_assert(mgf1_md != backend._ffi.NULL)
|
||||||
|
res = backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(pkey_ctx, mgf1_md)
|
||||||
|
backend.openssl_assert(res > 0)
|
||||||
|
oaep_md = backend._lib.EVP_get_digestbyname(
|
||||||
|
padding._algorithm.name.encode("ascii"))
|
||||||
|
backend.openssl_assert(oaep_md != backend._ffi.NULL)
|
||||||
|
res = backend._lib.EVP_PKEY_CTX_set_rsa_oaep_md(pkey_ctx, oaep_md)
|
||||||
|
backend.openssl_assert(res > 0)
|
||||||
|
|
||||||
|
outlen = backend._ffi.new("size_t *", buf_size)
|
||||||
|
buf = backend._ffi.new("char[]", buf_size)
|
||||||
|
res = crypt(pkey_ctx, buf, outlen, data, len(data))
|
||||||
|
if res <= 0:
|
||||||
|
_handle_rsa_enc_dec_error(backend, key)
|
||||||
|
|
||||||
|
return backend._ffi.buffer(buf)[:outlen[0]]
|
||||||
|
|
||||||
|
|
||||||
|
def _enc_dec_rsa_098(backend, key, data, padding_enum):
|
||||||
|
if isinstance(key, _RSAPublicKey):
|
||||||
|
crypt = backend._lib.RSA_public_encrypt
|
||||||
|
else:
|
||||||
|
crypt = backend._lib.RSA_private_decrypt
|
||||||
|
|
||||||
|
key_size = backend._lib.RSA_size(key._rsa_cdata)
|
||||||
|
backend.openssl_assert(key_size > 0)
|
||||||
|
buf = backend._ffi.new("unsigned char[]", key_size)
|
||||||
|
res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum)
|
||||||
|
if res < 0:
|
||||||
|
_handle_rsa_enc_dec_error(backend, key)
|
||||||
|
|
||||||
|
return backend._ffi.buffer(buf)[:res]
|
||||||
|
|
||||||
|
|
||||||
|
def _handle_rsa_enc_dec_error(backend, key):
|
||||||
|
errors = backend._consume_errors()
|
||||||
|
assert errors
|
||||||
|
assert errors[0].lib == backend._lib.ERR_LIB_RSA
|
||||||
|
if isinstance(key, _RSAPublicKey):
|
||||||
|
assert (errors[0].reason ==
|
||||||
|
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
|
||||||
|
raise ValueError(
|
||||||
|
"Data too long for key size. Encrypt less data or use a "
|
||||||
|
"larger key size."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
decoding_errors = [
|
||||||
|
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01,
|
||||||
|
backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02,
|
||||||
|
backend._lib.RSA_R_OAEP_DECODING_ERROR,
|
||||||
|
# Though this error looks similar to the
|
||||||
|
# RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE, this occurs on decrypts,
|
||||||
|
# rather than on encrypts
|
||||||
|
backend._lib.RSA_R_DATA_TOO_LARGE_FOR_MODULUS,
|
||||||
|
]
|
||||||
|
if backend._lib.Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR:
|
||||||
|
decoding_errors.append(backend._lib.RSA_R_PKCS_DECODING_ERROR)
|
||||||
|
|
||||||
|
assert errors[0].reason in decoding_errors
|
||||||
|
raise ValueError("Decryption failed.")
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricSignatureContext)
|
||||||
|
class _RSASignatureContext(object):
|
||||||
|
def __init__(self, backend, private_key, padding, algorithm):
|
||||||
|
self._backend = backend
|
||||||
|
self._private_key = private_key
|
||||||
|
|
||||||
|
if not isinstance(padding, AsymmetricPadding):
|
||||||
|
raise TypeError("Expected provider of AsymmetricPadding.")
|
||||||
|
|
||||||
|
self._pkey_size = self._backend._lib.EVP_PKEY_size(
|
||||||
|
self._private_key._evp_pkey
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(self._pkey_size > 0)
|
||||||
|
|
||||||
|
if isinstance(padding, PKCS1v15):
|
||||||
|
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||||
|
self._finalize_method = self._finalize_pkey_ctx
|
||||||
|
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
|
||||||
|
else:
|
||||||
|
self._finalize_method = self._finalize_pkcs1
|
||||||
|
elif isinstance(padding, PSS):
|
||||||
|
if not isinstance(padding._mgf, MGF1):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Only MGF1 is supported by this backend.",
|
||||||
|
_Reasons.UNSUPPORTED_MGF
|
||||||
|
)
|
||||||
|
|
||||||
|
# Size of key in bytes - 2 is the maximum
|
||||||
|
# PSS signature length (salt length is checked later)
|
||||||
|
if self._pkey_size - algorithm.digest_size - 2 < 0:
|
||||||
|
raise ValueError("Digest too large for key size. Use a larger "
|
||||||
|
"key.")
|
||||||
|
|
||||||
|
if not self._backend._pss_mgf1_hash_supported(
|
||||||
|
padding._mgf._algorithm
|
||||||
|
):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"When OpenSSL is older than 1.0.1 then only SHA1 is "
|
||||||
|
"supported with MGF1.",
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||||
|
self._finalize_method = self._finalize_pkey_ctx
|
||||||
|
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
|
||||||
|
else:
|
||||||
|
self._finalize_method = self._finalize_pss
|
||||||
|
else:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not supported by this backend.".format(padding.name),
|
||||||
|
_Reasons.UNSUPPORTED_PADDING
|
||||||
|
)
|
||||||
|
|
||||||
|
self._padding = padding
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._hash_ctx.update(data)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||||
|
self._algorithm.name.encode("ascii"))
|
||||||
|
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
|
||||||
|
|
||||||
|
return self._finalize_method(evp_md)
|
||||||
|
|
||||||
|
def _finalize_pkey_ctx(self, evp_md):
|
||||||
|
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
|
||||||
|
self._private_key._evp_pkey, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
|
||||||
|
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
|
||||||
|
self._backend._lib.EVP_PKEY_CTX_free)
|
||||||
|
res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
|
||||||
|
pkey_ctx, evp_md)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
||||||
|
pkey_ctx, self._padding_enum)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
if isinstance(self._padding, PSS):
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
||||||
|
pkey_ctx,
|
||||||
|
_get_rsa_pss_salt_length(
|
||||||
|
self._padding,
|
||||||
|
self._private_key.key_size,
|
||||||
|
self._hash_ctx.algorithm.digest_size
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
|
||||||
|
if self._backend._lib.Cryptography_HAS_MGF1_MD:
|
||||||
|
# MGF1 MD is configurable in OpenSSL 1.0.1+
|
||||||
|
mgf1_md = self._backend._lib.EVP_get_digestbyname(
|
||||||
|
self._padding._mgf._algorithm.name.encode("ascii"))
|
||||||
|
self._backend.openssl_assert(
|
||||||
|
mgf1_md != self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
|
||||||
|
pkey_ctx, mgf1_md
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
data_to_sign = self._hash_ctx.finalize()
|
||||||
|
buflen = self._backend._ffi.new("size_t *")
|
||||||
|
res = self._backend._lib.EVP_PKEY_sign(
|
||||||
|
pkey_ctx,
|
||||||
|
self._backend._ffi.NULL,
|
||||||
|
buflen,
|
||||||
|
data_to_sign,
|
||||||
|
len(data_to_sign)
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", buflen[0])
|
||||||
|
res = self._backend._lib.EVP_PKEY_sign(
|
||||||
|
pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign))
|
||||||
|
if res != 1:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
||||||
|
reason = None
|
||||||
|
if (errors[0].reason ==
|
||||||
|
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE):
|
||||||
|
reason = ("Salt length too long for key size. Try using "
|
||||||
|
"MAX_LENGTH instead.")
|
||||||
|
else:
|
||||||
|
assert (errors[0].reason ==
|
||||||
|
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
|
||||||
|
reason = "Digest too large for key size. Use a larger key."
|
||||||
|
assert reason is not None
|
||||||
|
raise ValueError(reason)
|
||||||
|
|
||||||
|
return self._backend._ffi.buffer(buf)[:]
|
||||||
|
|
||||||
|
def _finalize_pkcs1(self, evp_md):
|
||||||
|
if self._hash_ctx._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context has already been finalized.")
|
||||||
|
|
||||||
|
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
|
||||||
|
sig_len = self._backend._ffi.new("unsigned int *")
|
||||||
|
res = self._backend._lib.EVP_SignFinal(
|
||||||
|
self._hash_ctx._ctx._ctx,
|
||||||
|
sig_buf,
|
||||||
|
sig_len,
|
||||||
|
self._private_key._evp_pkey
|
||||||
|
)
|
||||||
|
self._hash_ctx.finalize()
|
||||||
|
if res == 0:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
||||||
|
assert (errors[0].reason ==
|
||||||
|
self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY)
|
||||||
|
raise ValueError("Digest too large for key size. Use a larger "
|
||||||
|
"key.")
|
||||||
|
|
||||||
|
return self._backend._ffi.buffer(sig_buf)[:sig_len[0]]
|
||||||
|
|
||||||
|
def _finalize_pss(self, evp_md):
|
||||||
|
data_to_sign = self._hash_ctx.finalize()
|
||||||
|
padded = self._backend._ffi.new("unsigned char[]", self._pkey_size)
|
||||||
|
res = self._backend._lib.RSA_padding_add_PKCS1_PSS(
|
||||||
|
self._private_key._rsa_cdata,
|
||||||
|
padded,
|
||||||
|
data_to_sign,
|
||||||
|
evp_md,
|
||||||
|
_get_rsa_pss_salt_length(
|
||||||
|
self._padding,
|
||||||
|
self._private_key.key_size,
|
||||||
|
len(data_to_sign)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if res != 1:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors[0].lib == self._backend._lib.ERR_LIB_RSA
|
||||||
|
assert (errors[0].reason ==
|
||||||
|
self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE)
|
||||||
|
raise ValueError("Salt length too long for key size. Try using "
|
||||||
|
"MAX_LENGTH instead.")
|
||||||
|
|
||||||
|
sig_buf = self._backend._ffi.new("char[]", self._pkey_size)
|
||||||
|
sig_len = self._backend._lib.RSA_private_encrypt(
|
||||||
|
self._pkey_size,
|
||||||
|
padded,
|
||||||
|
sig_buf,
|
||||||
|
self._private_key._rsa_cdata,
|
||||||
|
self._backend._lib.RSA_NO_PADDING
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(sig_len != -1)
|
||||||
|
return self._backend._ffi.buffer(sig_buf)[:sig_len]
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricVerificationContext)
|
||||||
|
class _RSAVerificationContext(object):
|
||||||
|
def __init__(self, backend, public_key, signature, padding, algorithm):
|
||||||
|
self._backend = backend
|
||||||
|
self._public_key = public_key
|
||||||
|
self._signature = signature
|
||||||
|
|
||||||
|
if not isinstance(padding, AsymmetricPadding):
|
||||||
|
raise TypeError("Expected provider of AsymmetricPadding.")
|
||||||
|
|
||||||
|
self._pkey_size = self._backend._lib.EVP_PKEY_size(
|
||||||
|
self._public_key._evp_pkey
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(self._pkey_size > 0)
|
||||||
|
|
||||||
|
if isinstance(padding, PKCS1v15):
|
||||||
|
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||||
|
self._verify_method = self._verify_pkey_ctx
|
||||||
|
self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING
|
||||||
|
else:
|
||||||
|
self._verify_method = self._verify_pkcs1
|
||||||
|
elif isinstance(padding, PSS):
|
||||||
|
if not isinstance(padding._mgf, MGF1):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Only MGF1 is supported by this backend.",
|
||||||
|
_Reasons.UNSUPPORTED_MGF
|
||||||
|
)
|
||||||
|
|
||||||
|
# Size of key in bytes - 2 is the maximum
|
||||||
|
# PSS signature length (salt length is checked later)
|
||||||
|
if self._pkey_size - algorithm.digest_size - 2 < 0:
|
||||||
|
raise ValueError(
|
||||||
|
"Digest too large for key size. Check that you have the "
|
||||||
|
"correct key and digest algorithm."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not self._backend._pss_mgf1_hash_supported(
|
||||||
|
padding._mgf._algorithm
|
||||||
|
):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"When OpenSSL is older than 1.0.1 then only SHA1 is "
|
||||||
|
"supported with MGF1.",
|
||||||
|
_Reasons.UNSUPPORTED_HASH
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._backend._lib.Cryptography_HAS_PKEY_CTX:
|
||||||
|
self._verify_method = self._verify_pkey_ctx
|
||||||
|
self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING
|
||||||
|
else:
|
||||||
|
self._verify_method = self._verify_pss
|
||||||
|
else:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"{0} is not supported by this backend.".format(padding.name),
|
||||||
|
_Reasons.UNSUPPORTED_PADDING
|
||||||
|
)
|
||||||
|
|
||||||
|
self._padding = padding
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._hash_ctx = hashes.Hash(self._algorithm, self._backend)
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
self._hash_ctx.update(data)
|
||||||
|
|
||||||
|
def verify(self):
|
||||||
|
evp_md = self._backend._lib.EVP_get_digestbyname(
|
||||||
|
self._algorithm.name.encode("ascii"))
|
||||||
|
self._backend.openssl_assert(evp_md != self._backend._ffi.NULL)
|
||||||
|
|
||||||
|
self._verify_method(evp_md)
|
||||||
|
|
||||||
|
def _verify_pkey_ctx(self, evp_md):
|
||||||
|
pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new(
|
||||||
|
self._public_key._evp_pkey, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(pkey_ctx != self._backend._ffi.NULL)
|
||||||
|
pkey_ctx = self._backend._ffi.gc(pkey_ctx,
|
||||||
|
self._backend._lib.EVP_PKEY_CTX_free)
|
||||||
|
res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_signature_md(
|
||||||
|
pkey_ctx, evp_md)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding(
|
||||||
|
pkey_ctx, self._padding_enum)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
if isinstance(self._padding, PSS):
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen(
|
||||||
|
pkey_ctx,
|
||||||
|
_get_rsa_pss_salt_length(
|
||||||
|
self._padding,
|
||||||
|
self._public_key.key_size,
|
||||||
|
self._hash_ctx.algorithm.digest_size
|
||||||
|
)
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
if self._backend._lib.Cryptography_HAS_MGF1_MD:
|
||||||
|
# MGF1 MD is configurable in OpenSSL 1.0.1+
|
||||||
|
mgf1_md = self._backend._lib.EVP_get_digestbyname(
|
||||||
|
self._padding._mgf._algorithm.name.encode("ascii"))
|
||||||
|
self._backend.openssl_assert(
|
||||||
|
mgf1_md != self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md(
|
||||||
|
pkey_ctx, mgf1_md
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
|
||||||
|
data_to_verify = self._hash_ctx.finalize()
|
||||||
|
res = self._backend._lib.EVP_PKEY_verify(
|
||||||
|
pkey_ctx,
|
||||||
|
self._signature,
|
||||||
|
len(self._signature),
|
||||||
|
data_to_verify,
|
||||||
|
len(data_to_verify)
|
||||||
|
)
|
||||||
|
# The previous call can return negative numbers in the event of an
|
||||||
|
# error. This is not a signature failure but we need to fail if it
|
||||||
|
# occurs.
|
||||||
|
self._backend.openssl_assert(res >= 0)
|
||||||
|
if res == 0:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors
|
||||||
|
raise InvalidSignature
|
||||||
|
|
||||||
|
def _verify_pkcs1(self, evp_md):
|
||||||
|
if self._hash_ctx._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context has already been finalized.")
|
||||||
|
|
||||||
|
res = self._backend._lib.EVP_VerifyFinal(
|
||||||
|
self._hash_ctx._ctx._ctx,
|
||||||
|
self._signature,
|
||||||
|
len(self._signature),
|
||||||
|
self._public_key._evp_pkey
|
||||||
|
)
|
||||||
|
self._hash_ctx.finalize()
|
||||||
|
# The previous call can return negative numbers in the event of an
|
||||||
|
# error. This is not a signature failure but we need to fail if it
|
||||||
|
# occurs.
|
||||||
|
self._backend.openssl_assert(res >= 0)
|
||||||
|
if res == 0:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors
|
||||||
|
raise InvalidSignature
|
||||||
|
|
||||||
|
def _verify_pss(self, evp_md):
|
||||||
|
buf = self._backend._ffi.new("unsigned char[]", self._pkey_size)
|
||||||
|
res = self._backend._lib.RSA_public_decrypt(
|
||||||
|
len(self._signature),
|
||||||
|
self._signature,
|
||||||
|
buf,
|
||||||
|
self._public_key._rsa_cdata,
|
||||||
|
self._backend._lib.RSA_NO_PADDING
|
||||||
|
)
|
||||||
|
if res != self._pkey_size:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors
|
||||||
|
raise InvalidSignature
|
||||||
|
|
||||||
|
data_to_verify = self._hash_ctx.finalize()
|
||||||
|
res = self._backend._lib.RSA_verify_PKCS1_PSS(
|
||||||
|
self._public_key._rsa_cdata,
|
||||||
|
data_to_verify,
|
||||||
|
evp_md,
|
||||||
|
buf,
|
||||||
|
_get_rsa_pss_salt_length(
|
||||||
|
self._padding,
|
||||||
|
self._public_key.key_size,
|
||||||
|
len(data_to_verify)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if res != 1:
|
||||||
|
errors = self._backend._consume_errors()
|
||||||
|
assert errors
|
||||||
|
raise InvalidSignature
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(RSAPrivateKeyWithSerialization)
|
||||||
|
class _RSAPrivateKey(object):
|
||||||
|
def __init__(self, backend, rsa_cdata, evp_pkey):
|
||||||
|
self._backend = backend
|
||||||
|
self._rsa_cdata = rsa_cdata
|
||||||
|
self._evp_pkey = evp_pkey
|
||||||
|
|
||||||
|
n = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.RSA_get0_key(
|
||||||
|
self._rsa_cdata, n, self._backend._ffi.NULL,
|
||||||
|
self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||||
|
self._key_size = self._backend._lib.BN_num_bits(n[0])
|
||||||
|
|
||||||
|
key_size = utils.read_only_property("_key_size")
|
||||||
|
|
||||||
|
def signer(self, padding, algorithm):
|
||||||
|
return _RSASignatureContext(self._backend, self, padding, algorithm)
|
||||||
|
|
||||||
|
def decrypt(self, ciphertext, padding):
|
||||||
|
key_size_bytes = int(math.ceil(self.key_size / 8.0))
|
||||||
|
if key_size_bytes != len(ciphertext):
|
||||||
|
raise ValueError("Ciphertext length must be equal to key size.")
|
||||||
|
|
||||||
|
return _enc_dec_rsa(self._backend, self, ciphertext, padding)
|
||||||
|
|
||||||
|
def public_key(self):
|
||||||
|
ctx = self._backend._lib.RSAPublicKey_dup(self._rsa_cdata)
|
||||||
|
self._backend.openssl_assert(ctx != self._backend._ffi.NULL)
|
||||||
|
ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free)
|
||||||
|
res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
evp_pkey = self._backend._rsa_cdata_to_evp_pkey(ctx)
|
||||||
|
return _RSAPublicKey(self._backend, ctx, evp_pkey)
|
||||||
|
|
||||||
|
def private_numbers(self):
|
||||||
|
n = self._backend._ffi.new("BIGNUM **")
|
||||||
|
e = self._backend._ffi.new("BIGNUM **")
|
||||||
|
d = self._backend._ffi.new("BIGNUM **")
|
||||||
|
p = self._backend._ffi.new("BIGNUM **")
|
||||||
|
q = self._backend._ffi.new("BIGNUM **")
|
||||||
|
dmp1 = self._backend._ffi.new("BIGNUM **")
|
||||||
|
dmq1 = self._backend._ffi.new("BIGNUM **")
|
||||||
|
iqmp = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.RSA_get0_key(self._rsa_cdata, n, e, d)
|
||||||
|
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(d[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend._lib.RSA_get0_factors(self._rsa_cdata, p, q)
|
||||||
|
self._backend.openssl_assert(p[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(q[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend._lib.RSA_get0_crt_params(
|
||||||
|
self._rsa_cdata, dmp1, dmq1, iqmp
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(dmp1[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(dmq1[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(iqmp[0] != self._backend._ffi.NULL)
|
||||||
|
return rsa.RSAPrivateNumbers(
|
||||||
|
p=self._backend._bn_to_int(p[0]),
|
||||||
|
q=self._backend._bn_to_int(q[0]),
|
||||||
|
d=self._backend._bn_to_int(d[0]),
|
||||||
|
dmp1=self._backend._bn_to_int(dmp1[0]),
|
||||||
|
dmq1=self._backend._bn_to_int(dmq1[0]),
|
||||||
|
iqmp=self._backend._bn_to_int(iqmp[0]),
|
||||||
|
public_numbers=rsa.RSAPublicNumbers(
|
||||||
|
e=self._backend._bn_to_int(e[0]),
|
||||||
|
n=self._backend._bn_to_int(n[0]),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||||
|
return self._backend._private_key_bytes(
|
||||||
|
encoding,
|
||||||
|
format,
|
||||||
|
encryption_algorithm,
|
||||||
|
self._evp_pkey,
|
||||||
|
self._rsa_cdata
|
||||||
|
)
|
||||||
|
|
||||||
|
def sign(self, data, padding, algorithm):
|
||||||
|
signer = self.signer(padding, algorithm)
|
||||||
|
signer.update(data)
|
||||||
|
signature = signer.finalize()
|
||||||
|
return signature
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(RSAPublicKeyWithSerialization)
|
||||||
|
class _RSAPublicKey(object):
|
||||||
|
def __init__(self, backend, rsa_cdata, evp_pkey):
|
||||||
|
self._backend = backend
|
||||||
|
self._rsa_cdata = rsa_cdata
|
||||||
|
self._evp_pkey = evp_pkey
|
||||||
|
|
||||||
|
n = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.RSA_get0_key(
|
||||||
|
self._rsa_cdata, n, self._backend._ffi.NULL,
|
||||||
|
self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||||
|
self._key_size = self._backend._lib.BN_num_bits(n[0])
|
||||||
|
|
||||||
|
key_size = utils.read_only_property("_key_size")
|
||||||
|
|
||||||
|
def verifier(self, signature, padding, algorithm):
|
||||||
|
if not isinstance(signature, bytes):
|
||||||
|
raise TypeError("signature must be bytes.")
|
||||||
|
|
||||||
|
return _RSAVerificationContext(
|
||||||
|
self._backend, self, signature, padding, algorithm
|
||||||
|
)
|
||||||
|
|
||||||
|
def encrypt(self, plaintext, padding):
|
||||||
|
return _enc_dec_rsa(self._backend, self, plaintext, padding)
|
||||||
|
|
||||||
|
def public_numbers(self):
|
||||||
|
n = self._backend._ffi.new("BIGNUM **")
|
||||||
|
e = self._backend._ffi.new("BIGNUM **")
|
||||||
|
self._backend._lib.RSA_get0_key(
|
||||||
|
self._rsa_cdata, n, e, self._backend._ffi.NULL
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(n[0] != self._backend._ffi.NULL)
|
||||||
|
self._backend.openssl_assert(e[0] != self._backend._ffi.NULL)
|
||||||
|
return rsa.RSAPublicNumbers(
|
||||||
|
e=self._backend._bn_to_int(e[0]),
|
||||||
|
n=self._backend._bn_to_int(n[0]),
|
||||||
|
)
|
||||||
|
|
||||||
|
def public_bytes(self, encoding, format):
|
||||||
|
return self._backend._public_key_bytes(
|
||||||
|
encoding,
|
||||||
|
format,
|
||||||
|
self,
|
||||||
|
self._evp_pkey,
|
||||||
|
self._rsa_cdata
|
||||||
|
)
|
||||||
|
|
||||||
|
def verify(self, signature, data, padding, algorithm):
|
||||||
|
verifier = self.verifier(signature, padding, algorithm)
|
||||||
|
verifier.update(data)
|
||||||
|
verifier.verify()
|
|
@ -0,0 +1,26 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
def _truncate_digest(digest, order_bits):
|
||||||
|
digest_len = len(digest)
|
||||||
|
|
||||||
|
if 8 * digest_len > order_bits:
|
||||||
|
digest_len = (order_bits + 7) // 8
|
||||||
|
digest = digest[:digest_len]
|
||||||
|
|
||||||
|
if 8 * digest_len > order_bits:
|
||||||
|
rshift = 8 - (order_bits & 0x7)
|
||||||
|
assert 0 < rshift < 8
|
||||||
|
|
||||||
|
mask = 0xFF >> rshift << rshift
|
||||||
|
|
||||||
|
# Set the bottom rshift bits to 0
|
||||||
|
digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask)
|
||||||
|
|
||||||
|
return digest
|
|
@ -0,0 +1,420 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import operator
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from cryptography import utils, x509
|
||||||
|
from cryptography.exceptions import UnsupportedAlgorithm
|
||||||
|
from cryptography.hazmat.backends.openssl.decode_asn1 import (
|
||||||
|
_CERTIFICATE_EXTENSION_PARSER, _CRL_EXTENSION_PARSER,
|
||||||
|
_CSR_EXTENSION_PARSER, _REVOKED_CERTIFICATE_EXTENSION_PARSER,
|
||||||
|
_asn1_integer_to_int, _asn1_string_to_bytes, _decode_x509_name, _obj2txt,
|
||||||
|
_parse_asn1_time
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.primitives import hashes, serialization
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(x509.Certificate)
|
||||||
|
class _Certificate(object):
|
||||||
|
def __init__(self, backend, x509):
|
||||||
|
self._backend = backend
|
||||||
|
self._x509 = x509
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<Certificate(subject={0}, ...)>".format(self.subject)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, x509.Certificate):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
res = self._backend._lib.X509_cmp(self._x509, other._x509)
|
||||||
|
return res == 0
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.public_bytes(serialization.Encoding.DER))
|
||||||
|
|
||||||
|
def fingerprint(self, algorithm):
|
||||||
|
h = hashes.Hash(algorithm, self._backend)
|
||||||
|
h.update(self.public_bytes(serialization.Encoding.DER))
|
||||||
|
return h.finalize()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
version = self._backend._lib.X509_get_version(self._x509)
|
||||||
|
if version == 0:
|
||||||
|
return x509.Version.v1
|
||||||
|
elif version == 2:
|
||||||
|
return x509.Version.v3
|
||||||
|
else:
|
||||||
|
raise x509.InvalidVersion(
|
||||||
|
"{0} is not a valid X509 version".format(version), version
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial(self):
|
||||||
|
warnings.warn(
|
||||||
|
"Certificate serial is deprecated, use serial_number instead.",
|
||||||
|
utils.DeprecatedIn14,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
return self.serial_number
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial_number(self):
|
||||||
|
asn1_int = self._backend._lib.X509_get_serialNumber(self._x509)
|
||||||
|
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
||||||
|
return _asn1_integer_to_int(self._backend, asn1_int)
|
||||||
|
|
||||||
|
def public_key(self):
|
||||||
|
pkey = self._backend._lib.X509_get_pubkey(self._x509)
|
||||||
|
if pkey == self._backend._ffi.NULL:
|
||||||
|
# Remove errors from the stack.
|
||||||
|
self._backend._consume_errors()
|
||||||
|
raise ValueError("Certificate public key is of an unknown type")
|
||||||
|
|
||||||
|
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||||
|
|
||||||
|
return self._backend._evp_pkey_to_public_key(pkey)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def not_valid_before(self):
|
||||||
|
asn1_time = self._backend._lib.X509_get_notBefore(self._x509)
|
||||||
|
return _parse_asn1_time(self._backend, asn1_time)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def not_valid_after(self):
|
||||||
|
asn1_time = self._backend._lib.X509_get_notAfter(self._x509)
|
||||||
|
return _parse_asn1_time(self._backend, asn1_time)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issuer(self):
|
||||||
|
issuer = self._backend._lib.X509_get_issuer_name(self._x509)
|
||||||
|
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
||||||
|
return _decode_x509_name(self._backend, issuer)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subject(self):
|
||||||
|
subject = self._backend._lib.X509_get_subject_name(self._x509)
|
||||||
|
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
||||||
|
return _decode_x509_name(self._backend, subject)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature_hash_algorithm(self):
|
||||||
|
alg = self._backend._ffi.new("X509_ALGOR **")
|
||||||
|
self._backend._lib.X509_get0_signature(
|
||||||
|
self._backend._ffi.NULL, alg, self._x509
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
|
||||||
|
oid = _obj2txt(self._backend, alg[0].algorithm)
|
||||||
|
try:
|
||||||
|
return x509._SIG_OIDS_TO_HASH[oid]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Signature algorithm OID:{0} not recognized".format(oid)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extensions(self):
|
||||||
|
return _CERTIFICATE_EXTENSION_PARSER.parse(self._backend, self._x509)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature(self):
|
||||||
|
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
|
||||||
|
self._backend._lib.X509_get0_signature(
|
||||||
|
sig, self._backend._ffi.NULL, self._x509
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
|
||||||
|
return _asn1_string_to_bytes(self._backend, sig[0])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tbs_certificate_bytes(self):
|
||||||
|
pp = self._backend._ffi.new("unsigned char **")
|
||||||
|
res = self._backend._lib.i2d_re_X509_tbs(self._x509, pp)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
pp = self._backend._ffi.gc(
|
||||||
|
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||||
|
)
|
||||||
|
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||||
|
|
||||||
|
def public_bytes(self, encoding):
|
||||||
|
bio = self._backend._create_mem_bio_gc()
|
||||||
|
if encoding is serialization.Encoding.PEM:
|
||||||
|
res = self._backend._lib.PEM_write_bio_X509(bio, self._x509)
|
||||||
|
elif encoding is serialization.Encoding.DER:
|
||||||
|
res = self._backend._lib.i2d_X509_bio(bio, self._x509)
|
||||||
|
else:
|
||||||
|
raise TypeError("encoding must be an item from the Encoding enum")
|
||||||
|
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return self._backend._read_mem_bio(bio)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(x509.RevokedCertificate)
|
||||||
|
class _RevokedCertificate(object):
|
||||||
|
def __init__(self, backend, crl, x509_revoked):
|
||||||
|
self._backend = backend
|
||||||
|
# The X509_REVOKED_value is a X509_REVOKED * that has
|
||||||
|
# no reference counting. This means when X509_CRL_free is
|
||||||
|
# called then the CRL and all X509_REVOKED * are freed. Since
|
||||||
|
# you can retain a reference to a single revoked certificate
|
||||||
|
# and let the CRL fall out of scope we need to retain a
|
||||||
|
# private reference to the CRL inside the RevokedCertificate
|
||||||
|
# object to prevent the gc from being called inappropriately.
|
||||||
|
self._crl = crl
|
||||||
|
self._x509_revoked = x509_revoked
|
||||||
|
|
||||||
|
@property
|
||||||
|
def serial_number(self):
|
||||||
|
asn1_int = self._backend._lib.X509_REVOKED_get0_serialNumber(
|
||||||
|
self._x509_revoked
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(asn1_int != self._backend._ffi.NULL)
|
||||||
|
return _asn1_integer_to_int(self._backend, asn1_int)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def revocation_date(self):
|
||||||
|
return _parse_asn1_time(
|
||||||
|
self._backend,
|
||||||
|
self._backend._lib.X509_REVOKED_get0_revocationDate(
|
||||||
|
self._x509_revoked
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extensions(self):
|
||||||
|
return _REVOKED_CERTIFICATE_EXTENSION_PARSER.parse(
|
||||||
|
self._backend, self._x509_revoked
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(x509.CertificateRevocationList)
|
||||||
|
class _CertificateRevocationList(object):
|
||||||
|
def __init__(self, backend, x509_crl):
|
||||||
|
self._backend = backend
|
||||||
|
self._x509_crl = x509_crl
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, x509.CertificateRevocationList):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
res = self._backend._lib.X509_CRL_cmp(self._x509_crl, other._x509_crl)
|
||||||
|
return res == 0
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def fingerprint(self, algorithm):
|
||||||
|
h = hashes.Hash(algorithm, self._backend)
|
||||||
|
bio = self._backend._create_mem_bio_gc()
|
||||||
|
res = self._backend._lib.i2d_X509_CRL_bio(
|
||||||
|
bio, self._x509_crl
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
der = self._backend._read_mem_bio(bio)
|
||||||
|
h.update(der)
|
||||||
|
return h.finalize()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature_hash_algorithm(self):
|
||||||
|
alg = self._backend._ffi.new("X509_ALGOR **")
|
||||||
|
self._backend._lib.X509_CRL_get0_signature(
|
||||||
|
self._backend._ffi.NULL, alg, self._x509_crl
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
|
||||||
|
oid = _obj2txt(self._backend, alg[0].algorithm)
|
||||||
|
try:
|
||||||
|
return x509._SIG_OIDS_TO_HASH[oid]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Signature algorithm OID:{0} not recognized".format(oid)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def issuer(self):
|
||||||
|
issuer = self._backend._lib.X509_CRL_get_issuer(self._x509_crl)
|
||||||
|
self._backend.openssl_assert(issuer != self._backend._ffi.NULL)
|
||||||
|
return _decode_x509_name(self._backend, issuer)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def next_update(self):
|
||||||
|
nu = self._backend._lib.X509_CRL_get_nextUpdate(self._x509_crl)
|
||||||
|
self._backend.openssl_assert(nu != self._backend._ffi.NULL)
|
||||||
|
return _parse_asn1_time(self._backend, nu)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_update(self):
|
||||||
|
lu = self._backend._lib.X509_CRL_get_lastUpdate(self._x509_crl)
|
||||||
|
self._backend.openssl_assert(lu != self._backend._ffi.NULL)
|
||||||
|
return _parse_asn1_time(self._backend, lu)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature(self):
|
||||||
|
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
|
||||||
|
self._backend._lib.X509_CRL_get0_signature(
|
||||||
|
sig, self._backend._ffi.NULL, self._x509_crl
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
|
||||||
|
return _asn1_string_to_bytes(self._backend, sig[0])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tbs_certlist_bytes(self):
|
||||||
|
pp = self._backend._ffi.new("unsigned char **")
|
||||||
|
res = self._backend._lib.i2d_re_X509_CRL_tbs(self._x509_crl, pp)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
pp = self._backend._ffi.gc(
|
||||||
|
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||||
|
)
|
||||||
|
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||||
|
|
||||||
|
def public_bytes(self, encoding):
|
||||||
|
bio = self._backend._create_mem_bio_gc()
|
||||||
|
if encoding is serialization.Encoding.PEM:
|
||||||
|
res = self._backend._lib.PEM_write_bio_X509_CRL(
|
||||||
|
bio, self._x509_crl
|
||||||
|
)
|
||||||
|
elif encoding is serialization.Encoding.DER:
|
||||||
|
res = self._backend._lib.i2d_X509_CRL_bio(bio, self._x509_crl)
|
||||||
|
else:
|
||||||
|
raise TypeError("encoding must be an item from the Encoding enum")
|
||||||
|
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return self._backend._read_mem_bio(bio)
|
||||||
|
|
||||||
|
def _revoked_cert(self, idx):
|
||||||
|
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
|
||||||
|
r = self._backend._lib.sk_X509_REVOKED_value(revoked, idx)
|
||||||
|
self._backend.openssl_assert(r != self._backend._ffi.NULL)
|
||||||
|
return _RevokedCertificate(self._backend, self, r)
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
for i in range(len(self)):
|
||||||
|
yield self._revoked_cert(i)
|
||||||
|
|
||||||
|
def __getitem__(self, idx):
|
||||||
|
if isinstance(idx, slice):
|
||||||
|
start, stop, step = idx.indices(len(self))
|
||||||
|
return [self._revoked_cert(i) for i in range(start, stop, step)]
|
||||||
|
else:
|
||||||
|
idx = operator.index(idx)
|
||||||
|
if idx < 0:
|
||||||
|
idx += len(self)
|
||||||
|
if not 0 <= idx < len(self):
|
||||||
|
raise IndexError
|
||||||
|
return self._revoked_cert(idx)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
revoked = self._backend._lib.X509_CRL_get_REVOKED(self._x509_crl)
|
||||||
|
if revoked == self._backend._ffi.NULL:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return self._backend._lib.sk_X509_REVOKED_num(revoked)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extensions(self):
|
||||||
|
return _CRL_EXTENSION_PARSER.parse(self._backend, self._x509_crl)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(x509.CertificateSigningRequest)
|
||||||
|
class _CertificateSigningRequest(object):
|
||||||
|
def __init__(self, backend, x509_req):
|
||||||
|
self._backend = backend
|
||||||
|
self._x509_req = x509_req
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, _CertificateSigningRequest):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
self_bytes = self.public_bytes(serialization.Encoding.DER)
|
||||||
|
other_bytes = other.public_bytes(serialization.Encoding.DER)
|
||||||
|
return self_bytes == other_bytes
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash(self.public_bytes(serialization.Encoding.DER))
|
||||||
|
|
||||||
|
def public_key(self):
|
||||||
|
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
|
||||||
|
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
|
||||||
|
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||||
|
return self._backend._evp_pkey_to_public_key(pkey)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def subject(self):
|
||||||
|
subject = self._backend._lib.X509_REQ_get_subject_name(self._x509_req)
|
||||||
|
self._backend.openssl_assert(subject != self._backend._ffi.NULL)
|
||||||
|
return _decode_x509_name(self._backend, subject)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature_hash_algorithm(self):
|
||||||
|
alg = self._backend._ffi.new("X509_ALGOR **")
|
||||||
|
self._backend._lib.X509_REQ_get0_signature(
|
||||||
|
self._backend._ffi.NULL, alg, self._x509_req
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(alg[0] != self._backend._ffi.NULL)
|
||||||
|
oid = _obj2txt(self._backend, alg[0].algorithm)
|
||||||
|
try:
|
||||||
|
return x509._SIG_OIDS_TO_HASH[oid]
|
||||||
|
except KeyError:
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Signature algorithm OID:{0} not recognized".format(oid)
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def extensions(self):
|
||||||
|
x509_exts = self._backend._lib.X509_REQ_get_extensions(self._x509_req)
|
||||||
|
return _CSR_EXTENSION_PARSER.parse(self._backend, x509_exts)
|
||||||
|
|
||||||
|
def public_bytes(self, encoding):
|
||||||
|
bio = self._backend._create_mem_bio_gc()
|
||||||
|
if encoding is serialization.Encoding.PEM:
|
||||||
|
res = self._backend._lib.PEM_write_bio_X509_REQ(
|
||||||
|
bio, self._x509_req
|
||||||
|
)
|
||||||
|
elif encoding is serialization.Encoding.DER:
|
||||||
|
res = self._backend._lib.i2d_X509_REQ_bio(bio, self._x509_req)
|
||||||
|
else:
|
||||||
|
raise TypeError("encoding must be an item from the Encoding enum")
|
||||||
|
|
||||||
|
self._backend.openssl_assert(res == 1)
|
||||||
|
return self._backend._read_mem_bio(bio)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tbs_certrequest_bytes(self):
|
||||||
|
pp = self._backend._ffi.new("unsigned char **")
|
||||||
|
res = self._backend._lib.i2d_re_X509_REQ_tbs(self._x509_req, pp)
|
||||||
|
self._backend.openssl_assert(res > 0)
|
||||||
|
pp = self._backend._ffi.gc(
|
||||||
|
pp, lambda pointer: self._backend._lib.OPENSSL_free(pointer[0])
|
||||||
|
)
|
||||||
|
return self._backend._ffi.buffer(pp[0], res)[:]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def signature(self):
|
||||||
|
sig = self._backend._ffi.new("ASN1_BIT_STRING **")
|
||||||
|
self._backend._lib.X509_REQ_get0_signature(
|
||||||
|
sig, self._backend._ffi.NULL, self._x509_req
|
||||||
|
)
|
||||||
|
self._backend.openssl_assert(sig[0] != self._backend._ffi.NULL)
|
||||||
|
return _asn1_string_to_bytes(self._backend, sig[0])
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_signature_valid(self):
|
||||||
|
pkey = self._backend._lib.X509_REQ_get_pubkey(self._x509_req)
|
||||||
|
self._backend.openssl_assert(pkey != self._backend._ffi.NULL)
|
||||||
|
pkey = self._backend._ffi.gc(pkey, self._backend._lib.EVP_PKEY_free)
|
||||||
|
res = self._backend._lib.X509_REQ_verify(self._x509_req, pkey)
|
||||||
|
|
||||||
|
if res != 1:
|
||||||
|
self._backend._consume_errors()
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
|
@ -0,0 +1,5 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
Binary file not shown.
BIN
lib/python3.4/site-packages/cryptography/hazmat/bindings/_openssl.cpython-34m.so
Executable file
BIN
lib/python3.4/site-packages/cryptography/hazmat/bindings/_openssl.cpython-34m.so
Executable file
Binary file not shown.
BIN
lib/python3.4/site-packages/cryptography/hazmat/bindings/_padding.cpython-34m.so
Executable file
BIN
lib/python3.4/site-packages/cryptography/hazmat/bindings/_padding.cpython-34m.so
Executable file
Binary file not shown.
|
@ -0,0 +1,5 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
|
@ -0,0 +1,15 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography.hazmat.bindings._commoncrypto import ffi, lib
|
||||||
|
|
||||||
|
|
||||||
|
class Binding(object):
|
||||||
|
"""
|
||||||
|
CommonCrypto API wrapper.
|
||||||
|
"""
|
||||||
|
lib = lib
|
||||||
|
ffi = ffi
|
|
@ -0,0 +1,5 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
|
@ -0,0 +1,431 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
# This is a temporary copy of all the CONDITIONAL_NAMES from _cffi_src so
|
||||||
|
# we can loop over them and delete them at runtime. It will be removed when
|
||||||
|
# cffi supports #if in cdef
|
||||||
|
|
||||||
|
CONDITIONAL_NAMES = {
|
||||||
|
"Cryptography_HAS_AES_WRAP": [
|
||||||
|
"AES_wrap_key",
|
||||||
|
"AES_unwrap_key",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_CMAC": [
|
||||||
|
"CMAC_CTX_new",
|
||||||
|
"CMAC_Init",
|
||||||
|
"CMAC_Update",
|
||||||
|
"CMAC_Final",
|
||||||
|
"CMAC_CTX_copy",
|
||||||
|
"CMAC_CTX_free",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_CMS": [
|
||||||
|
"BIO_new_CMS",
|
||||||
|
"i2d_CMS_bio_stream",
|
||||||
|
"PEM_write_bio_CMS_stream",
|
||||||
|
"CMS_final",
|
||||||
|
"CMS_sign",
|
||||||
|
"CMS_verify",
|
||||||
|
"CMS_encrypt",
|
||||||
|
"CMS_decrypt",
|
||||||
|
"CMS_add1_signer",
|
||||||
|
"CMS_TEXT",
|
||||||
|
"CMS_NOCERTS",
|
||||||
|
"CMS_NO_CONTENT_VERIFY",
|
||||||
|
"CMS_NO_ATTR_VERIFY",
|
||||||
|
"CMS_NOSIGS",
|
||||||
|
"CMS_NOINTERN",
|
||||||
|
"CMS_NO_SIGNER_CERT_VERIFY",
|
||||||
|
"CMS_NOVERIFY",
|
||||||
|
"CMS_DETACHED",
|
||||||
|
"CMS_BINARY",
|
||||||
|
"CMS_NOATTR",
|
||||||
|
"CMS_NOSMIMECAP",
|
||||||
|
"CMS_NOOLDMIMETYPE",
|
||||||
|
"CMS_CRLFEOL",
|
||||||
|
"CMS_STREAM",
|
||||||
|
"CMS_NOCRL",
|
||||||
|
"CMS_PARTIAL",
|
||||||
|
"CMS_REUSE_DIGEST",
|
||||||
|
"CMS_USE_KEYID",
|
||||||
|
"CMS_DEBUG_DECRYPT",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_CMS_BIO_FUNCTIONS": [
|
||||||
|
"BIO_new_CMS",
|
||||||
|
"i2d_CMS_bio_stream",
|
||||||
|
"PEM_write_bio_CMS_stream",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_EC": [
|
||||||
|
"OPENSSL_EC_NAMED_CURVE",
|
||||||
|
"EC_GROUP_new",
|
||||||
|
"EC_GROUP_free",
|
||||||
|
"EC_GROUP_clear_free",
|
||||||
|
"EC_GROUP_new_curve_GFp",
|
||||||
|
"EC_GROUP_new_by_curve_name",
|
||||||
|
"EC_GROUP_set_curve_GFp",
|
||||||
|
"EC_GROUP_get_curve_GFp",
|
||||||
|
"EC_GROUP_method_of",
|
||||||
|
"EC_GROUP_get0_generator",
|
||||||
|
"EC_GROUP_get_curve_name",
|
||||||
|
"EC_GROUP_get_degree",
|
||||||
|
"EC_GROUP_set_asn1_flag",
|
||||||
|
"EC_GROUP_set_point_conversion_form",
|
||||||
|
"EC_KEY_new",
|
||||||
|
"EC_KEY_free",
|
||||||
|
"EC_get_builtin_curves",
|
||||||
|
"EC_KEY_new_by_curve_name",
|
||||||
|
"EC_KEY_copy",
|
||||||
|
"EC_KEY_dup",
|
||||||
|
"EC_KEY_up_ref",
|
||||||
|
"EC_KEY_set_group",
|
||||||
|
"EC_KEY_get0_private_key",
|
||||||
|
"EC_KEY_set_private_key",
|
||||||
|
"EC_KEY_set_public_key",
|
||||||
|
"EC_KEY_get_enc_flags",
|
||||||
|
"EC_KEY_set_enc_flags",
|
||||||
|
"EC_KEY_set_conv_form",
|
||||||
|
"EC_KEY_set_asn1_flag",
|
||||||
|
"EC_KEY_precompute_mult",
|
||||||
|
"EC_KEY_generate_key",
|
||||||
|
"EC_KEY_check_key",
|
||||||
|
"EC_POINT_new",
|
||||||
|
"EC_POINT_free",
|
||||||
|
"EC_POINT_clear_free",
|
||||||
|
"EC_POINT_copy",
|
||||||
|
"EC_POINT_dup",
|
||||||
|
"EC_POINT_method_of",
|
||||||
|
"EC_POINT_set_to_infinity",
|
||||||
|
"EC_POINT_set_Jprojective_coordinates_GFp",
|
||||||
|
"EC_POINT_get_Jprojective_coordinates_GFp",
|
||||||
|
"EC_POINT_set_affine_coordinates_GFp",
|
||||||
|
"EC_POINT_get_affine_coordinates_GFp",
|
||||||
|
"EC_POINT_set_compressed_coordinates_GFp",
|
||||||
|
"EC_POINT_point2oct",
|
||||||
|
"EC_POINT_oct2point",
|
||||||
|
"EC_POINT_point2bn",
|
||||||
|
"EC_POINT_bn2point",
|
||||||
|
"EC_POINT_point2hex",
|
||||||
|
"EC_POINT_hex2point",
|
||||||
|
"EC_POINT_add",
|
||||||
|
"EC_POINT_dbl",
|
||||||
|
"EC_POINT_invert",
|
||||||
|
"EC_POINT_is_at_infinity",
|
||||||
|
"EC_POINT_is_on_curve",
|
||||||
|
"EC_POINT_cmp",
|
||||||
|
"EC_POINT_make_affine",
|
||||||
|
"EC_POINTs_make_affine",
|
||||||
|
"EC_POINTs_mul",
|
||||||
|
"EC_POINT_mul",
|
||||||
|
"EC_GROUP_precompute_mult",
|
||||||
|
"EC_GROUP_have_precompute_mult",
|
||||||
|
"EC_GFp_simple_method",
|
||||||
|
"EC_GFp_mont_method",
|
||||||
|
"EC_GFp_nist_method",
|
||||||
|
"EC_METHOD_get_field_type",
|
||||||
|
"EVP_PKEY_assign_EC_KEY",
|
||||||
|
"EVP_PKEY_get1_EC_KEY",
|
||||||
|
"EVP_PKEY_set1_EC_KEY",
|
||||||
|
"PEM_write_bio_ECPrivateKey",
|
||||||
|
"i2d_EC_PUBKEY",
|
||||||
|
"d2i_EC_PUBKEY",
|
||||||
|
"d2i_EC_PUBKEY_bio",
|
||||||
|
"i2d_EC_PUBKEY_bio",
|
||||||
|
"d2i_ECPrivateKey",
|
||||||
|
"d2i_ECPrivateKey_bio",
|
||||||
|
"i2d_ECPrivateKey",
|
||||||
|
"i2d_ECPrivateKey_bio",
|
||||||
|
"i2o_ECPublicKey",
|
||||||
|
"o2i_ECPublicKey",
|
||||||
|
"SSL_CTX_set_tmp_ecdh",
|
||||||
|
"POINT_CONVERSION_COMPRESSED",
|
||||||
|
"POINT_CONVERSION_UNCOMPRESSED",
|
||||||
|
"POINT_CONVERSION_HYBRID",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_EC_1_0_1": [
|
||||||
|
"EC_KEY_get_flags",
|
||||||
|
"EC_KEY_set_flags",
|
||||||
|
"EC_KEY_clear_flags",
|
||||||
|
"EC_KEY_set_public_key_affine_coordinates",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_EC2M": [
|
||||||
|
"EC_GF2m_simple_method",
|
||||||
|
"EC_POINT_set_affine_coordinates_GF2m",
|
||||||
|
"EC_POINT_get_affine_coordinates_GF2m",
|
||||||
|
"EC_POINT_set_compressed_coordinates_GF2m",
|
||||||
|
"EC_GROUP_set_curve_GF2m",
|
||||||
|
"EC_GROUP_get_curve_GF2m",
|
||||||
|
"EC_GROUP_new_curve_GF2m",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_EC_1_0_2": [
|
||||||
|
"EC_curve_nid2nist",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_ECDH": [
|
||||||
|
"ECDH_compute_key",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_ECDSA": [
|
||||||
|
"ECDSA_SIG_new",
|
||||||
|
"ECDSA_SIG_free",
|
||||||
|
"i2d_ECDSA_SIG",
|
||||||
|
"d2i_ECDSA_SIG",
|
||||||
|
"ECDSA_do_sign",
|
||||||
|
"ECDSA_do_sign_ex",
|
||||||
|
"ECDSA_do_verify",
|
||||||
|
"ECDSA_sign_setup",
|
||||||
|
"ECDSA_sign",
|
||||||
|
"ECDSA_sign_ex",
|
||||||
|
"ECDSA_verify",
|
||||||
|
"ECDSA_size",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_ENGINE_CRYPTODEV": [
|
||||||
|
"ENGINE_load_cryptodev"
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_098H_ERROR_CODES": [
|
||||||
|
"ASN1_F_B64_READ_ASN1",
|
||||||
|
"ASN1_F_B64_WRITE_ASN1",
|
||||||
|
"ASN1_F_SMIME_READ_ASN1",
|
||||||
|
"ASN1_F_SMIME_TEXT",
|
||||||
|
"ASN1_R_NO_CONTENT_TYPE",
|
||||||
|
"ASN1_R_NO_MULTIPART_BODY_FAILURE",
|
||||||
|
"ASN1_R_NO_MULTIPART_BOUNDARY",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_098C_CAMELLIA_CODES": [
|
||||||
|
"EVP_F_CAMELLIA_INIT_KEY",
|
||||||
|
"EVP_R_CAMELLIA_KEY_SETUP_FAILED"
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_EC_CODES": [
|
||||||
|
"EC_R_UNKNOWN_GROUP",
|
||||||
|
"EC_F_EC_GROUP_NEW_BY_CURVE_NAME"
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_TLSEXT_ERROR_CODES": [
|
||||||
|
"SSL_TLSEXT_ERR_OK",
|
||||||
|
"SSL_TLSEXT_ERR_ALERT_WARNING",
|
||||||
|
"SSL_TLSEXT_ERR_ALERT_FATAL",
|
||||||
|
"SSL_TLSEXT_ERR_NOACK",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_RSA_R_PKCS_DECODING_ERROR": [
|
||||||
|
"RSA_R_PKCS_DECODING_ERROR"
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_GCM": [
|
||||||
|
"EVP_CTRL_GCM_GET_TAG",
|
||||||
|
"EVP_CTRL_GCM_SET_TAG",
|
||||||
|
"EVP_CTRL_GCM_SET_IVLEN",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_PBKDF2_HMAC": [
|
||||||
|
"PKCS5_PBKDF2_HMAC"
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_PKEY_CTX": [
|
||||||
|
"EVP_PKEY_CTX_new",
|
||||||
|
"EVP_PKEY_CTX_new_id",
|
||||||
|
"EVP_PKEY_CTX_dup",
|
||||||
|
"EVP_PKEY_CTX_free",
|
||||||
|
"EVP_PKEY_sign",
|
||||||
|
"EVP_PKEY_sign_init",
|
||||||
|
"EVP_PKEY_verify",
|
||||||
|
"EVP_PKEY_verify_init",
|
||||||
|
"Cryptography_EVP_PKEY_encrypt",
|
||||||
|
"EVP_PKEY_encrypt_init",
|
||||||
|
"Cryptography_EVP_PKEY_decrypt",
|
||||||
|
"EVP_PKEY_decrypt_init",
|
||||||
|
"EVP_PKEY_CTX_set_signature_md",
|
||||||
|
"EVP_PKEY_id",
|
||||||
|
"EVP_PKEY_CTX_set_rsa_padding",
|
||||||
|
"EVP_PKEY_CTX_set_rsa_pss_saltlen",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_ECDSA_SHA2_NIDS": [
|
||||||
|
"NID_ecdsa_with_SHA224",
|
||||||
|
"NID_ecdsa_with_SHA256",
|
||||||
|
"NID_ecdsa_with_SHA384",
|
||||||
|
"NID_ecdsa_with_SHA512",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_EGD": [
|
||||||
|
"RAND_egd",
|
||||||
|
"RAND_egd_bytes",
|
||||||
|
"RAND_query_egd_bytes",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_PSS_PADDING": [
|
||||||
|
"RSA_PKCS1_PSS_PADDING",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_MGF1_MD": [
|
||||||
|
"EVP_PKEY_CTX_set_rsa_mgf1_md",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_RSA_OAEP_MD": [
|
||||||
|
"EVP_PKEY_CTX_set_rsa_oaep_md",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_TLSv1_1": [
|
||||||
|
"SSL_OP_NO_TLSv1_1",
|
||||||
|
"TLSv1_1_method",
|
||||||
|
"TLSv1_1_server_method",
|
||||||
|
"TLSv1_1_client_method",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_TLSv1_2": [
|
||||||
|
"SSL_OP_NO_TLSv1_2",
|
||||||
|
"TLSv1_2_method",
|
||||||
|
"TLSv1_2_server_method",
|
||||||
|
"TLSv1_2_client_method",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_SSL3_METHOD": [
|
||||||
|
"SSLv3_method",
|
||||||
|
"SSLv3_client_method",
|
||||||
|
"SSLv3_server_method",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_TLSEXT_HOSTNAME": [
|
||||||
|
"SSL_set_tlsext_host_name",
|
||||||
|
"SSL_get_servername",
|
||||||
|
"SSL_CTX_set_tlsext_servername_callback",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_TLSEXT_STATUS_REQ_CB": [
|
||||||
|
"SSL_CTX_set_tlsext_status_cb",
|
||||||
|
"SSL_CTX_set_tlsext_status_arg"
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_STATUS_REQ_OCSP_RESP": [
|
||||||
|
"SSL_set_tlsext_status_ocsp_resp",
|
||||||
|
"SSL_get_tlsext_status_ocsp_resp",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_TLSEXT_STATUS_REQ_TYPE": [
|
||||||
|
"SSL_set_tlsext_status_type",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_RELEASE_BUFFERS": [
|
||||||
|
"SSL_MODE_RELEASE_BUFFERS",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_OP_NO_COMPRESSION": [
|
||||||
|
"SSL_OP_NO_COMPRESSION",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [
|
||||||
|
"SSL_OP_MSIE_SSLV2_RSA_PADDING",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_SSL_OP_NO_TICKET": [
|
||||||
|
"SSL_OP_NO_TICKET",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_SSL_SET_SSL_CTX": [
|
||||||
|
"SSL_set_SSL_CTX",
|
||||||
|
"TLSEXT_NAMETYPE_host_name",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_NETBSD_D1_METH": [
|
||||||
|
"DTLSv1_method",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_NEXTPROTONEG": [
|
||||||
|
"SSL_CTX_set_next_protos_advertised_cb",
|
||||||
|
"SSL_CTX_set_next_proto_select_cb",
|
||||||
|
"SSL_select_next_proto",
|
||||||
|
"SSL_get0_next_proto_negotiated",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_SECURE_RENEGOTIATION": [
|
||||||
|
"SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION",
|
||||||
|
"SSL_OP_LEGACY_SERVER_CONNECT",
|
||||||
|
"SSL_get_secure_renegotiation_support",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_ALPN": [
|
||||||
|
"SSL_CTX_set_alpn_protos",
|
||||||
|
"SSL_set_alpn_protos",
|
||||||
|
"SSL_CTX_set_alpn_select_cb",
|
||||||
|
"SSL_get0_alpn_selected",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_COMPRESSION": [
|
||||||
|
"SSL_get_current_compression",
|
||||||
|
"SSL_get_current_expansion",
|
||||||
|
"SSL_COMP_get_name",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_GET_SERVER_TMP_KEY": [
|
||||||
|
"SSL_get_server_tmp_key",
|
||||||
|
],
|
||||||
|
|
||||||
|
"Cryptography_HAS_SSL_CTX_SET_CLIENT_CERT_ENGINE": [
|
||||||
|
"SSL_CTX_set_client_cert_engine",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_SSL_CTX_CLEAR_OPTIONS": [
|
||||||
|
"SSL_CTX_clear_options",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [
|
||||||
|
'X509_V_ERR_SUITE_B_INVALID_VERSION',
|
||||||
|
'X509_V_ERR_SUITE_B_INVALID_ALGORITHM',
|
||||||
|
'X509_V_ERR_SUITE_B_INVALID_CURVE',
|
||||||
|
'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM',
|
||||||
|
'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED',
|
||||||
|
'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256',
|
||||||
|
'X509_V_ERR_HOSTNAME_MISMATCH',
|
||||||
|
'X509_V_ERR_EMAIL_MISMATCH',
|
||||||
|
'X509_V_ERR_IP_ADDRESS_MISMATCH'
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_102_VERIFICATION_PARAMS": [
|
||||||
|
"X509_V_FLAG_SUITEB_128_LOS_ONLY",
|
||||||
|
"X509_V_FLAG_SUITEB_192_LOS",
|
||||||
|
"X509_V_FLAG_SUITEB_128_LOS",
|
||||||
|
"X509_VERIFY_PARAM_set1_host",
|
||||||
|
"X509_VERIFY_PARAM_set1_email",
|
||||||
|
"X509_VERIFY_PARAM_set1_ip",
|
||||||
|
"X509_VERIFY_PARAM_set1_ip_asc",
|
||||||
|
"X509_VERIFY_PARAM_set_hostflags",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [
|
||||||
|
"X509_V_FLAG_TRUSTED_FIRST",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [
|
||||||
|
"X509_V_FLAG_PARTIAL_CHAIN",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [
|
||||||
|
'X509_V_ERR_DIFFERENT_CRL_SCOPE',
|
||||||
|
'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE',
|
||||||
|
'X509_V_ERR_UNNESTED_RESOURCE',
|
||||||
|
'X509_V_ERR_PERMITTED_VIOLATION',
|
||||||
|
'X509_V_ERR_EXCLUDED_VIOLATION',
|
||||||
|
'X509_V_ERR_SUBTREE_MINMAX',
|
||||||
|
'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE',
|
||||||
|
'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX',
|
||||||
|
'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX',
|
||||||
|
'X509_V_ERR_CRL_PATH_VALIDATION_ERROR',
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_100_VERIFICATION_PARAMS": [
|
||||||
|
"Cryptography_HAS_100_VERIFICATION_PARAMS",
|
||||||
|
"X509_V_FLAG_EXTENDED_CRL_SUPPORT",
|
||||||
|
"X509_V_FLAG_USE_DELTAS",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [
|
||||||
|
"X509_V_FLAG_CHECK_SS_SIGNATURE",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_SET_CERT_CB": [
|
||||||
|
"SSL_CTX_set_cert_cb",
|
||||||
|
"SSL_set_cert_cb",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_AES_CTR128_ENCRYPT": [
|
||||||
|
"AES_ctr128_encrypt",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_SSL_ST": [
|
||||||
|
"SSL_ST_BEFORE",
|
||||||
|
"SSL_ST_OK",
|
||||||
|
"SSL_ST_INIT",
|
||||||
|
"SSL_ST_RENEGOTIATE",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_TLS_ST": [
|
||||||
|
"TLS_ST_BEFORE",
|
||||||
|
"TLS_ST_OK",
|
||||||
|
],
|
||||||
|
"Cryptography_HAS_LOCKING_CALLBACKS": [
|
||||||
|
"CRYPTO_LOCK",
|
||||||
|
"CRYPTO_UNLOCK",
|
||||||
|
"CRYPTO_READ",
|
||||||
|
"CRYPTO_LOCK_SSL",
|
||||||
|
"CRYPTO_lock",
|
||||||
|
]
|
||||||
|
}
|
|
@ -0,0 +1,250 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import collections
|
||||||
|
import os
|
||||||
|
import threading
|
||||||
|
import types
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from cryptography.exceptions import InternalError
|
||||||
|
from cryptography.hazmat.bindings._openssl import ffi, lib
|
||||||
|
from cryptography.hazmat.bindings.openssl._conditional import CONDITIONAL_NAMES
|
||||||
|
|
||||||
|
_OpenSSLError = collections.namedtuple("_OpenSSLError",
|
||||||
|
["code", "lib", "func", "reason"])
|
||||||
|
_OpenSSLErrorWithText = collections.namedtuple(
|
||||||
|
"_OpenSSLErrorWithText", ["code", "lib", "func", "reason", "reason_text"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _consume_errors(lib):
|
||||||
|
errors = []
|
||||||
|
while True:
|
||||||
|
code = lib.ERR_get_error()
|
||||||
|
if code == 0:
|
||||||
|
break
|
||||||
|
|
||||||
|
err_lib = lib.ERR_GET_LIB(code)
|
||||||
|
err_func = lib.ERR_GET_FUNC(code)
|
||||||
|
err_reason = lib.ERR_GET_REASON(code)
|
||||||
|
|
||||||
|
errors.append(_OpenSSLError(code, err_lib, err_func, err_reason))
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def _openssl_assert(lib, ok):
|
||||||
|
if not ok:
|
||||||
|
errors = _consume_errors(lib)
|
||||||
|
errors_with_text = []
|
||||||
|
for err in errors:
|
||||||
|
err_text_reason = ffi.string(
|
||||||
|
lib.ERR_error_string(err.code, ffi.NULL)
|
||||||
|
)
|
||||||
|
errors_with_text.append(
|
||||||
|
_OpenSSLErrorWithText(
|
||||||
|
err.code, err.lib, err.func, err.reason, err_text_reason
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
raise InternalError(
|
||||||
|
"Unknown OpenSSL error. This error is commonly encountered when "
|
||||||
|
"another library is not cleaning up the OpenSSL error stack. If "
|
||||||
|
"you are using cryptography with another library that uses "
|
||||||
|
"OpenSSL try disabling it before reporting a bug. Otherwise "
|
||||||
|
"please file an issue at https://github.com/pyca/cryptography/"
|
||||||
|
"issues with information on how to reproduce "
|
||||||
|
"this. ({0!r})".format(errors_with_text),
|
||||||
|
errors_with_text
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def ffi_callback(signature, name, **kwargs):
|
||||||
|
"""Callback dispatcher
|
||||||
|
|
||||||
|
The ffi_callback() dispatcher keeps callbacks compatible between dynamic
|
||||||
|
and static callbacks.
|
||||||
|
"""
|
||||||
|
def wrapper(func):
|
||||||
|
if lib.Cryptography_STATIC_CALLBACKS:
|
||||||
|
# def_extern() returns a decorator that sets the internal
|
||||||
|
# function pointer and returns the original function unmodified.
|
||||||
|
ffi.def_extern(name=name, **kwargs)(func)
|
||||||
|
callback = getattr(lib, name)
|
||||||
|
else:
|
||||||
|
# callback() wraps the function in a cdata function.
|
||||||
|
callback = ffi.callback(signature, **kwargs)(func)
|
||||||
|
return callback
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
@ffi_callback("int (*)(unsigned char *, int)",
|
||||||
|
name="Cryptography_rand_bytes",
|
||||||
|
error=-1)
|
||||||
|
def _osrandom_rand_bytes(buf, size):
|
||||||
|
signed = ffi.cast("char *", buf)
|
||||||
|
result = os.urandom(size)
|
||||||
|
signed[0:size] = result
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
@ffi_callback("int (*)(void)", name="Cryptography_rand_status")
|
||||||
|
def _osrandom_rand_status():
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def build_conditional_library(lib, conditional_names):
|
||||||
|
conditional_lib = types.ModuleType("lib")
|
||||||
|
excluded_names = set()
|
||||||
|
for condition, names in conditional_names.items():
|
||||||
|
if not getattr(lib, condition):
|
||||||
|
excluded_names |= set(names)
|
||||||
|
|
||||||
|
for attr in dir(lib):
|
||||||
|
if attr not in excluded_names:
|
||||||
|
setattr(conditional_lib, attr, getattr(lib, attr))
|
||||||
|
|
||||||
|
return conditional_lib
|
||||||
|
|
||||||
|
|
||||||
|
class Binding(object):
|
||||||
|
"""
|
||||||
|
OpenSSL API wrapper.
|
||||||
|
"""
|
||||||
|
lib = None
|
||||||
|
ffi = ffi
|
||||||
|
_lib_loaded = False
|
||||||
|
_locks = None
|
||||||
|
_lock_cb_handle = None
|
||||||
|
_init_lock = threading.Lock()
|
||||||
|
_lock_init_lock = threading.Lock()
|
||||||
|
|
||||||
|
_osrandom_engine_id = ffi.new("const char[]", b"osrandom")
|
||||||
|
_osrandom_engine_name = ffi.new("const char[]", b"osrandom_engine")
|
||||||
|
_osrandom_method = ffi.new(
|
||||||
|
"RAND_METHOD *",
|
||||||
|
dict(bytes=_osrandom_rand_bytes,
|
||||||
|
pseudorand=_osrandom_rand_bytes,
|
||||||
|
status=_osrandom_rand_status)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self._ensure_ffi_initialized()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _register_osrandom_engine(cls):
|
||||||
|
_openssl_assert(cls.lib, cls.lib.ERR_peek_error() == 0)
|
||||||
|
|
||||||
|
engine = cls.lib.ENGINE_new()
|
||||||
|
_openssl_assert(cls.lib, engine != cls.ffi.NULL)
|
||||||
|
try:
|
||||||
|
result = cls.lib.ENGINE_set_id(engine, cls._osrandom_engine_id)
|
||||||
|
_openssl_assert(cls.lib, result == 1)
|
||||||
|
result = cls.lib.ENGINE_set_name(engine, cls._osrandom_engine_name)
|
||||||
|
_openssl_assert(cls.lib, result == 1)
|
||||||
|
result = cls.lib.ENGINE_set_RAND(engine, cls._osrandom_method)
|
||||||
|
_openssl_assert(cls.lib, result == 1)
|
||||||
|
result = cls.lib.ENGINE_add(engine)
|
||||||
|
if result != 1:
|
||||||
|
errors = _consume_errors(cls.lib)
|
||||||
|
_openssl_assert(
|
||||||
|
cls.lib,
|
||||||
|
errors[0].reason == cls.lib.ENGINE_R_CONFLICTING_ENGINE_ID
|
||||||
|
)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
result = cls.lib.ENGINE_free(engine)
|
||||||
|
_openssl_assert(cls.lib, result == 1)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _ensure_ffi_initialized(cls):
|
||||||
|
with cls._init_lock:
|
||||||
|
if not cls._lib_loaded:
|
||||||
|
cls.lib = build_conditional_library(lib, CONDITIONAL_NAMES)
|
||||||
|
cls._lib_loaded = True
|
||||||
|
# initialize the SSL library
|
||||||
|
cls.lib.SSL_library_init()
|
||||||
|
# adds all ciphers/digests for EVP
|
||||||
|
cls.lib.OpenSSL_add_all_algorithms()
|
||||||
|
# loads error strings for libcrypto and libssl functions
|
||||||
|
cls.lib.SSL_load_error_strings()
|
||||||
|
cls._register_osrandom_engine()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def init_static_locks(cls):
|
||||||
|
with cls._lock_init_lock:
|
||||||
|
cls._ensure_ffi_initialized()
|
||||||
|
|
||||||
|
if not cls._lock_cb_handle:
|
||||||
|
wrapper = ffi_callback(
|
||||||
|
"void(int, int, const char *, int)",
|
||||||
|
name="Cryptography_locking_cb",
|
||||||
|
)
|
||||||
|
cls._lock_cb_handle = wrapper(cls._lock_cb)
|
||||||
|
|
||||||
|
# Use Python's implementation if available, importing _ssl triggers
|
||||||
|
# the setup for this.
|
||||||
|
__import__("_ssl")
|
||||||
|
|
||||||
|
if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL:
|
||||||
|
return
|
||||||
|
|
||||||
|
# If nothing else has setup a locking callback already, we set up
|
||||||
|
# our own
|
||||||
|
num_locks = cls.lib.CRYPTO_num_locks()
|
||||||
|
cls._locks = [threading.Lock() for n in range(num_locks)]
|
||||||
|
|
||||||
|
cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _lock_cb(cls, mode, n, file, line):
|
||||||
|
lock = cls._locks[n]
|
||||||
|
|
||||||
|
if mode & cls.lib.CRYPTO_LOCK:
|
||||||
|
lock.acquire()
|
||||||
|
elif mode & cls.lib.CRYPTO_UNLOCK:
|
||||||
|
lock.release()
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format(
|
||||||
|
mode, n, file, line
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_openssl_version(version):
|
||||||
|
if version < 0x10000000:
|
||||||
|
if os.environ.get("CRYPTOGRAPHY_ALLOW_OPENSSL_098"):
|
||||||
|
warnings.warn(
|
||||||
|
"OpenSSL version 0.9.8 is no longer supported by the OpenSSL "
|
||||||
|
"project, please upgrade. The next version of cryptography "
|
||||||
|
"will completely remove support for it.",
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
"You are linking against OpenSSL 0.9.8, which is no longer "
|
||||||
|
"support by the OpenSSL project. You need to upgrade to a "
|
||||||
|
"newer version of OpenSSL."
|
||||||
|
)
|
||||||
|
elif version < 0x10001000:
|
||||||
|
warnings.warn(
|
||||||
|
"OpenSSL versions less than 1.0.1 are no longer supported by the "
|
||||||
|
"OpenSSL project, please upgrade. A future version of "
|
||||||
|
"cryptography will drop support for these versions of OpenSSL.",
|
||||||
|
DeprecationWarning
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# OpenSSL is not thread safe until the locks are initialized. We call this
|
||||||
|
# method in module scope so that it executes with the import lock. On
|
||||||
|
# Pythons < 3.4 this import lock is a global lock, which can prevent a race
|
||||||
|
# condition registering the OpenSSL locks. On Python 3.4+ the import lock
|
||||||
|
# is per module so this approach will not work.
|
||||||
|
Binding.init_static_locks()
|
||||||
|
|
||||||
|
_verify_openssl_version(Binding.lib.SSLeay())
|
|
@ -0,0 +1,5 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
|
@ -0,0 +1,40 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class AsymmetricSignatureContext(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def update(self, data):
|
||||||
|
"""
|
||||||
|
Processes the provided bytes and returns nothing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def finalize(self):
|
||||||
|
"""
|
||||||
|
Returns the signature as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class AsymmetricVerificationContext(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def update(self, data):
|
||||||
|
"""
|
||||||
|
Processes the provided bytes and returns nothing.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def verify(self):
|
||||||
|
"""
|
||||||
|
Raises an exception if the bytes provided to update do not match the
|
||||||
|
signature or the signature does not match the public key.
|
||||||
|
"""
|
|
@ -0,0 +1,166 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
|
||||||
|
|
||||||
|
class DHPrivateNumbers(object):
|
||||||
|
def __init__(self, x, public_numbers):
|
||||||
|
if not isinstance(x, six.integer_types):
|
||||||
|
raise TypeError("x must be an integer.")
|
||||||
|
|
||||||
|
if not isinstance(public_numbers, DHPublicNumbers):
|
||||||
|
raise TypeError("public_numbers must be an instance of "
|
||||||
|
"DHPublicNumbers.")
|
||||||
|
|
||||||
|
self._x = x
|
||||||
|
self._public_numbers = public_numbers
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, DHPrivateNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self._x == other._x and
|
||||||
|
self._public_numbers == other._public_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
public_numbers = utils.read_only_property("_public_numbers")
|
||||||
|
x = utils.read_only_property("_x")
|
||||||
|
|
||||||
|
|
||||||
|
class DHPublicNumbers(object):
|
||||||
|
def __init__(self, y, parameter_numbers):
|
||||||
|
if not isinstance(y, six.integer_types):
|
||||||
|
raise TypeError("y must be an integer.")
|
||||||
|
|
||||||
|
if not isinstance(parameter_numbers, DHParameterNumbers):
|
||||||
|
raise TypeError(
|
||||||
|
"parameters must be an instance of DHParameterNumbers.")
|
||||||
|
|
||||||
|
self._y = y
|
||||||
|
self._parameter_numbers = parameter_numbers
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, DHPublicNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self._y == other._y and
|
||||||
|
self._parameter_numbers == other._parameter_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
y = utils.read_only_property("_y")
|
||||||
|
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
||||||
|
|
||||||
|
|
||||||
|
class DHParameterNumbers(object):
|
||||||
|
def __init__(self, p, g):
|
||||||
|
if (
|
||||||
|
not isinstance(p, six.integer_types) or
|
||||||
|
not isinstance(g, six.integer_types)
|
||||||
|
):
|
||||||
|
raise TypeError("p and g must be integers")
|
||||||
|
|
||||||
|
self._p = p
|
||||||
|
self._g = g
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, DHParameterNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self._p == other._p and
|
||||||
|
self._g == other._g
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
p = utils.read_only_property("_p")
|
||||||
|
g = utils.read_only_property("_g")
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHParameters(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_private_key(self):
|
||||||
|
"""
|
||||||
|
Generates and returns a DHPrivateKey.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHParametersWithSerialization(DHParameters):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parameter_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns a DHParameterNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHPrivateKey(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the prime modulus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_key(self):
|
||||||
|
"""
|
||||||
|
The DHPublicKey associated with this private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parameters(self):
|
||||||
|
"""
|
||||||
|
The DHParameters object associated with this private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHPrivateKeyWithSerialization(DHPrivateKey):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns a DHPrivateNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHPublicKey(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the prime modulus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parameters(self):
|
||||||
|
"""
|
||||||
|
The DHParameters object associated with this public key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DHPublicKeyWithSerialization(DHPublicKey):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns a DHPublicNumbers.
|
||||||
|
"""
|
|
@ -0,0 +1,242 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DSAParameters(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def generate_private_key(self):
|
||||||
|
"""
|
||||||
|
Generates and returns a DSAPrivateKey.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DSAParametersWithNumbers(DSAParameters):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parameter_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns a DSAParameterNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DSAPrivateKey(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the prime modulus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_key(self):
|
||||||
|
"""
|
||||||
|
The DSAPublicKey associated with this private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parameters(self):
|
||||||
|
"""
|
||||||
|
The DSAParameters object associated with this private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def signer(self, signature_algorithm):
|
||||||
|
"""
|
||||||
|
Returns an AsymmetricSignatureContext used for signing data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DSAPrivateKeyWithSerialization(DSAPrivateKey):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns a DSAPrivateNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||||
|
"""
|
||||||
|
Returns the key serialized as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class DSAPublicKey(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the prime modulus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def parameters(self):
|
||||||
|
"""
|
||||||
|
The DSAParameters object associated with this public key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def verifier(self, signature, signature_algorithm):
|
||||||
|
"""
|
||||||
|
Returns an AsymmetricVerificationContext used for signing data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns a DSAPublicNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_bytes(self, encoding, format):
|
||||||
|
"""
|
||||||
|
Returns the key serialized as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
DSAPublicKeyWithSerialization = DSAPublicKey
|
||||||
|
|
||||||
|
|
||||||
|
def generate_parameters(key_size, backend):
|
||||||
|
return backend.generate_dsa_parameters(key_size)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_private_key(key_size, backend):
|
||||||
|
return backend.generate_dsa_private_key_and_parameters(key_size)
|
||||||
|
|
||||||
|
|
||||||
|
def _check_dsa_parameters(parameters):
|
||||||
|
if utils.bit_length(parameters.p) not in [1024, 2048, 3072]:
|
||||||
|
raise ValueError("p must be exactly 1024, 2048, or 3072 bits long")
|
||||||
|
if utils.bit_length(parameters.q) not in [160, 256]:
|
||||||
|
raise ValueError("q must be exactly 160 or 256 bits long")
|
||||||
|
|
||||||
|
if not (1 < parameters.g < parameters.p):
|
||||||
|
raise ValueError("g, p don't satisfy 1 < g < p.")
|
||||||
|
|
||||||
|
|
||||||
|
def _check_dsa_private_numbers(numbers):
|
||||||
|
parameters = numbers.public_numbers.parameter_numbers
|
||||||
|
_check_dsa_parameters(parameters)
|
||||||
|
if numbers.x <= 0 or numbers.x >= parameters.q:
|
||||||
|
raise ValueError("x must be > 0 and < q.")
|
||||||
|
|
||||||
|
if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p):
|
||||||
|
raise ValueError("y must be equal to (g ** x % p).")
|
||||||
|
|
||||||
|
|
||||||
|
class DSAParameterNumbers(object):
|
||||||
|
def __init__(self, p, q, g):
|
||||||
|
if (
|
||||||
|
not isinstance(p, six.integer_types) or
|
||||||
|
not isinstance(q, six.integer_types) or
|
||||||
|
not isinstance(g, six.integer_types)
|
||||||
|
):
|
||||||
|
raise TypeError(
|
||||||
|
"DSAParameterNumbers p, q, and g arguments must be integers."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._p = p
|
||||||
|
self._q = q
|
||||||
|
self._g = g
|
||||||
|
|
||||||
|
p = utils.read_only_property("_p")
|
||||||
|
q = utils.read_only_property("_q")
|
||||||
|
g = utils.read_only_property("_g")
|
||||||
|
|
||||||
|
def parameters(self, backend):
|
||||||
|
return backend.load_dsa_parameter_numbers(self)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, DSAParameterNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return self.p == other.p and self.q == other.q and self.g == other.g
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return (
|
||||||
|
"<DSAParameterNumbers(p={self.p}, q={self.q}, g={self.g})>".format(
|
||||||
|
self=self
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DSAPublicNumbers(object):
|
||||||
|
def __init__(self, y, parameter_numbers):
|
||||||
|
if not isinstance(y, six.integer_types):
|
||||||
|
raise TypeError("DSAPublicNumbers y argument must be an integer.")
|
||||||
|
|
||||||
|
if not isinstance(parameter_numbers, DSAParameterNumbers):
|
||||||
|
raise TypeError(
|
||||||
|
"parameter_numbers must be a DSAParameterNumbers instance."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._y = y
|
||||||
|
self._parameter_numbers = parameter_numbers
|
||||||
|
|
||||||
|
y = utils.read_only_property("_y")
|
||||||
|
parameter_numbers = utils.read_only_property("_parameter_numbers")
|
||||||
|
|
||||||
|
def public_key(self, backend):
|
||||||
|
return backend.load_dsa_public_numbers(self)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, DSAPublicNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.y == other.y and
|
||||||
|
self.parameter_numbers == other.parameter_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return (
|
||||||
|
"<DSAPublicNumbers(y={self.y}, "
|
||||||
|
"parameter_numbers={self.parameter_numbers})>".format(self=self)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class DSAPrivateNumbers(object):
|
||||||
|
def __init__(self, x, public_numbers):
|
||||||
|
if not isinstance(x, six.integer_types):
|
||||||
|
raise TypeError("DSAPrivateNumbers x argument must be an integer.")
|
||||||
|
|
||||||
|
if not isinstance(public_numbers, DSAPublicNumbers):
|
||||||
|
raise TypeError(
|
||||||
|
"public_numbers must be a DSAPublicNumbers instance."
|
||||||
|
)
|
||||||
|
self._public_numbers = public_numbers
|
||||||
|
self._x = x
|
||||||
|
|
||||||
|
x = utils.read_only_property("_x")
|
||||||
|
public_numbers = utils.read_only_property("_public_numbers")
|
||||||
|
|
||||||
|
def private_key(self, backend):
|
||||||
|
return backend.load_dsa_private_numbers(self)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, DSAPrivateNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.x == other.x and self.public_numbers == other.public_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
|
@ -0,0 +1,352 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class EllipticCurve(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def name(self):
|
||||||
|
"""
|
||||||
|
The name of the curve. e.g. secp256r1.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the base point of the curve.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class EllipticCurveSignatureAlgorithm(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def algorithm(self):
|
||||||
|
"""
|
||||||
|
The digest algorithm used with this signature.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class EllipticCurvePrivateKey(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def signer(self, signature_algorithm):
|
||||||
|
"""
|
||||||
|
Returns an AsymmetricSignatureContext used for signing data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def exchange(self, algorithm, peer_public_key):
|
||||||
|
"""
|
||||||
|
Performs a key exchange operation using the provided algorithm with the
|
||||||
|
provided peer's public key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_key(self):
|
||||||
|
"""
|
||||||
|
The EllipticCurvePublicKey for this private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def curve(self):
|
||||||
|
"""
|
||||||
|
The EllipticCurve that this key is on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class EllipticCurvePrivateKeyWithSerialization(EllipticCurvePrivateKey):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns an EllipticCurvePrivateNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||||
|
"""
|
||||||
|
Returns the key serialized as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class EllipticCurvePublicKey(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def verifier(self, signature, signature_algorithm):
|
||||||
|
"""
|
||||||
|
Returns an AsymmetricVerificationContext used for signing data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def curve(self):
|
||||||
|
"""
|
||||||
|
The EllipticCurve that this key is on.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns an EllipticCurvePublicNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_bytes(self, encoding, format):
|
||||||
|
"""
|
||||||
|
Returns the key serialized as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
EllipticCurvePublicKeyWithSerialization = EllipticCurvePublicKey
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT571R1(object):
|
||||||
|
name = "sect571r1"
|
||||||
|
key_size = 571
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT409R1(object):
|
||||||
|
name = "sect409r1"
|
||||||
|
key_size = 409
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT283R1(object):
|
||||||
|
name = "sect283r1"
|
||||||
|
key_size = 283
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT233R1(object):
|
||||||
|
name = "sect233r1"
|
||||||
|
key_size = 233
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT163R2(object):
|
||||||
|
name = "sect163r2"
|
||||||
|
key_size = 163
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT571K1(object):
|
||||||
|
name = "sect571k1"
|
||||||
|
key_size = 571
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT409K1(object):
|
||||||
|
name = "sect409k1"
|
||||||
|
key_size = 409
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT283K1(object):
|
||||||
|
name = "sect283k1"
|
||||||
|
key_size = 283
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT233K1(object):
|
||||||
|
name = "sect233k1"
|
||||||
|
key_size = 233
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECT163K1(object):
|
||||||
|
name = "sect163k1"
|
||||||
|
key_size = 163
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECP521R1(object):
|
||||||
|
name = "secp521r1"
|
||||||
|
key_size = 521
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECP384R1(object):
|
||||||
|
name = "secp384r1"
|
||||||
|
key_size = 384
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECP256R1(object):
|
||||||
|
name = "secp256r1"
|
||||||
|
key_size = 256
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECP256K1(object):
|
||||||
|
name = "secp256k1"
|
||||||
|
key_size = 256
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECP224R1(object):
|
||||||
|
name = "secp224r1"
|
||||||
|
key_size = 224
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurve)
|
||||||
|
class SECP192R1(object):
|
||||||
|
name = "secp192r1"
|
||||||
|
key_size = 192
|
||||||
|
|
||||||
|
|
||||||
|
_CURVE_TYPES = {
|
||||||
|
"prime192v1": SECP192R1,
|
||||||
|
"prime256v1": SECP256R1,
|
||||||
|
|
||||||
|
"secp192r1": SECP192R1,
|
||||||
|
"secp224r1": SECP224R1,
|
||||||
|
"secp256r1": SECP256R1,
|
||||||
|
"secp384r1": SECP384R1,
|
||||||
|
"secp521r1": SECP521R1,
|
||||||
|
"secp256k1": SECP256K1,
|
||||||
|
|
||||||
|
"sect163k1": SECT163K1,
|
||||||
|
"sect233k1": SECT233K1,
|
||||||
|
"sect283k1": SECT283K1,
|
||||||
|
"sect409k1": SECT409K1,
|
||||||
|
"sect571k1": SECT571K1,
|
||||||
|
|
||||||
|
"sect163r2": SECT163R2,
|
||||||
|
"sect233r1": SECT233R1,
|
||||||
|
"sect283r1": SECT283R1,
|
||||||
|
"sect409r1": SECT409R1,
|
||||||
|
"sect571r1": SECT571R1,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(EllipticCurveSignatureAlgorithm)
|
||||||
|
class ECDSA(object):
|
||||||
|
def __init__(self, algorithm):
|
||||||
|
self._algorithm = algorithm
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
|
||||||
|
def generate_private_key(curve, backend):
|
||||||
|
return backend.generate_elliptic_curve_private_key(curve)
|
||||||
|
|
||||||
|
|
||||||
|
class EllipticCurvePublicNumbers(object):
|
||||||
|
def __init__(self, x, y, curve):
|
||||||
|
if (
|
||||||
|
not isinstance(x, six.integer_types) or
|
||||||
|
not isinstance(y, six.integer_types)
|
||||||
|
):
|
||||||
|
raise TypeError("x and y must be integers.")
|
||||||
|
|
||||||
|
if not isinstance(curve, EllipticCurve):
|
||||||
|
raise TypeError("curve must provide the EllipticCurve interface.")
|
||||||
|
|
||||||
|
self._y = y
|
||||||
|
self._x = x
|
||||||
|
self._curve = curve
|
||||||
|
|
||||||
|
def public_key(self, backend):
|
||||||
|
return backend.load_elliptic_curve_public_numbers(self)
|
||||||
|
|
||||||
|
def encode_point(self):
|
||||||
|
# key_size is in bits. Convert to bytes and round up
|
||||||
|
byte_length = (self.curve.key_size + 7) // 8
|
||||||
|
return (
|
||||||
|
b'\x04' + utils.int_to_bytes(self.x, byte_length) +
|
||||||
|
utils.int_to_bytes(self.y, byte_length)
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_encoded_point(cls, curve, data):
|
||||||
|
if not isinstance(curve, EllipticCurve):
|
||||||
|
raise TypeError("curve must be an EllipticCurve instance")
|
||||||
|
|
||||||
|
if data.startswith(b'\x04'):
|
||||||
|
# key_size is in bits. Convert to bytes and round up
|
||||||
|
byte_length = (curve.key_size + 7) // 8
|
||||||
|
if len(data) == 2 * byte_length + 1:
|
||||||
|
x = utils.int_from_bytes(data[1:byte_length + 1], 'big')
|
||||||
|
y = utils.int_from_bytes(data[byte_length + 1:], 'big')
|
||||||
|
return cls(x, y, curve)
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid elliptic curve point data length')
|
||||||
|
else:
|
||||||
|
raise ValueError('Unsupported elliptic curve point type')
|
||||||
|
|
||||||
|
curve = utils.read_only_property("_curve")
|
||||||
|
x = utils.read_only_property("_x")
|
||||||
|
y = utils.read_only_property("_y")
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, EllipticCurvePublicNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.x == other.x and
|
||||||
|
self.y == other.y and
|
||||||
|
self.curve.name == other.curve.name and
|
||||||
|
self.curve.key_size == other.curve.key_size
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.x, self.y, self.curve.name, self.curve.key_size))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return (
|
||||||
|
"<EllipticCurvePublicNumbers(curve={0.curve.name}, x={0.x}, "
|
||||||
|
"y={0.y}>".format(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class EllipticCurvePrivateNumbers(object):
|
||||||
|
def __init__(self, private_value, public_numbers):
|
||||||
|
if not isinstance(private_value, six.integer_types):
|
||||||
|
raise TypeError("private_value must be an integer.")
|
||||||
|
|
||||||
|
if not isinstance(public_numbers, EllipticCurvePublicNumbers):
|
||||||
|
raise TypeError(
|
||||||
|
"public_numbers must be an EllipticCurvePublicNumbers "
|
||||||
|
"instance."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._private_value = private_value
|
||||||
|
self._public_numbers = public_numbers
|
||||||
|
|
||||||
|
def private_key(self, backend):
|
||||||
|
return backend.load_elliptic_curve_private_numbers(self)
|
||||||
|
|
||||||
|
private_value = utils.read_only_property("_private_value")
|
||||||
|
public_numbers = utils.read_only_property("_public_numbers")
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, EllipticCurvePrivateNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.private_value == other.private_value and
|
||||||
|
self.public_numbers == other.public_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.private_value, self.public_numbers))
|
||||||
|
|
||||||
|
|
||||||
|
class ECDH(object):
|
||||||
|
pass
|
|
@ -0,0 +1,67 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.hazmat.primitives import hashes
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class AsymmetricPadding(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def name(self):
|
||||||
|
"""
|
||||||
|
A string naming this padding (e.g. "PSS", "PKCS1").
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricPadding)
|
||||||
|
class PKCS1v15(object):
|
||||||
|
name = "EMSA-PKCS1-v1_5"
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricPadding)
|
||||||
|
class PSS(object):
|
||||||
|
MAX_LENGTH = object()
|
||||||
|
name = "EMSA-PSS"
|
||||||
|
|
||||||
|
def __init__(self, mgf, salt_length):
|
||||||
|
self._mgf = mgf
|
||||||
|
|
||||||
|
if (not isinstance(salt_length, six.integer_types) and
|
||||||
|
salt_length is not self.MAX_LENGTH):
|
||||||
|
raise TypeError("salt_length must be an integer.")
|
||||||
|
|
||||||
|
if salt_length is not self.MAX_LENGTH and salt_length < 0:
|
||||||
|
raise ValueError("salt_length must be zero or greater.")
|
||||||
|
|
||||||
|
self._salt_length = salt_length
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AsymmetricPadding)
|
||||||
|
class OAEP(object):
|
||||||
|
name = "EME-OAEP"
|
||||||
|
|
||||||
|
def __init__(self, mgf, algorithm, label):
|
||||||
|
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||||
|
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||||
|
|
||||||
|
self._mgf = mgf
|
||||||
|
self._algorithm = algorithm
|
||||||
|
self._label = label
|
||||||
|
|
||||||
|
|
||||||
|
class MGF1(object):
|
||||||
|
MAX_LENGTH = object()
|
||||||
|
|
||||||
|
def __init__(self, algorithm):
|
||||||
|
if not isinstance(algorithm, hashes.HashAlgorithm):
|
||||||
|
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||||
|
|
||||||
|
self._algorithm = algorithm
|
|
@ -0,0 +1,364 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
from fractions import gcd
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import UnsupportedAlgorithm, _Reasons
|
||||||
|
from cryptography.hazmat.backends.interfaces import RSABackend
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class RSAPrivateKey(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def signer(self, padding, algorithm):
|
||||||
|
"""
|
||||||
|
Returns an AsymmetricSignatureContext used for signing data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def decrypt(self, ciphertext, padding):
|
||||||
|
"""
|
||||||
|
Decrypts the provided ciphertext.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the public modulus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_key(self):
|
||||||
|
"""
|
||||||
|
The RSAPublicKey associated with this private key.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def sign(self, data, padding, algorithm):
|
||||||
|
"""
|
||||||
|
Signs the data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class RSAPrivateKeyWithSerialization(RSAPrivateKey):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns an RSAPrivateNumbers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def private_bytes(self, encoding, format, encryption_algorithm):
|
||||||
|
"""
|
||||||
|
Returns the key serialized as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class RSAPublicKey(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def verifier(self, signature, padding, algorithm):
|
||||||
|
"""
|
||||||
|
Returns an AsymmetricVerificationContext used for verifying signatures.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def encrypt(self, plaintext, padding):
|
||||||
|
"""
|
||||||
|
Encrypts the given plaintext.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The bit length of the public modulus.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_numbers(self):
|
||||||
|
"""
|
||||||
|
Returns an RSAPublicNumbers
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def public_bytes(self, encoding, format):
|
||||||
|
"""
|
||||||
|
Returns the key serialized as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def verify(self, signature, data, padding, algorithm):
|
||||||
|
"""
|
||||||
|
Verifies the signature of the data.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
RSAPublicKeyWithSerialization = RSAPublicKey
|
||||||
|
|
||||||
|
|
||||||
|
def generate_private_key(public_exponent, key_size, backend):
|
||||||
|
if not isinstance(backend, RSABackend):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Backend object does not implement RSABackend.",
|
||||||
|
_Reasons.BACKEND_MISSING_INTERFACE
|
||||||
|
)
|
||||||
|
|
||||||
|
_verify_rsa_parameters(public_exponent, key_size)
|
||||||
|
return backend.generate_rsa_private_key(public_exponent, key_size)
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_rsa_parameters(public_exponent, key_size):
|
||||||
|
if public_exponent < 3:
|
||||||
|
raise ValueError("public_exponent must be >= 3.")
|
||||||
|
|
||||||
|
if public_exponent & 1 == 0:
|
||||||
|
raise ValueError("public_exponent must be odd.")
|
||||||
|
|
||||||
|
if key_size < 512:
|
||||||
|
raise ValueError("key_size must be at least 512-bits.")
|
||||||
|
|
||||||
|
|
||||||
|
def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp,
|
||||||
|
public_exponent, modulus):
|
||||||
|
if modulus < 3:
|
||||||
|
raise ValueError("modulus must be >= 3.")
|
||||||
|
|
||||||
|
if p >= modulus:
|
||||||
|
raise ValueError("p must be < modulus.")
|
||||||
|
|
||||||
|
if q >= modulus:
|
||||||
|
raise ValueError("q must be < modulus.")
|
||||||
|
|
||||||
|
if dmp1 >= modulus:
|
||||||
|
raise ValueError("dmp1 must be < modulus.")
|
||||||
|
|
||||||
|
if dmq1 >= modulus:
|
||||||
|
raise ValueError("dmq1 must be < modulus.")
|
||||||
|
|
||||||
|
if iqmp >= modulus:
|
||||||
|
raise ValueError("iqmp must be < modulus.")
|
||||||
|
|
||||||
|
if private_exponent >= modulus:
|
||||||
|
raise ValueError("private_exponent must be < modulus.")
|
||||||
|
|
||||||
|
if public_exponent < 3 or public_exponent >= modulus:
|
||||||
|
raise ValueError("public_exponent must be >= 3 and < modulus.")
|
||||||
|
|
||||||
|
if public_exponent & 1 == 0:
|
||||||
|
raise ValueError("public_exponent must be odd.")
|
||||||
|
|
||||||
|
if dmp1 & 1 == 0:
|
||||||
|
raise ValueError("dmp1 must be odd.")
|
||||||
|
|
||||||
|
if dmq1 & 1 == 0:
|
||||||
|
raise ValueError("dmq1 must be odd.")
|
||||||
|
|
||||||
|
if p * q != modulus:
|
||||||
|
raise ValueError("p*q must equal modulus.")
|
||||||
|
|
||||||
|
|
||||||
|
def _check_public_key_components(e, n):
|
||||||
|
if n < 3:
|
||||||
|
raise ValueError("n must be >= 3.")
|
||||||
|
|
||||||
|
if e < 3 or e >= n:
|
||||||
|
raise ValueError("e must be >= 3 and < n.")
|
||||||
|
|
||||||
|
if e & 1 == 0:
|
||||||
|
raise ValueError("e must be odd.")
|
||||||
|
|
||||||
|
|
||||||
|
def _modinv(e, m):
|
||||||
|
"""
|
||||||
|
Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1
|
||||||
|
"""
|
||||||
|
x1, y1, x2, y2 = 1, 0, 0, 1
|
||||||
|
a, b = e, m
|
||||||
|
while b > 0:
|
||||||
|
q, r = divmod(a, b)
|
||||||
|
xn, yn = x1 - q * x2, y1 - q * y2
|
||||||
|
a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn
|
||||||
|
return x1 % m
|
||||||
|
|
||||||
|
|
||||||
|
def rsa_crt_iqmp(p, q):
|
||||||
|
"""
|
||||||
|
Compute the CRT (q ** -1) % p value from RSA primes p and q.
|
||||||
|
"""
|
||||||
|
return _modinv(q, p)
|
||||||
|
|
||||||
|
|
||||||
|
def rsa_crt_dmp1(private_exponent, p):
|
||||||
|
"""
|
||||||
|
Compute the CRT private_exponent % (p - 1) value from the RSA
|
||||||
|
private_exponent and p.
|
||||||
|
"""
|
||||||
|
return private_exponent % (p - 1)
|
||||||
|
|
||||||
|
|
||||||
|
def rsa_crt_dmq1(private_exponent, q):
|
||||||
|
"""
|
||||||
|
Compute the CRT private_exponent % (q - 1) value from the RSA
|
||||||
|
private_exponent and q.
|
||||||
|
"""
|
||||||
|
return private_exponent % (q - 1)
|
||||||
|
|
||||||
|
|
||||||
|
# Controls the number of iterations rsa_recover_prime_factors will perform
|
||||||
|
# to obtain the prime factors. Each iteration increments by 2 so the actual
|
||||||
|
# maximum attempts is half this number.
|
||||||
|
_MAX_RECOVERY_ATTEMPTS = 1000
|
||||||
|
|
||||||
|
|
||||||
|
def rsa_recover_prime_factors(n, e, d):
|
||||||
|
"""
|
||||||
|
Compute factors p and q from the private exponent d. We assume that n has
|
||||||
|
no more than two factors. This function is adapted from code in PyCrypto.
|
||||||
|
"""
|
||||||
|
# See 8.2.2(i) in Handbook of Applied Cryptography.
|
||||||
|
ktot = d * e - 1
|
||||||
|
# The quantity d*e-1 is a multiple of phi(n), even,
|
||||||
|
# and can be represented as t*2^s.
|
||||||
|
t = ktot
|
||||||
|
while t % 2 == 0:
|
||||||
|
t = t // 2
|
||||||
|
# Cycle through all multiplicative inverses in Zn.
|
||||||
|
# The algorithm is non-deterministic, but there is a 50% chance
|
||||||
|
# any candidate a leads to successful factoring.
|
||||||
|
# See "Digitalized Signatures and Public Key Functions as Intractable
|
||||||
|
# as Factorization", M. Rabin, 1979
|
||||||
|
spotted = False
|
||||||
|
a = 2
|
||||||
|
while not spotted and a < _MAX_RECOVERY_ATTEMPTS:
|
||||||
|
k = t
|
||||||
|
# Cycle through all values a^{t*2^i}=a^k
|
||||||
|
while k < ktot:
|
||||||
|
cand = pow(a, k, n)
|
||||||
|
# Check if a^k is a non-trivial root of unity (mod n)
|
||||||
|
if cand != 1 and cand != (n - 1) and pow(cand, 2, n) == 1:
|
||||||
|
# We have found a number such that (cand-1)(cand+1)=0 (mod n).
|
||||||
|
# Either of the terms divides n.
|
||||||
|
p = gcd(cand + 1, n)
|
||||||
|
spotted = True
|
||||||
|
break
|
||||||
|
k *= 2
|
||||||
|
# This value was not any good... let's try another!
|
||||||
|
a += 2
|
||||||
|
if not spotted:
|
||||||
|
raise ValueError("Unable to compute factors p and q from exponent d.")
|
||||||
|
# Found !
|
||||||
|
q, r = divmod(n, p)
|
||||||
|
assert r == 0
|
||||||
|
|
||||||
|
return (p, q)
|
||||||
|
|
||||||
|
|
||||||
|
class RSAPrivateNumbers(object):
|
||||||
|
def __init__(self, p, q, d, dmp1, dmq1, iqmp,
|
||||||
|
public_numbers):
|
||||||
|
if (
|
||||||
|
not isinstance(p, six.integer_types) or
|
||||||
|
not isinstance(q, six.integer_types) or
|
||||||
|
not isinstance(d, six.integer_types) or
|
||||||
|
not isinstance(dmp1, six.integer_types) or
|
||||||
|
not isinstance(dmq1, six.integer_types) or
|
||||||
|
not isinstance(iqmp, six.integer_types)
|
||||||
|
):
|
||||||
|
raise TypeError(
|
||||||
|
"RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must"
|
||||||
|
" all be an integers."
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(public_numbers, RSAPublicNumbers):
|
||||||
|
raise TypeError(
|
||||||
|
"RSAPrivateNumbers public_numbers must be an RSAPublicNumbers"
|
||||||
|
" instance."
|
||||||
|
)
|
||||||
|
|
||||||
|
self._p = p
|
||||||
|
self._q = q
|
||||||
|
self._d = d
|
||||||
|
self._dmp1 = dmp1
|
||||||
|
self._dmq1 = dmq1
|
||||||
|
self._iqmp = iqmp
|
||||||
|
self._public_numbers = public_numbers
|
||||||
|
|
||||||
|
p = utils.read_only_property("_p")
|
||||||
|
q = utils.read_only_property("_q")
|
||||||
|
d = utils.read_only_property("_d")
|
||||||
|
dmp1 = utils.read_only_property("_dmp1")
|
||||||
|
dmq1 = utils.read_only_property("_dmq1")
|
||||||
|
iqmp = utils.read_only_property("_iqmp")
|
||||||
|
public_numbers = utils.read_only_property("_public_numbers")
|
||||||
|
|
||||||
|
def private_key(self, backend):
|
||||||
|
return backend.load_rsa_private_numbers(self)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, RSAPrivateNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return (
|
||||||
|
self.p == other.p and
|
||||||
|
self.q == other.q and
|
||||||
|
self.d == other.d and
|
||||||
|
self.dmp1 == other.dmp1 and
|
||||||
|
self.dmq1 == other.dmq1 and
|
||||||
|
self.iqmp == other.iqmp and
|
||||||
|
self.public_numbers == other.public_numbers
|
||||||
|
)
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((
|
||||||
|
self.p,
|
||||||
|
self.q,
|
||||||
|
self.d,
|
||||||
|
self.dmp1,
|
||||||
|
self.dmq1,
|
||||||
|
self.iqmp,
|
||||||
|
self.public_numbers,
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
class RSAPublicNumbers(object):
|
||||||
|
def __init__(self, e, n):
|
||||||
|
if (
|
||||||
|
not isinstance(e, six.integer_types) or
|
||||||
|
not isinstance(n, six.integer_types)
|
||||||
|
):
|
||||||
|
raise TypeError("RSAPublicNumbers arguments must be integers.")
|
||||||
|
|
||||||
|
self._e = e
|
||||||
|
self._n = n
|
||||||
|
|
||||||
|
e = utils.read_only_property("_e")
|
||||||
|
n = utils.read_only_property("_n")
|
||||||
|
|
||||||
|
def public_key(self, backend):
|
||||||
|
return backend.load_rsa_public_numbers(self)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return "<RSAPublicNumbers(e={0.e}, n={0.n})>".format(self)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if not isinstance(other, RSAPublicNumbers):
|
||||||
|
return NotImplemented
|
||||||
|
|
||||||
|
return self.e == other.e and self.n == other.n
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not self == other
|
||||||
|
|
||||||
|
def __hash__(self):
|
||||||
|
return hash((self.e, self.n))
|
|
@ -0,0 +1,71 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from pyasn1.codec.der import decoder, encoder
|
||||||
|
from pyasn1.error import PyAsn1Error
|
||||||
|
from pyasn1.type import namedtype, univ
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
|
||||||
|
|
||||||
|
class _DSSSigValue(univ.Sequence):
|
||||||
|
componentType = namedtype.NamedTypes(
|
||||||
|
namedtype.NamedType('r', univ.Integer()),
|
||||||
|
namedtype.NamedType('s', univ.Integer())
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def decode_rfc6979_signature(signature):
|
||||||
|
warnings.warn(
|
||||||
|
"decode_rfc6979_signature is deprecated and will "
|
||||||
|
"be removed in a future version, use decode_dss_signature instead.",
|
||||||
|
utils.DeprecatedIn10,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
return decode_dss_signature(signature)
|
||||||
|
|
||||||
|
|
||||||
|
def decode_dss_signature(signature):
|
||||||
|
try:
|
||||||
|
data, remaining = decoder.decode(signature, asn1Spec=_DSSSigValue())
|
||||||
|
except PyAsn1Error:
|
||||||
|
raise ValueError("Invalid signature data. Unable to decode ASN.1")
|
||||||
|
|
||||||
|
if remaining:
|
||||||
|
raise ValueError(
|
||||||
|
"The signature contains bytes after the end of the ASN.1 sequence."
|
||||||
|
)
|
||||||
|
|
||||||
|
r = int(data.getComponentByName('r'))
|
||||||
|
s = int(data.getComponentByName('s'))
|
||||||
|
return (r, s)
|
||||||
|
|
||||||
|
|
||||||
|
def encode_rfc6979_signature(r, s):
|
||||||
|
warnings.warn(
|
||||||
|
"encode_rfc6979_signature is deprecated and will "
|
||||||
|
"be removed in a future version, use encode_dss_signature instead.",
|
||||||
|
utils.DeprecatedIn10,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
return encode_dss_signature(r, s)
|
||||||
|
|
||||||
|
|
||||||
|
def encode_dss_signature(r, s):
|
||||||
|
if (
|
||||||
|
not isinstance(r, six.integer_types) or
|
||||||
|
not isinstance(s, six.integer_types)
|
||||||
|
):
|
||||||
|
raise ValueError("Both r and s must be integers")
|
||||||
|
|
||||||
|
sig = _DSSSigValue()
|
||||||
|
sig.setComponentByName('r', r)
|
||||||
|
sig.setComponentByName('s', s)
|
||||||
|
return encoder.encode(sig)
|
|
@ -0,0 +1,20 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography.hazmat.primitives.ciphers.base import (
|
||||||
|
AEADCipherContext, AEADEncryptionContext, BlockCipherAlgorithm, Cipher,
|
||||||
|
CipherAlgorithm, CipherContext
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Cipher",
|
||||||
|
"CipherAlgorithm",
|
||||||
|
"BlockCipherAlgorithm",
|
||||||
|
"CipherContext",
|
||||||
|
"AEADCipherContext",
|
||||||
|
"AEADEncryptionContext",
|
||||||
|
]
|
|
@ -0,0 +1,140 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.hazmat.primitives.ciphers import (
|
||||||
|
BlockCipherAlgorithm, CipherAlgorithm
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _verify_key_size(algorithm, key):
|
||||||
|
# Verify that the key size matches the expected key size
|
||||||
|
if len(key) * 8 not in algorithm.key_sizes:
|
||||||
|
raise ValueError("Invalid key size ({0}) for {1}.".format(
|
||||||
|
len(key) * 8, algorithm.name
|
||||||
|
))
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(BlockCipherAlgorithm)
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class AES(object):
|
||||||
|
name = "AES"
|
||||||
|
block_size = 128
|
||||||
|
key_sizes = frozenset([128, 192, 256])
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(BlockCipherAlgorithm)
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class Camellia(object):
|
||||||
|
name = "camellia"
|
||||||
|
block_size = 128
|
||||||
|
key_sizes = frozenset([128, 192, 256])
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(BlockCipherAlgorithm)
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class TripleDES(object):
|
||||||
|
name = "3DES"
|
||||||
|
block_size = 64
|
||||||
|
key_sizes = frozenset([64, 128, 192])
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
if len(key) == 8:
|
||||||
|
key += key + key
|
||||||
|
elif len(key) == 16:
|
||||||
|
key += key[:8]
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(BlockCipherAlgorithm)
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class Blowfish(object):
|
||||||
|
name = "Blowfish"
|
||||||
|
block_size = 64
|
||||||
|
key_sizes = frozenset(range(32, 449, 8))
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(BlockCipherAlgorithm)
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class CAST5(object):
|
||||||
|
name = "CAST5"
|
||||||
|
block_size = 64
|
||||||
|
key_sizes = frozenset(range(40, 129, 8))
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class ARC4(object):
|
||||||
|
name = "RC4"
|
||||||
|
key_sizes = frozenset([40, 56, 64, 80, 128, 160, 192, 256])
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class IDEA(object):
|
||||||
|
name = "IDEA"
|
||||||
|
block_size = 64
|
||||||
|
key_sizes = frozenset([128])
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(BlockCipherAlgorithm)
|
||||||
|
@utils.register_interface(CipherAlgorithm)
|
||||||
|
class SEED(object):
|
||||||
|
name = "SEED"
|
||||||
|
block_size = 128
|
||||||
|
key_sizes = frozenset([128])
|
||||||
|
|
||||||
|
def __init__(self, key):
|
||||||
|
self.key = _verify_key_size(self, key)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key_size(self):
|
||||||
|
return len(self.key) * 8
|
|
@ -0,0 +1,203 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm,
|
||||||
|
_Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.backends.interfaces import CipherBackend
|
||||||
|
from cryptography.hazmat.primitives.ciphers import modes
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class CipherAlgorithm(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def name(self):
|
||||||
|
"""
|
||||||
|
A string naming this mode (e.g. "AES", "Camellia").
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def key_size(self):
|
||||||
|
"""
|
||||||
|
The size of the key being used as an integer in bits (e.g. 128, 256).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BlockCipherAlgorithm(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def block_size(self):
|
||||||
|
"""
|
||||||
|
The size of a block as an integer in bits (e.g. 64, 128).
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class CipherContext(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def update(self, data):
|
||||||
|
"""
|
||||||
|
Processes the provided bytes through the cipher and returns the results
|
||||||
|
as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def finalize(self):
|
||||||
|
"""
|
||||||
|
Returns the results of processing the final block as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class AEADCipherContext(object):
|
||||||
|
@abc.abstractmethod
|
||||||
|
def authenticate_additional_data(self, data):
|
||||||
|
"""
|
||||||
|
Authenticates the provided bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class AEADEncryptionContext(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def tag(self):
|
||||||
|
"""
|
||||||
|
Returns tag bytes. This is only available after encryption is
|
||||||
|
finalized.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class Cipher(object):
|
||||||
|
def __init__(self, algorithm, mode, backend):
|
||||||
|
if not isinstance(backend, CipherBackend):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Backend object does not implement CipherBackend.",
|
||||||
|
_Reasons.BACKEND_MISSING_INTERFACE
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(algorithm, CipherAlgorithm):
|
||||||
|
raise TypeError("Expected interface of CipherAlgorithm.")
|
||||||
|
|
||||||
|
if mode is not None:
|
||||||
|
mode.validate_for_algorithm(algorithm)
|
||||||
|
|
||||||
|
self.algorithm = algorithm
|
||||||
|
self.mode = mode
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
def encryptor(self):
|
||||||
|
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||||
|
if self.mode.tag is not None:
|
||||||
|
raise ValueError(
|
||||||
|
"Authentication tag must be None when encrypting."
|
||||||
|
)
|
||||||
|
ctx = self._backend.create_symmetric_encryption_ctx(
|
||||||
|
self.algorithm, self.mode
|
||||||
|
)
|
||||||
|
return self._wrap_ctx(ctx, encrypt=True)
|
||||||
|
|
||||||
|
def decryptor(self):
|
||||||
|
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||||
|
if self.mode.tag is None:
|
||||||
|
raise ValueError(
|
||||||
|
"Authentication tag must be provided when decrypting."
|
||||||
|
)
|
||||||
|
ctx = self._backend.create_symmetric_decryption_ctx(
|
||||||
|
self.algorithm, self.mode
|
||||||
|
)
|
||||||
|
return self._wrap_ctx(ctx, encrypt=False)
|
||||||
|
|
||||||
|
def _wrap_ctx(self, ctx, encrypt):
|
||||||
|
if isinstance(self.mode, modes.ModeWithAuthenticationTag):
|
||||||
|
if encrypt:
|
||||||
|
return _AEADEncryptionContext(ctx)
|
||||||
|
else:
|
||||||
|
return _AEADCipherContext(ctx)
|
||||||
|
else:
|
||||||
|
return _CipherContext(ctx)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(CipherContext)
|
||||||
|
class _CipherContext(object):
|
||||||
|
def __init__(self, ctx):
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
return self._ctx.update(data)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
data = self._ctx.finalize()
|
||||||
|
self._ctx = None
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AEADCipherContext)
|
||||||
|
@utils.register_interface(CipherContext)
|
||||||
|
class _AEADCipherContext(object):
|
||||||
|
def __init__(self, ctx):
|
||||||
|
self._ctx = ctx
|
||||||
|
self._bytes_processed = 0
|
||||||
|
self._aad_bytes_processed = 0
|
||||||
|
self._tag = None
|
||||||
|
self._updated = False
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
self._updated = True
|
||||||
|
self._bytes_processed += len(data)
|
||||||
|
if self._bytes_processed > self._ctx._mode._MAX_ENCRYPTED_BYTES:
|
||||||
|
raise ValueError(
|
||||||
|
"{0} has a maximum encrypted byte limit of {1}".format(
|
||||||
|
self._ctx._mode.name, self._ctx._mode._MAX_ENCRYPTED_BYTES
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._ctx.update(data)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
data = self._ctx.finalize()
|
||||||
|
self._tag = self._ctx.tag
|
||||||
|
self._ctx = None
|
||||||
|
return data
|
||||||
|
|
||||||
|
def authenticate_additional_data(self, data):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
if self._updated:
|
||||||
|
raise AlreadyUpdated("Update has been called on this context.")
|
||||||
|
|
||||||
|
self._aad_bytes_processed += len(data)
|
||||||
|
if self._aad_bytes_processed > self._ctx._mode._MAX_AAD_BYTES:
|
||||||
|
raise ValueError(
|
||||||
|
"{0} has a maximum AAD byte limit of {1}".format(
|
||||||
|
self._ctx._mode.name, self._ctx._mode._MAX_AAD_BYTES
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
self._ctx.authenticate_additional_data(data)
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(AEADEncryptionContext)
|
||||||
|
class _AEADEncryptionContext(_AEADCipherContext):
|
||||||
|
@property
|
||||||
|
def tag(self):
|
||||||
|
if self._ctx is not None:
|
||||||
|
raise NotYetFinalized("You must finalize encryption before "
|
||||||
|
"getting the tag.")
|
||||||
|
return self._tag
|
|
@ -0,0 +1,185 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class Mode(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def name(self):
|
||||||
|
"""
|
||||||
|
A string naming this mode (e.g. "ECB", "CBC").
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def validate_for_algorithm(self, algorithm):
|
||||||
|
"""
|
||||||
|
Checks that all the necessary invariants of this (mode, algorithm)
|
||||||
|
combination are met.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class ModeWithInitializationVector(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def initialization_vector(self):
|
||||||
|
"""
|
||||||
|
The value of the initialization vector for this mode as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class ModeWithNonce(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def nonce(self):
|
||||||
|
"""
|
||||||
|
The value of the nonce for this mode as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class ModeWithAuthenticationTag(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def tag(self):
|
||||||
|
"""
|
||||||
|
The value of the tag supplied to the constructor of this mode.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def _check_iv_length(self, algorithm):
|
||||||
|
if len(self.initialization_vector) * 8 != algorithm.block_size:
|
||||||
|
raise ValueError("Invalid IV size ({0}) for {1}.".format(
|
||||||
|
len(self.initialization_vector), self.name
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
@utils.register_interface(ModeWithInitializationVector)
|
||||||
|
class CBC(object):
|
||||||
|
name = "CBC"
|
||||||
|
|
||||||
|
def __init__(self, initialization_vector):
|
||||||
|
if not isinstance(initialization_vector, bytes):
|
||||||
|
raise TypeError("initialization_vector must be bytes")
|
||||||
|
|
||||||
|
self._initialization_vector = initialization_vector
|
||||||
|
|
||||||
|
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||||
|
validate_for_algorithm = _check_iv_length
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
class ECB(object):
|
||||||
|
name = "ECB"
|
||||||
|
|
||||||
|
def validate_for_algorithm(self, algorithm):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
@utils.register_interface(ModeWithInitializationVector)
|
||||||
|
class OFB(object):
|
||||||
|
name = "OFB"
|
||||||
|
|
||||||
|
def __init__(self, initialization_vector):
|
||||||
|
if not isinstance(initialization_vector, bytes):
|
||||||
|
raise TypeError("initialization_vector must be bytes")
|
||||||
|
|
||||||
|
self._initialization_vector = initialization_vector
|
||||||
|
|
||||||
|
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||||
|
validate_for_algorithm = _check_iv_length
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
@utils.register_interface(ModeWithInitializationVector)
|
||||||
|
class CFB(object):
|
||||||
|
name = "CFB"
|
||||||
|
|
||||||
|
def __init__(self, initialization_vector):
|
||||||
|
if not isinstance(initialization_vector, bytes):
|
||||||
|
raise TypeError("initialization_vector must be bytes")
|
||||||
|
|
||||||
|
self._initialization_vector = initialization_vector
|
||||||
|
|
||||||
|
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||||
|
validate_for_algorithm = _check_iv_length
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
@utils.register_interface(ModeWithInitializationVector)
|
||||||
|
class CFB8(object):
|
||||||
|
name = "CFB8"
|
||||||
|
|
||||||
|
def __init__(self, initialization_vector):
|
||||||
|
if not isinstance(initialization_vector, bytes):
|
||||||
|
raise TypeError("initialization_vector must be bytes")
|
||||||
|
|
||||||
|
self._initialization_vector = initialization_vector
|
||||||
|
|
||||||
|
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||||
|
validate_for_algorithm = _check_iv_length
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
@utils.register_interface(ModeWithNonce)
|
||||||
|
class CTR(object):
|
||||||
|
name = "CTR"
|
||||||
|
|
||||||
|
def __init__(self, nonce):
|
||||||
|
if not isinstance(nonce, bytes):
|
||||||
|
raise TypeError("nonce must be bytes")
|
||||||
|
|
||||||
|
self._nonce = nonce
|
||||||
|
|
||||||
|
nonce = utils.read_only_property("_nonce")
|
||||||
|
|
||||||
|
def validate_for_algorithm(self, algorithm):
|
||||||
|
if len(self.nonce) * 8 != algorithm.block_size:
|
||||||
|
raise ValueError("Invalid nonce size ({0}) for {1}.".format(
|
||||||
|
len(self.nonce), self.name
|
||||||
|
))
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(Mode)
|
||||||
|
@utils.register_interface(ModeWithInitializationVector)
|
||||||
|
@utils.register_interface(ModeWithAuthenticationTag)
|
||||||
|
class GCM(object):
|
||||||
|
name = "GCM"
|
||||||
|
_MAX_ENCRYPTED_BYTES = (2 ** 39 - 256) // 8
|
||||||
|
_MAX_AAD_BYTES = (2 ** 64) // 8
|
||||||
|
|
||||||
|
def __init__(self, initialization_vector, tag=None, min_tag_length=16):
|
||||||
|
# len(initialization_vector) must in [1, 2 ** 64), but it's impossible
|
||||||
|
# to actually construct a bytes object that large, so we don't check
|
||||||
|
# for it
|
||||||
|
if min_tag_length < 4:
|
||||||
|
raise ValueError("min_tag_length must be >= 4")
|
||||||
|
if tag is not None and len(tag) < min_tag_length:
|
||||||
|
raise ValueError(
|
||||||
|
"Authentication tag must be {0} bytes or longer.".format(
|
||||||
|
min_tag_length)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(initialization_vector, bytes):
|
||||||
|
raise TypeError("initialization_vector must be bytes")
|
||||||
|
|
||||||
|
if tag is not None and not isinstance(tag, bytes):
|
||||||
|
raise TypeError("tag must be bytes or None")
|
||||||
|
|
||||||
|
self._initialization_vector = initialization_vector
|
||||||
|
self._tag = tag
|
||||||
|
|
||||||
|
tag = utils.read_only_property("_tag")
|
||||||
|
initialization_vector = utils.read_only_property("_initialization_vector")
|
||||||
|
|
||||||
|
def validate_for_algorithm(self, algorithm):
|
||||||
|
pass
|
|
@ -0,0 +1,66 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.backends.interfaces import CMACBackend
|
||||||
|
from cryptography.hazmat.primitives import ciphers, interfaces
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(interfaces.MACContext)
|
||||||
|
class CMAC(object):
|
||||||
|
def __init__(self, algorithm, backend, ctx=None):
|
||||||
|
if not isinstance(backend, CMACBackend):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Backend object does not implement CMACBackend.",
|
||||||
|
_Reasons.BACKEND_MISSING_INTERFACE
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(algorithm, ciphers.BlockCipherAlgorithm):
|
||||||
|
raise TypeError(
|
||||||
|
"Expected instance of BlockCipherAlgorithm."
|
||||||
|
)
|
||||||
|
self._algorithm = algorithm
|
||||||
|
|
||||||
|
self._backend = backend
|
||||||
|
if ctx is None:
|
||||||
|
self._ctx = self._backend.create_cmac_ctx(self._algorithm)
|
||||||
|
else:
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
raise TypeError("data must be bytes.")
|
||||||
|
self._ctx.update(data)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
digest = self._ctx.finalize()
|
||||||
|
self._ctx = None
|
||||||
|
return digest
|
||||||
|
|
||||||
|
def verify(self, signature):
|
||||||
|
if not isinstance(signature, bytes):
|
||||||
|
raise TypeError("signature must be bytes.")
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
|
||||||
|
ctx, self._ctx = self._ctx, None
|
||||||
|
ctx.verify(signature)
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
return CMAC(
|
||||||
|
self._algorithm,
|
||||||
|
backend=self._backend,
|
||||||
|
ctx=self._ctx.copy()
|
||||||
|
)
|
|
@ -0,0 +1,26 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import hmac
|
||||||
|
|
||||||
|
from cryptography.hazmat.bindings._constant_time import lib
|
||||||
|
|
||||||
|
|
||||||
|
if hasattr(hmac, "compare_digest"):
|
||||||
|
def bytes_eq(a, b):
|
||||||
|
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
||||||
|
raise TypeError("a and b must be bytes.")
|
||||||
|
|
||||||
|
return hmac.compare_digest(a, b)
|
||||||
|
|
||||||
|
else:
|
||||||
|
def bytes_eq(a, b):
|
||||||
|
if not isinstance(a, bytes) or not isinstance(b, bytes):
|
||||||
|
raise TypeError("a and b must be bytes.")
|
||||||
|
|
||||||
|
return lib.Cryptography_constant_time_bytes_eq(
|
||||||
|
a, len(a), b, len(b)
|
||||||
|
) == 1
|
|
@ -0,0 +1,163 @@
|
||||||
|
# This file is dual licensed under the terms of the Apache License, Version
|
||||||
|
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
||||||
|
# for complete details.
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
|
from cryptography import utils
|
||||||
|
from cryptography.exceptions import (
|
||||||
|
AlreadyFinalized, UnsupportedAlgorithm, _Reasons
|
||||||
|
)
|
||||||
|
from cryptography.hazmat.backends.interfaces import HashBackend
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class HashAlgorithm(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def name(self):
|
||||||
|
"""
|
||||||
|
A string naming this algorithm (e.g. "sha256", "md5").
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def digest_size(self):
|
||||||
|
"""
|
||||||
|
The size of the resulting digest in bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractproperty
|
||||||
|
def block_size(self):
|
||||||
|
"""
|
||||||
|
The internal block size of the hash algorithm in bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class HashContext(object):
|
||||||
|
@abc.abstractproperty
|
||||||
|
def algorithm(self):
|
||||||
|
"""
|
||||||
|
A HashAlgorithm that will be used by this context.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def update(self, data):
|
||||||
|
"""
|
||||||
|
Processes the provided bytes through the hash.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def finalize(self):
|
||||||
|
"""
|
||||||
|
Finalizes the hash context and returns the hash digest as bytes.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abc.abstractmethod
|
||||||
|
def copy(self):
|
||||||
|
"""
|
||||||
|
Return a HashContext that is a copy of the current context.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashContext)
|
||||||
|
class Hash(object):
|
||||||
|
def __init__(self, algorithm, backend, ctx=None):
|
||||||
|
if not isinstance(backend, HashBackend):
|
||||||
|
raise UnsupportedAlgorithm(
|
||||||
|
"Backend object does not implement HashBackend.",
|
||||||
|
_Reasons.BACKEND_MISSING_INTERFACE
|
||||||
|
)
|
||||||
|
|
||||||
|
if not isinstance(algorithm, HashAlgorithm):
|
||||||
|
raise TypeError("Expected instance of hashes.HashAlgorithm.")
|
||||||
|
self._algorithm = algorithm
|
||||||
|
|
||||||
|
self._backend = backend
|
||||||
|
|
||||||
|
if ctx is None:
|
||||||
|
self._ctx = self._backend.create_hash_ctx(self.algorithm)
|
||||||
|
else:
|
||||||
|
self._ctx = ctx
|
||||||
|
|
||||||
|
algorithm = utils.read_only_property("_algorithm")
|
||||||
|
|
||||||
|
def update(self, data):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
if not isinstance(data, bytes):
|
||||||
|
raise TypeError("data must be bytes.")
|
||||||
|
self._ctx.update(data)
|
||||||
|
|
||||||
|
def copy(self):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
return Hash(
|
||||||
|
self.algorithm, backend=self._backend, ctx=self._ctx.copy()
|
||||||
|
)
|
||||||
|
|
||||||
|
def finalize(self):
|
||||||
|
if self._ctx is None:
|
||||||
|
raise AlreadyFinalized("Context was already finalized.")
|
||||||
|
digest = self._ctx.finalize()
|
||||||
|
self._ctx = None
|
||||||
|
return digest
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class SHA1(object):
|
||||||
|
name = "sha1"
|
||||||
|
digest_size = 20
|
||||||
|
block_size = 64
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class SHA224(object):
|
||||||
|
name = "sha224"
|
||||||
|
digest_size = 28
|
||||||
|
block_size = 64
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class SHA256(object):
|
||||||
|
name = "sha256"
|
||||||
|
digest_size = 32
|
||||||
|
block_size = 64
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class SHA384(object):
|
||||||
|
name = "sha384"
|
||||||
|
digest_size = 48
|
||||||
|
block_size = 128
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class SHA512(object):
|
||||||
|
name = "sha512"
|
||||||
|
digest_size = 64
|
||||||
|
block_size = 128
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class RIPEMD160(object):
|
||||||
|
name = "ripemd160"
|
||||||
|
digest_size = 20
|
||||||
|
block_size = 64
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class Whirlpool(object):
|
||||||
|
name = "whirlpool"
|
||||||
|
digest_size = 64
|
||||||
|
block_size = 64
|
||||||
|
|
||||||
|
|
||||||
|
@utils.register_interface(HashAlgorithm)
|
||||||
|
class MD5(object):
|
||||||
|
name = "md5"
|
||||||
|
digest_size = 16
|
||||||
|
block_size = 64
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue