diff --git a/Linux_i686/bin/alembic b/Linux_i686/bin/alembic new file mode 100755 index 0000000..ad61885 --- /dev/null +++ b/Linux_i686/bin/alembic @@ -0,0 +1,10 @@ +#!/usr/bin/python +# EASY-INSTALL-ENTRY-SCRIPT: 'alembic==0.6.5','console_scripts','alembic' +__requires__ = 'alembic==0.6.5' +import sys +from pkg_resources import load_entry_point + +if __name__ == '__main__': + sys.exit( + load_entry_point('alembic==0.6.5', 'console_scripts', 'alembic')() + ) diff --git a/Linux_i686/bin/cftp b/Linux_i686/bin/cftp new file mode 100755 index 0000000..bd7f3db --- /dev/null +++ b/Linux_i686/bin/cftp @@ -0,0 +1,15 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +import sys, os +extra = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, extra) +try: + import _preamble +except ImportError: + sys.exc_clear() +sys.path.remove(extra) + +from twisted.conch.scripts.cftp import run +run() diff --git a/Linux_i686/bin/ckeygen b/Linux_i686/bin/ckeygen new file mode 100755 index 0000000..bf12fe3 --- /dev/null +++ b/Linux_i686/bin/ckeygen @@ -0,0 +1,15 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +import sys, os +extra = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, extra) +try: + import _preamble +except ImportError: + sys.exc_clear() +sys.path.remove(extra) + +from twisted.conch.scripts.ckeygen import run +run() diff --git a/Linux_i686/bin/conch b/Linux_i686/bin/conch new file mode 100755 index 0000000..304dd29 --- /dev/null +++ b/Linux_i686/bin/conch @@ -0,0 +1,15 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +import sys, os +extra = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, extra) +try: + import _preamble +except ImportError: + sys.exc_clear() +sys.path.remove(extra) + +from twisted.conch.scripts.conch import run +run() diff --git a/Linux_i686/bin/edsig b/Linux_i686/bin/edsig new file mode 100755 index 0000000..a7c3974 --- /dev/null +++ b/Linux_i686/bin/edsig @@ -0,0 +1,88 @@ +#!/usr/bin/python + +import os, sys +import ed25519 +from hashlib import sha256 + +def help(): + print """\ +Usage: (ed25519 version %s) + + edsig generate [STEM] + creates keypair, writes to 'STEM.signing.key' and 'STEM.verifying.key' + default is to 'signing.key' and 'verifying.key' + + edsig sign (signing.key|keyfile) message.file + prints signature to stdout + If message.file is "-", reads from stdin. + + edsig verify (verifying.key|keyfile) message.file (signature|sigfile) + prints 'good signature!' or raises exception + If message.file is "-", reads from stdin. + +Key-providing arguments can either be the key itself, or point to a file +containing the key. +""" % ed25519.__version__ + +def remove_prefix(prefix, s): + if not s.startswith(prefix): + raise ValueError("no prefix found") + return s[len(prefix):] + +def data_from_arg(arg, prefix, keylen, readable): + if (readable + and arg.startswith(prefix) + and len(remove_prefix(prefix, arg))==keylen): + return arg + if os.path.isfile(arg): + return open(arg,"r").read() + raise ValueError("unable to get data from '%s'" % arg) + +def message_rep(msg_arg): + if msg_arg == "-": + f = sys.stdin + else: + f = open(msg_arg, "rb") + h = sha256() + while True: + data = f.read(16*1024) + if not data: + break + h.update(data) + return h.digest() + +if len(sys.argv) < 2: + help() +elif sys.argv[1] == "generate": + sk,vk = ed25519.create_keypair() + if len(sys.argv) > 2: + sk_outfile = sys.argv[2]+".signing.key" + vk_outfile = sys.argv[2]+".verifying.key" + else: + sk_outfile = "signing.key" + vk_outfile = "verifying.key" + sk_s = sk.to_seed(prefix="sign0-") + vk_s = vk.to_ascii("verf0-", "base32") + open(sk_outfile,"w").write(sk_s) + open(vk_outfile,"w").write(vk_s+"\n") + print "wrote private signing key to", sk_outfile + print "write public verifying key to", vk_outfile +elif sys.argv[1] == "sign": + sk_arg = sys.argv[2] + msg_arg = sys.argv[3] + sk = ed25519.SigningKey(data_from_arg(sk_arg, "sign0-", 52, False), + prefix="sign0-") + sig = sk.sign(message_rep(msg_arg), prefix="sig0-", encoding="base32") + print sig +elif sys.argv[1] == "verify": + vk_arg = sys.argv[2] + msg_arg = sys.argv[3] + sig_arg = sys.argv[4] + vk = ed25519.VerifyingKey(data_from_arg(vk_arg, "verf0-", 52, True), + prefix="verf0-", encoding="base32") + sig = data_from_arg(sig_arg, "sig0-", 103, True) + vk.verify(sig, message_rep(msg_arg), + prefix="sig0-", encoding="base32") # could raise BadSignature + print "good signature!" +else: + help() diff --git a/Linux_i686/bin/lore b/Linux_i686/bin/lore new file mode 100755 index 0000000..0b497e3 --- /dev/null +++ b/Linux_i686/bin/lore @@ -0,0 +1,16 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +import sys, os +extra = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, extra) +try: + import _preamble +except ImportError: + sys.exc_clear() +sys.path.remove(extra) + +from twisted.lore.scripts.lore import run +run() + diff --git a/Linux_i686/bin/mailmail b/Linux_i686/bin/mailmail new file mode 100755 index 0000000..1fc026d --- /dev/null +++ b/Linux_i686/bin/mailmail @@ -0,0 +1,20 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +This script attempts to send some email. +""" + +import sys, os +extra = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, extra) +try: + import _preamble +except ImportError: + sys.exc_clear() +sys.path.remove(extra) + +from twisted.mail.scripts import mailmail +mailmail.run() + diff --git a/Linux_i686/bin/mako-render b/Linux_i686/bin/mako-render new file mode 100755 index 0000000..62b2386 --- /dev/null +++ b/Linux_i686/bin/mako-render @@ -0,0 +1,46 @@ +#!/usr/bin/python + +def render(data, filename, kw): + from mako.template import Template + from mako.lookup import TemplateLookup + + lookup = TemplateLookup(["."]) + return Template(data, filename, lookup=lookup).render(**kw) + +def varsplit(var): + if "=" not in var: + return (var, "") + return var.split("=", 1) + +def main(argv=None): + from os.path import isfile + from sys import stdin + + if argv is None: + import sys + argv = sys.argv + + from optparse import OptionParser + + parser = OptionParser("usage: %prog [FILENAME]") + parser.add_option("--var", default=[], action="append", + help="variable (can be used multiple times, use name=value)") + + opts, args = parser.parse_args(argv[1:]) + if len(args) not in (0, 1): + parser.error("wrong number of arguments") # Will exit + + if (len(args) == 0) or (args[0] == "-"): + fo = stdin + else: + filename = args[0] + if not isfile(filename): + raise SystemExit("error: can't find %s" % filename) + fo = open(filename) + + kw = dict([varsplit(var) for var in opts.var]) + data = fo.read() + print(render(data, filename, kw)) + +if __name__ == "__main__": + main() diff --git a/Linux_i686/bin/manhole b/Linux_i686/bin/manhole new file mode 100755 index 0000000..ff66286 --- /dev/null +++ b/Linux_i686/bin/manhole @@ -0,0 +1,16 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +This script runs GtkManhole, a client for Twisted.Manhole +""" +import sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +from twisted.scripts import manhole +manhole.run() diff --git a/Linux_i686/bin/pyhtmlizer b/Linux_i686/bin/pyhtmlizer new file mode 100755 index 0000000..430f788 --- /dev/null +++ b/Linux_i686/bin/pyhtmlizer @@ -0,0 +1,12 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. +import sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +from twisted.scripts.htmlizer import run +run() diff --git a/Linux_i686/bin/tap2deb b/Linux_i686/bin/tap2deb new file mode 100755 index 0000000..3f90d25 --- /dev/null +++ b/Linux_i686/bin/tap2deb @@ -0,0 +1,16 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +tap2deb +""" +import sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +from twisted.scripts import tap2deb +tap2deb.run() diff --git a/Linux_i686/bin/tap2rpm b/Linux_i686/bin/tap2rpm new file mode 100755 index 0000000..3667858 --- /dev/null +++ b/Linux_i686/bin/tap2rpm @@ -0,0 +1,19 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +# based off the tap2deb code +# tap2rpm built by Sean Reifschneider, + +""" +tap2rpm +""" +import sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +from twisted.scripts import tap2rpm +tap2rpm.run() diff --git a/Linux_i686/bin/tapconvert b/Linux_i686/bin/tapconvert new file mode 100755 index 0000000..fb7fe59 --- /dev/null +++ b/Linux_i686/bin/tapconvert @@ -0,0 +1,12 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. +import sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +from twisted.scripts.tapconvert import run +run() diff --git a/Linux_i686/bin/tkconch b/Linux_i686/bin/tkconch new file mode 100755 index 0000000..5b123a7 --- /dev/null +++ b/Linux_i686/bin/tkconch @@ -0,0 +1,15 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. + +import sys, os +extra = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.insert(0, extra) +try: + import _preamble +except ImportError: + sys.exc_clear() +sys.path.remove(extra) + +from twisted.conch.scripts.tkconch import run +run() diff --git a/Linux_i686/bin/trial b/Linux_i686/bin/trial new file mode 100755 index 0000000..64a38cf --- /dev/null +++ b/Linux_i686/bin/trial @@ -0,0 +1,18 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. +import os, sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +# begin chdir armor +sys.path[:] = map(os.path.abspath, sys.path) +# end chdir armor + +sys.path.insert(0, os.path.abspath(os.getcwd())) + +from twisted.scripts.trial import run +run() diff --git a/Linux_i686/bin/twistd b/Linux_i686/bin/twistd new file mode 100755 index 0000000..8cf908d --- /dev/null +++ b/Linux_i686/bin/twistd @@ -0,0 +1,14 @@ +#!/usr/bin/python +# Copyright (c) Twisted Matrix Laboratories. +# See LICENSE for details. +import os, sys + +try: + import _preamble +except ImportError: + sys.exc_clear() + +sys.path.insert(0, os.path.abspath(os.getcwd())) + +from twisted.scripts.twistd import run +run() diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/PKG-INFO new file mode 100644 index 0000000..1ff4f1a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/PKG-INFO @@ -0,0 +1,58 @@ +Metadata-Version: 1.1 +Name: Flask +Version: 0.10.1 +Summary: A microframework based on Werkzeug, Jinja2 and good intentions +Home-page: http://github.com/mitsuhiko/flask/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description: + Flask + ----- + + Flask is a microframework for Python based on Werkzeug, Jinja 2 and good + intentions. And before you ask: It's BSD licensed! + + Flask is Fun + ```````````` + + .. code:: python + + from flask import Flask + app = Flask(__name__) + + @app.route("/") + def hello(): + return "Hello World!" + + if __name__ == "__main__": + app.run() + + And Easy to Setup + ````````````````` + + .. code:: bash + + $ pip install Flask + $ python hello.py + * Running on http://localhost:5000/ + + Links + ````` + + * `website `_ + * `documentation `_ + * `development version + `_ + + +Platform: any +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/SOURCES.txt new file mode 100644 index 0000000..e326cfc --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/SOURCES.txt @@ -0,0 +1,238 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +Makefile +README +run-tests.py +setup.cfg +setup.py +Flask.egg-info/PKG-INFO +Flask.egg-info/SOURCES.txt +Flask.egg-info/dependency_links.txt +Flask.egg-info/not-zip-safe +Flask.egg-info/requires.txt +Flask.egg-info/top_level.txt +artwork/.DS_Store +artwork/LICENSE +artwork/logo-full.svg +docs/.gitignore +docs/Makefile +docs/advanced_foreword.rst +docs/api.rst +docs/appcontext.rst +docs/becomingbig.rst +docs/blueprints.rst +docs/changelog.rst +docs/conf.py +docs/config.rst +docs/contents.rst.inc +docs/design.rst +docs/errorhandling.rst +docs/extensiondev.rst +docs/extensions.rst +docs/flaskdocext.py +docs/flaskext.py +docs/flaskstyle.sty +docs/foreword.rst +docs/htmlfaq.rst +docs/index.rst +docs/installation.rst +docs/latexindex.rst +docs/license.rst +docs/logo.pdf +docs/make.bat +docs/python3.rst +docs/quickstart.rst +docs/reqcontext.rst +docs/security.rst +docs/shell.rst +docs/signals.rst +docs/styleguide.rst +docs/templating.rst +docs/testing.rst +docs/unicode.rst +docs/upgrading.rst +docs/views.rst +docs/_static/debugger.png +docs/_static/flask.png +docs/_static/flaskr.png +docs/_static/logo-full.png +docs/_static/no.png +docs/_static/touch-icon.png +docs/_static/yes.png +docs/_templates/sidebarintro.html +docs/_templates/sidebarlogo.html +docs/_themes/.git +docs/_themes/.gitignore +docs/_themes/LICENSE +docs/_themes/README +docs/_themes/flask_theme_support.py +docs/_themes/flask/layout.html +docs/_themes/flask/relations.html +docs/_themes/flask/theme.conf +docs/_themes/flask/static/flasky.css_t +docs/_themes/flask/static/small_flask.css +docs/_themes/flask_small/layout.html +docs/_themes/flask_small/theme.conf +docs/_themes/flask_small/static/flasky.css_t +docs/deploying/cgi.rst +docs/deploying/fastcgi.rst +docs/deploying/index.rst +docs/deploying/mod_wsgi.rst +docs/deploying/uwsgi.rst +docs/deploying/wsgi-standalone.rst +docs/patterns/apierrors.rst +docs/patterns/appdispatch.rst +docs/patterns/appfactories.rst +docs/patterns/caching.rst +docs/patterns/celery.rst +docs/patterns/deferredcallbacks.rst +docs/patterns/distribute.rst +docs/patterns/errorpages.rst +docs/patterns/fabric.rst +docs/patterns/favicon.rst +docs/patterns/fileuploads.rst +docs/patterns/flashing.rst +docs/patterns/index.rst +docs/patterns/jquery.rst +docs/patterns/lazyloading.rst +docs/patterns/methodoverrides.rst +docs/patterns/mongokit.rst +docs/patterns/packages.rst +docs/patterns/requestchecksum.rst +docs/patterns/sqlalchemy.rst +docs/patterns/sqlite3.rst +docs/patterns/streaming.rst +docs/patterns/templateinheritance.rst +docs/patterns/urlprocessors.rst +docs/patterns/viewdecorators.rst +docs/patterns/wtforms.rst +docs/tutorial/css.rst +docs/tutorial/dbcon.rst +docs/tutorial/dbinit.rst +docs/tutorial/folders.rst +docs/tutorial/index.rst +docs/tutorial/introduction.rst +docs/tutorial/schema.rst +docs/tutorial/setup.rst +docs/tutorial/templates.rst +docs/tutorial/testing.rst +docs/tutorial/views.rst +examples/.DS_Store +examples/blueprintexample/blueprintexample.py +examples/blueprintexample/blueprintexample_test.py +examples/blueprintexample/simple_page/__init__.py +examples/blueprintexample/simple_page/simple_page.py +examples/blueprintexample/simple_page/templates/pages/hello.html +examples/blueprintexample/simple_page/templates/pages/index.html +examples/blueprintexample/simple_page/templates/pages/layout.html +examples/blueprintexample/simple_page/templates/pages/world.html +examples/flaskr/README +examples/flaskr/flaskr.py +examples/flaskr/flaskr_tests.py +examples/flaskr/schema.sql +examples/flaskr/static/style.css +examples/flaskr/templates/layout.html +examples/flaskr/templates/login.html +examples/flaskr/templates/show_entries.html +examples/jqueryexample/jqueryexample.py +examples/jqueryexample/templates/index.html +examples/jqueryexample/templates/layout.html +examples/minitwit/README +examples/minitwit/minitwit.py +examples/minitwit/minitwit_tests.py +examples/minitwit/schema.sql +examples/minitwit/static/style.css +examples/minitwit/templates/layout.html +examples/minitwit/templates/login.html +examples/minitwit/templates/register.html +examples/minitwit/templates/timeline.html +examples/persona/.DS_Store +examples/persona/persona.py +examples/persona/static/.DS_Store +examples/persona/static/persona.js +examples/persona/static/spinner.png +examples/persona/static/style.css +examples/persona/templates/index.html +examples/persona/templates/layout.html +flask/__init__.py +flask/_compat.py +flask/app.py +flask/blueprints.py +flask/config.py +flask/ctx.py +flask/debughelpers.py +flask/exthook.py +flask/globals.py +flask/helpers.py +flask/json.py +flask/logging.py +flask/module.py +flask/sessions.py +flask/signals.py +flask/templating.py +flask/testing.py +flask/views.py +flask/wrappers.py +flask/ext/__init__.py +flask/testsuite/__init__.py +flask/testsuite/appctx.py +flask/testsuite/basic.py +flask/testsuite/blueprints.py +flask/testsuite/config.py +flask/testsuite/deprecations.py +flask/testsuite/examples.py +flask/testsuite/ext.py +flask/testsuite/helpers.py +flask/testsuite/regression.py +flask/testsuite/reqctx.py +flask/testsuite/signals.py +flask/testsuite/subclassing.py +flask/testsuite/templating.py +flask/testsuite/testing.py +flask/testsuite/views.py +flask/testsuite/static/index.html +flask/testsuite/templates/_macro.html +flask/testsuite/templates/context_template.html +flask/testsuite/templates/escaping_template.html +flask/testsuite/templates/mail.txt +flask/testsuite/templates/simple_template.html +flask/testsuite/templates/template_filter.html +flask/testsuite/templates/template_test.html +flask/testsuite/templates/nested/nested.txt +flask/testsuite/test_apps/config_module_app.py +flask/testsuite/test_apps/flask_newext_simple.py +flask/testsuite/test_apps/importerror.py +flask/testsuite/test_apps/main_app.py +flask/testsuite/test_apps/blueprintapp/__init__.py +flask/testsuite/test_apps/blueprintapp/apps/__init__.py +flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.py +flask/testsuite/test_apps/blueprintapp/apps/admin/static/test.txt +flask/testsuite/test_apps/blueprintapp/apps/admin/static/css/test.css +flask/testsuite/test_apps/blueprintapp/apps/admin/templates/admin/index.html +flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.py +flask/testsuite/test_apps/blueprintapp/apps/frontend/templates/frontend/index.html +flask/testsuite/test_apps/config_package_app/__init__.py +flask/testsuite/test_apps/flask_broken/__init__.py +flask/testsuite/test_apps/flask_broken/b.py +flask/testsuite/test_apps/flask_newext_package/__init__.py +flask/testsuite/test_apps/flask_newext_package/submodule.py +flask/testsuite/test_apps/flaskext/__init__.py +flask/testsuite/test_apps/flaskext/oldext_simple.py +flask/testsuite/test_apps/flaskext/oldext_package/__init__.py +flask/testsuite/test_apps/flaskext/oldext_package/submodule.py +flask/testsuite/test_apps/lib/python2.5/site-packages/SiteEgg.egg +flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.py +flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.py +flask/testsuite/test_apps/moduleapp/__init__.py +flask/testsuite/test_apps/moduleapp/apps/__init__.py +flask/testsuite/test_apps/moduleapp/apps/admin/__init__.py +flask/testsuite/test_apps/moduleapp/apps/admin/static/test.txt +flask/testsuite/test_apps/moduleapp/apps/admin/static/css/test.css +flask/testsuite/test_apps/moduleapp/apps/admin/templates/index.html +flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.py +flask/testsuite/test_apps/moduleapp/apps/frontend/templates/index.html +flask/testsuite/test_apps/path/installed_package/__init__.py +flask/testsuite/test_apps/subdomaintestmodule/__init__.py +flask/testsuite/test_apps/subdomaintestmodule/static/hello.txt \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/installed-files.txt new file mode 100644 index 0000000..5b4a661 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/installed-files.txt @@ -0,0 +1,148 @@ +../flask/wrappers.py +../flask/_compat.py +../flask/templating.py +../flask/helpers.py +../flask/ctx.py +../flask/views.py +../flask/sessions.py +../flask/blueprints.py +../flask/json.py +../flask/module.py +../flask/signals.py +../flask/logging.py +../flask/globals.py +../flask/__init__.py +../flask/debughelpers.py +../flask/testing.py +../flask/config.py +../flask/app.py +../flask/exthook.py +../flask/ext/__init__.py +../flask/testsuite/deprecations.py +../flask/testsuite/regression.py +../flask/testsuite/ext.py +../flask/testsuite/templating.py +../flask/testsuite/helpers.py +../flask/testsuite/views.py +../flask/testsuite/blueprints.py +../flask/testsuite/subclassing.py +../flask/testsuite/signals.py +../flask/testsuite/examples.py +../flask/testsuite/reqctx.py +../flask/testsuite/__init__.py +../flask/testsuite/basic.py +../flask/testsuite/testing.py +../flask/testsuite/config.py +../flask/testsuite/appctx.py +../flask/testsuite/static/index.html +../flask/testsuite/templates/_macro.html +../flask/testsuite/templates/context_template.html +../flask/testsuite/templates/escaping_template.html +../flask/testsuite/templates/mail.txt +../flask/testsuite/templates/simple_template.html +../flask/testsuite/templates/template_filter.html +../flask/testsuite/templates/template_test.html +../flask/testsuite/templates/nested/nested.txt +../flask/testsuite/test_apps/config_module_app.py +../flask/testsuite/test_apps/flask_newext_simple.py +../flask/testsuite/test_apps/importerror.py +../flask/testsuite/test_apps/main_app.py +../flask/testsuite/test_apps/blueprintapp/__init__.py +../flask/testsuite/test_apps/blueprintapp/apps/__init__.py +../flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.py +../flask/testsuite/test_apps/blueprintapp/apps/admin/static/test.txt +../flask/testsuite/test_apps/blueprintapp/apps/admin/static/css/test.css +../flask/testsuite/test_apps/blueprintapp/apps/admin/templates/admin/index.html +../flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.py +../flask/testsuite/test_apps/blueprintapp/apps/frontend/templates/frontend/index.html +../flask/testsuite/test_apps/config_package_app/__init__.py +../flask/testsuite/test_apps/flask_broken/__init__.py +../flask/testsuite/test_apps/flask_broken/b.py +../flask/testsuite/test_apps/flask_newext_package/__init__.py +../flask/testsuite/test_apps/flask_newext_package/submodule.py +../flask/testsuite/test_apps/flaskext/__init__.py +../flask/testsuite/test_apps/flaskext/oldext_simple.py +../flask/testsuite/test_apps/flaskext/oldext_package/__init__.py +../flask/testsuite/test_apps/flaskext/oldext_package/submodule.py +../flask/testsuite/test_apps/lib/python2.5/site-packages/SiteEgg.egg +../flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.py +../flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.py +../flask/testsuite/test_apps/moduleapp/__init__.py +../flask/testsuite/test_apps/moduleapp/apps/__init__.py +../flask/testsuite/test_apps/moduleapp/apps/admin/__init__.py +../flask/testsuite/test_apps/moduleapp/apps/admin/static/test.txt +../flask/testsuite/test_apps/moduleapp/apps/admin/static/css/test.css +../flask/testsuite/test_apps/moduleapp/apps/admin/templates/index.html +../flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.py +../flask/testsuite/test_apps/moduleapp/apps/frontend/templates/index.html +../flask/testsuite/test_apps/path/installed_package/__init__.py +../flask/testsuite/test_apps/subdomaintestmodule/__init__.py +../flask/testsuite/test_apps/subdomaintestmodule/static/hello.txt +../flask/wrappers.pyc +../flask/_compat.pyc +../flask/templating.pyc +../flask/helpers.pyc +../flask/ctx.pyc +../flask/views.pyc +../flask/sessions.pyc +../flask/blueprints.pyc +../flask/json.pyc +../flask/module.pyc +../flask/signals.pyc +../flask/logging.pyc +../flask/globals.pyc +../flask/__init__.pyc +../flask/debughelpers.pyc +../flask/testing.pyc +../flask/config.pyc +../flask/app.pyc +../flask/exthook.pyc +../flask/ext/__init__.pyc +../flask/testsuite/deprecations.pyc +../flask/testsuite/regression.pyc +../flask/testsuite/ext.pyc +../flask/testsuite/templating.pyc +../flask/testsuite/helpers.pyc +../flask/testsuite/views.pyc +../flask/testsuite/blueprints.pyc +../flask/testsuite/subclassing.pyc +../flask/testsuite/signals.pyc +../flask/testsuite/examples.pyc +../flask/testsuite/reqctx.pyc +../flask/testsuite/__init__.pyc +../flask/testsuite/basic.pyc +../flask/testsuite/testing.pyc +../flask/testsuite/config.pyc +../flask/testsuite/appctx.pyc +../flask/testsuite/test_apps/config_module_app.pyc +../flask/testsuite/test_apps/flask_newext_simple.pyc +../flask/testsuite/test_apps/importerror.pyc +../flask/testsuite/test_apps/main_app.pyc +../flask/testsuite/test_apps/blueprintapp/__init__.pyc +../flask/testsuite/test_apps/blueprintapp/apps/__init__.pyc +../flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.pyc +../flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.pyc +../flask/testsuite/test_apps/config_package_app/__init__.pyc +../flask/testsuite/test_apps/flask_broken/__init__.pyc +../flask/testsuite/test_apps/flask_broken/b.pyc +../flask/testsuite/test_apps/flask_newext_package/__init__.pyc +../flask/testsuite/test_apps/flask_newext_package/submodule.pyc +../flask/testsuite/test_apps/flaskext/__init__.pyc +../flask/testsuite/test_apps/flaskext/oldext_simple.pyc +../flask/testsuite/test_apps/flaskext/oldext_package/__init__.pyc +../flask/testsuite/test_apps/flaskext/oldext_package/submodule.pyc +../flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.pyc +../flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.pyc +../flask/testsuite/test_apps/moduleapp/__init__.pyc +../flask/testsuite/test_apps/moduleapp/apps/__init__.pyc +../flask/testsuite/test_apps/moduleapp/apps/admin/__init__.pyc +../flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.pyc +../flask/testsuite/test_apps/path/installed_package/__init__.pyc +../flask/testsuite/test_apps/subdomaintestmodule/__init__.pyc +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/requires.txt new file mode 100644 index 0000000..a7281e1 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/requires.txt @@ -0,0 +1,3 @@ +Werkzeug>=0.7 +Jinja2>=2.4 +itsdangerous>=0.21 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/top_level.txt new file mode 100644 index 0000000..7e10602 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/top_level.txt @@ -0,0 +1 @@ +flask diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/PKG-INFO new file mode 100644 index 0000000..e35f909 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/PKG-INFO @@ -0,0 +1,22 @@ +Metadata-Version: 1.1 +Name: Flask-Migrate +Version: 1.2.0 +Summary: SQLAlchemy database migrations for Flask applications using Alembic +Home-page: http://github.com/miguelgrinberg/flask-migrate/ +Author: Miguel Grinberg +Author-email: miguelgrinberg50@gmail.com +License: MIT +Description: + Flask-Migrate + -------------- + + SQLAlchemy database migrations for Flask applications using Alembic. + +Platform: any +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/SOURCES.txt new file mode 100644 index 0000000..e0e8111 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/SOURCES.txt @@ -0,0 +1,24 @@ +LICENSE +MANIFEST.in +README.md +setup.cfg +setup.py +Flask_Migrate.egg-info/PKG-INFO +Flask_Migrate.egg-info/SOURCES.txt +Flask_Migrate.egg-info/dependency_links.txt +Flask_Migrate.egg-info/not-zip-safe +Flask_Migrate.egg-info/requires.txt +Flask_Migrate.egg-info/top_level.txt +flask_migrate/__init__.py +flask_migrate/templates/flask/README +flask_migrate/templates/flask/alembic.ini.mako +flask_migrate/templates/flask/env.py +flask_migrate/templates/flask/script.py.mako +tests/__init__.py +tests/__init__.pyc +tests/app.py +tests/app.pyc +tests/app2.py +tests/test_migrate.py +tests/test_migrate.pyc +tests/test_migrate_custom_directory.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/installed-files.txt new file mode 100644 index 0000000..673345e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/installed-files.txt @@ -0,0 +1,14 @@ +../flask_migrate/__init__.py +../flask_migrate/templates/flask/README +../flask_migrate/templates/flask/alembic.ini.mako +../flask_migrate/templates/flask/env.py +../flask_migrate/templates/flask/script.py.mako +../flask_migrate/__init__.pyc +../flask_migrate/templates/flask/env.pyc +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/requires.txt new file mode 100644 index 0000000..0426413 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/requires.txt @@ -0,0 +1,4 @@ +Flask>=0.9 +Flask-SQLAlchemy>=1.0 +alembic>=0.6 +Flask-Script>=0.6 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/top_level.txt new file mode 100644 index 0000000..0652762 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/top_level.txt @@ -0,0 +1 @@ +flask_migrate diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/PKG-INFO new file mode 100644 index 0000000..45e77d9 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/PKG-INFO @@ -0,0 +1,30 @@ +Metadata-Version: 1.1 +Name: Flask-SQLAlchemy +Version: 1.0 +Summary: Adds SQLAlchemy support to your Flask application +Home-page: http://github.com/mitsuhiko/flask-sqlalchemy +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description: + Flask-SQLAlchemy + ---------------- + + Adds SQLAlchemy support to your Flask application. + + Links + ````` + + * `documentation `_ + * `development version + `_ + + +Platform: any +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/SOURCES.txt new file mode 100644 index 0000000..298e2fe --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/SOURCES.txt @@ -0,0 +1,35 @@ +CHANGES +LICENSE +MANIFEST.in +README +setup.cfg +setup.py +test_sqlalchemy.py +Flask_SQLAlchemy.egg-info/PKG-INFO +Flask_SQLAlchemy.egg-info/SOURCES.txt +Flask_SQLAlchemy.egg-info/dependency_links.txt +Flask_SQLAlchemy.egg-info/not-zip-safe +Flask_SQLAlchemy.egg-info/requires.txt +Flask_SQLAlchemy.egg-info/top_level.txt +docs/Makefile +docs/api.rst +docs/binds.rst +docs/changelog.rst +docs/conf.py +docs/config.rst +docs/contents.rst.inc +docs/contexts.rst +docs/flaskstyle.sty +docs/index.rst +docs/logo.pdf +docs/make.bat +docs/models.rst +docs/queries.rst +docs/quickstart.rst +docs/signals.rst +docs/_static/flask-sqlalchemy-small.png +docs/_static/flask-sqlalchemy.png +docs/_templates/sidebarintro.html +docs/_templates/sidebarlogo.html +flask_sqlalchemy/__init__.py +flask_sqlalchemy/_compat.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/installed-files.txt new file mode 100644 index 0000000..ba5a715 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/installed-files.txt @@ -0,0 +1,11 @@ +../flask_sqlalchemy/_compat.py +../flask_sqlalchemy/__init__.py +../flask_sqlalchemy/_compat.pyc +../flask_sqlalchemy/__init__.pyc +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/requires.txt new file mode 100644 index 0000000..d07e166 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/requires.txt @@ -0,0 +1,3 @@ +setuptools +Flask>=0.10 +SQLAlchemy \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/top_level.txt new file mode 100644 index 0000000..8a5538e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/top_level.txt @@ -0,0 +1 @@ +flask_sqlalchemy diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/PKG-INFO new file mode 100644 index 0000000..db853cd --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/PKG-INFO @@ -0,0 +1,35 @@ +Metadata-Version: 1.1 +Name: Flask-Script +Version: 2.0.3 +Summary: Scripting support for Flask +Home-page: http://github.com/smurfix/flask-script +Author: Matthias Urlichs +Author-email: matthias@urlichs.de +License: BSD +Download-URL: https://github.com/smurfix/flask-script/tarball/v2.0.3 +Description: + Flask-Script + -------------- + + Flask support for writing external scripts. + + Links + ````` + + * `documentation `_ + + + +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/SOURCES.txt new file mode 100644 index 0000000..0202fcb --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/SOURCES.txt @@ -0,0 +1,29 @@ +LICENSE +MANIFEST.in +README.rst +setup.cfg +setup.py +tests.py +Flask_Script.egg-info/PKG-INFO +Flask_Script.egg-info/SOURCES.txt +Flask_Script.egg-info/dependency_links.txt +Flask_Script.egg-info/not-zip-safe +Flask_Script.egg-info/requires.txt +Flask_Script.egg-info/top_level.txt +docs/Makefile +docs/conf.py +docs/index.rst +docs/make.bat +docs/_static/flask-script.png +docs/_static/index.html +docs/_themes/README +docs/_themes/flask_theme_support.py +docs/_themes/flask/theme.conf +docs/_themes/flask/static/flasky.css_t +docs/_themes/flask_small/layout.html +docs/_themes/flask_small/theme.conf +docs/_themes/flask_small/static/flasky.css_t +flask_script/__init__.py +flask_script/_compat.py +flask_script/cli.py +flask_script/commands.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/installed-files.txt new file mode 100644 index 0000000..967fb02 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/installed-files.txt @@ -0,0 +1,15 @@ +../flask_script/cli.py +../flask_script/_compat.py +../flask_script/commands.py +../flask_script/__init__.py +../flask_script/cli.pyc +../flask_script/_compat.pyc +../flask_script/commands.pyc +../flask_script/__init__.pyc +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/requires.txt new file mode 100644 index 0000000..2077213 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/requires.txt @@ -0,0 +1 @@ +Flask \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/top_level.txt new file mode 100644 index 0000000..efd6af0 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/top_level.txt @@ -0,0 +1 @@ +flask_script diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/PKG-INFO new file mode 100644 index 0000000..2c6a330 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/PKG-INFO @@ -0,0 +1,55 @@ +Metadata-Version: 1.1 +Name: Jinja2 +Version: 2.7.2 +Summary: A small but fast and easy to use stand-alone template engine written in pure python. +Home-page: http://jinja.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description: + Jinja2 + ~~~~~~ + + Jinja2 is a template engine written in pure Python. It provides a + `Django`_ inspired non-XML syntax but supports inline expressions and + an optional `sandboxed`_ environment. + + Nutshell + -------- + + Here a small example of a Jinja template:: + + {% extends 'base.html' %} + {% block title %}Memberlist{% endblock %} + {% block content %} + + {% endblock %} + + Philosophy + ---------- + + Application logic is for the controller but don't try to make the life + for the template designer too hard by giving him too few functionality. + + For more informations visit the new `Jinja2 webpage`_ and `documentation`_. + + .. _sandboxed: http://en.wikipedia.org/wiki/Sandbox_(computer_security) + .. _Django: http://www.djangoproject.com/ + .. _Jinja2 webpage: http://jinja.pocoo.org/ + .. _documentation: http://jinja.pocoo.org/2/documentation/ + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/SOURCES.txt new file mode 100644 index 0000000..a27a9c4 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/SOURCES.txt @@ -0,0 +1,126 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +Makefile +README.rst +run-tests.py +setup.cfg +setup.py +Jinja2.egg-info/PKG-INFO +Jinja2.egg-info/SOURCES.txt +Jinja2.egg-info/dependency_links.txt +Jinja2.egg-info/entry_points.txt +Jinja2.egg-info/not-zip-safe +Jinja2.egg-info/requires.txt +Jinja2.egg-info/top_level.txt +artwork/jinjalogo.svg +docs/Makefile +docs/api.rst +docs/cache_extension.py +docs/changelog.rst +docs/conf.py +docs/contents.rst.inc +docs/extensions.rst +docs/faq.rst +docs/index.rst +docs/integration.rst +docs/intro.rst +docs/jinjaext.py +docs/jinjastyle.sty +docs/latexindex.rst +docs/logo.pdf +docs/sandbox.rst +docs/switching.rst +docs/templates.rst +docs/tricks.rst +docs/_static/.ignore +docs/_static/jinja-small.png +docs/_templates/sidebarintro.html +docs/_templates/sidebarlogo.html +docs/_themes/LICENSE +docs/_themes/README +docs/_themes/jinja/layout.html +docs/_themes/jinja/relations.html +docs/_themes/jinja/theme.conf +docs/_themes/jinja/static/jinja.css_t +examples/bench.py +examples/profile.py +examples/basic/cycle.py +examples/basic/debugger.py +examples/basic/inheritance.py +examples/basic/test.py +examples/basic/test_filter_and_linestatements.py +examples/basic/test_loop_filter.py +examples/basic/translate.py +examples/basic/templates/broken.html +examples/basic/templates/subbroken.html +examples/rwbench/djangoext.py +examples/rwbench/rwbench.py +examples/rwbench/django/_form.html +examples/rwbench/django/_input_field.html +examples/rwbench/django/_textarea.html +examples/rwbench/django/index.html +examples/rwbench/django/layout.html +examples/rwbench/genshi/helpers.html +examples/rwbench/genshi/index.html +examples/rwbench/genshi/layout.html +examples/rwbench/jinja/helpers.html +examples/rwbench/jinja/index.html +examples/rwbench/jinja/layout.html +examples/rwbench/mako/helpers.html +examples/rwbench/mako/index.html +examples/rwbench/mako/layout.html +ext/djangojinja2.py +ext/inlinegettext.py +ext/jinja.el +ext/Vim/jinja.vim +ext/django2jinja/django2jinja.py +ext/django2jinja/example.py +ext/django2jinja/templates/index.html +ext/django2jinja/templates/layout.html +ext/django2jinja/templates/subtemplate.html +jinja2/__init__.py +jinja2/_compat.py +jinja2/_stringdefs.py +jinja2/bccache.py +jinja2/compiler.py +jinja2/constants.py +jinja2/debug.py +jinja2/defaults.py +jinja2/environment.py +jinja2/exceptions.py +jinja2/ext.py +jinja2/filters.py +jinja2/lexer.py +jinja2/loaders.py +jinja2/meta.py +jinja2/nodes.py +jinja2/optimizer.py +jinja2/parser.py +jinja2/runtime.py +jinja2/sandbox.py +jinja2/tests.py +jinja2/utils.py +jinja2/visitor.py +jinja2/testsuite/__init__.py +jinja2/testsuite/api.py +jinja2/testsuite/bytecode_cache.py +jinja2/testsuite/core_tags.py +jinja2/testsuite/debug.py +jinja2/testsuite/doctests.py +jinja2/testsuite/ext.py +jinja2/testsuite/filters.py +jinja2/testsuite/imports.py +jinja2/testsuite/inheritance.py +jinja2/testsuite/lexnparse.py +jinja2/testsuite/loader.py +jinja2/testsuite/regression.py +jinja2/testsuite/security.py +jinja2/testsuite/tests.py +jinja2/testsuite/utils.py +jinja2/testsuite/res/__init__.py +jinja2/testsuite/res/templates/broken.html +jinja2/testsuite/res/templates/syntaxerror.html +jinja2/testsuite/res/templates/test.html +jinja2/testsuite/res/templates/foo/test.html \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/entry_points.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/entry_points.txt new file mode 100644 index 0000000..32e6b75 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/entry_points.txt @@ -0,0 +1,4 @@ + + [babel.extractors] + jinja2 = jinja2.ext:babel_extract[i18n] + \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/installed-files.txt new file mode 100644 index 0000000..b958248 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/installed-files.txt @@ -0,0 +1,92 @@ +../jinja2/bccache.py +../jinja2/_compat.py +../jinja2/ext.py +../jinja2/defaults.py +../jinja2/meta.py +../jinja2/_stringdefs.py +../jinja2/nodes.py +../jinja2/runtime.py +../jinja2/exceptions.py +../jinja2/lexer.py +../jinja2/__init__.py +../jinja2/visitor.py +../jinja2/optimizer.py +../jinja2/sandbox.py +../jinja2/debug.py +../jinja2/filters.py +../jinja2/constants.py +../jinja2/tests.py +../jinja2/utils.py +../jinja2/compiler.py +../jinja2/parser.py +../jinja2/loaders.py +../jinja2/environment.py +../jinja2/testsuite/api.py +../jinja2/testsuite/regression.py +../jinja2/testsuite/core_tags.py +../jinja2/testsuite/inheritance.py +../jinja2/testsuite/ext.py +../jinja2/testsuite/security.py +../jinja2/testsuite/doctests.py +../jinja2/testsuite/bytecode_cache.py +../jinja2/testsuite/imports.py +../jinja2/testsuite/lexnparse.py +../jinja2/testsuite/__init__.py +../jinja2/testsuite/debug.py +../jinja2/testsuite/filters.py +../jinja2/testsuite/tests.py +../jinja2/testsuite/loader.py +../jinja2/testsuite/utils.py +../jinja2/testsuite/res/__init__.py +../jinja2/testsuite/res/templates/broken.html +../jinja2/testsuite/res/templates/syntaxerror.html +../jinja2/testsuite/res/templates/test.html +../jinja2/testsuite/res/templates/foo/test.html +../jinja2/bccache.pyc +../jinja2/_compat.pyc +../jinja2/ext.pyc +../jinja2/defaults.pyc +../jinja2/meta.pyc +../jinja2/_stringdefs.pyc +../jinja2/nodes.pyc +../jinja2/runtime.pyc +../jinja2/exceptions.pyc +../jinja2/lexer.pyc +../jinja2/__init__.pyc +../jinja2/visitor.pyc +../jinja2/optimizer.pyc +../jinja2/sandbox.pyc +../jinja2/debug.pyc +../jinja2/filters.pyc +../jinja2/constants.pyc +../jinja2/tests.pyc +../jinja2/utils.pyc +../jinja2/compiler.pyc +../jinja2/parser.pyc +../jinja2/loaders.pyc +../jinja2/environment.pyc +../jinja2/testsuite/api.pyc +../jinja2/testsuite/regression.pyc +../jinja2/testsuite/core_tags.pyc +../jinja2/testsuite/inheritance.pyc +../jinja2/testsuite/ext.pyc +../jinja2/testsuite/security.pyc +../jinja2/testsuite/doctests.pyc +../jinja2/testsuite/bytecode_cache.pyc +../jinja2/testsuite/imports.pyc +../jinja2/testsuite/lexnparse.pyc +../jinja2/testsuite/__init__.pyc +../jinja2/testsuite/debug.pyc +../jinja2/testsuite/filters.pyc +../jinja2/testsuite/tests.pyc +../jinja2/testsuite/loader.pyc +../jinja2/testsuite/utils.pyc +../jinja2/testsuite/res/__init__.pyc +./ +requires.txt +SOURCES.txt +entry_points.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/requires.txt new file mode 100644 index 0000000..ccd0e92 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/requires.txt @@ -0,0 +1,4 @@ +markupsafe + +[i18n] +Babel>=0.8 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/top_level.txt new file mode 100644 index 0000000..7f7afbf --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/top_level.txt @@ -0,0 +1 @@ +jinja2 diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/PKG-INFO new file mode 100644 index 0000000..91b8ec7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/PKG-INFO @@ -0,0 +1,71 @@ +Metadata-Version: 1.1 +Name: Mako +Version: 0.9.1 +Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. +Home-page: http://www.makotemplates.org/ +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Description: ========================= + Mako Templates for Python + ========================= + + Mako is a template library written in Python. It provides a familiar, non-XML + syntax which compiles into Python modules for maximum performance. Mako's + syntax and API borrows from the best ideas of many others, including Django + templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded + Python (i.e. Python Server Page) language, which refines the familiar ideas + of componentized layout and inheritance to produce one of the most + straightforward and flexible models available, while also maintaining close + ties to Python calling and scoping semantics. + + Nutshell + ======== + + :: + + <%inherit file="base.html"/> + <% + rows = [[v for v in range(0,10)] for row in range(0,10)] + %> + + % for row in rows: + ${makerow(row)} + % endfor +
+ + <%def name="makerow(row)"> + + % for name in row: + ${name}\ + % endfor + + + + Philosophy + =========== + + Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! + + Documentation + ============== + + See documentation for Mako at http://www.makotemplates.org/docs/ + + License + ======== + + Mako is licensed under an MIT-style license (see LICENSE). + Other incorporated projects may be licensed under different licenses. + All licenses allow for non-commercial and commercial use. + +Keywords: templates +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/SOURCES.txt new file mode 100644 index 0000000..6f580c3 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/SOURCES.txt @@ -0,0 +1,173 @@ +CHANGES +LICENSE +MANIFEST.in +README.rst +distribute_setup.py +setup.cfg +setup.py +Mako.egg-info/PKG-INFO +Mako.egg-info/SOURCES.txt +Mako.egg-info/dependency_links.txt +Mako.egg-info/entry_points.txt +Mako.egg-info/not-zip-safe +Mako.egg-info/requires.txt +Mako.egg-info/top_level.txt +doc/caching.html +doc/defs.html +doc/filtering.html +doc/genindex.html +doc/index.html +doc/inheritance.html +doc/namespaces.html +doc/runtime.html +doc/search.html +doc/searchindex.js +doc/syntax.html +doc/unicode.html +doc/usage.html +doc/_sources/caching.txt +doc/_sources/defs.txt +doc/_sources/filtering.txt +doc/_sources/index.txt +doc/_sources/inheritance.txt +doc/_sources/namespaces.txt +doc/_sources/runtime.txt +doc/_sources/syntax.txt +doc/_sources/unicode.txt +doc/_sources/usage.txt +doc/_static/basic.css +doc/_static/comment-bright.png +doc/_static/comment-close.png +doc/_static/comment.png +doc/_static/default.css +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/down-pressed.png +doc/_static/down.png +doc/_static/file.png +doc/_static/jquery.js +doc/_static/makoLogo.png +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sidebar.js +doc/_static/site.css +doc/_static/underscore.js +doc/_static/up-pressed.png +doc/_static/up.png +doc/_static/websupport.js +doc/build/Makefile +doc/build/caching.rst +doc/build/conf.py +doc/build/defs.rst +doc/build/filtering.rst +doc/build/index.rst +doc/build/inheritance.rst +doc/build/namespaces.rst +doc/build/runtime.rst +doc/build/syntax.rst +doc/build/unicode.rst +doc/build/usage.rst +doc/build/builder/__init__.py +doc/build/builder/builders.py +doc/build/builder/util.py +doc/build/static/docs.css +doc/build/static/makoLogo.png +doc/build/static/site.css +doc/build/templates/base.mako +doc/build/templates/genindex.mako +doc/build/templates/layout.mako +doc/build/templates/page.mako +doc/build/templates/rtd_layout.mako +doc/build/templates/search.mako +examples/bench/basic.py +examples/bench/cheetah/footer.tmpl +examples/bench/cheetah/header.tmpl +examples/bench/cheetah/template.tmpl +examples/bench/django/templatetags/__init__.py +examples/bench/django/templatetags/bench.py +examples/bench/kid/base.kid +examples/bench/kid/template.kid +examples/bench/myghty/base.myt +examples/bench/myghty/template.myt +examples/wsgi/run_wsgi.py +mako/__init__.py +mako/_ast_util.py +mako/ast.py +mako/cache.py +mako/codegen.py +mako/compat.py +mako/exceptions.py +mako/filters.py +mako/lexer.py +mako/lookup.py +mako/parsetree.py +mako/pygen.py +mako/pyparser.py +mako/runtime.py +mako/template.py +mako/util.py +mako/ext/__init__.py +mako/ext/autohandler.py +mako/ext/babelplugin.py +mako/ext/beaker_cache.py +mako/ext/preprocessors.py +mako/ext/pygmentplugin.py +mako/ext/turbogears.py +scripts/mako-render +test/__init__.py +test/sample_module_namespace.py +test/test_ast.py +test/test_babelplugin.py +test/test_block.py +test/test_cache.py +test/test_call.py +test/test_decorators.py +test/test_def.py +test/test_exceptions.py +test/test_filters.py +test/test_inheritance.py +test/test_lexer.py +test/test_lookup.py +test/test_loop.py +test/test_lru.py +test/test_namespace.py +test/test_pygen.py +test/test_runtime.py +test/test_template.py +test/test_tgplugin.py +test/test_util.py +test/util.py +test/foo/__init__.py +test/foo/test_ns.py +test/templates/badbom.html +test/templates/bom.html +test/templates/bommagic.html +test/templates/chs_unicode.html +test/templates/chs_unicode_py3k.html +test/templates/chs_utf8.html +test/templates/crlf.html +test/templates/gettext.mako +test/templates/index.html +test/templates/internationalization.html +test/templates/modtest.html +test/templates/read_unicode.html +test/templates/read_unicode_py3k.html +test/templates/runtimeerr.html +test/templates/runtimeerr_py3k.html +test/templates/unicode.html +test/templates/unicode_arguments.html +test/templates/unicode_arguments_py3k.html +test/templates/unicode_code.html +test/templates/unicode_code_py3k.html +test/templates/unicode_expr.html +test/templates/unicode_expr_py3k.html +test/templates/unicode_runtime_error.html +test/templates/unicode_syntax_error.html +test/templates/foo/modtest.html.py +test/templates/othersubdir/foo.html +test/templates/subdir/incl.html +test/templates/subdir/index.html +test/templates/subdir/modtest.html +test/templates/subdir/foo/modtest.html.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/entry_points.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/entry_points.txt new file mode 100644 index 0000000..3717629 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/entry_points.txt @@ -0,0 +1,14 @@ + + [python.templating.engines] + mako = mako.ext.turbogears:TGPlugin + + [pygments.lexers] + mako = mako.ext.pygmentplugin:MakoLexer + html+mako = mako.ext.pygmentplugin:MakoHtmlLexer + xml+mako = mako.ext.pygmentplugin:MakoXmlLexer + js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer + css+mako = mako.ext.pygmentplugin:MakoCssLexer + + [babel.extractors] + mako = mako.ext.babelplugin:extract + \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/installed-files.txt new file mode 100644 index 0000000..3605bd0 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/installed-files.txt @@ -0,0 +1,55 @@ +../mako/compat.py +../mako/ast.py +../mako/codegen.py +../mako/util.py +../mako/pygen.py +../mako/runtime.py +../mako/exceptions.py +../mako/lexer.py +../mako/lookup.py +../mako/template.py +../mako/__init__.py +../mako/_ast_util.py +../mako/pyparser.py +../mako/filters.py +../mako/parsetree.py +../mako/cache.py +../mako/ext/babelplugin.py +../mako/ext/turbogears.py +../mako/ext/preprocessors.py +../mako/ext/autohandler.py +../mako/ext/beaker_cache.py +../mako/ext/__init__.py +../mako/ext/pygmentplugin.py +../mako/compat.pyc +../mako/ast.pyc +../mako/codegen.pyc +../mako/util.pyc +../mako/pygen.pyc +../mako/runtime.pyc +../mako/exceptions.pyc +../mako/lexer.pyc +../mako/lookup.pyc +../mako/template.pyc +../mako/__init__.pyc +../mako/_ast_util.pyc +../mako/pyparser.pyc +../mako/filters.pyc +../mako/parsetree.pyc +../mako/cache.pyc +../mako/ext/babelplugin.pyc +../mako/ext/turbogears.pyc +../mako/ext/preprocessors.pyc +../mako/ext/autohandler.pyc +../mako/ext/beaker_cache.pyc +../mako/ext/__init__.pyc +../mako/ext/pygmentplugin.pyc +./ +requires.txt +SOURCES.txt +entry_points.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt +../../../../bin/mako-render diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/requires.txt new file mode 100644 index 0000000..8d60d23 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/requires.txt @@ -0,0 +1,4 @@ +MarkupSafe>=0.9.2 + +[beaker] +Beaker>=1.1 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/top_level.txt new file mode 100644 index 0000000..2951cdd --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/top_level.txt @@ -0,0 +1 @@ +mako diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/PKG-INFO new file mode 100644 index 0000000..12aa93e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/PKG-INFO @@ -0,0 +1,119 @@ +Metadata-Version: 1.1 +Name: MarkupSafe +Version: 0.23 +Summary: Implements a XML/HTML/XHTML Markup safe string for Python +Home-page: http://github.com/mitsuhiko/markupsafe +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description: MarkupSafe + ========== + + Implements a unicode subclass that supports HTML strings: + + >>> from markupsafe import Markup, escape + >>> escape("") + Markup(u'<script>alert(document.cookie);</script>') + >>> tmpl = Markup("%s") + >>> tmpl % "Peter > Lustig" + Markup(u'Peter > Lustig') + + If you want to make an object unicode that is not yet unicode + but don't want to lose the taint information, you can use the + `soft_unicode` function. (On Python 3 you can also use `soft_str` which + is a different name for the same function). + + >>> from markupsafe import soft_unicode + >>> soft_unicode(42) + u'42' + >>> soft_unicode(Markup('foo')) + Markup(u'foo') + + HTML Representations + -------------------- + + Objects can customize their HTML markup equivalent by overriding + the `__html__` function: + + >>> class Foo(object): + ... def __html__(self): + ... return 'Nice' + ... + >>> escape(Foo()) + Markup(u'Nice') + >>> Markup(Foo()) + Markup(u'Nice') + + Silent Escapes + -------------- + + Since MarkupSafe 0.10 there is now also a separate escape function + called `escape_silent` that returns an empty string for `None` for + consistency with other systems that return empty strings for `None` + when escaping (for instance Pylons' webhelpers). + + If you also want to use this for the escape method of the Markup + object, you can create your own subclass that does that:: + + from markupsafe import Markup, escape_silent as escape + + class SilentMarkup(Markup): + __slots__ = () + + @classmethod + def escape(cls, s): + return cls(escape(s)) + + New-Style String Formatting + --------------------------- + + Starting with MarkupSafe 0.21 new style string formats from Python 2.6 and + 3.x are now fully supported. Previously the escape behavior of those + functions was spotty at best. The new implementations operates under the + following algorithm: + + 1. if an object has an ``__html_format__`` method it is called as + replacement for ``__format__`` with the format specifier. It either + has to return a string or markup object. + 2. if an object has an ``__html__`` method it is called. + 3. otherwise the default format system of Python kicks in and the result + is HTML escaped. + + Here is how you can implement your own formatting:: + + class User(object): + + def __init__(self, id, username): + self.id = id + self.username = username + + def __html_format__(self, format_spec): + if format_spec == 'link': + return Markup('{1}').format( + self.id, + self.__html__(), + ) + elif format_spec: + raise ValueError('Invalid format spec') + return self.__html__() + + def __html__(self): + return Markup('{0}').format(self.username) + + And to format that user: + + >>> user = User(1, 'foo') + >>> Markup('

User: {0:link}').format(user) + Markup(u'

User: foo') + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: Text Processing :: Markup :: HTML diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/SOURCES.txt new file mode 100644 index 0000000..dfeb82b --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/SOURCES.txt @@ -0,0 +1,17 @@ +AUTHORS +LICENSE +MANIFEST.in +README.rst +setup.cfg +setup.py +MarkupSafe.egg-info/PKG-INFO +MarkupSafe.egg-info/SOURCES.txt +MarkupSafe.egg-info/dependency_links.txt +MarkupSafe.egg-info/not-zip-safe +MarkupSafe.egg-info/top_level.txt +markupsafe/__init__.py +markupsafe/_compat.py +markupsafe/_constants.py +markupsafe/_native.py +markupsafe/_speedups.c +markupsafe/tests.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/installed-files.txt new file mode 100644 index 0000000..b2eff87 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/installed-files.txt @@ -0,0 +1,18 @@ +../markupsafe/_compat.py +../markupsafe/_native.py +../markupsafe/__init__.py +../markupsafe/_constants.py +../markupsafe/tests.py +../markupsafe/_speedups.c +../markupsafe/_compat.pyc +../markupsafe/_native.pyc +../markupsafe/__init__.pyc +../markupsafe/_constants.pyc +../markupsafe/tests.pyc +../markupsafe/_speedups.so +./ +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/top_level.txt new file mode 100644 index 0000000..75bf729 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/top_level.txt @@ -0,0 +1 @@ +markupsafe diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/SSL.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/SSL.py new file mode 100644 index 0000000..a257f16 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/SSL.py @@ -0,0 +1,1423 @@ +from sys import platform +from functools import wraps, partial +from itertools import count +from weakref import WeakValueDictionary +from errno import errorcode + +from six import text_type as _text_type +from six import integer_types as integer_types + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + native as _native) + +from OpenSSL.crypto import ( + FILETYPE_PEM, _PassphraseHelper, PKey, X509Name, X509, X509Store) + +_unspecified = object() + +try: + _memoryview = memoryview +except NameError: + class _memoryview(object): + pass + +OPENSSL_VERSION_NUMBER = _lib.OPENSSL_VERSION_NUMBER +SSLEAY_VERSION = _lib.SSLEAY_VERSION +SSLEAY_CFLAGS = _lib.SSLEAY_CFLAGS +SSLEAY_PLATFORM = _lib.SSLEAY_PLATFORM +SSLEAY_DIR = _lib.SSLEAY_DIR +SSLEAY_BUILT_ON = _lib.SSLEAY_BUILT_ON + +SENT_SHUTDOWN = _lib.SSL_SENT_SHUTDOWN +RECEIVED_SHUTDOWN = _lib.SSL_RECEIVED_SHUTDOWN + +SSLv2_METHOD = 1 +SSLv3_METHOD = 2 +SSLv23_METHOD = 3 +TLSv1_METHOD = 4 +TLSv1_1_METHOD = 5 +TLSv1_2_METHOD = 6 + +OP_NO_SSLv2 = _lib.SSL_OP_NO_SSLv2 +OP_NO_SSLv3 = _lib.SSL_OP_NO_SSLv3 +OP_NO_TLSv1 = _lib.SSL_OP_NO_TLSv1 + +OP_NO_TLSv1_1 = getattr(_lib, "SSL_OP_NO_TLSv1_1", 0) +OP_NO_TLSv1_2 = getattr(_lib, "SSL_OP_NO_TLSv1_2", 0) + +try: + MODE_RELEASE_BUFFERS = _lib.SSL_MODE_RELEASE_BUFFERS +except AttributeError: + pass + +OP_SINGLE_DH_USE = _lib.SSL_OP_SINGLE_DH_USE +OP_EPHEMERAL_RSA = _lib.SSL_OP_EPHEMERAL_RSA +OP_MICROSOFT_SESS_ID_BUG = _lib.SSL_OP_MICROSOFT_SESS_ID_BUG +OP_NETSCAPE_CHALLENGE_BUG = _lib.SSL_OP_NETSCAPE_CHALLENGE_BUG +OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG = _lib.SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG +OP_SSLREF2_REUSE_CERT_TYPE_BUG = _lib.SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG +OP_MICROSOFT_BIG_SSLV3_BUFFER = _lib.SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER +try: + OP_MSIE_SSLV2_RSA_PADDING = _lib.SSL_OP_MSIE_SSLV2_RSA_PADDING +except AttributeError: + pass +OP_SSLEAY_080_CLIENT_DH_BUG = _lib.SSL_OP_SSLEAY_080_CLIENT_DH_BUG +OP_TLS_D5_BUG = _lib.SSL_OP_TLS_D5_BUG +OP_TLS_BLOCK_PADDING_BUG = _lib.SSL_OP_TLS_BLOCK_PADDING_BUG +OP_DONT_INSERT_EMPTY_FRAGMENTS = _lib.SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS +OP_CIPHER_SERVER_PREFERENCE = _lib.SSL_OP_CIPHER_SERVER_PREFERENCE +OP_TLS_ROLLBACK_BUG = _lib.SSL_OP_TLS_ROLLBACK_BUG +OP_PKCS1_CHECK_1 = _lib.SSL_OP_PKCS1_CHECK_1 +OP_PKCS1_CHECK_2 = _lib.SSL_OP_PKCS1_CHECK_2 +OP_NETSCAPE_CA_DN_BUG = _lib.SSL_OP_NETSCAPE_CA_DN_BUG +OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG= _lib.SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG +try: + OP_NO_COMPRESSION = _lib.SSL_OP_NO_COMPRESSION +except AttributeError: + pass + +OP_NO_QUERY_MTU = _lib.SSL_OP_NO_QUERY_MTU +OP_COOKIE_EXCHANGE = _lib.SSL_OP_COOKIE_EXCHANGE +OP_NO_TICKET = _lib.SSL_OP_NO_TICKET + +OP_ALL = _lib.SSL_OP_ALL + +VERIFY_PEER = _lib.SSL_VERIFY_PEER +VERIFY_FAIL_IF_NO_PEER_CERT = _lib.SSL_VERIFY_FAIL_IF_NO_PEER_CERT +VERIFY_CLIENT_ONCE = _lib.SSL_VERIFY_CLIENT_ONCE +VERIFY_NONE = _lib.SSL_VERIFY_NONE + +SESS_CACHE_OFF = _lib.SSL_SESS_CACHE_OFF +SESS_CACHE_CLIENT = _lib.SSL_SESS_CACHE_CLIENT +SESS_CACHE_SERVER = _lib.SSL_SESS_CACHE_SERVER +SESS_CACHE_BOTH = _lib.SSL_SESS_CACHE_BOTH +SESS_CACHE_NO_AUTO_CLEAR = _lib.SSL_SESS_CACHE_NO_AUTO_CLEAR +SESS_CACHE_NO_INTERNAL_LOOKUP = _lib.SSL_SESS_CACHE_NO_INTERNAL_LOOKUP +SESS_CACHE_NO_INTERNAL_STORE = _lib.SSL_SESS_CACHE_NO_INTERNAL_STORE +SESS_CACHE_NO_INTERNAL = _lib.SSL_SESS_CACHE_NO_INTERNAL + +SSL_ST_CONNECT = _lib.SSL_ST_CONNECT +SSL_ST_ACCEPT = _lib.SSL_ST_ACCEPT +SSL_ST_MASK = _lib.SSL_ST_MASK +SSL_ST_INIT = _lib.SSL_ST_INIT +SSL_ST_BEFORE = _lib.SSL_ST_BEFORE +SSL_ST_OK = _lib.SSL_ST_OK +SSL_ST_RENEGOTIATE = _lib.SSL_ST_RENEGOTIATE + +SSL_CB_LOOP = _lib.SSL_CB_LOOP +SSL_CB_EXIT = _lib.SSL_CB_EXIT +SSL_CB_READ = _lib.SSL_CB_READ +SSL_CB_WRITE = _lib.SSL_CB_WRITE +SSL_CB_ALERT = _lib.SSL_CB_ALERT +SSL_CB_READ_ALERT = _lib.SSL_CB_READ_ALERT +SSL_CB_WRITE_ALERT = _lib.SSL_CB_WRITE_ALERT +SSL_CB_ACCEPT_LOOP = _lib.SSL_CB_ACCEPT_LOOP +SSL_CB_ACCEPT_EXIT = _lib.SSL_CB_ACCEPT_EXIT +SSL_CB_CONNECT_LOOP = _lib.SSL_CB_CONNECT_LOOP +SSL_CB_CONNECT_EXIT = _lib.SSL_CB_CONNECT_EXIT +SSL_CB_HANDSHAKE_START = _lib.SSL_CB_HANDSHAKE_START +SSL_CB_HANDSHAKE_DONE = _lib.SSL_CB_HANDSHAKE_DONE + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.SSL` API. + """ + + + +_raise_current_error = partial(_exception_from_error_queue, Error) + + +class WantReadError(Error): + pass + + + +class WantWriteError(Error): + pass + + + +class WantX509LookupError(Error): + pass + + + +class ZeroReturnError(Error): + pass + + + +class SysCallError(Error): + pass + + + +class _VerifyHelper(object): + def __init__(self, connection, callback): + self._problems = [] + + @wraps(callback) + def wrapper(ok, store_ctx): + cert = X509.__new__(X509) + cert._x509 = _lib.X509_STORE_CTX_get_current_cert(store_ctx) + error_number = _lib.X509_STORE_CTX_get_error(store_ctx) + error_depth = _lib.X509_STORE_CTX_get_error_depth(store_ctx) + + try: + result = callback(connection, cert, error_number, error_depth, ok) + except Exception as e: + self._problems.append(e) + return 0 + else: + if result: + _lib.X509_STORE_CTX_set_error(store_ctx, _lib.X509_V_OK) + return 1 + else: + return 0 + + self.callback = _ffi.callback( + "int (*)(int, X509_STORE_CTX *)", wrapper) + + + def raise_if_problem(self): + if self._problems: + try: + _raise_current_error() + except Error: + pass + raise self._problems.pop(0) + + + +def _asFileDescriptor(obj): + fd = None + if not isinstance(obj, integer_types): + meth = getattr(obj, "fileno", None) + if meth is not None: + obj = meth() + + if isinstance(obj, integer_types): + fd = obj + + if not isinstance(fd, integer_types): + raise TypeError("argument must be an int, or have a fileno() method.") + elif fd < 0: + raise ValueError( + "file descriptor cannot be a negative integer (%i)" % (fd,)) + + return fd + + + +def SSLeay_version(type): + """ + Return a string describing the version of OpenSSL in use. + + :param type: One of the SSLEAY_ constants defined in this module. + """ + return _ffi.string(_lib.SSLeay_version(type)) + + + +class Session(object): + pass + + + +class Context(object): + """ + :py:obj:`OpenSSL.SSL.Context` instances define the parameters for setting up + new SSL connections. + """ + _methods = { + SSLv3_METHOD: "SSLv3_method", + SSLv23_METHOD: "SSLv23_method", + TLSv1_METHOD: "TLSv1_method", + TLSv1_1_METHOD: "TLSv1_1_method", + TLSv1_2_METHOD: "TLSv1_2_method", + } + _methods = dict( + (identifier, getattr(_lib, name)) + for (identifier, name) in _methods.items() + if getattr(_lib, name, None) is not None) + + + def __init__(self, method): + """ + :param method: One of SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, or + TLSv1_METHOD. + """ + if not isinstance(method, integer_types): + raise TypeError("method must be an integer") + + try: + method_func = self._methods[method] + except KeyError: + raise ValueError("No such protocol") + + method_obj = method_func() + if method_obj == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + context = _lib.SSL_CTX_new(method_obj) + if context == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + context = _ffi.gc(context, _lib.SSL_CTX_free) + + self._context = context + self._passphrase_helper = None + self._passphrase_callback = None + self._passphrase_userdata = None + self._verify_helper = None + self._verify_callback = None + self._info_callback = None + self._tlsext_servername_callback = None + self._app_data = None + + # SSL_CTX_set_app_data(self->ctx, self); + # SSL_CTX_set_mode(self->ctx, SSL_MODE_ENABLE_PARTIAL_WRITE | + # SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER | + # SSL_MODE_AUTO_RETRY); + self.set_mode(_lib.SSL_MODE_ENABLE_PARTIAL_WRITE) + + + def load_verify_locations(self, cafile, capath=None): + """ + Let SSL know where we can find trusted certificates for the certificate + chain + + :param cafile: In which file we can find the certificates + :param capath: In which directory we can find the certificates + :return: None + """ + if cafile is None: + cafile = _ffi.NULL + elif not isinstance(cafile, bytes): + raise TypeError("cafile must be None or a byte string") + + if capath is None: + capath = _ffi.NULL + elif not isinstance(capath, bytes): + raise TypeError("capath must be None or a byte string") + + load_result = _lib.SSL_CTX_load_verify_locations(self._context, cafile, capath) + if not load_result: + _raise_current_error() + + + def _wrap_callback(self, callback): + @wraps(callback) + def wrapper(size, verify, userdata): + return callback(size, verify, self._passphrase_userdata) + return _PassphraseHelper( + FILETYPE_PEM, wrapper, more_args=True, truncate=True) + + + def set_passwd_cb(self, callback, userdata=None): + """ + Set the passphrase callback + + :param callback: The Python callback to use + :param userdata: (optional) A Python object which will be given as + argument to the callback + :return: None + """ + if not callable(callback): + raise TypeError("callback must be callable") + + self._passphrase_helper = self._wrap_callback(callback) + self._passphrase_callback = self._passphrase_helper.callback + _lib.SSL_CTX_set_default_passwd_cb( + self._context, self._passphrase_callback) + self._passphrase_userdata = userdata + + + def set_default_verify_paths(self): + """ + Use the platform-specific CA certificate locations + + :return: None + """ + set_result = _lib.SSL_CTX_set_default_verify_paths(self._context) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def use_certificate_chain_file(self, certfile): + """ + Load a certificate chain from a file + + :param certfile: The name of the certificate chain file + :return: None + """ + if isinstance(certfile, _text_type): + # Perhaps sys.getfilesystemencoding() could be better? + certfile = certfile.encode("utf-8") + + if not isinstance(certfile, bytes): + raise TypeError("certfile must be bytes or unicode") + + result = _lib.SSL_CTX_use_certificate_chain_file(self._context, certfile) + if not result: + _raise_current_error() + + + def use_certificate_file(self, certfile, filetype=FILETYPE_PEM): + """ + Load a certificate from a file + + :param certfile: The name of the certificate file + :param filetype: (optional) The encoding of the file, default is PEM + :return: None + """ + if isinstance(certfile, _text_type): + # Perhaps sys.getfilesystemencoding() could be better? + certfile = certfile.encode("utf-8") + if not isinstance(certfile, bytes): + raise TypeError("certfile must be bytes or unicode") + if not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_certificate_file(self._context, certfile, filetype) + if not use_result: + _raise_current_error() + + + def use_certificate(self, cert): + """ + Load a certificate from a X509 object + + :param cert: The X509 object + :return: None + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + + use_result = _lib.SSL_CTX_use_certificate(self._context, cert._x509) + if not use_result: + _raise_current_error() + + + def add_extra_chain_cert(self, certobj): + """ + Add certificate to chain + + :param certobj: The X509 certificate object to add to the chain + :return: None + """ + if not isinstance(certobj, X509): + raise TypeError("certobj must be an X509 instance") + + copy = _lib.X509_dup(certobj._x509) + add_result = _lib.SSL_CTX_add_extra_chain_cert(self._context, copy) + if not add_result: + # TODO: This is untested. + _lib.X509_free(copy) + _raise_current_error() + + + def _raise_passphrase_exception(self): + if self._passphrase_helper is None: + _raise_current_error() + exception = self._passphrase_helper.raise_if_problem(Error) + if exception is not None: + raise exception + + + def use_privatekey_file(self, keyfile, filetype=_unspecified): + """ + Load a private key from a file + + :param keyfile: The name of the key file + :param filetype: (optional) The encoding of the file, default is PEM + :return: None + """ + if isinstance(keyfile, _text_type): + # Perhaps sys.getfilesystemencoding() could be better? + keyfile = keyfile.encode("utf-8") + + if not isinstance(keyfile, bytes): + raise TypeError("keyfile must be a byte string") + + if filetype is _unspecified: + filetype = FILETYPE_PEM + elif not isinstance(filetype, integer_types): + raise TypeError("filetype must be an integer") + + use_result = _lib.SSL_CTX_use_PrivateKey_file( + self._context, keyfile, filetype) + if not use_result: + self._raise_passphrase_exception() + + + def use_privatekey(self, pkey): + """ + Load a private key from a PKey object + + :param pkey: The PKey object + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + use_result = _lib.SSL_CTX_use_PrivateKey(self._context, pkey._pkey) + if not use_result: + self._raise_passphrase_exception() + + + def check_privatekey(self): + """ + Check that the private key and certificate match up + + :return: None (raises an exception if something's wrong) + """ + + def load_client_ca(self, cafile): + """ + Load the trusted certificates that will be sent to the client (basically + telling the client "These are the guys I trust"). Does not actually + imply any of the certificates are trusted; that must be configured + separately. + + :param cafile: The name of the certificates file + :return: None + """ + + def set_session_id(self, buf): + """ + Set the session identifier. This is needed if you want to do session + resumption. + + :param buf: A Python object that can be safely converted to a string + :returns: None + """ + + def set_session_cache_mode(self, mode): + """ + Enable/disable session caching and specify the mode used. + + :param mode: One or more of the SESS_CACHE_* flags (combine using + bitwise or) + :returns: The previously set caching mode. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_session_cache_mode(self._context, mode) + + + def get_session_cache_mode(self): + """ + :returns: The currently used cache mode. + """ + return _lib.SSL_CTX_get_session_cache_mode(self._context) + + + def set_verify(self, mode, callback): + """ + Set the verify mode and verify callback + + :param mode: The verify mode, this is either VERIFY_NONE or + VERIFY_PEER combined with possible other flags + :param callback: The Python callback to use + :return: None + + See SSL_CTX_set_verify(3SSL) for further details. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + if not callable(callback): + raise TypeError("callback must be callable") + + self._verify_helper = _VerifyHelper(self, callback) + self._verify_callback = self._verify_helper.callback + _lib.SSL_CTX_set_verify(self._context, mode, self._verify_callback) + + + def set_verify_depth(self, depth): + """ + Set the verify depth + + :param depth: An integer specifying the verify depth + :return: None + """ + if not isinstance(depth, integer_types): + raise TypeError("depth must be an integer") + + _lib.SSL_CTX_set_verify_depth(self._context, depth) + + + def get_verify_mode(self): + """ + Get the verify mode + + :return: The verify mode + """ + return _lib.SSL_CTX_get_verify_mode(self._context) + + + def get_verify_depth(self): + """ + Get the verify depth + + :return: The verify depth + """ + return _lib.SSL_CTX_get_verify_depth(self._context) + + + def load_tmp_dh(self, dhfile): + """ + Load parameters for Ephemeral Diffie-Hellman + + :param dhfile: The file to load EDH parameters from + :return: None + """ + if not isinstance(dhfile, bytes): + raise TypeError("dhfile must be a byte string") + + bio = _lib.BIO_new_file(dhfile, b"r") + if bio == _ffi.NULL: + _raise_current_error() + bio = _ffi.gc(bio, _lib.BIO_free) + + dh = _lib.PEM_read_bio_DHparams(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + dh = _ffi.gc(dh, _lib.DH_free) + _lib.SSL_CTX_set_tmp_dh(self._context, dh) + + + def set_cipher_list(self, cipher_list): + """ + Change the cipher list + + :param cipher_list: A cipher list, see ciphers(1) + :return: None + """ + if isinstance(cipher_list, _text_type): + cipher_list = cipher_list.encode("ascii") + + if not isinstance(cipher_list, bytes): + raise TypeError("cipher_list must be bytes or unicode") + + result = _lib.SSL_CTX_set_cipher_list(self._context, cipher_list) + if not result: + _raise_current_error() + + + def set_client_ca_list(self, certificate_authorities): + """ + Set the list of preferred client certificate signers for this server context. + + This list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authorities: a sequence of X509Names. + :return: None + """ + name_stack = _lib.sk_X509_NAME_new_null() + if name_stack == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + try: + for ca_name in certificate_authorities: + if not isinstance(ca_name, X509Name): + raise TypeError( + "client CAs must be X509Name objects, not %s objects" % ( + type(ca_name).__name__,)) + copy = _lib.X509_NAME_dup(ca_name._name) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + push_result = _lib.sk_X509_NAME_push(name_stack, copy) + if not push_result: + _lib.X509_NAME_free(copy) + _raise_current_error() + except: + _lib.sk_X509_NAME_free(name_stack) + raise + + _lib.SSL_CTX_set_client_CA_list(self._context, name_stack) + + + def add_client_ca(self, certificate_authority): + """ + Add the CA certificate to the list of preferred signers for this context. + + The list of certificate authorities will be sent to the client when the + server requests a client certificate. + + :param certificate_authority: certificate authority's X509 certificate. + :return: None + """ + if not isinstance(certificate_authority, X509): + raise TypeError("certificate_authority must be an X509 instance") + + add_result = _lib.SSL_CTX_add_client_CA( + self._context, certificate_authority._x509) + if not add_result: + # TODO: This is untested. + _raise_current_error() + + + def set_timeout(self, timeout): + """ + Set session timeout + + :param timeout: The timeout in seconds + :return: The previous session timeout + """ + if not isinstance(timeout, integer_types): + raise TypeError("timeout must be an integer") + + return _lib.SSL_CTX_set_timeout(self._context, timeout) + + + def get_timeout(self): + """ + Get the session timeout + + :return: The session timeout + """ + return _lib.SSL_CTX_get_timeout(self._context) + + + def set_info_callback(self, callback): + """ + Set the info callback + + :param callback: The Python callback to use + :return: None + """ + @wraps(callback) + def wrapper(ssl, where, return_code): + callback(Connection._reverse_mapping[ssl], where, return_code) + self._info_callback = _ffi.callback( + "void (*)(const SSL *, int, int)", wrapper) + _lib.SSL_CTX_set_info_callback(self._context, self._info_callback) + + + def get_app_data(self): + """ + Get the application data (supplied via set_app_data()) + + :return: The application data + """ + return self._app_data + + + def set_app_data(self, data): + """ + Set the application data (will be returned from get_app_data()) + + :param data: Any Python object + :return: None + """ + self._app_data = data + + + def get_cert_store(self): + """ + Get the certificate store for the context. + + :return: A X509Store object or None if it does not have one. + """ + store = _lib.SSL_CTX_get_cert_store(self._context) + if store == _ffi.NULL: + # TODO: This is untested. + return None + + pystore = X509Store.__new__(X509Store) + pystore._store = store + return pystore + + + def set_options(self, options): + """ + Add options. Options set before are not cleared! + + :param options: The options to add. + :return: The new option bitmask. + """ + if not isinstance(options, integer_types): + raise TypeError("options must be an integer") + + return _lib.SSL_CTX_set_options(self._context, options) + + + def set_mode(self, mode): + """ + Add modes via bitmask. Modes set before are not cleared! + + :param mode: The mode to add. + :return: The new mode bitmask. + """ + if not isinstance(mode, integer_types): + raise TypeError("mode must be an integer") + + return _lib.SSL_CTX_set_mode(self._context, mode) + + + def set_tlsext_servername_callback(self, callback): + """ + Specify a callback function to be called when clients specify a server name. + + :param callback: The callback function. It will be invoked with one + argument, the Connection instance. + """ + @wraps(callback) + def wrapper(ssl, alert, arg): + callback(Connection._reverse_mapping[ssl]) + return 0 + + self._tlsext_servername_callback = _ffi.callback( + "int (*)(const SSL *, int *, void *)", wrapper) + _lib.SSL_CTX_set_tlsext_servername_callback( + self._context, self._tlsext_servername_callback) + +ContextType = Context + + + +class Connection(object): + """ + """ + _reverse_mapping = WeakValueDictionary() + + def __init__(self, context, socket=None): + """ + Create a new Connection object, using the given OpenSSL.SSL.Context + instance and socket. + + :param context: An SSL Context to use for this connection + :param socket: The socket to use for transport layer + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + ssl = _lib.SSL_new(context._context) + self._ssl = _ffi.gc(ssl, _lib.SSL_free) + self._context = context + + self._reverse_mapping[self._ssl] = self + + if socket is None: + self._socket = None + # Don't set up any gc for these, SSL_free will take care of them. + self._into_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + self._from_ssl = _lib.BIO_new(_lib.BIO_s_mem()) + + if self._into_ssl == _ffi.NULL or self._from_ssl == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + _lib.SSL_set_bio(self._ssl, self._into_ssl, self._from_ssl) + else: + self._into_ssl = None + self._from_ssl = None + self._socket = socket + set_result = _lib.SSL_set_fd(self._ssl, _asFileDescriptor(self._socket)) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def __getattr__(self, name): + """ + Look up attributes on the wrapped socket object if they are not found on + the Connection object. + """ + return getattr(self._socket, name) + + + def _raise_ssl_error(self, ssl, result): + if self._context._verify_helper is not None: + self._context._verify_helper.raise_if_problem() + + error = _lib.SSL_get_error(ssl, result) + if error == _lib.SSL_ERROR_WANT_READ: + raise WantReadError() + elif error == _lib.SSL_ERROR_WANT_WRITE: + raise WantWriteError() + elif error == _lib.SSL_ERROR_ZERO_RETURN: + raise ZeroReturnError() + elif error == _lib.SSL_ERROR_WANT_X509_LOOKUP: + # TODO: This is untested. + raise WantX509LookupError() + elif error == _lib.SSL_ERROR_SYSCALL: + if _lib.ERR_peek_error() == 0: + if result < 0: + if platform == "win32": + errno = _ffi.getwinerror()[0] + else: + errno = _ffi.errno + raise SysCallError(errno, errorcode[errno]) + else: + raise SysCallError(-1, "Unexpected EOF") + else: + # TODO: This is untested. + _raise_current_error() + elif error == _lib.SSL_ERROR_NONE: + pass + else: + _raise_current_error() + + + def get_context(self): + """ + Get session context + """ + return self._context + + + def set_context(self, context): + """ + Switch this connection to a new session context + + :param context: A :py:class:`Context` instance giving the new session + context to use. + """ + if not isinstance(context, Context): + raise TypeError("context must be a Context instance") + + _lib.SSL_set_SSL_CTX(self._ssl, context._context) + self._context = context + + + def get_servername(self): + """ + Retrieve the servername extension value if provided in the client hello + message, or None if there wasn't one. + + :return: A byte string giving the server name or :py:data:`None`. + """ + name = _lib.SSL_get_servername(self._ssl, _lib.TLSEXT_NAMETYPE_host_name) + if name == _ffi.NULL: + return None + + return _ffi.string(name) + + + def set_tlsext_host_name(self, name): + """ + Set the value of the servername extension to send in the client hello. + + :param name: A byte string giving the name. + """ + if not isinstance(name, bytes): + raise TypeError("name must be a byte string") + elif b"\0" in name: + raise TypeError("name must not contain NUL byte") + + # XXX I guess this can fail sometimes? + _lib.SSL_set_tlsext_host_name(self._ssl, name) + + + def pending(self): + """ + Get the number of bytes that can be safely read from the connection + + :return: The number of bytes available in the receive buffer. + """ + return _lib.SSL_pending(self._ssl) + + + def send(self, buf, flags=0): + """ + Send data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param buf: The string to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + if isinstance(buf, _memoryview): + buf = buf.tobytes() + if not isinstance(buf, bytes): + raise TypeError("data must be a byte string") + + result = _lib.SSL_write(self._ssl, buf, len(buf)) + self._raise_ssl_error(self._ssl, result) + return result + write = send + + + def sendall(self, buf, flags=0): + """ + Send "all" data on the connection. This calls send() repeatedly until + all data is sent. If an error occurs, it's impossible to tell how much + data has been sent. + + :param buf: The string to send + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The number of bytes written + """ + if isinstance(buf, _memoryview): + buf = buf.tobytes() + if not isinstance(buf, bytes): + raise TypeError("buf must be a byte string") + + left_to_send = len(buf) + total_sent = 0 + data = _ffi.new("char[]", buf) + + while left_to_send: + result = _lib.SSL_write(self._ssl, data + total_sent, left_to_send) + self._raise_ssl_error(self._ssl, result) + total_sent += result + left_to_send -= result + + + def recv(self, bufsiz, flags=None): + """ + Receive data on the connection. NOTE: If you get one of the WantRead, + WantWrite or WantX509Lookup exceptions on this, you have to call the + method again with the SAME buffer. + + :param bufsiz: The maximum number of bytes to read + :param flags: (optional) Included for compatibility with the socket + API, the value is ignored + :return: The string read from the Connection + """ + buf = _ffi.new("char[]", bufsiz) + result = _lib.SSL_read(self._ssl, buf, bufsiz) + self._raise_ssl_error(self._ssl, result) + return _ffi.buffer(buf, result)[:] + read = recv + + + def _handle_bio_errors(self, bio, result): + if _lib.BIO_should_retry(bio): + if _lib.BIO_should_read(bio): + raise WantReadError() + elif _lib.BIO_should_write(bio): + # TODO: This is untested. + raise WantWriteError() + elif _lib.BIO_should_io_special(bio): + # TODO: This is untested. I think io_special means the socket + # BIO has a not-yet connected socket. + raise ValueError("BIO_should_io_special") + else: + # TODO: This is untested. + raise ValueError("unknown bio failure") + else: + # TODO: This is untested. + _raise_current_error() + + + def bio_read(self, bufsiz): + """ + When using non-socket connections this function reads the "dirty" data + that would have traveled away on the network. + + :param bufsiz: The maximum number of bytes to read + :return: The string read. + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(bufsiz, integer_types): + raise TypeError("bufsiz must be an integer") + + buf = _ffi.new("char[]", bufsiz) + result = _lib.BIO_read(self._from_ssl, buf, bufsiz) + if result <= 0: + self._handle_bio_errors(self._from_ssl, result) + + return _ffi.buffer(buf, result)[:] + + + def bio_write(self, buf): + """ + When using non-socket connections this function sends "dirty" data that + would have traveled in on the network. + + :param buf: The string to put into the memory BIO. + :return: The number of bytes written + """ + if self._into_ssl is None: + raise TypeError("Connection sock was not None") + + if not isinstance(buf, bytes): + raise TypeError("buf must be a byte string") + + result = _lib.BIO_write(self._into_ssl, buf, len(buf)) + if result <= 0: + self._handle_bio_errors(self._into_ssl, result) + return result + + + def renegotiate(self): + """ + Renegotiate the session + + :return: True if the renegotiation can be started, false otherwise + """ + + def do_handshake(self): + """ + Perform an SSL handshake (usually called after renegotiate() or one of + set_*_state()). This can raise the same exceptions as send and recv. + + :return: None. + """ + result = _lib.SSL_do_handshake(self._ssl) + self._raise_ssl_error(self._ssl, result) + + + def renegotiate_pending(self): + """ + Check if there's a renegotiation in progress, it will return false once + a renegotiation is finished. + + :return: Whether there's a renegotiation in progress + """ + + def total_renegotiations(self): + """ + Find out the total number of renegotiations. + + :return: The number of renegotiations. + """ + return _lib.SSL_total_renegotiations(self._ssl) + + + def connect(self, addr): + """ + Connect to remote host and set up client-side SSL + + :param addr: A remote address + :return: What the socket's connect method returns + """ + _lib.SSL_set_connect_state(self._ssl) + return self._socket.connect(addr) + + + def connect_ex(self, addr): + """ + Connect to remote host and set up client-side SSL. Note that if the socket's + connect_ex method doesn't return 0, SSL won't be initialized. + + :param addr: A remove address + :return: What the socket's connect_ex method returns + """ + connect_ex = self._socket.connect_ex + self.set_connect_state() + return connect_ex(addr) + + + def accept(self): + """ + Accept incoming connection and set up SSL on it + + :return: A (conn,addr) pair where conn is a Connection and addr is an + address + """ + client, addr = self._socket.accept() + conn = Connection(self._context, client) + conn.set_accept_state() + return (conn, addr) + + + def bio_shutdown(self): + """ + When using non-socket connections this function signals end of + data on the input for this connection. + + :return: None + """ + if self._from_ssl is None: + raise TypeError("Connection sock was not None") + + _lib.BIO_set_mem_eof_return(self._into_ssl, 0) + + + def shutdown(self): + """ + Send closure alert + + :return: True if the shutdown completed successfully (i.e. both sides + have sent closure alerts), false otherwise (i.e. you have to + wait for a ZeroReturnError on a recv() method call + """ + result = _lib.SSL_shutdown(self._ssl) + if result < 0: + # TODO: This is untested. + _raise_current_error() + elif result > 0: + return True + else: + return False + + + def get_cipher_list(self): + """ + Get the session cipher list + + :return: A list of cipher strings + """ + ciphers = [] + for i in count(): + result = _lib.SSL_get_cipher_list(self._ssl, i) + if result == _ffi.NULL: + break + ciphers.append(_native(_ffi.string(result))) + return ciphers + + + def get_client_ca_list(self): + """ + Get CAs whose certificates are suggested for client authentication. + + :return: If this is a server connection, a list of X509Names representing + the acceptable CAs as set by :py:meth:`OpenSSL.SSL.Context.set_client_ca_list` or + :py:meth:`OpenSSL.SSL.Context.add_client_ca`. If this is a client connection, + the list of such X509Names sent by the server, or an empty list if that + has not yet happened. + """ + ca_names = _lib.SSL_get_client_CA_list(self._ssl) + if ca_names == _ffi.NULL: + # TODO: This is untested. + return [] + + result = [] + for i in range(_lib.sk_X509_NAME_num(ca_names)): + name = _lib.sk_X509_NAME_value(ca_names, i) + copy = _lib.X509_NAME_dup(name) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + pyname = X509Name.__new__(X509Name) + pyname._name = _ffi.gc(copy, _lib.X509_NAME_free) + result.append(pyname) + return result + + + def makefile(self): + """ + The makefile() method is not implemented, since there is no dup semantics + for SSL connections + + :raise NotImplementedError + """ + raise NotImplementedError("Cannot make file object of OpenSSL.SSL.Connection") + + + def get_app_data(self): + """ + Get application data + + :return: The application data + """ + return self._app_data + + + def set_app_data(self, data): + """ + Set application data + + :param data - The application data + :return: None + """ + self._app_data = data + + + def get_shutdown(self): + """ + Get shutdown state + + :return: The shutdown state, a bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + """ + return _lib.SSL_get_shutdown(self._ssl) + + + def set_shutdown(self, state): + """ + Set shutdown state + + :param state - bitvector of SENT_SHUTDOWN, RECEIVED_SHUTDOWN. + :return: None + """ + if not isinstance(state, integer_types): + raise TypeError("state must be an integer") + + _lib.SSL_set_shutdown(self._ssl, state) + + + def state_string(self): + """ + Get a verbose state description + + :return: A string representing the state + """ + + def server_random(self): + """ + Get a copy of the server hello nonce. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.s3.server_random, + _lib.SSL3_RANDOM_SIZE)[:] + + + def client_random(self): + """ + Get a copy of the client hello nonce. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.s3.client_random, + _lib.SSL3_RANDOM_SIZE)[:] + + + def master_key(self): + """ + Get a copy of the master key. + + :return: A string representing the state + """ + if self._ssl.session == _ffi.NULL: + return None + return _ffi.buffer( + self._ssl.session.master_key, + self._ssl.session.master_key_length)[:] + + + def sock_shutdown(self, *args, **kwargs): + """ + See shutdown(2) + + :return: What the socket's shutdown() method returns + """ + return self._socket.shutdown(*args, **kwargs) + + + def get_peer_certificate(self): + """ + Retrieve the other side's certificate (if any) + + :return: The peer's certificate + """ + cert = _lib.SSL_get_peer_certificate(self._ssl) + if cert != _ffi.NULL: + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert, _lib.X509_free) + return pycert + return None + + + def get_peer_cert_chain(self): + """ + Retrieve the other side's certificate (if any) + + :return: A list of X509 instances giving the peer's certificate chain, + or None if it does not have one. + """ + cert_stack = _lib.SSL_get_peer_cert_chain(self._ssl) + if cert_stack == _ffi.NULL: + return None + + result = [] + for i in range(_lib.sk_X509_num(cert_stack)): + # TODO could incref instead of dup here + cert = _lib.X509_dup(_lib.sk_X509_value(cert_stack, i)) + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert, _lib.X509_free) + result.append(pycert) + return result + + + def want_read(self): + """ + Checks if more data has to be read from the transport layer to complete an + operation. + + :return: True iff more data has to be read + """ + return _lib.SSL_want_read(self._ssl) + + + def want_write(self): + """ + Checks if there is data to write to the transport layer to complete an + operation. + + :return: True iff there is data to write + """ + return _lib.SSL_want_write(self._ssl) + + + def set_accept_state(self): + """ + Set the connection to work in server mode. The handshake will be handled + automatically by read/write. + + :return: None + """ + _lib.SSL_set_accept_state(self._ssl) + + + def set_connect_state(self): + """ + Set the connection to work in client mode. The handshake will be handled + automatically by read/write. + + :return: None + """ + _lib.SSL_set_connect_state(self._ssl) + + + def get_session(self): + """ + Returns the Session currently used. + + @return: An instance of :py:class:`OpenSSL.SSL.Session` or :py:obj:`None` if + no session exists. + """ + session = _lib.SSL_get1_session(self._ssl) + if session == _ffi.NULL: + return None + + pysession = Session.__new__(Session) + pysession._session = _ffi.gc(session, _lib.SSL_SESSION_free) + return pysession + + + def set_session(self, session): + """ + Set the session to be used when the TLS/SSL connection is established. + + :param session: A Session instance representing the session to use. + :returns: None + """ + if not isinstance(session, Session): + raise TypeError("session must be a Session instance") + + result = _lib.SSL_set_session(self._ssl, session._session) + if not result: + _raise_current_error() + +ConnectionType = Connection + +# This is similar to the initialization calls at the end of OpenSSL/crypto.py +# but is exercised mostly by the Context initializer. +_lib.SSL_library_init() diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/__init__.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/__init__.py new file mode 100644 index 0000000..db96e1f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/__init__.py @@ -0,0 +1,12 @@ +# Copyright (C) AB Strakt +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +from OpenSSL import rand, crypto, SSL +from OpenSSL.version import __version__ + +__all__ = [ + 'rand', 'crypto', 'SSL', 'tsafe', '__version__'] diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/_util.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/_util.py new file mode 100644 index 0000000..baeecc6 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/_util.py @@ -0,0 +1,53 @@ +from six import PY3, binary_type, text_type + +from cryptography.hazmat.bindings.openssl.binding import Binding +binding = Binding() +ffi = binding.ffi +lib = binding.lib + +def exception_from_error_queue(exceptionType): + def text(charp): + return native(ffi.string(charp)) + + errors = [] + while True: + error = lib.ERR_get_error() + if error == 0: + break + errors.append(( + text(lib.ERR_lib_error_string(error)), + text(lib.ERR_func_error_string(error)), + text(lib.ERR_reason_error_string(error)))) + + raise exceptionType(errors) + + + +def native(s): + """ + Convert :py:class:`bytes` or :py:class:`unicode` to the native + :py:class:`str` type, using UTF-8 encoding if conversion is necessary. + + :raise UnicodeError: The input string is not UTF-8 decodeable. + + :raise TypeError: The input is neither :py:class:`bytes` nor + :py:class:`unicode`. + """ + if not isinstance(s, (binary_type, text_type)): + raise TypeError("%r is neither bytes nor unicode" % s) + if PY3: + if isinstance(s, binary_type): + return s.decode("utf-8") + else: + if isinstance(s, text_type): + return s.encode("utf-8") + return s + + + +if PY3: + def byte_string(s): + return s.encode("charmap") +else: + def byte_string(s): + return s diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/crypto.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/crypto.py new file mode 100644 index 0000000..d0026bd --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/crypto.py @@ -0,0 +1,2314 @@ +from time import time +from base64 import b16encode +from functools import partial +from operator import __eq__, __ne__, __lt__, __le__, __gt__, __ge__ + +from six import ( + integer_types as _integer_types, + text_type as _text_type) + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue, + byte_string as _byte_string, + native as _native) + +FILETYPE_PEM = _lib.SSL_FILETYPE_PEM +FILETYPE_ASN1 = _lib.SSL_FILETYPE_ASN1 + +# TODO This was an API mistake. OpenSSL has no such constant. +FILETYPE_TEXT = 2 ** 16 - 1 + +TYPE_RSA = _lib.EVP_PKEY_RSA +TYPE_DSA = _lib.EVP_PKEY_DSA + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.crypto` API. + """ + + +_raise_current_error = partial(_exception_from_error_queue, Error) + +def _untested_error(where): + """ + An OpenSSL API failed somehow. Additionally, the failure which was + encountered isn't one that's exercised by the test suite so future behavior + of pyOpenSSL is now somewhat less predictable. + """ + raise RuntimeError("Unknown %s failure" % (where,)) + + + +def _new_mem_buf(buffer=None): + """ + Allocate a new OpenSSL memory BIO. + + Arrange for the garbage collector to clean it up automatically. + + :param buffer: None or some bytes to use to put into the BIO so that they + can be read out. + """ + if buffer is None: + bio = _lib.BIO_new(_lib.BIO_s_mem()) + free = _lib.BIO_free + else: + data = _ffi.new("char[]", buffer) + bio = _lib.BIO_new_mem_buf(data, len(buffer)) + # Keep the memory alive as long as the bio is alive! + def free(bio, ref=data): + return _lib.BIO_free(bio) + + if bio == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + bio = _ffi.gc(bio, free) + return bio + + + +def _bio_to_string(bio): + """ + Copy the contents of an OpenSSL BIO object into a Python byte string. + """ + result_buffer = _ffi.new('char**') + buffer_length = _lib.BIO_get_mem_data(bio, result_buffer) + return _ffi.buffer(result_buffer[0], buffer_length)[:] + + + +def _set_asn1_time(boundary, when): + """ + The the time value of an ASN1 time object. + + @param boundary: An ASN1_GENERALIZEDTIME pointer (or an object safely + castable to that type) which will have its value set. + @param when: A string representation of the desired time value. + + @raise TypeError: If C{when} is not a L{bytes} string. + @raise ValueError: If C{when} does not represent a time in the required + format. + @raise RuntimeError: If the time value cannot be set for some other + (unspecified) reason. + """ + if not isinstance(when, bytes): + raise TypeError("when must be a byte string") + + set_result = _lib.ASN1_GENERALIZEDTIME_set_string( + _ffi.cast('ASN1_GENERALIZEDTIME*', boundary), when) + if set_result == 0: + dummy = _ffi.gc(_lib.ASN1_STRING_new(), _lib.ASN1_STRING_free) + _lib.ASN1_STRING_set(dummy, when, len(when)) + check_result = _lib.ASN1_GENERALIZEDTIME_check( + _ffi.cast('ASN1_GENERALIZEDTIME*', dummy)) + if not check_result: + raise ValueError("Invalid string") + else: + _untested_error() + + + +def _get_asn1_time(timestamp): + """ + Retrieve the time value of an ASN1 time object. + + @param timestamp: An ASN1_GENERALIZEDTIME* (or an object safely castable to + that type) from which the time value will be retrieved. + + @return: The time value from C{timestamp} as a L{bytes} string in a certain + format. Or C{None} if the object contains no time value. + """ + string_timestamp = _ffi.cast('ASN1_STRING*', timestamp) + if _lib.ASN1_STRING_length(string_timestamp) == 0: + return None + elif _lib.ASN1_STRING_type(string_timestamp) == _lib.V_ASN1_GENERALIZEDTIME: + return _ffi.string(_lib.ASN1_STRING_data(string_timestamp)) + else: + generalized_timestamp = _ffi.new("ASN1_GENERALIZEDTIME**") + _lib.ASN1_TIME_to_generalizedtime(timestamp, generalized_timestamp) + if generalized_timestamp[0] == _ffi.NULL: + # This may happen: + # - if timestamp was not an ASN1_TIME + # - if allocating memory for the ASN1_GENERALIZEDTIME failed + # - if a copy of the time data from timestamp cannot be made for + # the newly allocated ASN1_GENERALIZEDTIME + # + # These are difficult to test. cffi enforces the ASN1_TIME type. + # Memory allocation failures are a pain to trigger + # deterministically. + _untested_error("ASN1_TIME_to_generalizedtime") + else: + string_timestamp = _ffi.cast( + "ASN1_STRING*", generalized_timestamp[0]) + string_data = _lib.ASN1_STRING_data(string_timestamp) + string_result = _ffi.string(string_data) + _lib.ASN1_GENERALIZEDTIME_free(generalized_timestamp[0]) + return string_result + + + +class PKey(object): + _only_public = False + _initialized = True + + def __init__(self): + pkey = _lib.EVP_PKEY_new() + self._pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + self._initialized = False + + + def generate_key(self, type, bits): + """ + Generate a key of a given type, with a given number of a bits + + :param type: The key type (TYPE_RSA or TYPE_DSA) + :param bits: The number of bits + + :return: None + """ + if not isinstance(type, int): + raise TypeError("type must be an integer") + + if not isinstance(bits, int): + raise TypeError("bits must be an integer") + + # TODO Check error return + exponent = _lib.BN_new() + exponent = _ffi.gc(exponent, _lib.BN_free) + _lib.BN_set_word(exponent, _lib.RSA_F4) + + if type == TYPE_RSA: + if bits <= 0: + raise ValueError("Invalid number of bits") + + rsa = _lib.RSA_new() + + result = _lib.RSA_generate_key_ex(rsa, bits, exponent, _ffi.NULL) + if result == 0: + # TODO: The test for this case is commented out. Different + # builds of OpenSSL appear to have different failure modes that + # make it hard to test. Visual inspection of the OpenSSL + # source reveals that a return value of 0 signals an error. + # Manual testing on a particular build of OpenSSL suggests that + # this is probably the appropriate way to handle those errors. + _raise_current_error() + + result = _lib.EVP_PKEY_assign_RSA(self._pkey, rsa) + if not result: + # TODO: It appears as though this can fail if an engine is in + # use which does not support RSA. + _raise_current_error() + + elif type == TYPE_DSA: + dsa = _lib.DSA_generate_parameters( + bits, _ffi.NULL, 0, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL) + if dsa == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + if not _lib.DSA_generate_key(dsa): + # TODO: This is untested. + _raise_current_error() + if not _lib.EVP_PKEY_assign_DSA(self._pkey, dsa): + # TODO: This is untested. + _raise_current_error() + else: + raise Error("No such key type") + + self._initialized = True + + + def check(self): + """ + Check the consistency of an RSA private key. + + :return: True if key is consistent. + :raise Error: if the key is inconsistent. + :raise TypeError: if the key is of a type which cannot be checked. + Only RSA keys can currently be checked. + """ + if self._only_public: + raise TypeError("public key only") + + if _lib.EVP_PKEY_type(self._pkey.type) != _lib.EVP_PKEY_RSA: + raise TypeError("key type unsupported") + + rsa = _lib.EVP_PKEY_get1_RSA(self._pkey) + rsa = _ffi.gc(rsa, _lib.RSA_free) + result = _lib.RSA_check_key(rsa) + if result: + return True + _raise_current_error() + + + def type(self): + """ + Returns the type of the key + + :return: The type of the key. + """ + return self._pkey.type + + + def bits(self): + """ + Returns the number of bits of the key + + :return: The number of bits of the key. + """ + return _lib.EVP_PKEY_bits(self._pkey) +PKeyType = PKey + + + +class X509Name(object): + def __init__(self, name): + """ + Create a new X509Name, copying the given X509Name instance. + + :param name: An X509Name object to copy + """ + name = _lib.X509_NAME_dup(name._name) + self._name = _ffi.gc(name, _lib.X509_NAME_free) + + + def __setattr__(self, name, value): + if name.startswith('_'): + return super(X509Name, self).__setattr__(name, value) + + # Note: we really do not want str subclasses here, so we do not use + # isinstance. + if type(name) is not str: + raise TypeError("attribute name must be string, not '%.200s'" % ( + type(value).__name__,)) + + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + try: + _raise_current_error() + except Error: + pass + raise AttributeError("No such attribute") + + # If there's an old entry for this NID, remove it + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + ent_obj = _lib.X509_NAME_ENTRY_get_object(ent) + ent_nid = _lib.OBJ_obj2nid(ent_obj) + if nid == ent_nid: + ent = _lib.X509_NAME_delete_entry(self._name, i) + _lib.X509_NAME_ENTRY_free(ent) + break + + if isinstance(value, _text_type): + value = value.encode('utf-8') + + add_result = _lib.X509_NAME_add_entry_by_NID( + self._name, nid, _lib.MBSTRING_UTF8, value, -1, -1, 0) + if not add_result: + _raise_current_error() + + + def __getattr__(self, name): + """ + Find attribute. An X509Name object has the following attributes: + countryName (alias C), stateOrProvince (alias ST), locality (alias L), + organization (alias O), organizationalUnit (alias OU), commonName (alias + CN) and more... + """ + nid = _lib.OBJ_txt2nid(_byte_string(name)) + if nid == _lib.NID_undef: + # This is a bit weird. OBJ_txt2nid indicated failure, but it seems + # a lower level function, a2d_ASN1_OBJECT, also feels the need to + # push something onto the error queue. If we don't clean that up + # now, someone else will bump into it later and be quite confused. + # See lp#314814. + try: + _raise_current_error() + except Error: + pass + return super(X509Name, self).__getattr__(name) + + entry_index = _lib.X509_NAME_get_index_by_NID(self._name, nid, -1) + if entry_index == -1: + return None + + entry = _lib.X509_NAME_get_entry(self._name, entry_index) + data = _lib.X509_NAME_ENTRY_get_data(entry) + + result_buffer = _ffi.new("unsigned char**") + data_length = _lib.ASN1_STRING_to_UTF8(result_buffer, data) + if data_length < 0: + # TODO: This is untested. + _raise_current_error() + + try: + result = _ffi.buffer(result_buffer[0], data_length)[:].decode('utf-8') + finally: + # XXX untested + _lib.OPENSSL_free(result_buffer[0]) + return result + + + def _cmp(op): + def f(self, other): + if not isinstance(other, X509Name): + return NotImplemented + result = _lib.X509_NAME_cmp(self._name, other._name) + return op(result, 0) + return f + + __eq__ = _cmp(__eq__) + __ne__ = _cmp(__ne__) + + __lt__ = _cmp(__lt__) + __le__ = _cmp(__le__) + + __gt__ = _cmp(__gt__) + __ge__ = _cmp(__ge__) + + def __repr__(self): + """ + String representation of an X509Name + """ + result_buffer = _ffi.new("char[]", 512); + format_result = _lib.X509_NAME_oneline( + self._name, result_buffer, len(result_buffer)) + + if format_result == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + return "" % ( + _native(_ffi.string(result_buffer)),) + + + def hash(self): + """ + Return the hash value of this name + + :return: None + """ + return _lib.X509_NAME_hash(self._name) + + + def der(self): + """ + Return the DER encoding of this name + + :return: A :py:class:`bytes` instance giving the DER encoded form of + this name. + """ + result_buffer = _ffi.new('unsigned char**') + encode_result = _lib.i2d_X509_NAME(self._name, result_buffer) + if encode_result < 0: + # TODO: This is untested. + _raise_current_error() + + string_result = _ffi.buffer(result_buffer[0], encode_result)[:] + _lib.OPENSSL_free(result_buffer[0]) + return string_result + + + def get_components(self): + """ + Returns the split-up components of this name. + + :return: List of tuples (name, value). + """ + result = [] + for i in range(_lib.X509_NAME_entry_count(self._name)): + ent = _lib.X509_NAME_get_entry(self._name, i) + + fname = _lib.X509_NAME_ENTRY_get_object(ent) + fval = _lib.X509_NAME_ENTRY_get_data(ent) + + nid = _lib.OBJ_obj2nid(fname) + name = _lib.OBJ_nid2sn(nid) + + result.append(( + _ffi.string(name), + _ffi.string( + _lib.ASN1_STRING_data(fval), + _lib.ASN1_STRING_length(fval)))) + + return result +X509NameType = X509Name + + +class X509Extension(object): + def __init__(self, type_name, critical, value, subject=None, issuer=None): + """ + :param typename: The name of the extension to create. + :type typename: :py:data:`str` + + :param critical: A flag indicating whether this is a critical extension. + + :param value: The value of the extension. + :type value: :py:data:`str` + + :param subject: Optional X509 cert to use as subject. + :type subject: :py:class:`X509` + + :param issuer: Optional X509 cert to use as issuer. + :type issuer: :py:class:`X509` + + :return: The X509Extension object + """ + ctx = _ffi.new("X509V3_CTX*") + + # A context is necessary for any extension which uses the r2i conversion + # method. That is, X509V3_EXT_nconf may segfault if passed a NULL ctx. + # Start off by initializing most of the fields to NULL. + _lib.X509V3_set_ctx(ctx, _ffi.NULL, _ffi.NULL, _ffi.NULL, _ffi.NULL, 0) + + # We have no configuration database - but perhaps we should (some + # extensions may require it). + _lib.X509V3_set_ctx_nodb(ctx) + + # Initialize the subject and issuer, if appropriate. ctx is a local, + # and as far as I can tell none of the X509V3_* APIs invoked here steal + # any references, so no need to mess with reference counts or duplicates. + if issuer is not None: + if not isinstance(issuer, X509): + raise TypeError("issuer must be an X509 instance") + ctx.issuer_cert = issuer._x509 + if subject is not None: + if not isinstance(subject, X509): + raise TypeError("subject must be an X509 instance") + ctx.subject_cert = subject._x509 + + if critical: + # There are other OpenSSL APIs which would let us pass in critical + # separately, but they're harder to use, and since value is already + # a pile of crappy junk smuggling a ton of utterly important + # structured data, what's the point of trying to avoid nasty stuff + # with strings? (However, X509V3_EXT_i2d in particular seems like it + # would be a better API to invoke. I do not know where to get the + # ext_struc it desires for its last parameter, though.) + value = b"critical," + value + + extension = _lib.X509V3_EXT_nconf(_ffi.NULL, ctx, type_name, value) + if extension == _ffi.NULL: + _raise_current_error() + self._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + + + @property + def _nid(self): + return _lib.OBJ_obj2nid(self._extension.object) + + _prefixes = { + _lib.GEN_EMAIL: "email", + _lib.GEN_DNS: "DNS", + _lib.GEN_URI: "URI", + } + + def _subjectAltNameString(self): + method = _lib.X509V3_EXT_get(self._extension) + if method == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + payload = self._extension.value.data + length = self._extension.value.length + + payloadptr = _ffi.new("unsigned char**") + payloadptr[0] = payload + + if method.it != _ffi.NULL: + ptr = _lib.ASN1_ITEM_ptr(method.it) + data = _lib.ASN1_item_d2i(_ffi.NULL, payloadptr, length, ptr) + names = _ffi.cast("GENERAL_NAMES*", data) + else: + names = _ffi.cast( + "GENERAL_NAMES*", + method.d2i(_ffi.NULL, payloadptr, length)) + + parts = [] + for i in range(_lib.sk_GENERAL_NAME_num(names)): + name = _lib.sk_GENERAL_NAME_value(names, i) + try: + label = self._prefixes[name.type] + except KeyError: + bio = _new_mem_buf() + _lib.GENERAL_NAME_print(bio, name) + parts.append(_native(_bio_to_string(bio))) + else: + value = _native( + _ffi.buffer(name.d.ia5.data, name.d.ia5.length)[:]) + parts.append(label + ":" + value) + return ", ".join(parts) + + + def __str__(self): + """ + :return: a nice text representation of the extension + """ + if _lib.NID_subject_alt_name == self._nid: + return self._subjectAltNameString() + + bio = _new_mem_buf() + print_result = _lib.X509V3_EXT_print(bio, self._extension, 0, 0) + if not print_result: + # TODO: This is untested. + _raise_current_error() + + return _native(_bio_to_string(bio)) + + + def get_critical(self): + """ + Returns the critical field of the X509Extension + + :return: The critical field. + """ + return _lib.X509_EXTENSION_get_critical(self._extension) + + + def get_short_name(self): + """ + Returns the short version of the type name of the X509Extension + + :return: The short type name. + """ + obj = _lib.X509_EXTENSION_get_object(self._extension) + nid = _lib.OBJ_obj2nid(obj) + return _ffi.string(_lib.OBJ_nid2sn(nid)) + + + def get_data(self): + """ + Returns the data of the X509Extension + + :return: A :py:data:`str` giving the X509Extension's ASN.1 encoded data. + """ + octet_result = _lib.X509_EXTENSION_get_data(self._extension) + string_result = _ffi.cast('ASN1_STRING*', octet_result) + char_result = _lib.ASN1_STRING_data(string_result) + result_length = _lib.ASN1_STRING_length(string_result) + return _ffi.buffer(char_result, result_length)[:] + +X509ExtensionType = X509Extension + + +class X509Req(object): + def __init__(self): + req = _lib.X509_REQ_new() + self._req = _ffi.gc(req, _lib.X509_REQ_free) + + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate request + + :param pkey: The public key to use + :return: None + """ + set_result = _lib.X509_REQ_set_pubkey(self._req, pkey._pkey) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def get_pubkey(self): + """ + Get the public key from the certificate request + + :return: The public key + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_REQ_get_pubkey(self._req) + if pkey._pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + + def set_version(self, version): + """ + Set the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :param version: The version number + :return: None + """ + set_result = _lib.X509_REQ_set_version(self._req, version) + if not set_result: + _raise_current_error() + + + def get_version(self): + """ + Get the version subfield (RFC 2459, section 4.1.2.1) of the certificate + request. + + :return: an integer giving the value of the version subfield + """ + return _lib.X509_REQ_get_version(self._req) + + + def get_subject(self): + """ + Create an X509Name object for the subject of the certificate request + + :return: An X509Name object + """ + name = X509Name.__new__(X509Name) + name._name = _lib.X509_REQ_get_subject_name(self._req) + if name._name == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # The name is owned by the X509Req structure. As long as the X509Name + # Python object is alive, keep the X509Req Python object alive. + name._owner = self + + return name + + + def add_extensions(self, extensions): + """ + Add extensions to the request. + + :param extensions: a sequence of X509Extension objects + :return: None + """ + stack = _lib.sk_X509_EXTENSION_new_null() + if stack == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) + + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + # TODO push can fail (here and elsewhere) + _lib.sk_X509_EXTENSION_push(stack, ext._extension) + + add_result = _lib.X509_REQ_add_extensions(self._req, stack) + if not add_result: + # TODO: This is untested. + _raise_current_error() + + + def sign(self, pkey, digest): + """ + Sign the certificate request using the supplied key and digest + + :param pkey: The key to sign with + :param digest: The message digest to use + :return: None + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_REQ_sign(self._req, pkey._pkey, digest_obj) + if not sign_result: + # TODO: This is untested. + _raise_current_error() + + + def verify(self, pkey): + """ + Verifies a certificate request using the supplied public key + + :param key: a public key + :return: True if the signature is correct. + + :raise OpenSSL.crypto.Error: If the signature is invalid or there is a + problem verifying the signature. + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + result = _lib.X509_REQ_verify(self._req, pkey._pkey) + if result <= 0: + _raise_current_error() + + return result + + +X509ReqType = X509Req + + + +class X509(object): + def __init__(self): + # TODO Allocation failure? And why not __new__ instead of __init__? + x509 = _lib.X509_new() + self._x509 = _ffi.gc(x509, _lib.X509_free) + + + def set_version(self, version): + """ + Set version number of the certificate + + :param version: The version number + :type version: :py:class:`int` + + :return: None + """ + if not isinstance(version, int): + raise TypeError("version must be an integer") + + _lib.X509_set_version(self._x509, version) + + + def get_version(self): + """ + Return version number of the certificate + + :return: Version number as a Python integer + """ + return _lib.X509_get_version(self._x509) + + + def get_pubkey(self): + """ + Get the public key of the certificate + + :return: The public key + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.X509_get_pubkey(self._x509) + if pkey._pkey == _ffi.NULL: + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate + + :param pkey: The public key + + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + set_result = _lib.X509_set_pubkey(self._x509, pkey._pkey) + if not set_result: + _raise_current_error() + + + def sign(self, pkey, digest): + """ + Sign the certificate using the supplied key and digest + + :param pkey: The key to sign with + :param digest: The message digest to use + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + + if pkey._only_public: + raise ValueError("Key only has public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + evp_md = _lib.EVP_get_digestbyname(_byte_string(digest)) + if evp_md == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.X509_sign(self._x509, pkey._pkey, evp_md) + if not sign_result: + _raise_current_error() + + + def get_signature_algorithm(self): + """ + Retrieve the signature algorithm used in the certificate + + :return: A byte string giving the name of the signature algorithm used in + the certificate. + :raise ValueError: If the signature algorithm is undefined. + """ + alg = self._x509.cert_info.signature.algorithm + nid = _lib.OBJ_obj2nid(alg) + if nid == _lib.NID_undef: + raise ValueError("Undefined signature algorithm") + return _ffi.string(_lib.OBJ_nid2ln(nid)) + + + def digest(self, digest_name): + """ + Return the digest of the X509 object. + + :param digest_name: The name of the digest algorithm to use. + :type digest_name: :py:class:`bytes` + + :return: The digest of the object + """ + digest = _lib.EVP_get_digestbyname(_byte_string(digest_name)) + if digest == _ffi.NULL: + raise ValueError("No such digest method") + + result_buffer = _ffi.new("char[]", _lib.EVP_MAX_MD_SIZE) + result_length = _ffi.new("unsigned int[]", 1) + result_length[0] = len(result_buffer) + + digest_result = _lib.X509_digest( + self._x509, digest, result_buffer, result_length) + + if not digest_result: + # TODO: This is untested. + _raise_current_error() + + return b":".join([ + b16encode(ch).upper() for ch + in _ffi.buffer(result_buffer, result_length[0])]) + + + def subject_name_hash(self): + """ + Return the hash of the X509 subject. + + :return: The hash of the subject. + """ + return _lib.X509_subject_name_hash(self._x509) + + + def set_serial_number(self, serial): + """ + Set serial number of the certificate + + :param serial: The serial number + :type serial: :py:class:`int` + + :return: None + """ + if not isinstance(serial, _integer_types): + raise TypeError("serial must be an integer") + + hex_serial = hex(serial)[2:] + if not isinstance(hex_serial, bytes): + hex_serial = hex_serial.encode('ascii') + + bignum_serial = _ffi.new("BIGNUM**") + + # BN_hex2bn stores the result in &bignum. Unless it doesn't feel like + # it. If bignum is still NULL after this call, then the return value is + # actually the result. I hope. -exarkun + small_serial = _lib.BN_hex2bn(bignum_serial, hex_serial) + + if bignum_serial[0] == _ffi.NULL: + set_result = _lib.ASN1_INTEGER_set( + _lib.X509_get_serialNumber(self._x509), small_serial) + if set_result: + # TODO Not tested + _raise_current_error() + else: + asn1_serial = _lib.BN_to_ASN1_INTEGER(bignum_serial[0], _ffi.NULL) + _lib.BN_free(bignum_serial[0]) + if asn1_serial == _ffi.NULL: + # TODO Not tested + _raise_current_error() + asn1_serial = _ffi.gc(asn1_serial, _lib.ASN1_INTEGER_free) + set_result = _lib.X509_set_serialNumber(self._x509, asn1_serial) + if not set_result: + # TODO Not tested + _raise_current_error() + + + def get_serial_number(self): + """ + Return serial number of the certificate + + :return: Serial number as a Python integer + """ + asn1_serial = _lib.X509_get_serialNumber(self._x509) + bignum_serial = _lib.ASN1_INTEGER_to_BN(asn1_serial, _ffi.NULL) + try: + hex_serial = _lib.BN_bn2hex(bignum_serial) + try: + hexstring_serial = _ffi.string(hex_serial) + serial = int(hexstring_serial, 16) + return serial + finally: + _lib.OPENSSL_free(hex_serial) + finally: + _lib.BN_free(bignum_serial) + + + def gmtime_adj_notAfter(self, amount): + """ + Adjust the time stamp for when the certificate stops being valid + + :param amount: The number of seconds by which to adjust the ending + validity time. + :type amount: :py:class:`int` + + :return: None + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notAfter = _lib.X509_get_notAfter(self._x509) + _lib.X509_gmtime_adj(notAfter, amount) + + + def gmtime_adj_notBefore(self, amount): + """ + Change the timestamp for when the certificate starts being valid to the current + time plus an offset. + + :param amount: The number of seconds by which to adjust the starting validity + time. + :return: None + """ + if not isinstance(amount, int): + raise TypeError("amount must be an integer") + + notBefore = _lib.X509_get_notBefore(self._x509) + _lib.X509_gmtime_adj(notBefore, amount) + + + def has_expired(self): + """ + Check whether the certificate has expired. + + :return: True if the certificate has expired, false otherwise + """ + now = int(time()) + notAfter = _lib.X509_get_notAfter(self._x509) + return _lib.ASN1_UTCTIME_cmp_time_t( + _ffi.cast('ASN1_UTCTIME*', notAfter), now) < 0 + + + def _get_boundary_time(self, which): + return _get_asn1_time(which(self._x509)) + + + def get_notBefore(self): + """ + Retrieve the time stamp for when the certificate starts being valid + + :return: A string giving the timestamp, in the format:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + or None if there is no value set. + """ + return self._get_boundary_time(_lib.X509_get_notBefore) + + + def _set_boundary_time(self, which, when): + return _set_asn1_time(which(self._x509), when) + + + def set_notBefore(self, when): + """ + Set the time stamp for when the certificate starts being valid + + :param when: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + :type when: :py:class:`bytes` + + :return: None + """ + return self._set_boundary_time(_lib.X509_get_notBefore, when) + + + def get_notAfter(self): + """ + Retrieve the time stamp for when the certificate stops being valid + + :return: A string giving the timestamp, in the format:: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + or None if there is no value set. + """ + return self._get_boundary_time(_lib.X509_get_notAfter) + + + def set_notAfter(self, when): + """ + Set the time stamp for when the certificate stops being valid + + :param when: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + :type when: :py:class:`bytes` + + :return: None + """ + return self._set_boundary_time(_lib.X509_get_notAfter, when) + + + def _get_name(self, which): + name = X509Name.__new__(X509Name) + name._name = which(self._x509) + if name._name == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # The name is owned by the X509 structure. As long as the X509Name + # Python object is alive, keep the X509 Python object alive. + name._owner = self + + return name + + + def _set_name(self, which, name): + if not isinstance(name, X509Name): + raise TypeError("name must be an X509Name") + set_result = which(self._x509, name._name) + if not set_result: + # TODO: This is untested. + _raise_current_error() + + + def get_issuer(self): + """ + Create an X509Name object for the issuer of the certificate + + :return: An X509Name object + """ + return self._get_name(_lib.X509_get_issuer_name) + + + def set_issuer(self, issuer): + """ + Set the issuer of the certificate + + :param issuer: The issuer name + :type issuer: :py:class:`X509Name` + + :return: None + """ + return self._set_name(_lib.X509_set_issuer_name, issuer) + + + def get_subject(self): + """ + Create an X509Name object for the subject of the certificate + + :return: An X509Name object + """ + return self._get_name(_lib.X509_get_subject_name) + + + def set_subject(self, subject): + """ + Set the subject of the certificate + + :param subject: The subject name + :type subject: :py:class:`X509Name` + :return: None + """ + return self._set_name(_lib.X509_set_subject_name, subject) + + + def get_extension_count(self): + """ + Get the number of extensions on the certificate. + + :return: The number of extensions as an integer. + """ + return _lib.X509_get_ext_count(self._x509) + + + def add_extensions(self, extensions): + """ + Add extensions to the certificate. + + :param extensions: a sequence of X509Extension objects + :return: None + """ + for ext in extensions: + if not isinstance(ext, X509Extension): + raise ValueError("One of the elements is not an X509Extension") + + add_result = _lib.X509_add_ext(self._x509, ext._extension, -1) + if not add_result: + _raise_current_error() + + + def get_extension(self, index): + """ + Get a specific extension of the certificate by index. + + :param index: The index of the extension to retrieve. + :return: The X509Extension object at the specified index. + """ + ext = X509Extension.__new__(X509Extension) + ext._extension = _lib.X509_get_ext(self._x509, index) + if ext._extension == _ffi.NULL: + raise IndexError("extension index out of bounds") + + extension = _lib.X509_EXTENSION_dup(ext._extension) + ext._extension = _ffi.gc(extension, _lib.X509_EXTENSION_free) + return ext + +X509Type = X509 + + + +class X509Store(object): + def __init__(self): + store = _lib.X509_STORE_new() + self._store = _ffi.gc(store, _lib.X509_STORE_free) + + + def add_cert(self, cert): + if not isinstance(cert, X509): + raise TypeError() + + result = _lib.X509_STORE_add_cert(self._store, cert._x509) + if not result: + _raise_current_error() + + +X509StoreType = X509Store + + + +def load_certificate(type, buffer): + """ + Load a certificate from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + + :param buffer: The buffer the certificate is stored in + :type buffer: :py:class:`bytes` + + :return: The X509 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + x509 = _lib.PEM_read_bio_X509(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + x509 = _lib.d2i_X509_bio(bio, _ffi.NULL); + else: + raise ValueError( + "type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if x509 == _ffi.NULL: + _raise_current_error() + + cert = X509.__new__(X509) + cert._x509 = _ffi.gc(x509, _lib.X509_free) + return cert + + +def dump_certificate(type, cert): + """ + Dump a certificate to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param cert: The certificate to dump + :return: The buffer with the dumped certificate in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509(bio, cert._x509) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_bio(bio, cert._x509) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_print_ex(bio, cert._x509, 0, 0) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + return _bio_to_string(bio) + + + +def dump_privatekey(type, pkey, cipher=None, passphrase=None): + """ + Dump a private key to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1, or + FILETYPE_TEXT) + :param pkey: The PKey to dump + :param cipher: (optional) if encrypted PEM format, the cipher to + use + :param passphrase: (optional) if encrypted PEM format, this can be either + the passphrase to use, or a callback for providing the + passphrase. + :return: The buffer with the dumped key in + :rtype: :py:data:`str` + """ + bio = _new_mem_buf() + + if cipher is not None: + if passphrase is None: + raise TypeError( + "if a value is given for cipher " + "one must also be given for passphrase") + cipher_obj = _lib.EVP_get_cipherbyname(_byte_string(cipher)) + if cipher_obj == _ffi.NULL: + raise ValueError("Invalid cipher name") + else: + cipher_obj = _ffi.NULL + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_PrivateKey( + bio, pkey._pkey, cipher_obj, _ffi.NULL, 0, + helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_PrivateKey_bio(bio, pkey._pkey) + elif type == FILETYPE_TEXT: + rsa = _lib.EVP_PKEY_get1_RSA(pkey._pkey) + result_code = _lib.RSA_print(bio, rsa, 0) + # TODO RSA_free(rsa)? + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or " + "FILETYPE_TEXT") + + if result_code == 0: + _raise_current_error() + + return _bio_to_string(bio) + + + +def _X509_REVOKED_dup(original): + copy = _lib.X509_REVOKED_new() + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + if original.serialNumber != _ffi.NULL: + copy.serialNumber = _lib.ASN1_INTEGER_dup(original.serialNumber) + + if original.revocationDate != _ffi.NULL: + copy.revocationDate = _lib.M_ASN1_TIME_dup(original.revocationDate) + + if original.extensions != _ffi.NULL: + extension_stack = _lib.sk_X509_EXTENSION_new_null() + for i in range(_lib.sk_X509_EXTENSION_num(original.extensions)): + original_ext = _lib.sk_X509_EXTENSION_value(original.extensions, i) + copy_ext = _lib.X509_EXTENSION_dup(original_ext) + _lib.sk_X509_EXTENSION_push(extension_stack, copy_ext) + copy.extensions = extension_stack + + copy.sequence = original.sequence + return copy + + + +class Revoked(object): + # http://www.openssl.org/docs/apps/x509v3_config.html#CRL_distribution_points_ + # which differs from crl_reasons of crypto/x509v3/v3_enum.c that matches + # OCSP_crl_reason_str. We use the latter, just like the command line + # program. + _crl_reasons = [ + b"unspecified", + b"keyCompromise", + b"CACompromise", + b"affiliationChanged", + b"superseded", + b"cessationOfOperation", + b"certificateHold", + # b"removeFromCRL", + ] + + def __init__(self): + revoked = _lib.X509_REVOKED_new() + self._revoked = _ffi.gc(revoked, _lib.X509_REVOKED_free) + + + def set_serial(self, hex_str): + """ + Set the serial number of a revoked Revoked structure + + :param hex_str: The new serial number. + :type hex_str: :py:data:`str` + :return: None + """ + bignum_serial = _ffi.gc(_lib.BN_new(), _lib.BN_free) + bignum_ptr = _ffi.new("BIGNUM**") + bignum_ptr[0] = bignum_serial + bn_result = _lib.BN_hex2bn(bignum_ptr, hex_str) + if not bn_result: + raise ValueError("bad hex string") + + asn1_serial = _ffi.gc( + _lib.BN_to_ASN1_INTEGER(bignum_serial, _ffi.NULL), + _lib.ASN1_INTEGER_free) + _lib.X509_REVOKED_set_serialNumber(self._revoked, asn1_serial) + + + def get_serial(self): + """ + Return the serial number of a Revoked structure + + :return: The serial number as a string + """ + bio = _new_mem_buf() + + result = _lib.i2a_ASN1_INTEGER(bio, self._revoked.serialNumber) + if result < 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + + def _delete_reason(self): + stack = self._revoked.extensions + for i in range(_lib.sk_X509_EXTENSION_num(stack)): + ext = _lib.sk_X509_EXTENSION_value(stack, i) + if _lib.OBJ_obj2nid(ext.object) == _lib.NID_crl_reason: + _lib.X509_EXTENSION_free(ext) + _lib.sk_X509_EXTENSION_delete(stack, i) + break + + + def set_reason(self, reason): + """ + Set the reason of a Revoked object. + + If :py:data:`reason` is :py:data:`None`, delete the reason instead. + + :param reason: The reason string. + :type reason: :py:class:`str` or :py:class:`NoneType` + :return: None + """ + if reason is None: + self._delete_reason() + elif not isinstance(reason, bytes): + raise TypeError("reason must be None or a byte string") + else: + reason = reason.lower().replace(b' ', b'') + reason_code = [r.lower() for r in self._crl_reasons].index(reason) + + new_reason_ext = _lib.ASN1_ENUMERATED_new() + if new_reason_ext == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + new_reason_ext = _ffi.gc(new_reason_ext, _lib.ASN1_ENUMERATED_free) + + set_result = _lib.ASN1_ENUMERATED_set(new_reason_ext, reason_code) + if set_result == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + self._delete_reason() + add_result = _lib.X509_REVOKED_add1_ext_i2d( + self._revoked, _lib.NID_crl_reason, new_reason_ext, 0, 0) + + if not add_result: + # TODO: This is untested. + _raise_current_error() + + + def get_reason(self): + """ + Return the reason of a Revoked object. + + :return: The reason as a string + """ + extensions = self._revoked.extensions + for i in range(_lib.sk_X509_EXTENSION_num(extensions)): + ext = _lib.sk_X509_EXTENSION_value(extensions, i) + if _lib.OBJ_obj2nid(ext.object) == _lib.NID_crl_reason: + bio = _new_mem_buf() + + print_result = _lib.X509V3_EXT_print(bio, ext, 0, 0) + if not print_result: + print_result = _lib.M_ASN1_OCTET_STRING_print(bio, ext.value) + if print_result == 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + + def all_reasons(self): + """ + Return a list of all the supported reason strings. + + :return: A list of reason strings. + """ + return self._crl_reasons[:] + + + def set_rev_date(self, when): + """ + Set the revocation timestamp + + :param when: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + + :return: None + """ + return _set_asn1_time(self._revoked.revocationDate, when) + + + def get_rev_date(self): + """ + Retrieve the revocation date + + :return: A string giving the timestamp, in the format: + + YYYYMMDDhhmmssZ + YYYYMMDDhhmmss+hhmm + YYYYMMDDhhmmss-hhmm + """ + return _get_asn1_time(self._revoked.revocationDate) + + + +class CRL(object): + def __init__(self): + """ + Create a new empty CRL object. + """ + crl = _lib.X509_CRL_new() + self._crl = _ffi.gc(crl, _lib.X509_CRL_free) + + + def get_revoked(self): + """ + Return revoked portion of the CRL structure (by value not reference). + + :return: A tuple of Revoked objects. + """ + results = [] + revoked_stack = self._crl.crl.revoked + for i in range(_lib.sk_X509_REVOKED_num(revoked_stack)): + revoked = _lib.sk_X509_REVOKED_value(revoked_stack, i) + revoked_copy = _X509_REVOKED_dup(revoked) + pyrev = Revoked.__new__(Revoked) + pyrev._revoked = _ffi.gc(revoked_copy, _lib.X509_REVOKED_free) + results.append(pyrev) + if results: + return tuple(results) + + + def add_revoked(self, revoked): + """ + Add a revoked (by value not reference) to the CRL structure + + :param revoked: The new revoked. + :type revoked: :class:`X509` + + :return: None + """ + copy = _X509_REVOKED_dup(revoked._revoked) + if copy == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + add_result = _lib.X509_CRL_add0_revoked(self._crl, copy) + if add_result == 0: + # TODO: This is untested. + _raise_current_error() + + + def export(self, cert, key, type=FILETYPE_PEM, days=100): + """ + export a CRL as a string + + :param cert: Used to sign CRL. + :type cert: :class:`X509` + + :param key: Used to sign CRL. + :type key: :class:`PKey` + + :param type: The export format, either :py:data:`FILETYPE_PEM`, :py:data:`FILETYPE_ASN1`, or :py:data:`FILETYPE_TEXT`. + + :param days: The number of days until the next update of this CRL. + :type days: :py:data:`int` + + :return: :py:data:`str` + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + if not isinstance(key, PKey): + raise TypeError("key must be a PKey instance") + if not isinstance(type, int): + raise TypeError("type must be an integer") + + bio = _lib.BIO_new(_lib.BIO_s_mem()) + if bio == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + # A scratch time object to give different values to different CRL fields + sometime = _lib.ASN1_TIME_new() + if sometime == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + _lib.X509_gmtime_adj(sometime, 0) + _lib.X509_CRL_set_lastUpdate(self._crl, sometime) + + _lib.X509_gmtime_adj(sometime, days * 24 * 60 * 60) + _lib.X509_CRL_set_nextUpdate(self._crl, sometime) + + _lib.X509_CRL_set_issuer_name(self._crl, _lib.X509_get_subject_name(cert._x509)) + + sign_result = _lib.X509_CRL_sign(self._crl, key._pkey, _lib.EVP_md5()) + if not sign_result: + _raise_current_error() + + if type == FILETYPE_PEM: + ret = _lib.PEM_write_bio_X509_CRL(bio, self._crl) + elif type == FILETYPE_ASN1: + ret = _lib.i2d_X509_CRL_bio(bio, self._crl) + elif type == FILETYPE_TEXT: + ret = _lib.X509_CRL_print(bio, self._crl) + else: + raise ValueError( + "type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT") + + if not ret: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) +CRLType = CRL + + + +class PKCS7(object): + def type_is_signed(self): + """ + Check if this NID_pkcs7_signed object + + :return: True if the PKCS7 is of type signed + """ + if _lib.PKCS7_type_is_signed(self._pkcs7): + return True + return False + + + def type_is_enveloped(self): + """ + Check if this NID_pkcs7_enveloped object + + :returns: True if the PKCS7 is of type enveloped + """ + if _lib.PKCS7_type_is_enveloped(self._pkcs7): + return True + return False + + + def type_is_signedAndEnveloped(self): + """ + Check if this NID_pkcs7_signedAndEnveloped object + + :returns: True if the PKCS7 is of type signedAndEnveloped + """ + if _lib.PKCS7_type_is_signedAndEnveloped(self._pkcs7): + return True + return False + + + def type_is_data(self): + """ + Check if this NID_pkcs7_data object + + :return: True if the PKCS7 is of type data + """ + if _lib.PKCS7_type_is_data(self._pkcs7): + return True + return False + + + def get_type_name(self): + """ + Returns the type name of the PKCS7 structure + + :return: A string with the typename + """ + nid = _lib.OBJ_obj2nid(self._pkcs7.type) + string_type = _lib.OBJ_nid2sn(nid) + return _ffi.string(string_type) + +PKCS7Type = PKCS7 + + + +class PKCS12(object): + def __init__(self): + self._pkey = None + self._cert = None + self._cacerts = None + self._friendlyname = None + + + def get_certificate(self): + """ + Return certificate portion of the PKCS12 structure + + :return: X509 object containing the certificate + """ + return self._cert + + + def set_certificate(self, cert): + """ + Replace the certificate portion of the PKCS12 structure + + :param cert: The new certificate. + :type cert: :py:class:`X509` or :py:data:`None` + :return: None + """ + if not isinstance(cert, X509): + raise TypeError("cert must be an X509 instance") + self._cert = cert + + + def get_privatekey(self): + """ + Return private key portion of the PKCS12 structure + + :returns: PKey object containing the private key + """ + return self._pkey + + + def set_privatekey(self, pkey): + """ + Replace or set the certificate portion of the PKCS12 structure + + :param pkey: The new private key. + :type pkey: :py:class:`PKey` + :return: None + """ + if not isinstance(pkey, PKey): + raise TypeError("pkey must be a PKey instance") + self._pkey = pkey + + + def get_ca_certificates(self): + """ + Return CA certificates within of the PKCS12 object + + :return: A newly created tuple containing the CA certificates in the chain, + if any are present, or None if no CA certificates are present. + """ + if self._cacerts is not None: + return tuple(self._cacerts) + + + def set_ca_certificates(self, cacerts): + """ + Replace or set the CA certificates withing the PKCS12 object. + + :param cacerts: The new CA certificates. + :type cacerts: :py:data:`None` or an iterable of :py:class:`X509` + :return: None + """ + if cacerts is None: + self._cacerts = None + else: + cacerts = list(cacerts) + for cert in cacerts: + if not isinstance(cert, X509): + raise TypeError("iterable must only contain X509 instances") + self._cacerts = cacerts + + + def set_friendlyname(self, name): + """ + Replace or set the certificate portion of the PKCS12 structure + + :param name: The new friendly name. + :type name: :py:class:`bytes` + :return: None + """ + if name is None: + self._friendlyname = None + elif not isinstance(name, bytes): + raise TypeError("name must be a byte string or None (not %r)" % (name,)) + self._friendlyname = name + + + def get_friendlyname(self): + """ + Return friendly name portion of the PKCS12 structure + + :returns: String containing the friendlyname + """ + return self._friendlyname + + + def export(self, passphrase=None, iter=2048, maciter=1): + """ + Dump a PKCS12 object as a string. See also "man PKCS12_create". + + :param passphrase: used to encrypt the PKCS12 + :type passphrase: :py:data:`bytes` + + :param iter: How many times to repeat the encryption + :type iter: :py:data:`int` + + :param maciter: How many times to repeat the MAC + :type maciter: :py:data:`int` + + :return: The string containing the PKCS12 + """ + if self._cacerts is None: + cacerts = _ffi.NULL + else: + cacerts = _lib.sk_X509_new_null() + cacerts = _ffi.gc(cacerts, _lib.sk_X509_free) + for cert in self._cacerts: + _lib.sk_X509_push(cacerts, cert._x509) + + if passphrase is None: + passphrase = _ffi.NULL + + friendlyname = self._friendlyname + if friendlyname is None: + friendlyname = _ffi.NULL + + if self._pkey is None: + pkey = _ffi.NULL + else: + pkey = self._pkey._pkey + + if self._cert is None: + cert = _ffi.NULL + else: + cert = self._cert._x509 + + pkcs12 = _lib.PKCS12_create( + passphrase, friendlyname, pkey, cert, cacerts, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + _lib.NID_pbe_WithSHA1And3_Key_TripleDES_CBC, + iter, maciter, 0) + if pkcs12 == _ffi.NULL: + _raise_current_error() + pkcs12 = _ffi.gc(pkcs12, _lib.PKCS12_free) + + bio = _new_mem_buf() + _lib.i2d_PKCS12_bio(bio, pkcs12) + return _bio_to_string(bio) + +PKCS12Type = PKCS12 + + + +class NetscapeSPKI(object): + def __init__(self): + spki = _lib.NETSCAPE_SPKI_new() + self._spki = _ffi.gc(spki, _lib.NETSCAPE_SPKI_free) + + + def sign(self, pkey, digest): + """ + Sign the certificate request using the supplied key and digest + + :param pkey: The key to sign with + :param digest: The message digest to use + :return: None + """ + if pkey._only_public: + raise ValueError("Key has only public part") + + if not pkey._initialized: + raise ValueError("Key is uninitialized") + + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + sign_result = _lib.NETSCAPE_SPKI_sign(self._spki, pkey._pkey, digest_obj) + if not sign_result: + # TODO: This is untested. + _raise_current_error() + + + def verify(self, key): + """ + Verifies a certificate request using the supplied public key + + :param key: a public key + :return: True if the signature is correct. + :raise OpenSSL.crypto.Error: If the signature is invalid or there is a + problem verifying the signature. + """ + answer = _lib.NETSCAPE_SPKI_verify(self._spki, key._pkey) + if answer <= 0: + _raise_current_error() + return True + + + def b64_encode(self): + """ + Generate a base64 encoded string from an SPKI + + :return: The base64 encoded string + """ + encoded = _lib.NETSCAPE_SPKI_b64_encode(self._spki) + result = _ffi.string(encoded) + _lib.CRYPTO_free(encoded) + return result + + + def get_pubkey(self): + """ + Get the public key of the certificate + + :return: The public key + """ + pkey = PKey.__new__(PKey) + pkey._pkey = _lib.NETSCAPE_SPKI_get_pubkey(self._spki) + if pkey._pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey._pkey = _ffi.gc(pkey._pkey, _lib.EVP_PKEY_free) + pkey._only_public = True + return pkey + + + def set_pubkey(self, pkey): + """ + Set the public key of the certificate + + :param pkey: The public key + :return: None + """ + set_result = _lib.NETSCAPE_SPKI_set_pubkey(self._spki, pkey._pkey) + if not set_result: + # TODO: This is untested. + _raise_current_error() +NetscapeSPKIType = NetscapeSPKI + + +class _PassphraseHelper(object): + def __init__(self, type, passphrase, more_args=False, truncate=False): + if type != FILETYPE_PEM and passphrase is not None: + raise ValueError("only FILETYPE_PEM key format supports encryption") + self._passphrase = passphrase + self._more_args = more_args + self._truncate = truncate + self._problems = [] + + + @property + def callback(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return _ffi.NULL + elif callable(self._passphrase): + return _ffi.callback("pem_password_cb", self._read_passphrase) + else: + raise TypeError("Last argument must be string or callable") + + + @property + def callback_args(self): + if self._passphrase is None: + return _ffi.NULL + elif isinstance(self._passphrase, bytes): + return self._passphrase + elif callable(self._passphrase): + return _ffi.NULL + else: + raise TypeError("Last argument must be string or callable") + + + def raise_if_problem(self, exceptionType=Error): + try: + _exception_from_error_queue(exceptionType) + except exceptionType as e: + from_queue = e + if self._problems: + raise self._problems[0] + return from_queue + + + def _read_passphrase(self, buf, size, rwflag, userdata): + try: + if self._more_args: + result = self._passphrase(size, rwflag, userdata) + else: + result = self._passphrase(rwflag) + if not isinstance(result, bytes): + raise ValueError("String expected") + if len(result) > size: + if self._truncate: + result = result[:size] + else: + raise ValueError("passphrase returned by callback is too long") + for i in range(len(result)): + buf[i] = result[i:i + 1] + return len(result) + except Exception as e: + self._problems.append(e) + return 0 + + + +def load_privatekey(type, buffer, passphrase=None): + """ + Load a private key from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the key is stored in + :param passphrase: (optional) if encrypted PEM format, this can be + either the passphrase to use, or a callback for + providing the passphrase. + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + helper = _PassphraseHelper(type, passphrase) + if type == FILETYPE_PEM: + evp_pkey = _lib.PEM_read_bio_PrivateKey( + bio, _ffi.NULL, helper.callback, helper.callback_args) + helper.raise_if_problem() + elif type == FILETYPE_ASN1: + evp_pkey = _lib.d2i_PrivateKey_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if evp_pkey == _ffi.NULL: + _raise_current_error() + + pkey = PKey.__new__(PKey) + pkey._pkey = _ffi.gc(evp_pkey, _lib.EVP_PKEY_free) + return pkey + + + +def dump_certificate_request(type, req): + """ + Dump a certificate request to a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param req: The certificate request to dump + :return: The buffer with the dumped certificate request in + """ + bio = _new_mem_buf() + + if type == FILETYPE_PEM: + result_code = _lib.PEM_write_bio_X509_REQ(bio, req._req) + elif type == FILETYPE_ASN1: + result_code = _lib.i2d_X509_REQ_bio(bio, req._req) + elif type == FILETYPE_TEXT: + result_code = _lib.X509_REQ_print_ex(bio, req._req, 0, 0) + else: + raise ValueError("type argument must be FILETYPE_PEM, FILETYPE_ASN1, or FILETYPE_TEXT") + + if result_code == 0: + # TODO: This is untested. + _raise_current_error() + + return _bio_to_string(bio) + + + +def load_certificate_request(type, buffer): + """ + Load a certificate request from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the certificate request is stored in + :return: The X509Req object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + req = _lib.PEM_read_bio_X509_REQ(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + req = _lib.d2i_X509_REQ_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if req == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + + x509req = X509Req.__new__(X509Req) + x509req._req = _ffi.gc(req, _lib.X509_REQ_free) + return x509req + + + +def sign(pkey, data, digest): + """ + Sign data with a digest + + :param pkey: Pkey to sign with + :param data: data to be signed + :param digest: message digest to use + :return: signature + """ + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + md_ctx = _ffi.new("EVP_MD_CTX*") + md_ctx = _ffi.gc(md_ctx, _lib.EVP_MD_CTX_cleanup) + + _lib.EVP_SignInit(md_ctx, digest_obj) + _lib.EVP_SignUpdate(md_ctx, data, len(data)) + + signature_buffer = _ffi.new("unsigned char[]", 512) + signature_length = _ffi.new("unsigned int*") + signature_length[0] = len(signature_buffer) + final_result = _lib.EVP_SignFinal( + md_ctx, signature_buffer, signature_length, pkey._pkey) + + if final_result != 1: + # TODO: This is untested. + _raise_current_error() + + return _ffi.buffer(signature_buffer, signature_length[0])[:] + + + +def verify(cert, signature, data, digest): + """ + Verify a signature + + :param cert: signing certificate (X509 object) + :param signature: signature returned by sign function + :param data: data to be verified + :param digest: message digest to use + :return: None if the signature is correct, raise exception otherwise + """ + digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) + if digest_obj == _ffi.NULL: + raise ValueError("No such digest method") + + pkey = _lib.X509_get_pubkey(cert._x509) + if pkey == _ffi.NULL: + # TODO: This is untested. + _raise_current_error() + pkey = _ffi.gc(pkey, _lib.EVP_PKEY_free) + + md_ctx = _ffi.new("EVP_MD_CTX*") + md_ctx = _ffi.gc(md_ctx, _lib.EVP_MD_CTX_cleanup) + + _lib.EVP_VerifyInit(md_ctx, digest_obj) + _lib.EVP_VerifyUpdate(md_ctx, data, len(data)) + verify_result = _lib.EVP_VerifyFinal(md_ctx, signature, len(signature), pkey) + + if verify_result != 1: + _raise_current_error() + + + +def load_crl(type, buffer): + """ + Load a certificate revocation list from a buffer + + :param type: The file type (one of FILETYPE_PEM, FILETYPE_ASN1) + :param buffer: The buffer the CRL is stored in + + :return: The PKey object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + crl = _lib.PEM_read_bio_X509_CRL(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + crl = _lib.d2i_X509_CRL_bio(bio, _ffi.NULL) + else: + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if crl == _ffi.NULL: + _raise_current_error() + + result = CRL.__new__(CRL) + result._crl = crl + return result + + + +def load_pkcs7_data(type, buffer): + """ + Load pkcs7 data from a buffer + + :param type: The file type (one of FILETYPE_PEM or FILETYPE_ASN1) + :param buffer: The buffer with the pkcs7 data. + :return: The PKCS7 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + if type == FILETYPE_PEM: + pkcs7 = _lib.PEM_read_bio_PKCS7(bio, _ffi.NULL, _ffi.NULL, _ffi.NULL) + elif type == FILETYPE_ASN1: + pass + else: + # TODO: This is untested. + _raise_current_error() + raise ValueError("type argument must be FILETYPE_PEM or FILETYPE_ASN1") + + if pkcs7 == _ffi.NULL: + _raise_current_error() + + pypkcs7 = PKCS7.__new__(PKCS7) + pypkcs7._pkcs7 = _ffi.gc(pkcs7, _lib.PKCS7_free) + return pypkcs7 + + + +def load_pkcs12(buffer, passphrase): + """ + Load a PKCS12 object from a buffer + + :param buffer: The buffer the certificate is stored in + :param passphrase: (Optional) The password to decrypt the PKCS12 lump + :returns: The PKCS12 object + """ + if isinstance(buffer, _text_type): + buffer = buffer.encode("ascii") + + bio = _new_mem_buf(buffer) + + p12 = _lib.d2i_PKCS12_bio(bio, _ffi.NULL) + if p12 == _ffi.NULL: + _raise_current_error() + p12 = _ffi.gc(p12, _lib.PKCS12_free) + + pkey = _ffi.new("EVP_PKEY**") + cert = _ffi.new("X509**") + cacerts = _ffi.new("Cryptography_STACK_OF_X509**") + + parse_result = _lib.PKCS12_parse(p12, passphrase, pkey, cert, cacerts) + if not parse_result: + _raise_current_error() + + cacerts = _ffi.gc(cacerts[0], _lib.sk_X509_free) + + # openssl 1.0.0 sometimes leaves an X509_check_private_key error in the + # queue for no particular reason. This error isn't interesting to anyone + # outside this function. It's not even interesting to us. Get rid of it. + try: + _raise_current_error() + except Error: + pass + + if pkey[0] == _ffi.NULL: + pykey = None + else: + pykey = PKey.__new__(PKey) + pykey._pkey = _ffi.gc(pkey[0], _lib.EVP_PKEY_free) + + if cert[0] == _ffi.NULL: + pycert = None + friendlyname = None + else: + pycert = X509.__new__(X509) + pycert._x509 = _ffi.gc(cert[0], _lib.X509_free) + + friendlyname_length = _ffi.new("int*") + friendlyname_buffer = _lib.X509_alias_get0(cert[0], friendlyname_length) + friendlyname = _ffi.buffer(friendlyname_buffer, friendlyname_length[0])[:] + if friendlyname_buffer == _ffi.NULL: + friendlyname = None + + pycacerts = [] + for i in range(_lib.sk_X509_num(cacerts)): + pycacert = X509.__new__(X509) + pycacert._x509 = _lib.sk_X509_value(cacerts, i) + pycacerts.append(pycacert) + if not pycacerts: + pycacerts = None + + pkcs12 = PKCS12.__new__(PKCS12) + pkcs12._pkey = pykey + pkcs12._cert = pycert + pkcs12._cacerts = pycacerts + pkcs12._friendlyname = friendlyname + return pkcs12 + + +def _initialize_openssl_threads(get_ident, Lock): + import _ssl + return + + locks = list(Lock() for n in range(_lib.CRYPTO_num_locks())) + + def locking_function(mode, index, filename, line): + if mode & _lib.CRYPTO_LOCK: + locks[index].acquire() + else: + locks[index].release() + + _lib.CRYPTO_set_id_callback( + _ffi.callback("unsigned long (*)(void)", get_ident)) + + _lib.CRYPTO_set_locking_callback( + _ffi.callback( + "void (*)(int, int, const char*, int)", locking_function)) + + +try: + from thread import get_ident + from threading import Lock +except ImportError: + pass +else: + _initialize_openssl_threads(get_ident, Lock) + del get_ident, Lock + +# There are no direct unit tests for this initialization. It is tested +# indirectly since it is necessary for functions like dump_privatekey when +# using encryption. +# +# Thus OpenSSL.test.test_crypto.FunctionTests.test_dump_privatekey_passphrase +# and some other similar tests may fail without this (though they may not if +# the Python runtime has already done some initialization of the underlying +# OpenSSL library (and is linked against the same one that cryptography is +# using)). +_lib.OpenSSL_add_all_algorithms() + +# This is similar but exercised mainly by exception_from_error_queue. It calls +# both ERR_load_crypto_strings() and ERR_load_SSL_strings(). +_lib.SSL_load_error_strings() diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/rand.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/rand.py new file mode 100644 index 0000000..e754378 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/rand.py @@ -0,0 +1,180 @@ +""" +PRNG management routines, thin wrappers. + +See the file RATIONALE for a short explanation of why this module was written. +""" + +from functools import partial + +from six import integer_types as _integer_types + +from OpenSSL._util import ( + ffi as _ffi, + lib as _lib, + exception_from_error_queue as _exception_from_error_queue) + + +class Error(Exception): + """ + An error occurred in an `OpenSSL.rand` API. + """ + +_raise_current_error = partial(_exception_from_error_queue, Error) + +_unspecified = object() + +_builtin_bytes = bytes + +def bytes(num_bytes): + """ + Get some random bytes as a string. + + :param num_bytes: The number of bytes to fetch + :return: A string of random bytes + """ + if not isinstance(num_bytes, _integer_types): + raise TypeError("num_bytes must be an integer") + + if num_bytes < 0: + raise ValueError("num_bytes must not be negative") + + result_buffer = _ffi.new("char[]", num_bytes) + result_code = _lib.RAND_bytes(result_buffer, num_bytes) + if result_code == -1: + # TODO: No tests for this code path. Triggering a RAND_bytes failure + # might involve supplying a custom ENGINE? That's hard. + _raise_current_error() + + return _ffi.buffer(result_buffer)[:] + + + +def add(buffer, entropy): + """ + Add data with a given entropy to the PRNG + + :param buffer: Buffer with random data + :param entropy: The entropy (in bytes) measurement of the buffer + :return: None + """ + if not isinstance(buffer, _builtin_bytes): + raise TypeError("buffer must be a byte string") + + if not isinstance(entropy, int): + raise TypeError("entropy must be an integer") + + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_add(buffer, len(buffer), entropy) + + + +def seed(buffer): + """ + Alias for rand_add, with entropy equal to length + + :param buffer: Buffer with random data + :return: None + """ + if not isinstance(buffer, _builtin_bytes): + raise TypeError("buffer must be a byte string") + + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_seed(buffer, len(buffer)) + + + +def status(): + """ + Retrieve the status of the PRNG + + :return: True if the PRNG is seeded enough, false otherwise + """ + return _lib.RAND_status() + + + +def egd(path, bytes=_unspecified): + """ + Query an entropy gathering daemon (EGD) for random data and add it to the + PRNG. I haven't found any problems when the socket is missing, the function + just returns 0. + + :param path: The path to the EGD socket + :param bytes: (optional) The number of bytes to read, default is 255 + :returns: The number of bytes read (NB: a value of 0 isn't necessarily an + error, check rand.status()) + """ + if not isinstance(path, _builtin_bytes): + raise TypeError("path must be a byte string") + + if bytes is _unspecified: + bytes = 255 + elif not isinstance(bytes, int): + raise TypeError("bytes must be an integer") + + return _lib.RAND_egd_bytes(path, bytes) + + + +def cleanup(): + """ + Erase the memory used by the PRNG. + + :return: None + """ + # TODO Nothing tests this call actually being made, or made properly. + _lib.RAND_cleanup() + + + +def load_file(filename, maxbytes=_unspecified): + """ + Seed the PRNG with data from a file + + :param filename: The file to read data from + :param maxbytes: (optional) The number of bytes to read, default is + to read the entire file + :return: The number of bytes read + """ + if not isinstance(filename, _builtin_bytes): + raise TypeError("filename must be a string") + + if maxbytes is _unspecified: + maxbytes = -1 + elif not isinstance(maxbytes, int): + raise TypeError("maxbytes must be an integer") + + return _lib.RAND_load_file(filename, maxbytes) + + + +def write_file(filename): + """ + Save PRNG state to a file + + :param filename: The file to write data to + :return: The number of bytes written + """ + if not isinstance(filename, _builtin_bytes): + raise TypeError("filename must be a string") + + return _lib.RAND_write_file(filename) + + +# TODO There are no tests for screen at all +def screen(): + """ + Add the current contents of the screen to the PRNG state. Availability: + Windows. + + :return: None + """ + _lib.RAND_screen() + +if getattr(_lib, 'RAND_screen', None) is None: + del screen + + +# TODO There are no tests for the RAND strings being loaded, whatever that +# means. +_lib.ERR_load_RAND_strings() diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/__init__.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/__init__.py new file mode 100644 index 0000000..9b08060 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/__init__.py @@ -0,0 +1,6 @@ +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +Package containing unit tests for :py:mod:`OpenSSL`. +""" diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_crypto.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_crypto.py new file mode 100644 index 0000000..4e42f70 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_crypto.py @@ -0,0 +1,3037 @@ +# Copyright (c) Jean-Paul Calderone +# See LICENSE file for details. + +""" +Unit tests for :py:mod:`OpenSSL.crypto`. +""" + +from unittest import main + +import os, re +from subprocess import PIPE, Popen +from datetime import datetime, timedelta + +from six import binary_type + +from OpenSSL.crypto import TYPE_RSA, TYPE_DSA, Error, PKey, PKeyType +from OpenSSL.crypto import X509, X509Type, X509Name, X509NameType +from OpenSSL.crypto import X509Store, X509StoreType, X509Req, X509ReqType +from OpenSSL.crypto import X509Extension, X509ExtensionType +from OpenSSL.crypto import load_certificate, load_privatekey +from OpenSSL.crypto import FILETYPE_PEM, FILETYPE_ASN1, FILETYPE_TEXT +from OpenSSL.crypto import dump_certificate, load_certificate_request +from OpenSSL.crypto import dump_certificate_request, dump_privatekey +from OpenSSL.crypto import PKCS7Type, load_pkcs7_data +from OpenSSL.crypto import PKCS12, PKCS12Type, load_pkcs12 +from OpenSSL.crypto import CRL, Revoked, load_crl +from OpenSSL.crypto import NetscapeSPKI, NetscapeSPKIType +from OpenSSL.crypto import sign, verify +from OpenSSL.test.util import TestCase, b +from OpenSSL._util import native + +def normalize_certificate_pem(pem): + return dump_certificate(FILETYPE_PEM, load_certificate(FILETYPE_PEM, pem)) + + +def normalize_privatekey_pem(pem): + return dump_privatekey(FILETYPE_PEM, load_privatekey(FILETYPE_PEM, pem)) + + +GOOD_CIPHER = "blowfish" +BAD_CIPHER = "zippers" + +GOOD_DIGEST = "MD5" +BAD_DIGEST = "monkeys" + +root_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIIC7TCCAlagAwIBAgIIPQzE4MbeufQwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE +BhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdU +ZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwIhgPMjAwOTAzMjUxMjM2 +NThaGA8yMDE3MDYxMTEyMzY1OFowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklM +MRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9U +ZXN0aW5nIFJvb3QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPmaQumL +urpE527uSEHdL1pqcDRmWzu+98Y6YHzT/J7KWEamyMCNZ6fRW1JCR782UQ8a07fy +2xXsKy4WdKaxyG8CcatwmXvpvRQ44dSANMihHELpANTdyVp6DCysED6wkQFurHlF +1dshEaJw8b/ypDhmbVIo6Ci1xvCJqivbLFnbAgMBAAGjgbswgbgwHQYDVR0OBBYE +FINVdy1eIfFJDAkk51QJEo3IfgSuMIGIBgNVHSMEgYAwfoAUg1V3LV4h8UkMCSTn +VAkSjch+BK6hXKRaMFgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UE +BxMHQ2hpY2FnbzEQMA4GA1UEChMHVGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBS +b290IENBggg9DMTgxt659DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GB +AGGCDazMJGoWNBpc03u6+smc95dEead2KlZXBATOdFT1VesY3+nUOqZhEhTGlDMi +hkgaZnzoIq/Uamidegk4hirsCT/R+6vsKAAxNTcBjUeZjlykCJWy5ojShGftXIKY +w/njVbKMXrvc83qmTdGl3TAM0fxQIpqgcglFLveEBgzn +-----END CERTIFICATE----- +""") + +root_key_pem = b("""-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQD5mkLpi7q6ROdu7khB3S9aanA0Zls7vvfGOmB80/yeylhGpsjA +jWen0VtSQke/NlEPGtO38tsV7CsuFnSmschvAnGrcJl76b0UOOHUgDTIoRxC6QDU +3claegwsrBA+sJEBbqx5RdXbIRGicPG/8qQ4Zm1SKOgotcbwiaor2yxZ2wIDAQAB +AoGBAPCgMpmLxzwDaUmcFbTJUvlLW1hoxNNYSu2jIZm1k/hRAcE60JYwvBkgz3UB +yMEh0AtLxYe0bFk6EHah11tMUPgscbCq73snJ++8koUw+csk22G65hOs51bVb7Aa +6JBe67oLzdtvgCUFAA2qfrKzWRZzAdhUirQUZgySZk+Xq1pBAkEA/kZG0A6roTSM +BVnx7LnPfsycKUsTumorpXiylZJjTi9XtmzxhrYN6wgZlDOOwOLgSQhszGpxVoMD +u3gByT1b2QJBAPtL3mSKdvwRu/+40zaZLwvSJRxaj0mcE4BJOS6Oqs/hS1xRlrNk +PpQ7WJ4yM6ZOLnXzm2mKyxm50Mv64109FtMCQQDOqS2KkjHaLowTGVxwC0DijMfr +I9Lf8sSQk32J5VWCySWf5gGTfEnpmUa41gKTMJIbqZZLucNuDcOtzUaeWZlZAkA8 +ttXigLnCqR486JDPTi9ZscoZkZ+w7y6e/hH8t6d5Vjt48JVyfjPIaJY+km58LcN3 +6AWSeGAdtRFHVzR7oHjVAkB4hutvxiOeiIVQNBhM6RSI9aBPMI21DoX2JRoxvNW2 +cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq +-----END RSA PRIVATE KEY----- +""") + +server_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICKDCCAZGgAwIBAgIJAJn/HpR21r/8MA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV +BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH +VGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBSb290IENBMCIYDzIwMDkwMzI1MTIz +NzUzWhgPMjAxNzA2MTExMjM3NTNaMBgxFjAUBgNVBAMTDWxvdmVseSBzZXJ2ZXIw +gZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAL6m+G653V0tpBC/OKl22VxOi2Cv +lK4TYu9LHSDP9uDVTe7V5D5Tl6qzFoRRx5pfmnkqT5B+W9byp2NU3FC5hLm5zSAr +b45meUhjEJ/ifkZgbNUjHdBIGP9MAQUHZa5WKdkGIJvGAvs8UzUqlr4TBWQIB24+ +lJ+Ukk/CRgasrYwdAgMBAAGjNjA0MB0GA1UdDgQWBBS4kC7Ij0W1TZXZqXQFAM2e +gKEG2DATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQUFAAOBgQBh30Li +dJ+NlxIOx5343WqIBka3UbsOb2kxWrbkVCrvRapCMLCASO4FqiKWM+L0VDBprqIp +2mgpFQ6FHpoIENGvJhdEKpptQ5i7KaGhnDNTfdy3x1+h852G99f1iyj0RmbuFcM8 +uzujnS8YXWvM7DM1Ilozk4MzPug8jzFp5uhKCQ== +-----END CERTIFICATE----- +""") + +server_key_pem = normalize_privatekey_pem(b("""-----BEGIN RSA PRIVATE KEY----- +MIICWwIBAAKBgQC+pvhuud1dLaQQvzipdtlcTotgr5SuE2LvSx0gz/bg1U3u1eQ+ +U5eqsxaEUceaX5p5Kk+QflvW8qdjVNxQuYS5uc0gK2+OZnlIYxCf4n5GYGzVIx3Q +SBj/TAEFB2WuVinZBiCbxgL7PFM1Kpa+EwVkCAduPpSflJJPwkYGrK2MHQIDAQAB +AoGAbwuZ0AR6JveahBaczjfnSpiFHf+mve2UxoQdpyr6ROJ4zg/PLW5K/KXrC48G +j6f3tXMrfKHcpEoZrQWUfYBRCUsGD5DCazEhD8zlxEHahIsqpwA0WWssJA2VOLEN +j6DuV2pCFbw67rfTBkTSo32ahfXxEKev5KswZk0JIzH3ooECQQDgzS9AI89h0gs8 +Dt+1m11Rzqo3vZML7ZIyGApUzVan+a7hbc33nbGRkAXjHaUBJO31it/H6dTO+uwX +msWwNG5ZAkEA2RyFKs5xR5USTFaKLWCgpH/ydV96KPOpBND7TKQx62snDenFNNbn +FwwOhpahld+vqhYk+pfuWWUpQciE+Bu7ZQJASjfT4sQv4qbbKK/scePicnDdx9th +4e1EeB9xwb+tXXXUo/6Bor/AcUNwfiQ6Zt9PZOK9sR3lMZSsP7rMi7kzuQJABie6 +1sXXjFH7nNJvRG4S39cIxq8YRYTy68II/dlB2QzGpKxV/POCxbJ/zu0CU79tuYK7 +NaeNCFfH3aeTrX0LyQJAMBWjWmeKM2G2sCExheeQK0ROnaBC8itCECD4Jsve4nqf +r50+LF74iLXFwqysVCebPKMOpDWp/qQ1BbJQIPs7/A== +-----END RSA PRIVATE KEY----- +""")) + +client_cert_pem = b("""-----BEGIN CERTIFICATE----- +MIICJjCCAY+gAwIBAgIJAKxpFI5lODkjMA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV +BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH +VGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBSb290IENBMCIYDzIwMDkwMzI1MTIz +ODA1WhgPMjAxNzA2MTExMjM4MDVaMBYxFDASBgNVBAMTC3VnbHkgY2xpZW50MIGf +MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDAZh/SRtNm5ntMT4qb6YzEpTroMlq2 +rn+GrRHRiZ+xkCw/CGNhbtPir7/QxaUj26BSmQrHw1bGKEbPsWiW7bdXSespl+xK +iku4G/KvnnmWdeJHqsiXeUZtqurMELcPQAw9xPHEuhqqUJvvEoMTsnCEqGM+7Dtb +oCRajYyHfluARQIDAQABozYwNDAdBgNVHQ4EFgQUNQB+qkaOaEVecf1J3TTUtAff +0fAwEwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQEFBQADgYEAyv/Jh7gM +Q3OHvmsFEEvRI+hsW8y66zK4K5de239Y44iZrFYkt7Q5nBPMEWDj4F2hLYWL/qtI +9Zdr0U4UDCU9SmmGYh4o7R4TZ5pGFvBYvjhHbkSFYFQXZxKUi+WUxplP6I0wr2KJ +PSTJCjJOn3xo2NTKRgV1gaoTf2EhL+RG8TQ= +-----END CERTIFICATE----- +""") + +client_key_pem = normalize_privatekey_pem(b("""-----BEGIN RSA PRIVATE KEY----- +MIICXgIBAAKBgQDAZh/SRtNm5ntMT4qb6YzEpTroMlq2rn+GrRHRiZ+xkCw/CGNh +btPir7/QxaUj26BSmQrHw1bGKEbPsWiW7bdXSespl+xKiku4G/KvnnmWdeJHqsiX +eUZtqurMELcPQAw9xPHEuhqqUJvvEoMTsnCEqGM+7DtboCRajYyHfluARQIDAQAB +AoGATkZ+NceY5Glqyl4mD06SdcKfV65814vg2EL7V9t8+/mi9rYL8KztSXGlQWPX +zuHgtRoMl78yQ4ZJYOBVo+nsx8KZNRCEBlE19bamSbQLCeQMenWnpeYyQUZ908gF +h6L9qsFVJepgA9RDgAjyDoS5CaWCdCCPCH2lDkdcqC54SVUCQQDseuduc4wi8h4t +V8AahUn9fn9gYfhoNuM0gdguTA0nPLVWz4hy1yJiWYQe0H7NLNNTmCKiLQaJpAbb +TC6vE8C7AkEA0Ee8CMJUc20BnGEmxwgWcVuqFWaKCo8jTH1X38FlATUsyR3krjW2 +dL3yDD9NwHxsYP7nTKp/U8MV7U9IBn4y/wJBAJl7H0/BcLeRmuJk7IqJ7b635iYB +D/9beFUw3MUXmQXZUfyYz39xf6CDZsu1GEdEC5haykeln3Of4M9d/4Kj+FcCQQCY +si6xwT7GzMDkk/ko684AV3KPc/h6G0yGtFIrMg7J3uExpR/VdH2KgwMkZXisSMvw +JJEQjOMCVsEJlRk54WWjAkEAzoZNH6UhDdBK5F38rVt/y4SEHgbSfJHIAmPS32Kq +f6GGcfNpip0Uk7q7udTKuX7Q/buZi/C4YW7u3VKAquv9NA== +-----END RSA PRIVATE KEY----- +""")) + +cleartextCertificatePEM = b("""-----BEGIN CERTIFICATE----- +MIIC7TCCAlagAwIBAgIIPQzE4MbeufQwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE +BhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdU +ZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwIhgPMjAwOTAzMjUxMjM2 +NThaGA8yMDE3MDYxMTEyMzY1OFowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklM +MRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9U +ZXN0aW5nIFJvb3QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPmaQumL +urpE527uSEHdL1pqcDRmWzu+98Y6YHzT/J7KWEamyMCNZ6fRW1JCR782UQ8a07fy +2xXsKy4WdKaxyG8CcatwmXvpvRQ44dSANMihHELpANTdyVp6DCysED6wkQFurHlF +1dshEaJw8b/ypDhmbVIo6Ci1xvCJqivbLFnbAgMBAAGjgbswgbgwHQYDVR0OBBYE +FINVdy1eIfFJDAkk51QJEo3IfgSuMIGIBgNVHSMEgYAwfoAUg1V3LV4h8UkMCSTn +VAkSjch+BK6hXKRaMFgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UE +BxMHQ2hpY2FnbzEQMA4GA1UEChMHVGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBS +b290IENBggg9DMTgxt659DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GB +AGGCDazMJGoWNBpc03u6+smc95dEead2KlZXBATOdFT1VesY3+nUOqZhEhTGlDMi +hkgaZnzoIq/Uamidegk4hirsCT/R+6vsKAAxNTcBjUeZjlykCJWy5ojShGftXIKY +w/njVbKMXrvc83qmTdGl3TAM0fxQIpqgcglFLveEBgzn +-----END CERTIFICATE----- +""") + +cleartextPrivateKeyPEM = normalize_privatekey_pem(b("""\ +-----BEGIN RSA PRIVATE KEY----- +MIICXQIBAAKBgQD5mkLpi7q6ROdu7khB3S9aanA0Zls7vvfGOmB80/yeylhGpsjA +jWen0VtSQke/NlEPGtO38tsV7CsuFnSmschvAnGrcJl76b0UOOHUgDTIoRxC6QDU +3claegwsrBA+sJEBbqx5RdXbIRGicPG/8qQ4Zm1SKOgotcbwiaor2yxZ2wIDAQAB +AoGBAPCgMpmLxzwDaUmcFbTJUvlLW1hoxNNYSu2jIZm1k/hRAcE60JYwvBkgz3UB +yMEh0AtLxYe0bFk6EHah11tMUPgscbCq73snJ++8koUw+csk22G65hOs51bVb7Aa +6JBe67oLzdtvgCUFAA2qfrKzWRZzAdhUirQUZgySZk+Xq1pBAkEA/kZG0A6roTSM +BVnx7LnPfsycKUsTumorpXiylZJjTi9XtmzxhrYN6wgZlDOOwOLgSQhszGpxVoMD +u3gByT1b2QJBAPtL3mSKdvwRu/+40zaZLwvSJRxaj0mcE4BJOS6Oqs/hS1xRlrNk +PpQ7WJ4yM6ZOLnXzm2mKyxm50Mv64109FtMCQQDOqS2KkjHaLowTGVxwC0DijMfr +I9Lf8sSQk32J5VWCySWf5gGTfEnpmUa41gKTMJIbqZZLucNuDcOtzUaeWZlZAkA8 +ttXigLnCqR486JDPTi9ZscoZkZ+w7y6e/hH8t6d5Vjt48JVyfjPIaJY+km58LcN3 +6AWSeGAdtRFHVzR7oHjVAkB4hutvxiOeiIVQNBhM6RSI9aBPMI21DoX2JRoxvNW2 +cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq +-----END RSA PRIVATE KEY----- +""")) + +cleartextCertificateRequestPEM = b("""-----BEGIN CERTIFICATE REQUEST----- +MIIBnjCCAQcCAQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQH +EwdDaGljYWdvMRcwFQYDVQQKEw5NeSBDb21wYW55IEx0ZDEXMBUGA1UEAxMORnJl +ZGVyaWNrIERlYW4wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANp6Y17WzKSw +BsUWkXdqg6tnXy8H8hA1msCMWpc+/2KJ4mbv5NyD6UD+/SqagQqulPbF/DFea9nA +E0zhmHJELcM8gUTIlXv/cgDWnmK4xj8YkjVUiCdqKRAKeuzLG1pGmwwF5lGeJpXN +xQn5ecR0UYSOWj6TTGXB9VyUMQzCClcBAgMBAAGgADANBgkqhkiG9w0BAQUFAAOB +gQAAJGuF/R/GGbeC7FbFW+aJgr9ee0Xbl6nlhu7pTe67k+iiKT2dsl2ti68MVTnu +Vrb3HUNqOkiwsJf6kCtq5oPn3QVYzTa76Dt2y3Rtzv6boRSlmlfrgS92GNma8JfR +oICQk3nAudi6zl1Dix3BCv1pUp5KMtGn3MeDEi6QFGy2rA== +-----END CERTIFICATE REQUEST----- +""") + +encryptedPrivateKeyPEM = b("""-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: DES-EDE3-CBC,9573604A18579E9E + +SHOho56WxDkT0ht10UTeKc0F5u8cqIa01kzFAmETw0MAs8ezYtK15NPdCXUm3X/2 +a17G7LSF5bkxOgZ7vpXyMzun/owrj7CzvLxyncyEFZWvtvzaAhPhvTJtTIB3kf8B +8+qRcpTGK7NgXEgYBW5bj1y4qZkD4zCL9o9NQzsKI3Ie8i0239jsDOWR38AxjXBH +mGwAQ4Z6ZN5dnmM4fhMIWsmFf19sNyAML4gHenQCHhmXbjXeVq47aC2ProInJbrm ++00TcisbAQ40V9aehVbcDKtS4ZbMVDwncAjpXpcncC54G76N6j7F7wL7L/FuXa3A +fvSVy9n2VfF/pJ3kYSflLHH2G/DFxjF7dl0GxhKPxJjp3IJi9VtuvmN9R2jZWLQF +tfC8dXgy/P9CfFQhlinqBTEwgH0oZ/d4k4NVFDSdEMaSdmBAjlHpc+Vfdty3HVnV +rKXj//wslsFNm9kIwJGIgKUa/n2jsOiydrsk1mgH7SmNCb3YHgZhbbnq0qLat/HC +gHDt3FHpNQ31QzzL3yrenFB2L9osIsnRsDTPFNi4RX4SpDgNroxOQmyzCCV6H+d4 +o1mcnNiZSdxLZxVKccq0AfRpHqpPAFnJcQHP6xyT9MZp6fBa0XkxDnt9kNU8H3Qw +7SJWZ69VXjBUzMlQViLuaWMgTnL+ZVyFZf9hTF7U/ef4HMLMAVNdiaGG+G+AjCV/ +MbzjS007Oe4qqBnCWaFPSnJX6uLApeTbqAxAeyCql56ULW5x6vDMNC3dwjvS/CEh +11n8RkgFIQA0AhuKSIg3CbuartRsJnWOLwgLTzsrKYL4yRog1RJrtw== +-----END RSA PRIVATE KEY----- +""") + +encryptedPrivateKeyPEMPassphrase = b("foobar") + +# Some PKCS#7 stuff. Generated with the openssl command line: +# +# openssl crl2pkcs7 -inform pem -outform pem -certfile s.pem -nocrl +# +# with a certificate and key (but the key should be irrelevant) in s.pem +pkcs7Data = b("""\ +-----BEGIN PKCS7----- +MIIDNwYJKoZIhvcNAQcCoIIDKDCCAyQCAQExADALBgkqhkiG9w0BBwGgggMKMIID +BjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzERMA8G +A1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtN +MkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNA +cG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzELMAkG +A1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhvc3Qx +HTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEBBQAD +SwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh5kwI +zOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQCMAAw +LAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0G +A1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7hyNp +65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoTCE0y +Q3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlwdG8g +Q2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3QxLmNv +bYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6BoJu +VwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++7QGG +/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JEWUQ9 +Ho4EzbYCOaEAMQA= +-----END PKCS7----- +""") + +crlData = b("""\ +-----BEGIN X509 CRL----- +MIIBWzCBxTANBgkqhkiG9w0BAQQFADBYMQswCQYDVQQGEwJVUzELMAkGA1UECBMC +SUwxEDAOBgNVBAcTB0NoaWNhZ28xEDAOBgNVBAoTB1Rlc3RpbmcxGDAWBgNVBAMT +D1Rlc3RpbmcgUm9vdCBDQRcNMDkwNzI2MDQzNDU2WhcNMTIwOTI3MDI0MTUyWjA8 +MBUCAgOrGA8yMDA5MDcyNTIzMzQ1NlowIwICAQAYDzIwMDkwNzI1MjMzNDU2WjAM +MAoGA1UdFQQDCgEEMA0GCSqGSIb3DQEBBAUAA4GBAEBt7xTs2htdD3d4ErrcGAw1 +4dKcVnIWTutoI7xxen26Wwvh8VCsT7i/UeP+rBl9rC/kfjWjzQk3/zleaarGTpBT +0yp4HXRFFoRhhSE/hP+eteaPXRgrsNRLHe9ZDd69wmh7J1wMDb0m81RG7kqcbsid +vrzEeLDRiiPl92dyyWmu +-----END X509 CRL----- +""") + + +# A broken RSA private key which can be used to test the error path through +# PKey.check. +inconsistentPrivateKeyPEM = b("""-----BEGIN RSA PRIVATE KEY----- +MIIBPAIBAAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh +5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEaAQJBAIqm/bz4NA1H++Vx5Ewx +OcKp3w19QSaZAwlGRtsUxrP7436QjnREM3Bm8ygU11BjkPVmtrKm6AayQfCHqJoT +zIECIQDW0BoMoL0HOYM/mrTLhaykYAVqgIeJsPjvkEhTFXWBuQIhAM3deFAvWNu4 +nklUQ37XsCT2c9tmNt1LAT+slG2JOTTRAiAuXDtC/m3NYVwyHfFm+zKHRzHkClk2 +HjubeEgjpj32AQIhAJqMGTaZVOwevTXvvHwNeH+vRWsAYU/gbx+OQB+7VOcBAiEA +oolb6NMg/R3enNPvS1O4UU1H8wpaF77L4yiSWlE0p4w= +-----END RSA PRIVATE KEY----- +""") + +# certificate with NULL bytes in subjectAltName and common name + +nulbyteSubjectAltNamePEM = b("""-----BEGIN CERTIFICATE----- +MIIE2DCCA8CgAwIBAgIBADANBgkqhkiG9w0BAQUFADCBxTELMAkGA1UEBhMCVVMx +DzANBgNVBAgMBk9yZWdvbjESMBAGA1UEBwwJQmVhdmVydG9uMSMwIQYDVQQKDBpQ +eXRob24gU29mdHdhcmUgRm91bmRhdGlvbjEgMB4GA1UECwwXUHl0aG9uIENvcmUg +RGV2ZWxvcG1lbnQxJDAiBgNVBAMMG251bGwucHl0aG9uLm9yZwBleGFtcGxlLm9y +ZzEkMCIGCSqGSIb3DQEJARYVcHl0aG9uLWRldkBweXRob24ub3JnMB4XDTEzMDgw +NzEzMTE1MloXDTEzMDgwNzEzMTI1MlowgcUxCzAJBgNVBAYTAlVTMQ8wDQYDVQQI +DAZPcmVnb24xEjAQBgNVBAcMCUJlYXZlcnRvbjEjMCEGA1UECgwaUHl0aG9uIFNv +ZnR3YXJlIEZvdW5kYXRpb24xIDAeBgNVBAsMF1B5dGhvbiBDb3JlIERldmVsb3Bt +ZW50MSQwIgYDVQQDDBtudWxsLnB5dGhvbi5vcmcAZXhhbXBsZS5vcmcxJDAiBgkq +hkiG9w0BCQEWFXB5dGhvbi1kZXZAcHl0aG9uLm9yZzCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBALXq7cn7Rn1vO3aA3TrzA5QLp6bb7B3f/yN0CJ2XFj+j +pHs+Gw6WWSUDpybiiKnPec33BFawq3kyblnBMjBU61ioy5HwQqVkJ8vUVjGIUq3P +vX/wBmQfzCe4o4uM89gpHyUL9UYGG8oCRa17dgqcv7u5rg0Wq2B1rgY+nHwx3JIv +KRrgSwyRkGzpN8WQ1yrXlxWjgI9de0mPVDDUlywcWze1q2kwaEPTM3hLAmD1PESA +oY/n8A/RXoeeRs9i/Pm/DGUS8ZPINXk/yOzsR/XvvkTVroIeLZqfmFpnZeF0cHzL +08LODkVJJ9zjLdT7SA4vnne4FEbAxDbKAq5qkYzaL4UCAwEAAaOB0DCBzTAMBgNV +HRMBAf8EAjAAMB0GA1UdDgQWBBSIWlXAUv9hzVKjNQ/qWpwkOCL3XDALBgNVHQ8E +BAMCBeAwgZAGA1UdEQSBiDCBhYIeYWx0bnVsbC5weXRob24ub3JnAGV4YW1wbGUu +Y29tgSBudWxsQHB5dGhvbi5vcmcAdXNlckBleGFtcGxlLm9yZ4YpaHR0cDovL251 +bGwucHl0aG9uLm9yZwBodHRwOi8vZXhhbXBsZS5vcmeHBMAAAgGHECABDbgAAAAA +AAAAAAAAAAEwDQYJKoZIhvcNAQEFBQADggEBAKxPRe99SaghcI6IWT7UNkJw9aO9 +i9eo0Fj2MUqxpKbdb9noRDy2CnHWf7EIYZ1gznXPdwzSN4YCjV5d+Q9xtBaowT0j +HPERs1ZuytCNNJTmhyqZ8q6uzMLoht4IqH/FBfpvgaeC5tBTnTT0rD5A/olXeimk +kX4LxlEx5RAvpGB2zZVRGr6LobD9rVK91xuHYNIxxxfEGE8tCCWjp0+3ksri9SXx +VHWBnbM9YaL32u3hxm8sYB/Yb8WSBavJCWJJqRStVRHM1koZlJmXNx2BX4vPo6iW +RFEIPQsFZRLrtnCAiEhyT8bC2s/Njlu6ly9gtJZWSV46Q3ZjBL4q9sHKqZQ= +-----END CERTIFICATE-----""") + + +class X509ExtTests(TestCase): + """ + Tests for :py:class:`OpenSSL.crypto.X509Extension`. + """ + + def setUp(self): + """ + Create a new private key and start a certificate request (for a test + method to finish in one way or another). + """ + super(X509ExtTests, self).setUp() + # Basic setup stuff to generate a certificate + self.pkey = PKey() + self.pkey.generate_key(TYPE_RSA, 384) + self.req = X509Req() + self.req.set_pubkey(self.pkey) + # Authority good you have. + self.req.get_subject().commonName = "Yoda root CA" + self.x509 = X509() + self.subject = self.x509.get_subject() + self.subject.commonName = self.req.get_subject().commonName + self.x509.set_issuer(self.subject) + self.x509.set_pubkey(self.pkey) + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + expire = b((datetime.now() + timedelta(days=100)).strftime("%Y%m%d%H%M%SZ")) + self.x509.set_notBefore(now) + self.x509.set_notAfter(expire) + + + def tearDown(self): + """ + Forget all of the pyOpenSSL objects so they can be garbage collected, + their memory released, and not interfere with the leak detection code. + """ + self.pkey = self.req = self.x509 = self.subject = None + super(X509ExtTests, self).tearDown() + + + def test_str(self): + """ + The string representation of :py:class:`X509Extension` instances as returned by + :py:data:`str` includes stuff. + """ + # This isn't necessarily the best string representation. Perhaps it + # will be changed/improved in the future. + self.assertEquals( + str(X509Extension(b('basicConstraints'), True, b('CA:false'))), + 'CA:FALSE') + + + def test_type(self): + """ + :py:class:`X509Extension` and :py:class:`X509ExtensionType` refer to the same type object + and can be used to create instances of that type. + """ + self.assertIdentical(X509Extension, X509ExtensionType) + self.assertConsistentType( + X509Extension, + 'X509Extension', b('basicConstraints'), True, b('CA:true')) + + + def test_construction(self): + """ + :py:class:`X509Extension` accepts an extension type name, a critical flag, + and an extension value and returns an :py:class:`X509ExtensionType` instance. + """ + basic = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertTrue( + isinstance(basic, X509ExtensionType), + "%r is of type %r, should be %r" % ( + basic, type(basic), X509ExtensionType)) + + comment = X509Extension( + b('nsComment'), False, b('pyOpenSSL unit test')) + self.assertTrue( + isinstance(comment, X509ExtensionType), + "%r is of type %r, should be %r" % ( + comment, type(comment), X509ExtensionType)) + + + def test_invalid_extension(self): + """ + :py:class:`X509Extension` raises something if it is passed a bad extension + name or value. + """ + self.assertRaises( + Error, X509Extension, b('thisIsMadeUp'), False, b('hi')) + self.assertRaises( + Error, X509Extension, b('basicConstraints'), False, b('blah blah')) + + # Exercise a weird one (an extension which uses the r2i method). This + # exercises the codepath that requires a non-NULL ctx to be passed to + # X509V3_EXT_nconf. It can't work now because we provide no + # configuration database. It might be made to work in the future. + self.assertRaises( + Error, X509Extension, b('proxyCertInfo'), True, + b('language:id-ppl-anyLanguage,pathlen:1,policy:text:AB')) + + + def test_get_critical(self): + """ + :py:meth:`X509ExtensionType.get_critical` returns the value of the + extension's critical flag. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertTrue(ext.get_critical()) + ext = X509Extension(b('basicConstraints'), False, b('CA:true')) + self.assertFalse(ext.get_critical()) + + + def test_get_short_name(self): + """ + :py:meth:`X509ExtensionType.get_short_name` returns a string giving the short + type name of the extension. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertEqual(ext.get_short_name(), b('basicConstraints')) + ext = X509Extension(b('nsComment'), True, b('foo bar')) + self.assertEqual(ext.get_short_name(), b('nsComment')) + + + def test_get_data(self): + """ + :py:meth:`X509Extension.get_data` returns a string giving the data of the + extension. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + # Expect to get back the DER encoded form of CA:true. + self.assertEqual(ext.get_data(), b('0\x03\x01\x01\xff')) + + + def test_get_data_wrong_args(self): + """ + :py:meth:`X509Extension.get_data` raises :py:exc:`TypeError` if passed any arguments. + """ + ext = X509Extension(b('basicConstraints'), True, b('CA:true')) + self.assertRaises(TypeError, ext.get_data, None) + self.assertRaises(TypeError, ext.get_data, "foo") + self.assertRaises(TypeError, ext.get_data, 7) + + + def test_unused_subject(self): + """ + The :py:data:`subject` parameter to :py:class:`X509Extension` may be provided for an + extension which does not use it and is ignored in this case. + """ + ext1 = X509Extension( + b('basicConstraints'), False, b('CA:TRUE'), subject=self.x509) + self.x509.add_extensions([ext1]) + self.x509.sign(self.pkey, 'sha1') + # This is a little lame. Can we think of a better way? + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Basic Constraints:') in text) + self.assertTrue(b('CA:TRUE') in text) + + + def test_subject(self): + """ + If an extension requires a subject, the :py:data:`subject` parameter to + :py:class:`X509Extension` provides its value. + """ + ext3 = X509Extension( + b('subjectKeyIdentifier'), False, b('hash'), subject=self.x509) + self.x509.add_extensions([ext3]) + self.x509.sign(self.pkey, 'sha1') + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Subject Key Identifier:') in text) + + + def test_missing_subject(self): + """ + If an extension requires a subject and the :py:data:`subject` parameter is + given no value, something happens. + """ + self.assertRaises( + Error, X509Extension, b('subjectKeyIdentifier'), False, b('hash')) + + + def test_invalid_subject(self): + """ + If the :py:data:`subject` parameter is given a value which is not an + :py:class:`X509` instance, :py:exc:`TypeError` is raised. + """ + for badObj in [True, object(), "hello", [], self]: + self.assertRaises( + TypeError, + X509Extension, + 'basicConstraints', False, 'CA:TRUE', subject=badObj) + + + def test_unused_issuer(self): + """ + The :py:data:`issuer` parameter to :py:class:`X509Extension` may be provided for an + extension which does not use it and is ignored in this case. + """ + ext1 = X509Extension( + b('basicConstraints'), False, b('CA:TRUE'), issuer=self.x509) + self.x509.add_extensions([ext1]) + self.x509.sign(self.pkey, 'sha1') + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Basic Constraints:') in text) + self.assertTrue(b('CA:TRUE') in text) + + + def test_issuer(self): + """ + If an extension requires a issuer, the :py:data:`issuer` parameter to + :py:class:`X509Extension` provides its value. + """ + ext2 = X509Extension( + b('authorityKeyIdentifier'), False, b('issuer:always'), + issuer=self.x509) + self.x509.add_extensions([ext2]) + self.x509.sign(self.pkey, 'sha1') + text = dump_certificate(FILETYPE_TEXT, self.x509) + self.assertTrue(b('X509v3 Authority Key Identifier:') in text) + self.assertTrue(b('DirName:/CN=Yoda root CA') in text) + + + def test_missing_issuer(self): + """ + If an extension requires an issue and the :py:data:`issuer` parameter is given + no value, something happens. + """ + self.assertRaises( + Error, + X509Extension, + b('authorityKeyIdentifier'), False, + b('keyid:always,issuer:always')) + + + def test_invalid_issuer(self): + """ + If the :py:data:`issuer` parameter is given a value which is not an + :py:class:`X509` instance, :py:exc:`TypeError` is raised. + """ + for badObj in [True, object(), "hello", [], self]: + self.assertRaises( + TypeError, + X509Extension, + 'authorityKeyIdentifier', False, 'keyid:always,issuer:always', + issuer=badObj) + + + +class PKeyTests(TestCase): + """ + Unit tests for :py:class:`OpenSSL.crypto.PKey`. + """ + def test_type(self): + """ + :py:class:`PKey` and :py:class:`PKeyType` refer to the same type object + and can be used to create instances of that type. + """ + self.assertIdentical(PKey, PKeyType) + self.assertConsistentType(PKey, 'PKey') + + + def test_construction(self): + """ + :py:class:`PKey` takes no arguments and returns a new :py:class:`PKey` instance. + """ + self.assertRaises(TypeError, PKey, None) + key = PKey() + self.assertTrue( + isinstance(key, PKeyType), + "%r is of type %r, should be %r" % (key, type(key), PKeyType)) + + + def test_pregeneration(self): + """ + :py:attr:`PKeyType.bits` and :py:attr:`PKeyType.type` return :py:data:`0` before the key is + generated. :py:attr:`PKeyType.check` raises :py:exc:`TypeError` before the key is + generated. + """ + key = PKey() + self.assertEqual(key.type(), 0) + self.assertEqual(key.bits(), 0) + self.assertRaises(TypeError, key.check) + + + def test_failedGeneration(self): + """ + :py:meth:`PKeyType.generate_key` takes two arguments, the first giving the key + type as one of :py:data:`TYPE_RSA` or :py:data:`TYPE_DSA` and the second giving the + number of bits to generate. If an invalid type is specified or + generation fails, :py:exc:`Error` is raised. If an invalid number of bits is + specified, :py:exc:`ValueError` or :py:exc:`Error` is raised. + """ + key = PKey() + self.assertRaises(TypeError, key.generate_key) + self.assertRaises(TypeError, key.generate_key, 1, 2, 3) + self.assertRaises(TypeError, key.generate_key, "foo", "bar") + self.assertRaises(Error, key.generate_key, -1, 0) + + self.assertRaises(ValueError, key.generate_key, TYPE_RSA, -1) + self.assertRaises(ValueError, key.generate_key, TYPE_RSA, 0) + + # XXX RSA generation for small values of bits is fairly buggy in a wide + # range of OpenSSL versions. I need to figure out what the safe lower + # bound for a reasonable number of OpenSSL versions is and explicitly + # check for that in the wrapper. The failure behavior is typically an + # infinite loop inside OpenSSL. + + # self.assertRaises(Error, key.generate_key, TYPE_RSA, 2) + + # XXX DSA generation seems happy with any number of bits. The DSS + # says bits must be between 512 and 1024 inclusive. OpenSSL's DSA + # generator doesn't seem to care about the upper limit at all. For + # the lower limit, it uses 512 if anything smaller is specified. + # So, it doesn't seem possible to make generate_key fail for + # TYPE_DSA with a bits argument which is at least an int. + + # self.assertRaises(Error, key.generate_key, TYPE_DSA, -7) + + + def test_rsaGeneration(self): + """ + :py:meth:`PKeyType.generate_key` generates an RSA key when passed + :py:data:`TYPE_RSA` as a type and a reasonable number of bits. + """ + bits = 128 + key = PKey() + key.generate_key(TYPE_RSA, bits) + self.assertEqual(key.type(), TYPE_RSA) + self.assertEqual(key.bits(), bits) + self.assertTrue(key.check()) + + + def test_dsaGeneration(self): + """ + :py:meth:`PKeyType.generate_key` generates a DSA key when passed + :py:data:`TYPE_DSA` as a type and a reasonable number of bits. + """ + # 512 is a magic number. The DSS (Digital Signature Standard) + # allows a minimum of 512 bits for DSA. DSA_generate_parameters + # will silently promote any value below 512 to 512. + bits = 512 + key = PKey() + key.generate_key(TYPE_DSA, bits) + # self.assertEqual(key.type(), TYPE_DSA) + # self.assertEqual(key.bits(), bits) + # self.assertRaises(TypeError, key.check) + + + def test_regeneration(self): + """ + :py:meth:`PKeyType.generate_key` can be called multiple times on the same + key to generate new keys. + """ + key = PKey() + for type, bits in [(TYPE_RSA, 512), (TYPE_DSA, 576)]: + key.generate_key(type, bits) + self.assertEqual(key.type(), type) + self.assertEqual(key.bits(), bits) + + + def test_inconsistentKey(self): + """ + :py:`PKeyType.check` returns :py:exc:`Error` if the key is not consistent. + """ + key = load_privatekey(FILETYPE_PEM, inconsistentPrivateKeyPEM) + self.assertRaises(Error, key.check) + + + def test_check_wrong_args(self): + """ + :py:meth:`PKeyType.check` raises :py:exc:`TypeError` if called with any arguments. + """ + self.assertRaises(TypeError, PKey().check, None) + self.assertRaises(TypeError, PKey().check, object()) + self.assertRaises(TypeError, PKey().check, 1) + + + def test_check_public_key(self): + """ + :py:meth:`PKeyType.check` raises :py:exc:`TypeError` if only the public + part of the key is available. + """ + # A trick to get a public-only key + key = PKey() + key.generate_key(TYPE_RSA, 512) + cert = X509() + cert.set_pubkey(key) + pub = cert.get_pubkey() + self.assertRaises(TypeError, pub.check) + + + +class X509NameTests(TestCase): + """ + Unit tests for :py:class:`OpenSSL.crypto.X509Name`. + """ + def _x509name(self, **attrs): + # XXX There's no other way to get a new X509Name yet. + name = X509().get_subject() + attrs = list(attrs.items()) + # Make the order stable - order matters! + def key(attr): + return attr[1] + attrs.sort(key=key) + for k, v in attrs: + setattr(name, k, v) + return name + + + def test_type(self): + """ + The type of X509Name objects is :py:class:`X509NameType`. + """ + self.assertIdentical(X509Name, X509NameType) + self.assertEqual(X509NameType.__name__, 'X509Name') + self.assertTrue(isinstance(X509NameType, type)) + + name = self._x509name() + self.assertTrue( + isinstance(name, X509NameType), + "%r is of type %r, should be %r" % ( + name, type(name), X509NameType)) + + + def test_onlyStringAttributes(self): + """ + Attempting to set a non-:py:data:`str` attribute name on an :py:class:`X509NameType` + instance causes :py:exc:`TypeError` to be raised. + """ + name = self._x509name() + # Beyond these cases, you may also think that unicode should be + # rejected. Sorry, you're wrong. unicode is automatically converted to + # str outside of the control of X509Name, so there's no way to reject + # it. + + # Also, this used to test str subclasses, but that test is less relevant + # now that the implementation is in Python instead of C. Also PyPy + # automatically converts str subclasses to str when they are passed to + # setattr, so we can't test it on PyPy. Apparently CPython does this + # sometimes as well. + self.assertRaises(TypeError, setattr, name, None, "hello") + self.assertRaises(TypeError, setattr, name, 30, "hello") + + + def test_setInvalidAttribute(self): + """ + Attempting to set any attribute name on an :py:class:`X509NameType` instance for + which no corresponding NID is defined causes :py:exc:`AttributeError` to be + raised. + """ + name = self._x509name() + self.assertRaises(AttributeError, setattr, name, "no such thing", None) + + + def test_attributes(self): + """ + :py:class:`X509NameType` instances have attributes for each standard (?) + X509Name field. + """ + name = self._x509name() + name.commonName = "foo" + self.assertEqual(name.commonName, "foo") + self.assertEqual(name.CN, "foo") + name.CN = "baz" + self.assertEqual(name.commonName, "baz") + self.assertEqual(name.CN, "baz") + name.commonName = "bar" + self.assertEqual(name.commonName, "bar") + self.assertEqual(name.CN, "bar") + name.CN = "quux" + self.assertEqual(name.commonName, "quux") + self.assertEqual(name.CN, "quux") + + + def test_copy(self): + """ + :py:class:`X509Name` creates a new :py:class:`X509NameType` instance with all the same + attributes as an existing :py:class:`X509NameType` instance when called with + one. + """ + name = self._x509name(commonName="foo", emailAddress="bar@example.com") + + copy = X509Name(name) + self.assertEqual(copy.commonName, "foo") + self.assertEqual(copy.emailAddress, "bar@example.com") + + # Mutate the copy and ensure the original is unmodified. + copy.commonName = "baz" + self.assertEqual(name.commonName, "foo") + + # Mutate the original and ensure the copy is unmodified. + name.emailAddress = "quux@example.com" + self.assertEqual(copy.emailAddress, "bar@example.com") + + + def test_repr(self): + """ + :py:func:`repr` passed an :py:class:`X509NameType` instance should return a string + containing a description of the type and the NIDs which have been set + on it. + """ + name = self._x509name(commonName="foo", emailAddress="bar") + self.assertEqual( + repr(name), + "") + + + def test_comparison(self): + """ + :py:class:`X509NameType` instances should compare based on their NIDs. + """ + def _equality(a, b, assertTrue, assertFalse): + assertTrue(a == b, "(%r == %r) --> False" % (a, b)) + assertFalse(a != b) + assertTrue(b == a) + assertFalse(b != a) + + def assertEqual(a, b): + _equality(a, b, self.assertTrue, self.assertFalse) + + # Instances compare equal to themselves. + name = self._x509name() + assertEqual(name, name) + + # Empty instances should compare equal to each other. + assertEqual(self._x509name(), self._x509name()) + + # Instances with equal NIDs should compare equal to each other. + assertEqual(self._x509name(commonName="foo"), + self._x509name(commonName="foo")) + + # Instance with equal NIDs set using different aliases should compare + # equal to each other. + assertEqual(self._x509name(commonName="foo"), + self._x509name(CN="foo")) + + # Instances with more than one NID with the same values should compare + # equal to each other. + assertEqual(self._x509name(CN="foo", organizationalUnitName="bar"), + self._x509name(commonName="foo", OU="bar")) + + def assertNotEqual(a, b): + _equality(a, b, self.assertFalse, self.assertTrue) + + # Instances with different values for the same NID should not compare + # equal to each other. + assertNotEqual(self._x509name(CN="foo"), + self._x509name(CN="bar")) + + # Instances with different NIDs should not compare equal to each other. + assertNotEqual(self._x509name(CN="foo"), + self._x509name(OU="foo")) + + def _inequality(a, b, assertTrue, assertFalse): + assertTrue(a < b) + assertTrue(a <= b) + assertTrue(b > a) + assertTrue(b >= a) + assertFalse(a > b) + assertFalse(a >= b) + assertFalse(b < a) + assertFalse(b <= a) + + def assertLessThan(a, b): + _inequality(a, b, self.assertTrue, self.assertFalse) + + # An X509Name with a NID with a value which sorts less than the value + # of the same NID on another X509Name compares less than the other + # X509Name. + assertLessThan(self._x509name(CN="abc"), + self._x509name(CN="def")) + + def assertGreaterThan(a, b): + _inequality(a, b, self.assertFalse, self.assertTrue) + + # An X509Name with a NID with a value which sorts greater than the + # value of the same NID on another X509Name compares greater than the + # other X509Name. + assertGreaterThan(self._x509name(CN="def"), + self._x509name(CN="abc")) + + + def test_hash(self): + """ + :py:meth:`X509Name.hash` returns an integer hash based on the value of the + name. + """ + a = self._x509name(CN="foo") + b = self._x509name(CN="foo") + self.assertEqual(a.hash(), b.hash()) + a.CN = "bar" + self.assertNotEqual(a.hash(), b.hash()) + + + def test_der(self): + """ + :py:meth:`X509Name.der` returns the DER encoded form of the name. + """ + a = self._x509name(CN="foo", C="US") + self.assertEqual( + a.der(), + b('0\x1b1\x0b0\t\x06\x03U\x04\x06\x13\x02US' + '1\x0c0\n\x06\x03U\x04\x03\x13\x03foo')) + + + def test_get_components(self): + """ + :py:meth:`X509Name.get_components` returns a :py:data:`list` of + two-tuples of :py:data:`str` + giving the NIDs and associated values which make up the name. + """ + a = self._x509name() + self.assertEqual(a.get_components(), []) + a.CN = "foo" + self.assertEqual(a.get_components(), [(b("CN"), b("foo"))]) + a.organizationalUnitName = "bar" + self.assertEqual( + a.get_components(), + [(b("CN"), b("foo")), (b("OU"), b("bar"))]) + + + def test_load_nul_byte_attribute(self): + """ + An :py:class:`OpenSSL.crypto.X509Name` from an + :py:class:`OpenSSL.crypto.X509` instance loaded from a file can have a + NUL byte in the value of one of its attributes. + """ + cert = load_certificate(FILETYPE_PEM, nulbyteSubjectAltNamePEM) + subject = cert.get_subject() + self.assertEqual( + "null.python.org\x00example.org", subject.commonName) + + + def test_setAttributeFailure(self): + """ + If the value of an attribute cannot be set for some reason then + :py:class:`OpenSSL.crypto.Error` is raised. + """ + name = self._x509name() + # This value is too long + self.assertRaises(Error, setattr, name, "O", b"x" * 512) + + + +class _PKeyInteractionTestsMixin: + """ + Tests which involve another thing and a PKey. + """ + def signable(self): + """ + Return something with a :py:meth:`set_pubkey`, :py:meth:`set_pubkey`, + and :py:meth:`sign` method. + """ + raise NotImplementedError() + + + def test_signWithUngenerated(self): + """ + :py:meth:`X509Req.sign` raises :py:exc:`ValueError` when pass a + :py:class:`PKey` with no parts. + """ + request = self.signable() + key = PKey() + self.assertRaises(ValueError, request.sign, key, GOOD_DIGEST) + + + def test_signWithPublicKey(self): + """ + :py:meth:`X509Req.sign` raises :py:exc:`ValueError` when pass a + :py:class:`PKey` with no private part as the signing key. + """ + request = self.signable() + key = PKey() + key.generate_key(TYPE_RSA, 512) + request.set_pubkey(key) + pub = request.get_pubkey() + self.assertRaises(ValueError, request.sign, pub, GOOD_DIGEST) + + + def test_signWithUnknownDigest(self): + """ + :py:meth:`X509Req.sign` raises :py:exc:`ValueError` when passed a digest name which is + not known. + """ + request = self.signable() + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises(ValueError, request.sign, key, BAD_DIGEST) + + + def test_sign(self): + """ + :py:meth:`X509Req.sign` succeeds when passed a private key object and a valid + digest function. :py:meth:`X509Req.verify` can be used to check the signature. + """ + request = self.signable() + key = PKey() + key.generate_key(TYPE_RSA, 512) + request.set_pubkey(key) + request.sign(key, GOOD_DIGEST) + # If the type has a verify method, cover that too. + if getattr(request, 'verify', None) is not None: + pub = request.get_pubkey() + self.assertTrue(request.verify(pub)) + # Make another key that won't verify. + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises(Error, request.verify, key) + + + + +class X509ReqTests(TestCase, _PKeyInteractionTestsMixin): + """ + Tests for :py:class:`OpenSSL.crypto.X509Req`. + """ + def signable(self): + """ + Create and return a new :py:class:`X509Req`. + """ + return X509Req() + + + def test_type(self): + """ + :py:obj:`X509Req` and :py:obj:`X509ReqType` refer to the same type object and can be + used to create instances of that type. + """ + self.assertIdentical(X509Req, X509ReqType) + self.assertConsistentType(X509Req, 'X509Req') + + + def test_construction(self): + """ + :py:obj:`X509Req` takes no arguments and returns an :py:obj:`X509ReqType` instance. + """ + request = X509Req() + self.assertTrue( + isinstance(request, X509ReqType), + "%r is of type %r, should be %r" % (request, type(request), X509ReqType)) + + + def test_version(self): + """ + :py:obj:`X509ReqType.set_version` sets the X.509 version of the certificate + request. :py:obj:`X509ReqType.get_version` returns the X.509 version of + the certificate request. The initial value of the version is 0. + """ + request = X509Req() + self.assertEqual(request.get_version(), 0) + request.set_version(1) + self.assertEqual(request.get_version(), 1) + request.set_version(3) + self.assertEqual(request.get_version(), 3) + + + def test_version_wrong_args(self): + """ + :py:obj:`X509ReqType.set_version` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`int` argument. + :py:obj:`X509ReqType.get_version` raises :py:obj:`TypeError` if called with any + arguments. + """ + request = X509Req() + self.assertRaises(TypeError, request.set_version) + self.assertRaises(TypeError, request.set_version, "foo") + self.assertRaises(TypeError, request.set_version, 1, 2) + self.assertRaises(TypeError, request.get_version, None) + + + def test_get_subject(self): + """ + :py:obj:`X509ReqType.get_subject` returns an :py:obj:`X509Name` for the subject of + the request and which is valid even after the request object is + otherwise dead. + """ + request = X509Req() + subject = request.get_subject() + self.assertTrue( + isinstance(subject, X509NameType), + "%r is of type %r, should be %r" % (subject, type(subject), X509NameType)) + subject.commonName = "foo" + self.assertEqual(request.get_subject().commonName, "foo") + del request + subject.commonName = "bar" + self.assertEqual(subject.commonName, "bar") + + + def test_get_subject_wrong_args(self): + """ + :py:obj:`X509ReqType.get_subject` raises :py:obj:`TypeError` if called with any + arguments. + """ + request = X509Req() + self.assertRaises(TypeError, request.get_subject, None) + + + def test_add_extensions(self): + """ + :py:obj:`X509Req.add_extensions` accepts a :py:obj:`list` of :py:obj:`X509Extension` + instances and adds them to the X509 request. + """ + request = X509Req() + request.add_extensions([ + X509Extension(b('basicConstraints'), True, b('CA:false'))]) + # XXX Add get_extensions so the rest of this unit test can be written. + + + def test_add_extensions_wrong_args(self): + """ + :py:obj:`X509Req.add_extensions` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`list`. Or it raises :py:obj:`ValueError` + if called with a :py:obj:`list` containing objects other than :py:obj:`X509Extension` + instances. + """ + request = X509Req() + self.assertRaises(TypeError, request.add_extensions) + self.assertRaises(TypeError, request.add_extensions, object()) + self.assertRaises(ValueError, request.add_extensions, [object()]) + self.assertRaises(TypeError, request.add_extensions, [], None) + + + def test_verify_wrong_args(self): + """ + :py:obj:`X509Req.verify` raises :py:obj:`TypeError` if called with zero + arguments or more than one argument or if passed anything other than a + :py:obj:`PKey` instance as its single argument. + """ + request = X509Req() + self.assertRaises(TypeError, request.verify) + self.assertRaises(TypeError, request.verify, object()) + self.assertRaises(TypeError, request.verify, PKey(), object()) + + + def test_verify_uninitialized_key(self): + """ + :py:obj:`X509Req.verify` raises :py:obj:`OpenSSL.crypto.Error` if called + with a :py:obj:`OpenSSL.crypto.PKey` which contains no key data. + """ + request = X509Req() + pkey = PKey() + self.assertRaises(Error, request.verify, pkey) + + + def test_verify_wrong_key(self): + """ + :py:obj:`X509Req.verify` raises :py:obj:`OpenSSL.crypto.Error` if called + with a :py:obj:`OpenSSL.crypto.PKey` which does not represent the public + part of the key which signed the request. + """ + request = X509Req() + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + request.sign(pkey, GOOD_DIGEST) + another_pkey = load_privatekey(FILETYPE_PEM, client_key_pem) + self.assertRaises(Error, request.verify, another_pkey) + + + def test_verify_success(self): + """ + :py:obj:`X509Req.verify` returns :py:obj:`True` if called with a + :py:obj:`OpenSSL.crypto.PKey` which represents the public part ofthe key + which signed the request. + """ + request = X509Req() + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + request.sign(pkey, GOOD_DIGEST) + self.assertEqual(True, request.verify(pkey)) + + + +class X509Tests(TestCase, _PKeyInteractionTestsMixin): + """ + Tests for :py:obj:`OpenSSL.crypto.X509`. + """ + pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM + + extpem = """ +-----BEGIN CERTIFICATE----- +MIIC3jCCAkegAwIBAgIJAJHFjlcCgnQzMA0GCSqGSIb3DQEBBQUAMEcxCzAJBgNV +BAYTAlNFMRUwEwYDVQQIEwxXZXN0ZXJib3R0b20xEjAQBgNVBAoTCUNhdGFsb2dp +eDENMAsGA1UEAxMEUm9vdDAeFw0wODA0MjIxNDQ1MzhaFw0wOTA0MjIxNDQ1Mzha +MFQxCzAJBgNVBAYTAlNFMQswCQYDVQQIEwJXQjEUMBIGA1UEChMLT3Blbk1ldGFk +aXIxIjAgBgNVBAMTGW5vZGUxLm9tMi5vcGVubWV0YWRpci5vcmcwgZ8wDQYJKoZI +hvcNAQEBBQADgY0AMIGJAoGBAPIcQMrwbk2nESF/0JKibj9i1x95XYAOwP+LarwT +Op4EQbdlI9SY+uqYqlERhF19w7CS+S6oyqx0DRZSk4Y9dZ9j9/xgm2u/f136YS1u +zgYFPvfUs6PqYLPSM8Bw+SjJ+7+2+TN+Tkiof9WP1cMjodQwOmdsiRbR0/J7+b1B +hec1AgMBAAGjgcQwgcEwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT +TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFIdHsBcMVVMbAO7j6NCj +03HgLnHaMB8GA1UdIwQYMBaAFL2h9Bf9Mre4vTdOiHTGAt7BRY/8MEYGA1UdEQQ/ +MD2CDSouZXhhbXBsZS5vcmeCESoub20yLmV4bWFwbGUuY29thwSC7wgKgRNvbTJA +b3Blbm1ldGFkaXIub3JnMA0GCSqGSIb3DQEBBQUAA4GBALd7WdXkp2KvZ7/PuWZA +MPlIxyjS+Ly11+BNE0xGQRp9Wz+2lABtpgNqssvU156+HkKd02rGheb2tj7MX9hG +uZzbwDAZzJPjzDQDD7d3cWsrVcfIdqVU7epHqIadnOF+X0ghJ39pAm6VVadnSXCt +WpOdIpB8KksUTCzV591Nr1wd +-----END CERTIFICATE----- + """ + def signable(self): + """ + Create and return a new :py:obj:`X509`. + """ + return X509() + + + def test_type(self): + """ + :py:obj:`X509` and :py:obj:`X509Type` refer to the same type object and can be used + to create instances of that type. + """ + self.assertIdentical(X509, X509Type) + self.assertConsistentType(X509, 'X509') + + + def test_construction(self): + """ + :py:obj:`X509` takes no arguments and returns an instance of :py:obj:`X509Type`. + """ + certificate = X509() + self.assertTrue( + isinstance(certificate, X509Type), + "%r is of type %r, should be %r" % (certificate, + type(certificate), + X509Type)) + self.assertEqual(type(X509Type).__name__, 'type') + self.assertEqual(type(certificate).__name__, 'X509') + self.assertEqual(type(certificate), X509Type) + self.assertEqual(type(certificate), X509) + + + def test_get_version_wrong_args(self): + """ + :py:obj:`X509.get_version` raises :py:obj:`TypeError` if invoked with any arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_version, None) + + + def test_set_version_wrong_args(self): + """ + :py:obj:`X509.set_version` raises :py:obj:`TypeError` if invoked with the wrong number + of arguments or an argument not of type :py:obj:`int`. + """ + cert = X509() + self.assertRaises(TypeError, cert.set_version) + self.assertRaises(TypeError, cert.set_version, None) + self.assertRaises(TypeError, cert.set_version, 1, None) + + + def test_version(self): + """ + :py:obj:`X509.set_version` sets the certificate version number. + :py:obj:`X509.get_version` retrieves it. + """ + cert = X509() + cert.set_version(1234) + self.assertEquals(cert.get_version(), 1234) + + + def test_get_serial_number_wrong_args(self): + """ + :py:obj:`X509.get_serial_number` raises :py:obj:`TypeError` if invoked with any + arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_serial_number, None) + + + def test_serial_number(self): + """ + The serial number of an :py:obj:`X509Type` can be retrieved and modified with + :py:obj:`X509Type.get_serial_number` and :py:obj:`X509Type.set_serial_number`. + """ + certificate = X509() + self.assertRaises(TypeError, certificate.set_serial_number) + self.assertRaises(TypeError, certificate.set_serial_number, 1, 2) + self.assertRaises(TypeError, certificate.set_serial_number, "1") + self.assertRaises(TypeError, certificate.set_serial_number, 5.5) + self.assertEqual(certificate.get_serial_number(), 0) + certificate.set_serial_number(1) + self.assertEqual(certificate.get_serial_number(), 1) + certificate.set_serial_number(2 ** 32 + 1) + self.assertEqual(certificate.get_serial_number(), 2 ** 32 + 1) + certificate.set_serial_number(2 ** 64 + 1) + self.assertEqual(certificate.get_serial_number(), 2 ** 64 + 1) + certificate.set_serial_number(2 ** 128 + 1) + self.assertEqual(certificate.get_serial_number(), 2 ** 128 + 1) + + + def _setBoundTest(self, which): + """ + :py:obj:`X509Type.set_notBefore` takes a string in the format of an ASN1 + GENERALIZEDTIME and sets the beginning of the certificate's validity + period to it. + """ + certificate = X509() + set = getattr(certificate, 'set_not' + which) + get = getattr(certificate, 'get_not' + which) + + # Starts with no value. + self.assertEqual(get(), None) + + # GMT (Or is it UTC?) -exarkun + when = b("20040203040506Z") + set(when) + self.assertEqual(get(), when) + + # A plus two hours and thirty minutes offset + when = b("20040203040506+0530") + set(when) + self.assertEqual(get(), when) + + # A minus one hour fifteen minutes offset + when = b("20040203040506-0115") + set(when) + self.assertEqual(get(), when) + + # An invalid string results in a ValueError + self.assertRaises(ValueError, set, b("foo bar")) + + # The wrong number of arguments results in a TypeError. + self.assertRaises(TypeError, set) + self.assertRaises(TypeError, set, b("20040203040506Z"), b("20040203040506Z")) + self.assertRaises(TypeError, get, b("foo bar")) + + + # XXX ASN1_TIME (not GENERALIZEDTIME) + + def test_set_notBefore(self): + """ + :py:obj:`X509Type.set_notBefore` takes a string in the format of an ASN1 + GENERALIZEDTIME and sets the beginning of the certificate's validity + period to it. + """ + self._setBoundTest("Before") + + + def test_set_notAfter(self): + """ + :py:obj:`X509Type.set_notAfter` takes a string in the format of an ASN1 + GENERALIZEDTIME and sets the end of the certificate's validity period + to it. + """ + self._setBoundTest("After") + + + def test_get_notBefore(self): + """ + :py:obj:`X509Type.get_notBefore` returns a string in the format of an ASN1 + GENERALIZEDTIME even for certificates which store it as UTCTIME + internally. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertEqual(cert.get_notBefore(), b("20090325123658Z")) + + + def test_get_notAfter(self): + """ + :py:obj:`X509Type.get_notAfter` returns a string in the format of an ASN1 + GENERALIZEDTIME even for certificates which store it as UTCTIME + internally. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertEqual(cert.get_notAfter(), b("20170611123658Z")) + + + def test_gmtime_adj_notBefore_wrong_args(self): + """ + :py:obj:`X509Type.gmtime_adj_notBefore` raises :py:obj:`TypeError` if called with the + wrong number of arguments or a non-:py:obj:`int` argument. + """ + cert = X509() + self.assertRaises(TypeError, cert.gmtime_adj_notBefore) + self.assertRaises(TypeError, cert.gmtime_adj_notBefore, None) + self.assertRaises(TypeError, cert.gmtime_adj_notBefore, 123, None) + + + def test_gmtime_adj_notBefore(self): + """ + :py:obj:`X509Type.gmtime_adj_notBefore` changes the not-before timestamp to be + the current time plus the number of seconds passed in. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + now = datetime.utcnow() + timedelta(seconds=100) + cert.gmtime_adj_notBefore(100) + self.assertEqual(cert.get_notBefore(), b(now.strftime("%Y%m%d%H%M%SZ"))) + + + def test_gmtime_adj_notAfter_wrong_args(self): + """ + :py:obj:`X509Type.gmtime_adj_notAfter` raises :py:obj:`TypeError` if called with the + wrong number of arguments or a non-:py:obj:`int` argument. + """ + cert = X509() + self.assertRaises(TypeError, cert.gmtime_adj_notAfter) + self.assertRaises(TypeError, cert.gmtime_adj_notAfter, None) + self.assertRaises(TypeError, cert.gmtime_adj_notAfter, 123, None) + + + def test_gmtime_adj_notAfter(self): + """ + :py:obj:`X509Type.gmtime_adj_notAfter` changes the not-after timestamp to be + the current time plus the number of seconds passed in. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + now = datetime.utcnow() + timedelta(seconds=100) + cert.gmtime_adj_notAfter(100) + self.assertEqual(cert.get_notAfter(), b(now.strftime("%Y%m%d%H%M%SZ"))) + + + def test_has_expired_wrong_args(self): + """ + :py:obj:`X509Type.has_expired` raises :py:obj:`TypeError` if called with any + arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.has_expired, None) + + + def test_has_expired(self): + """ + :py:obj:`X509Type.has_expired` returns :py:obj:`True` if the certificate's not-after + time is in the past. + """ + cert = X509() + cert.gmtime_adj_notAfter(-1) + self.assertTrue(cert.has_expired()) + + + def test_has_not_expired(self): + """ + :py:obj:`X509Type.has_expired` returns :py:obj:`False` if the certificate's not-after + time is in the future. + """ + cert = X509() + cert.gmtime_adj_notAfter(2) + self.assertFalse(cert.has_expired()) + + + def test_digest(self): + """ + :py:obj:`X509.digest` returns a string giving ":"-separated hex-encoded words + of the digest of the certificate. + """ + cert = X509() + self.assertEqual( + # This is MD5 instead of GOOD_DIGEST because the digest algorithm + # actually matters to the assertion (ie, another arbitrary, good + # digest will not product the same digest). + cert.digest("MD5"), + b("A8:EB:07:F8:53:25:0A:F2:56:05:C5:A5:C4:C4:C7:15")) + + + def _extcert(self, pkey, extensions): + cert = X509() + cert.set_pubkey(pkey) + cert.get_subject().commonName = "Unit Tests" + cert.get_issuer().commonName = "Unit Tests" + when = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + cert.set_notBefore(when) + cert.set_notAfter(when) + + cert.add_extensions(extensions) + return load_certificate( + FILETYPE_PEM, dump_certificate(FILETYPE_PEM, cert)) + + + def test_extension_count(self): + """ + :py:obj:`X509.get_extension_count` returns the number of extensions that are + present in the certificate. + """ + pkey = load_privatekey(FILETYPE_PEM, client_key_pem) + ca = X509Extension(b('basicConstraints'), True, b('CA:FALSE')) + key = X509Extension(b('keyUsage'), True, b('digitalSignature')) + subjectAltName = X509Extension( + b('subjectAltName'), True, b('DNS:example.com')) + + # Try a certificate with no extensions at all. + c = self._extcert(pkey, []) + self.assertEqual(c.get_extension_count(), 0) + + # And a certificate with one + c = self._extcert(pkey, [ca]) + self.assertEqual(c.get_extension_count(), 1) + + # And a certificate with several + c = self._extcert(pkey, [ca, key, subjectAltName]) + self.assertEqual(c.get_extension_count(), 3) + + + def test_get_extension(self): + """ + :py:obj:`X509.get_extension` takes an integer and returns an :py:obj:`X509Extension` + corresponding to the extension at that index. + """ + pkey = load_privatekey(FILETYPE_PEM, client_key_pem) + ca = X509Extension(b('basicConstraints'), True, b('CA:FALSE')) + key = X509Extension(b('keyUsage'), True, b('digitalSignature')) + subjectAltName = X509Extension( + b('subjectAltName'), False, b('DNS:example.com')) + + cert = self._extcert(pkey, [ca, key, subjectAltName]) + + ext = cert.get_extension(0) + self.assertTrue(isinstance(ext, X509Extension)) + self.assertTrue(ext.get_critical()) + self.assertEqual(ext.get_short_name(), b('basicConstraints')) + + ext = cert.get_extension(1) + self.assertTrue(isinstance(ext, X509Extension)) + self.assertTrue(ext.get_critical()) + self.assertEqual(ext.get_short_name(), b('keyUsage')) + + ext = cert.get_extension(2) + self.assertTrue(isinstance(ext, X509Extension)) + self.assertFalse(ext.get_critical()) + self.assertEqual(ext.get_short_name(), b('subjectAltName')) + + self.assertRaises(IndexError, cert.get_extension, -1) + self.assertRaises(IndexError, cert.get_extension, 4) + self.assertRaises(TypeError, cert.get_extension, "hello") + + + def test_nullbyte_subjectAltName(self): + """ + The fields of a `subjectAltName` extension on an X509 may contain NUL + bytes and this value is reflected in the string representation of the + extension object. + """ + cert = load_certificate(FILETYPE_PEM, nulbyteSubjectAltNamePEM) + + ext = cert.get_extension(3) + self.assertEqual(ext.get_short_name(), b('subjectAltName')) + self.assertEqual( + b("DNS:altnull.python.org\x00example.com, " + "email:null@python.org\x00user@example.org, " + "URI:http://null.python.org\x00http://example.org, " + "IP Address:192.0.2.1, IP Address:2001:DB8:0:0:0:0:0:1\n"), + b(str(ext))) + + + def test_invalid_digest_algorithm(self): + """ + :py:obj:`X509.digest` raises :py:obj:`ValueError` if called with an unrecognized hash + algorithm. + """ + cert = X509() + self.assertRaises(ValueError, cert.digest, BAD_DIGEST) + + + def test_get_subject_wrong_args(self): + """ + :py:obj:`X509.get_subject` raises :py:obj:`TypeError` if called with any arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_subject, None) + + + def test_get_subject(self): + """ + :py:obj:`X509.get_subject` returns an :py:obj:`X509Name` instance. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + subj = cert.get_subject() + self.assertTrue(isinstance(subj, X509Name)) + self.assertEquals( + subj.get_components(), + [(b('C'), b('US')), (b('ST'), b('IL')), (b('L'), b('Chicago')), + (b('O'), b('Testing')), (b('CN'), b('Testing Root CA'))]) + + + def test_set_subject_wrong_args(self): + """ + :py:obj:`X509.set_subject` raises a :py:obj:`TypeError` if called with the wrong + number of arguments or an argument not of type :py:obj:`X509Name`. + """ + cert = X509() + self.assertRaises(TypeError, cert.set_subject) + self.assertRaises(TypeError, cert.set_subject, None) + self.assertRaises(TypeError, cert.set_subject, cert.get_subject(), None) + + + def test_set_subject(self): + """ + :py:obj:`X509.set_subject` changes the subject of the certificate to the one + passed in. + """ + cert = X509() + name = cert.get_subject() + name.C = 'AU' + name.O = 'Unit Tests' + cert.set_subject(name) + self.assertEquals( + cert.get_subject().get_components(), + [(b('C'), b('AU')), (b('O'), b('Unit Tests'))]) + + + def test_get_issuer_wrong_args(self): + """ + :py:obj:`X509.get_issuer` raises :py:obj:`TypeError` if called with any arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.get_issuer, None) + + + def test_get_issuer(self): + """ + :py:obj:`X509.get_issuer` returns an :py:obj:`X509Name` instance. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + subj = cert.get_issuer() + self.assertTrue(isinstance(subj, X509Name)) + comp = subj.get_components() + self.assertEquals( + comp, + [(b('C'), b('US')), (b('ST'), b('IL')), (b('L'), b('Chicago')), + (b('O'), b('Testing')), (b('CN'), b('Testing Root CA'))]) + + + def test_set_issuer_wrong_args(self): + """ + :py:obj:`X509.set_issuer` raises a :py:obj:`TypeError` if called with the wrong + number of arguments or an argument not of type :py:obj:`X509Name`. + """ + cert = X509() + self.assertRaises(TypeError, cert.set_issuer) + self.assertRaises(TypeError, cert.set_issuer, None) + self.assertRaises(TypeError, cert.set_issuer, cert.get_issuer(), None) + + + def test_set_issuer(self): + """ + :py:obj:`X509.set_issuer` changes the issuer of the certificate to the one + passed in. + """ + cert = X509() + name = cert.get_issuer() + name.C = 'AU' + name.O = 'Unit Tests' + cert.set_issuer(name) + self.assertEquals( + cert.get_issuer().get_components(), + [(b('C'), b('AU')), (b('O'), b('Unit Tests'))]) + + + def test_get_pubkey_uninitialized(self): + """ + When called on a certificate with no public key, :py:obj:`X509.get_pubkey` + raises :py:obj:`OpenSSL.crypto.Error`. + """ + cert = X509() + self.assertRaises(Error, cert.get_pubkey) + + + def test_subject_name_hash_wrong_args(self): + """ + :py:obj:`X509.subject_name_hash` raises :py:obj:`TypeError` if called with any + arguments. + """ + cert = X509() + self.assertRaises(TypeError, cert.subject_name_hash, None) + + + def test_subject_name_hash(self): + """ + :py:obj:`X509.subject_name_hash` returns the hash of the certificate's subject + name. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertIn( + cert.subject_name_hash(), + [3350047874, # OpenSSL 0.9.8, MD5 + 3278919224, # OpenSSL 1.0.0, SHA1 + ]) + + + def test_get_signature_algorithm(self): + """ + :py:obj:`X509Type.get_signature_algorithm` returns a string which means + the algorithm used to sign the certificate. + """ + cert = load_certificate(FILETYPE_PEM, self.pemData) + self.assertEqual( + b("sha1WithRSAEncryption"), cert.get_signature_algorithm()) + + + def test_get_undefined_signature_algorithm(self): + """ + :py:obj:`X509Type.get_signature_algorithm` raises :py:obj:`ValueError` if the + signature algorithm is undefined or unknown. + """ + # This certificate has been modified to indicate a bogus OID in the + # signature algorithm field so that OpenSSL does not recognize it. + certPEM = b("""\ +-----BEGIN CERTIFICATE----- +MIIC/zCCAmigAwIBAgIBATAGBgJ8BQUAMHsxCzAJBgNVBAYTAlNHMREwDwYDVQQK +EwhNMkNyeXB0bzEUMBIGA1UECxMLTTJDcnlwdG8gQ0ExJDAiBgNVBAMTG00yQ3J5 +cHRvIENlcnRpZmljYXRlIE1hc3RlcjEdMBsGCSqGSIb3DQEJARYObmdwc0Bwb3N0 +MS5jb20wHhcNMDAwOTEwMDk1MTMwWhcNMDIwOTEwMDk1MTMwWjBTMQswCQYDVQQG +EwJTRzERMA8GA1UEChMITTJDcnlwdG8xEjAQBgNVBAMTCWxvY2FsaG9zdDEdMBsG +CSqGSIb3DQEJARYObmdwc0Bwb3N0MS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBI +AkEArL57d26W9fNXvOhNlZzlPOACmvwOZ5AdNgLzJ1/MfsQQJ7hHVeHmTAjM664V ++fXvwUGJLziCeBo1ysWLRnl8CQIDAQABo4IBBDCCAQAwCQYDVR0TBAIwADAsBglg +hkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0O +BBYEFM+EgpK+eyZiwFU1aOPSbczbPSpVMIGlBgNVHSMEgZ0wgZqAFPuHI2nrnDqT +FeXFvylRT/7tKDgBoX+kfTB7MQswCQYDVQQGEwJTRzERMA8GA1UEChMITTJDcnlw +dG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtNMkNyeXB0byBDZXJ0 +aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tggEA +MA0GCSqGSIb3DQEBBAUAA4GBADv8KpPo+gfJxN2ERK1Y1l17sz/ZhzoGgm5XCdbx +jEY7xKfpQngV599k1xhl11IMqizDwu0855agrckg2MCTmOI9DZzDD77tAYb+Dk0O +PEVk0Mk/V0aIsDE9bolfCi/i/QWZ3N8s5nTWMNyBBBmoSliWCm4jkkRZRD0ejgTN +tgI5 +-----END CERTIFICATE----- +""") + cert = load_certificate(FILETYPE_PEM, certPEM) + self.assertRaises(ValueError, cert.get_signature_algorithm) + + + +class X509StoreTests(TestCase): + """ + Test for :py:obj:`OpenSSL.crypto.X509Store`. + """ + def test_type(self): + """ + :py:obj:`X509StoreType` is a type object. + """ + self.assertIdentical(X509Store, X509StoreType) + self.assertConsistentType(X509Store, 'X509Store') + + + def test_add_cert_wrong_args(self): + store = X509Store() + self.assertRaises(TypeError, store.add_cert) + self.assertRaises(TypeError, store.add_cert, object()) + self.assertRaises(TypeError, store.add_cert, X509(), object()) + + + def test_add_cert(self): + """ + :py:obj:`X509Store.add_cert` adds a :py:obj:`X509` instance to the + certificate store. + """ + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + store = X509Store() + store.add_cert(cert) + + + def test_add_cert_rejects_duplicate(self): + """ + :py:obj:`X509Store.add_cert` raises :py:obj:`OpenSSL.crypto.Error` if an + attempt is made to add the same certificate to the store more than once. + """ + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + store = X509Store() + store.add_cert(cert) + self.assertRaises(Error, store.add_cert, cert) + + + +class PKCS12Tests(TestCase): + """ + Test for :py:obj:`OpenSSL.crypto.PKCS12` and :py:obj:`OpenSSL.crypto.load_pkcs12`. + """ + pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM + + def test_type(self): + """ + :py:obj:`PKCS12Type` is a type object. + """ + self.assertIdentical(PKCS12, PKCS12Type) + self.assertConsistentType(PKCS12, 'PKCS12') + + + def test_empty_construction(self): + """ + :py:obj:`PKCS12` returns a new instance of :py:obj:`PKCS12` with no certificate, + private key, CA certificates, or friendly name. + """ + p12 = PKCS12() + self.assertEqual(None, p12.get_certificate()) + self.assertEqual(None, p12.get_privatekey()) + self.assertEqual(None, p12.get_ca_certificates()) + self.assertEqual(None, p12.get_friendlyname()) + + + def test_type_errors(self): + """ + The :py:obj:`PKCS12` setter functions (:py:obj:`set_certificate`, :py:obj:`set_privatekey`, + :py:obj:`set_ca_certificates`, and :py:obj:`set_friendlyname`) raise :py:obj:`TypeError` + when passed objects of types other than those expected. + """ + p12 = PKCS12() + self.assertRaises(TypeError, p12.set_certificate, 3) + self.assertRaises(TypeError, p12.set_certificate, PKey()) + self.assertRaises(TypeError, p12.set_certificate, X509) + self.assertRaises(TypeError, p12.set_privatekey, 3) + self.assertRaises(TypeError, p12.set_privatekey, 'legbone') + self.assertRaises(TypeError, p12.set_privatekey, X509()) + self.assertRaises(TypeError, p12.set_ca_certificates, 3) + self.assertRaises(TypeError, p12.set_ca_certificates, X509()) + self.assertRaises(TypeError, p12.set_ca_certificates, (3, 4)) + self.assertRaises(TypeError, p12.set_ca_certificates, ( PKey(), )) + self.assertRaises(TypeError, p12.set_friendlyname, 6) + self.assertRaises(TypeError, p12.set_friendlyname, ('foo', 'bar')) + + + def test_key_only(self): + """ + A :py:obj:`PKCS12` with only a private key can be exported using + :py:obj:`PKCS12.export` and loaded again using :py:obj:`load_pkcs12`. + """ + passwd = b"blah" + p12 = PKCS12() + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + p12.set_privatekey(pkey) + self.assertEqual(None, p12.get_certificate()) + self.assertEqual(pkey, p12.get_privatekey()) + try: + dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3) + except Error: + # Some versions of OpenSSL will throw an exception + # for this nearly useless PKCS12 we tried to generate: + # [('PKCS12 routines', 'PKCS12_create', 'invalid null argument')] + return + p12 = load_pkcs12(dumped_p12, passwd) + self.assertEqual(None, p12.get_ca_certificates()) + self.assertEqual(None, p12.get_certificate()) + + # OpenSSL fails to bring the key back to us. So sad. Perhaps in the + # future this will be improved. + self.assertTrue(isinstance(p12.get_privatekey(), (PKey, type(None)))) + + + def test_cert_only(self): + """ + A :py:obj:`PKCS12` with only a certificate can be exported using + :py:obj:`PKCS12.export` and loaded again using :py:obj:`load_pkcs12`. + """ + passwd = b"blah" + p12 = PKCS12() + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + p12.set_certificate(cert) + self.assertEqual(cert, p12.get_certificate()) + self.assertEqual(None, p12.get_privatekey()) + try: + dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3) + except Error: + # Some versions of OpenSSL will throw an exception + # for this nearly useless PKCS12 we tried to generate: + # [('PKCS12 routines', 'PKCS12_create', 'invalid null argument')] + return + p12 = load_pkcs12(dumped_p12, passwd) + self.assertEqual(None, p12.get_privatekey()) + + # OpenSSL fails to bring the cert back to us. Groany mcgroan. + self.assertTrue(isinstance(p12.get_certificate(), (X509, type(None)))) + + # Oh ho. It puts the certificate into the ca certificates list, in + # fact. Totally bogus, I would think. Nevertheless, let's exploit + # that to check to see if it reconstructed the certificate we expected + # it to. At some point, hopefully this will change so that + # p12.get_certificate() is actually what returns the loaded + # certificate. + self.assertEqual( + cleartextCertificatePEM, + dump_certificate(FILETYPE_PEM, p12.get_ca_certificates()[0])) + + + def gen_pkcs12(self, cert_pem=None, key_pem=None, ca_pem=None, friendly_name=None): + """ + Generate a PKCS12 object with components from PEM. Verify that the set + functions return None. + """ + p12 = PKCS12() + if cert_pem: + ret = p12.set_certificate(load_certificate(FILETYPE_PEM, cert_pem)) + self.assertEqual(ret, None) + if key_pem: + ret = p12.set_privatekey(load_privatekey(FILETYPE_PEM, key_pem)) + self.assertEqual(ret, None) + if ca_pem: + ret = p12.set_ca_certificates((load_certificate(FILETYPE_PEM, ca_pem),)) + self.assertEqual(ret, None) + if friendly_name: + ret = p12.set_friendlyname(friendly_name) + self.assertEqual(ret, None) + return p12 + + + def check_recovery(self, p12_str, key=None, cert=None, ca=None, passwd=b"", + extra=()): + """ + Use openssl program to confirm three components are recoverable from a + PKCS12 string. + """ + if key: + recovered_key = _runopenssl( + p12_str, b"pkcs12", b"-nocerts", b"-nodes", b"-passin", + b"pass:" + passwd, *extra) + self.assertEqual(recovered_key[-len(key):], key) + if cert: + recovered_cert = _runopenssl( + p12_str, b"pkcs12", b"-clcerts", b"-nodes", b"-passin", + b"pass:" + passwd, b"-nokeys", *extra) + self.assertEqual(recovered_cert[-len(cert):], cert) + if ca: + recovered_cert = _runopenssl( + p12_str, b"pkcs12", b"-cacerts", b"-nodes", b"-passin", + b"pass:" + passwd, b"-nokeys", *extra) + self.assertEqual(recovered_cert[-len(ca):], ca) + + + def test_load_pkcs12(self): + """ + A PKCS12 string generated using the openssl command line can be loaded + with :py:obj:`load_pkcs12` and its components extracted and examined. + """ + passwd = b"whatever" + pem = client_key_pem + client_cert_pem + p12_str = _runopenssl( + pem, b"pkcs12", b"-export", b"-clcerts", b"-passout", b"pass:" + passwd) + p12 = load_pkcs12(p12_str, passwd) + # verify + self.assertTrue(isinstance(p12, PKCS12)) + cert_pem = dump_certificate(FILETYPE_PEM, p12.get_certificate()) + self.assertEqual(cert_pem, client_cert_pem) + key_pem = dump_privatekey(FILETYPE_PEM, p12.get_privatekey()) + self.assertEqual(key_pem, client_key_pem) + self.assertEqual(None, p12.get_ca_certificates()) + + + def test_load_pkcs12_garbage(self): + """ + :py:obj:`load_pkcs12` raises :py:obj:`OpenSSL.crypto.Error` when passed a string + which is not a PKCS12 dump. + """ + passwd = 'whatever' + e = self.assertRaises(Error, load_pkcs12, b'fruit loops', passwd) + self.assertEqual( e.args[0][0][0], 'asn1 encoding routines') + self.assertEqual( len(e.args[0][0]), 3) + + + def test_replace(self): + """ + :py:obj:`PKCS12.set_certificate` replaces the certificate in a PKCS12 cluster. + :py:obj:`PKCS12.set_privatekey` replaces the private key. + :py:obj:`PKCS12.set_ca_certificates` replaces the CA certificates. + """ + p12 = self.gen_pkcs12(client_cert_pem, client_key_pem, root_cert_pem) + p12.set_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + p12.set_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + root_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + client_cert = load_certificate(FILETYPE_PEM, client_cert_pem) + p12.set_ca_certificates([root_cert]) # not a tuple + self.assertEqual(1, len(p12.get_ca_certificates())) + self.assertEqual(root_cert, p12.get_ca_certificates()[0]) + p12.set_ca_certificates([client_cert, root_cert]) + self.assertEqual(2, len(p12.get_ca_certificates())) + self.assertEqual(client_cert, p12.get_ca_certificates()[0]) + self.assertEqual(root_cert, p12.get_ca_certificates()[1]) + + + def test_friendly_name(self): + """ + The *friendlyName* of a PKCS12 can be set and retrieved via + :py:obj:`PKCS12.get_friendlyname` and :py:obj:`PKCS12_set_friendlyname`, and a + :py:obj:`PKCS12` with a friendly name set can be dumped with :py:obj:`PKCS12.export`. + """ + passwd = b'Dogmeat[]{}!@#$%^&*()~`?/.,<>-_+=";:' + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + for friendly_name in [b('Serverlicious'), None, b('###')]: + p12.set_friendlyname(friendly_name) + self.assertEqual(p12.get_friendlyname(), friendly_name) + dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3) + reloaded_p12 = load_pkcs12(dumped_p12, passwd) + self.assertEqual( + p12.get_friendlyname(), reloaded_p12.get_friendlyname()) + # We would use the openssl program to confirm the friendly + # name, but it is not possible. The pkcs12 command + # does not store the friendly name in the cert's + # alias, which we could then extract. + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, + ca=root_cert_pem, passwd=passwd) + + + def test_various_empty_passphrases(self): + """ + Test that missing, None, and '' passphrases are identical for PKCS12 + export. + """ + p12 = self.gen_pkcs12(client_cert_pem, client_key_pem, root_cert_pem) + passwd = b"" + dumped_p12_empty = p12.export(iter=2, maciter=0, passphrase=passwd) + dumped_p12_none = p12.export(iter=3, maciter=2, passphrase=None) + dumped_p12_nopw = p12.export(iter=9, maciter=4) + for dumped_p12 in [dumped_p12_empty, dumped_p12_none, dumped_p12_nopw]: + self.check_recovery( + dumped_p12, key=client_key_pem, cert=client_cert_pem, + ca=root_cert_pem, passwd=passwd) + + + def test_removing_ca_cert(self): + """ + Passing :py:obj:`None` to :py:obj:`PKCS12.set_ca_certificates` removes all CA + certificates. + """ + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + p12.set_ca_certificates(None) + self.assertEqual(None, p12.get_ca_certificates()) + + + def test_export_without_mac(self): + """ + Exporting a PKCS12 with a :py:obj:`maciter` of ``-1`` excludes the MAC + entirely. + """ + passwd = b"Lake Michigan" + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + dumped_p12 = p12.export(maciter=-1, passphrase=passwd, iter=2) + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, + passwd=passwd, extra=(b"-nomacver",)) + + + def test_load_without_mac(self): + """ + Loading a PKCS12 without a MAC does something other than crash. + """ + passwd = b"Lake Michigan" + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + dumped_p12 = p12.export(maciter=-1, passphrase=passwd, iter=2) + try: + recovered_p12 = load_pkcs12(dumped_p12, passwd) + # The person who generated this PCKS12 should be flogged, + # or better yet we should have a means to determine + # whether a PCKS12 had a MAC that was verified. + # Anyway, libopenssl chooses to allow it, so the + # pyopenssl binding does as well. + self.assertTrue(isinstance(recovered_p12, PKCS12)) + except Error: + # Failing here with an exception is preferred as some openssl + # versions do. + pass + + + def test_zero_len_list_for_ca(self): + """ + A PKCS12 with an empty CA certificates list can be exported. + """ + passwd = 'Hobie 18' + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem) + # p12.set_ca_certificates([]) + # self.assertEqual((), p12.get_ca_certificates()) + # dumped_p12 = p12.export(passphrase=passwd, iter=3) + # self.check_recovery( + # dumped_p12, key=server_key_pem, cert=server_cert_pem, + # passwd=passwd) + + + def test_export_without_args(self): + """ + All the arguments to :py:obj:`PKCS12.export` are optional. + """ + p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem) + dumped_p12 = p12.export() # no args + self.check_recovery( + dumped_p12, key=server_key_pem, cert=server_cert_pem, passwd=b"") + + + def test_key_cert_mismatch(self): + """ + :py:obj:`PKCS12.export` raises an exception when a key and certificate + mismatch. + """ + p12 = self.gen_pkcs12(server_cert_pem, client_key_pem, root_cert_pem) + self.assertRaises(Error, p12.export) + + + +# These quoting functions taken directly from Twisted's twisted.python.win32. +_cmdLineQuoteRe = re.compile(br'(\\*)"') +_cmdLineQuoteRe2 = re.compile(br'(\\+)\Z') +def cmdLineQuote(s): + """ + Internal method for quoting a single command-line argument. + + See http://www.perlmonks.org/?node_id=764004 + + :type: :py:obj:`str` + :param s: A single unquoted string to quote for something that is expecting + cmd.exe-style quoting + + :rtype: :py:obj:`str` + :return: A cmd.exe-style quoted string + """ + s = _cmdLineQuoteRe2.sub(br"\1\1", _cmdLineQuoteRe.sub(br'\1\1\\"', s)) + return b'"' + s + b'"' + + + +def quoteArguments(arguments): + """ + Quote an iterable of command-line arguments for passing to CreateProcess or + a similar API. This allows the list passed to :py:obj:`reactor.spawnProcess` to + match the child process's :py:obj:`sys.argv` properly. + + :type arguments: :py:obj:`iterable` of :py:obj:`str` + :param arguments: An iterable of unquoted arguments to quote + + :rtype: :py:obj:`str` + :return: A space-delimited string containing quoted versions of :py:obj:`arguments` + """ + return b' '.join(map(cmdLineQuote, arguments)) + + + +def _runopenssl(pem, *args): + """ + Run the command line openssl tool with the given arguments and write + the given PEM to its stdin. Not safe for quotes. + """ + if os.name == 'posix': + command = b"openssl " + b" ".join([ + (b"'" + arg.replace(b"'", b"'\\''") + b"'") + for arg in args]) + else: + command = b"openssl " + quoteArguments(args) + proc = Popen(native(command), shell=True, stdin=PIPE, stdout=PIPE) + proc.stdin.write(pem) + proc.stdin.close() + output = proc.stdout.read() + proc.stdout.close() + proc.wait() + return output + + + +class FunctionTests(TestCase): + """ + Tests for free-functions in the :py:obj:`OpenSSL.crypto` module. + """ + + def test_load_privatekey_invalid_format(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`ValueError` if passed an unknown filetype. + """ + self.assertRaises(ValueError, load_privatekey, 100, root_key_pem) + + + def test_load_privatekey_invalid_passphrase_type(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`TypeError` if passed a passphrase that is + neither a :py:obj:`str` nor a callable. + """ + self.assertRaises( + TypeError, + load_privatekey, + FILETYPE_PEM, encryptedPrivateKeyPEMPassphrase, object()) + + + def test_load_privatekey_wrong_args(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`TypeError` if called with the wrong number + of arguments. + """ + self.assertRaises(TypeError, load_privatekey) + + + def test_load_privatekey_wrongPassphrase(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`OpenSSL.crypto.Error` when it is passed an + encrypted PEM and an incorrect passphrase. + """ + self.assertRaises( + Error, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, b("quack")) + + + def test_load_privatekey_passphraseWrongType(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`ValueError` when it is passed a passphrase + with a private key encoded in a format, that doesn't support + encryption. + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + blob = dump_privatekey(FILETYPE_ASN1, key) + self.assertRaises(ValueError, + load_privatekey, FILETYPE_ASN1, blob, "secret") + + + def test_load_privatekey_passphrase(self): + """ + :py:obj:`load_privatekey` can create a :py:obj:`PKey` object from an encrypted PEM + string if given the passphrase. + """ + key = load_privatekey( + FILETYPE_PEM, encryptedPrivateKeyPEM, + encryptedPrivateKeyPEMPassphrase) + self.assertTrue(isinstance(key, PKeyType)) + + + def test_load_privatekey_passphrase_exception(self): + """ + If the passphrase callback raises an exception, that exception is raised + by :py:obj:`load_privatekey`. + """ + def cb(ignored): + raise ArithmeticError + + self.assertRaises(ArithmeticError, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + + + def test_load_privatekey_wrongPassphraseCallback(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`OpenSSL.crypto.Error` when it + is passed an encrypted PEM and a passphrase callback which returns an + incorrect passphrase. + """ + called = [] + def cb(*a): + called.append(None) + return b("quack") + self.assertRaises( + Error, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + self.assertTrue(called) + + + def test_load_privatekey_passphraseCallback(self): + """ + :py:obj:`load_privatekey` can create a :py:obj:`PKey` object from an encrypted PEM + string if given a passphrase callback which returns the correct + password. + """ + called = [] + def cb(writing): + called.append(writing) + return encryptedPrivateKeyPEMPassphrase + key = load_privatekey(FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + self.assertTrue(isinstance(key, PKeyType)) + self.assertEqual(called, [False]) + + + def test_load_privatekey_passphrase_wrong_return_type(self): + """ + :py:obj:`load_privatekey` raises :py:obj:`ValueError` if the passphrase + callback returns something other than a byte string. + """ + self.assertRaises( + ValueError, + load_privatekey, + FILETYPE_PEM, encryptedPrivateKeyPEM, lambda *args: 3) + + + def test_dump_privatekey_wrong_args(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`TypeError` if called with the wrong number + of arguments. + """ + self.assertRaises(TypeError, dump_privatekey) + # If cipher name is given, password is required. + self.assertRaises( + TypeError, dump_privatekey, FILETYPE_PEM, PKey(), GOOD_CIPHER) + + + def test_dump_privatekey_unknown_cipher(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`ValueError` if called with an unrecognized + cipher name. + """ + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises( + ValueError, dump_privatekey, + FILETYPE_PEM, key, BAD_CIPHER, "passphrase") + + + def test_dump_privatekey_invalid_passphrase_type(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`TypeError` if called with a passphrase which + is neither a :py:obj:`str` nor a callable. + """ + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises( + TypeError, + dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, object()) + + + def test_dump_privatekey_invalid_filetype(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`ValueError` if called with an unrecognized + filetype. + """ + key = PKey() + key.generate_key(TYPE_RSA, 512) + self.assertRaises(ValueError, dump_privatekey, 100, key) + + + def test_load_privatekey_passphraseCallbackLength(self): + """ + :py:obj:`crypto.load_privatekey` should raise an error when the passphrase + provided by the callback is too long, not silently truncate it. + """ + def cb(ignored): + return "a" * 1025 + + self.assertRaises(ValueError, + load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb) + + + def test_dump_privatekey_passphrase(self): + """ + :py:obj:`dump_privatekey` writes an encrypted PEM when given a passphrase. + """ + passphrase = b("foo") + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + pem = dump_privatekey(FILETYPE_PEM, key, GOOD_CIPHER, passphrase) + self.assertTrue(isinstance(pem, binary_type)) + loadedKey = load_privatekey(FILETYPE_PEM, pem, passphrase) + self.assertTrue(isinstance(loadedKey, PKeyType)) + self.assertEqual(loadedKey.type(), key.type()) + self.assertEqual(loadedKey.bits(), key.bits()) + + + def test_dump_privatekey_passphraseWrongType(self): + """ + :py:obj:`dump_privatekey` raises :py:obj:`ValueError` when it is passed a passphrase + with a private key encoded in a format, that doesn't support + encryption. + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertRaises(ValueError, + dump_privatekey, FILETYPE_ASN1, key, GOOD_CIPHER, "secret") + + + def test_dump_certificate(self): + """ + :py:obj:`dump_certificate` writes PEM, DER, and text. + """ + pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM + cert = load_certificate(FILETYPE_PEM, pemData) + dumped_pem = dump_certificate(FILETYPE_PEM, cert) + self.assertEqual(dumped_pem, cleartextCertificatePEM) + dumped_der = dump_certificate(FILETYPE_ASN1, cert) + good_der = _runopenssl(dumped_pem, b"x509", b"-outform", b"DER") + self.assertEqual(dumped_der, good_der) + cert2 = load_certificate(FILETYPE_ASN1, dumped_der) + dumped_pem2 = dump_certificate(FILETYPE_PEM, cert2) + self.assertEqual(dumped_pem2, cleartextCertificatePEM) + dumped_text = dump_certificate(FILETYPE_TEXT, cert) + good_text = _runopenssl(dumped_pem, b"x509", b"-noout", b"-text") + self.assertEqual(dumped_text, good_text) + + + def test_dump_privatekey_pem(self): + """ + :py:obj:`dump_privatekey` writes a PEM + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertTrue(key.check()) + dumped_pem = dump_privatekey(FILETYPE_PEM, key) + self.assertEqual(dumped_pem, cleartextPrivateKeyPEM) + + + def test_dump_privatekey_asn1(self): + """ + :py:obj:`dump_privatekey` writes a DER + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + dumped_pem = dump_privatekey(FILETYPE_PEM, key) + + dumped_der = dump_privatekey(FILETYPE_ASN1, key) + # XXX This OpenSSL call writes "writing RSA key" to standard out. Sad. + good_der = _runopenssl(dumped_pem, b"rsa", b"-outform", b"DER") + self.assertEqual(dumped_der, good_der) + key2 = load_privatekey(FILETYPE_ASN1, dumped_der) + dumped_pem2 = dump_privatekey(FILETYPE_PEM, key2) + self.assertEqual(dumped_pem2, cleartextPrivateKeyPEM) + + + def test_dump_privatekey_text(self): + """ + :py:obj:`dump_privatekey` writes a text + """ + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + dumped_pem = dump_privatekey(FILETYPE_PEM, key) + + dumped_text = dump_privatekey(FILETYPE_TEXT, key) + good_text = _runopenssl(dumped_pem, b"rsa", b"-noout", b"-text") + self.assertEqual(dumped_text, good_text) + + + def test_dump_certificate_request(self): + """ + :py:obj:`dump_certificate_request` writes a PEM, DER, and text. + """ + req = load_certificate_request(FILETYPE_PEM, cleartextCertificateRequestPEM) + dumped_pem = dump_certificate_request(FILETYPE_PEM, req) + self.assertEqual(dumped_pem, cleartextCertificateRequestPEM) + dumped_der = dump_certificate_request(FILETYPE_ASN1, req) + good_der = _runopenssl(dumped_pem, b"req", b"-outform", b"DER") + self.assertEqual(dumped_der, good_der) + req2 = load_certificate_request(FILETYPE_ASN1, dumped_der) + dumped_pem2 = dump_certificate_request(FILETYPE_PEM, req2) + self.assertEqual(dumped_pem2, cleartextCertificateRequestPEM) + dumped_text = dump_certificate_request(FILETYPE_TEXT, req) + good_text = _runopenssl(dumped_pem, b"req", b"-noout", b"-text") + self.assertEqual(dumped_text, good_text) + self.assertRaises(ValueError, dump_certificate_request, 100, req) + + + def test_dump_privatekey_passphraseCallback(self): + """ + :py:obj:`dump_privatekey` writes an encrypted PEM when given a callback which + returns the correct passphrase. + """ + passphrase = b("foo") + called = [] + def cb(writing): + called.append(writing) + return passphrase + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + pem = dump_privatekey(FILETYPE_PEM, key, GOOD_CIPHER, cb) + self.assertTrue(isinstance(pem, binary_type)) + self.assertEqual(called, [True]) + loadedKey = load_privatekey(FILETYPE_PEM, pem, passphrase) + self.assertTrue(isinstance(loadedKey, PKeyType)) + self.assertEqual(loadedKey.type(), key.type()) + self.assertEqual(loadedKey.bits(), key.bits()) + + + def test_dump_privatekey_passphrase_exception(self): + """ + :py:obj:`dump_privatekey` should not overwrite the exception raised + by the passphrase callback. + """ + def cb(ignored): + raise ArithmeticError + + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertRaises(ArithmeticError, + dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, cb) + + + def test_dump_privatekey_passphraseCallbackLength(self): + """ + :py:obj:`crypto.dump_privatekey` should raise an error when the passphrase + provided by the callback is too long, not silently truncate it. + """ + def cb(ignored): + return "a" * 1025 + + key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + self.assertRaises(ValueError, + dump_privatekey, FILETYPE_PEM, key, GOOD_CIPHER, cb) + + + def test_load_pkcs7_data(self): + """ + :py:obj:`load_pkcs7_data` accepts a PKCS#7 string and returns an instance of + :py:obj:`PKCS7Type`. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertTrue(isinstance(pkcs7, PKCS7Type)) + + + def test_load_pkcs7_data_invalid(self): + """ + If the data passed to :py:obj:`load_pkcs7_data` is invalid, + :py:obj:`Error` is raised. + """ + self.assertRaises(Error, load_pkcs7_data, FILETYPE_PEM, b"foo") + + + +class LoadCertificateTests(TestCase): + """ + Tests for :py:obj:`load_certificate_request`. + """ + def test_badFileType(self): + """ + If the file type passed to :py:obj:`load_certificate_request` is + neither :py:obj:`FILETYPE_PEM` nor :py:obj:`FILETYPE_ASN1` then + :py:class:`ValueError` is raised. + """ + self.assertRaises(ValueError, load_certificate_request, object(), b"") + + + +class PKCS7Tests(TestCase): + """ + Tests for :py:obj:`PKCS7Type`. + """ + def test_type(self): + """ + :py:obj:`PKCS7Type` is a type object. + """ + self.assertTrue(isinstance(PKCS7Type, type)) + self.assertEqual(PKCS7Type.__name__, 'PKCS7') + + # XXX This doesn't currently work. + # self.assertIdentical(PKCS7, PKCS7Type) + + + # XXX Opposite results for all these following methods + + def test_type_is_signed_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_signed` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_signed, None) + + + def test_type_is_signed(self): + """ + :py:obj:`PKCS7Type.type_is_signed` returns :py:obj:`True` if the PKCS7 object is of + the type *signed*. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertTrue(pkcs7.type_is_signed()) + + + def test_type_is_enveloped_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_enveloped` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_enveloped, None) + + + def test_type_is_enveloped(self): + """ + :py:obj:`PKCS7Type.type_is_enveloped` returns :py:obj:`False` if the PKCS7 object is + not of the type *enveloped*. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertFalse(pkcs7.type_is_enveloped()) + + + def test_type_is_signedAndEnveloped_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_signedAndEnveloped` raises :py:obj:`TypeError` if called + with any arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_signedAndEnveloped, None) + + + def test_type_is_signedAndEnveloped(self): + """ + :py:obj:`PKCS7Type.type_is_signedAndEnveloped` returns :py:obj:`False` if the PKCS7 + object is not of the type *signed and enveloped*. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertFalse(pkcs7.type_is_signedAndEnveloped()) + + + def test_type_is_data(self): + """ + :py:obj:`PKCS7Type.type_is_data` returns :py:obj:`False` if the PKCS7 object is not of + the type data. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertFalse(pkcs7.type_is_data()) + + + def test_type_is_data_wrong_args(self): + """ + :py:obj:`PKCS7Type.type_is_data` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.type_is_data, None) + + + def test_get_type_name_wrong_args(self): + """ + :py:obj:`PKCS7Type.get_type_name` raises :py:obj:`TypeError` if called with any + arguments. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(TypeError, pkcs7.get_type_name, None) + + + def test_get_type_name(self): + """ + :py:obj:`PKCS7Type.get_type_name` returns a :py:obj:`str` giving the type name. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertEquals(pkcs7.get_type_name(), b('pkcs7-signedData')) + + + def test_attribute(self): + """ + If an attribute other than one of the methods tested here is accessed on + an instance of :py:obj:`PKCS7Type`, :py:obj:`AttributeError` is raised. + """ + pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data) + self.assertRaises(AttributeError, getattr, pkcs7, "foo") + + + +class NetscapeSPKITests(TestCase, _PKeyInteractionTestsMixin): + """ + Tests for :py:obj:`OpenSSL.crypto.NetscapeSPKI`. + """ + def signable(self): + """ + Return a new :py:obj:`NetscapeSPKI` for use with signing tests. + """ + return NetscapeSPKI() + + + def test_type(self): + """ + :py:obj:`NetscapeSPKI` and :py:obj:`NetscapeSPKIType` refer to the same type object + and can be used to create instances of that type. + """ + self.assertIdentical(NetscapeSPKI, NetscapeSPKIType) + self.assertConsistentType(NetscapeSPKI, 'NetscapeSPKI') + + + def test_construction(self): + """ + :py:obj:`NetscapeSPKI` returns an instance of :py:obj:`NetscapeSPKIType`. + """ + nspki = NetscapeSPKI() + self.assertTrue(isinstance(nspki, NetscapeSPKIType)) + + + def test_invalid_attribute(self): + """ + Accessing a non-existent attribute of a :py:obj:`NetscapeSPKI` instance causes + an :py:obj:`AttributeError` to be raised. + """ + nspki = NetscapeSPKI() + self.assertRaises(AttributeError, lambda: nspki.foo) + + + def test_b64_encode(self): + """ + :py:obj:`NetscapeSPKI.b64_encode` encodes the certificate to a base64 blob. + """ + nspki = NetscapeSPKI() + blob = nspki.b64_encode() + self.assertTrue(isinstance(blob, binary_type)) + + + +class RevokedTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.Revoked` + """ + def test_construction(self): + """ + Confirm we can create :py:obj:`OpenSSL.crypto.Revoked`. Check + that it is empty. + """ + revoked = Revoked() + self.assertTrue(isinstance(revoked, Revoked)) + self.assertEquals(type(revoked), Revoked) + self.assertEquals(revoked.get_serial(), b('00')) + self.assertEquals(revoked.get_rev_date(), None) + self.assertEquals(revoked.get_reason(), None) + + + def test_construction_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.crypto.Revoked` with any arguments results + in a :py:obj:`TypeError` being raised. + """ + self.assertRaises(TypeError, Revoked, None) + self.assertRaises(TypeError, Revoked, 1) + self.assertRaises(TypeError, Revoked, "foo") + + + def test_serial(self): + """ + Confirm we can set and get serial numbers from + :py:obj:`OpenSSL.crypto.Revoked`. Confirm errors are handled + with grace. + """ + revoked = Revoked() + ret = revoked.set_serial(b('10b')) + self.assertEquals(ret, None) + ser = revoked.get_serial() + self.assertEquals(ser, b('010B')) + + revoked.set_serial(b('31ppp')) # a type error would be nice + ser = revoked.get_serial() + self.assertEquals(ser, b('31')) + + self.assertRaises(ValueError, revoked.set_serial, b('pqrst')) + self.assertRaises(TypeError, revoked.set_serial, 100) + self.assertRaises(TypeError, revoked.get_serial, 1) + self.assertRaises(TypeError, revoked.get_serial, None) + self.assertRaises(TypeError, revoked.get_serial, "") + + + def test_date(self): + """ + Confirm we can set and get revocation dates from + :py:obj:`OpenSSL.crypto.Revoked`. Confirm errors are handled + with grace. + """ + revoked = Revoked() + date = revoked.get_rev_date() + self.assertEquals(date, None) + + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + ret = revoked.set_rev_date(now) + self.assertEqual(ret, None) + date = revoked.get_rev_date() + self.assertEqual(date, now) + + + def test_reason(self): + """ + Confirm we can set and get revocation reasons from + :py:obj:`OpenSSL.crypto.Revoked`. The "get" need to work + as "set". Likewise, each reason of all_reasons() must work. + """ + revoked = Revoked() + for r in revoked.all_reasons(): + for x in range(2): + ret = revoked.set_reason(r) + self.assertEquals(ret, None) + reason = revoked.get_reason() + self.assertEquals( + reason.lower().replace(b(' '), b('')), + r.lower().replace(b(' '), b(''))) + r = reason # again with the resp of get + + revoked.set_reason(None) + self.assertEqual(revoked.get_reason(), None) + + + def test_set_reason_wrong_arguments(self): + """ + Calling :py:obj:`OpenSSL.crypto.Revoked.set_reason` with other than + one argument, or an argument which isn't a valid reason, + results in :py:obj:`TypeError` or :py:obj:`ValueError` being raised. + """ + revoked = Revoked() + self.assertRaises(TypeError, revoked.set_reason, 100) + self.assertRaises(ValueError, revoked.set_reason, b('blue')) + + + def test_get_reason_wrong_arguments(self): + """ + Calling :py:obj:`OpenSSL.crypto.Revoked.get_reason` with any + arguments results in :py:obj:`TypeError` being raised. + """ + revoked = Revoked() + self.assertRaises(TypeError, revoked.get_reason, None) + self.assertRaises(TypeError, revoked.get_reason, 1) + self.assertRaises(TypeError, revoked.get_reason, "foo") + + + +class CRLTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.CRL` + """ + cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM) + pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM) + + def test_construction(self): + """ + Confirm we can create :py:obj:`OpenSSL.crypto.CRL`. Check + that it is empty + """ + crl = CRL() + self.assertTrue( isinstance(crl, CRL) ) + self.assertEqual(crl.get_revoked(), None) + + + def test_construction_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.crypto.CRL` with any number of arguments + results in a :py:obj:`TypeError` being raised. + """ + self.assertRaises(TypeError, CRL, 1) + self.assertRaises(TypeError, CRL, "") + self.assertRaises(TypeError, CRL, None) + + + def test_export(self): + """ + Use python to create a simple CRL with a revocation, and export + the CRL in formats of PEM, DER and text. Those outputs are verified + with the openssl program. + """ + crl = CRL() + revoked = Revoked() + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + revoked.set_rev_date(now) + revoked.set_serial(b('3ab')) + revoked.set_reason(b('sUpErSeDEd')) + crl.add_revoked(revoked) + + # PEM format + dumped_crl = crl.export(self.cert, self.pkey, days=20) + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text") + text.index(b('Serial Number: 03AB')) + text.index(b('Superseded')) + text.index(b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA')) + + # DER format + dumped_crl = crl.export(self.cert, self.pkey, FILETYPE_ASN1) + text = _runopenssl(dumped_crl, b"crl", b"-noout", b"-text", b"-inform", b"DER") + text.index(b('Serial Number: 03AB')) + text.index(b('Superseded')) + text.index(b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA')) + + # text format + dumped_text = crl.export(self.cert, self.pkey, type=FILETYPE_TEXT) + self.assertEqual(text, dumped_text) + + + def test_export_invalid(self): + """ + If :py:obj:`CRL.export` is used with an uninitialized :py:obj:`X509` + instance, :py:obj:`OpenSSL.crypto.Error` is raised. + """ + crl = CRL() + self.assertRaises(Error, crl.export, X509(), PKey()) + + + def test_add_revoked_keyword(self): + """ + :py:obj:`OpenSSL.CRL.add_revoked` accepts its single argument as the + ``revoked`` keyword argument. + """ + crl = CRL() + revoked = Revoked() + crl.add_revoked(revoked=revoked) + self.assertTrue(isinstance(crl.get_revoked()[0], Revoked)) + + + def test_export_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.CRL.export` with fewer than two or more than + four arguments, or with arguments other than the certificate, + private key, integer file type, and integer number of days it + expects, results in a :py:obj:`TypeError` being raised. + """ + crl = CRL() + self.assertRaises(TypeError, crl.export) + self.assertRaises(TypeError, crl.export, self.cert) + self.assertRaises(TypeError, crl.export, self.cert, self.pkey, FILETYPE_PEM, 10, "foo") + + self.assertRaises(TypeError, crl.export, None, self.pkey, FILETYPE_PEM, 10) + self.assertRaises(TypeError, crl.export, self.cert, None, FILETYPE_PEM, 10) + self.assertRaises(TypeError, crl.export, self.cert, self.pkey, None, 10) + self.assertRaises(TypeError, crl.export, self.cert, FILETYPE_PEM, None) + + + def test_export_unknown_filetype(self): + """ + Calling :py:obj:`OpenSSL.CRL.export` with a file type other than + :py:obj:`FILETYPE_PEM`, :py:obj:`FILETYPE_ASN1`, or :py:obj:`FILETYPE_TEXT` results + in a :py:obj:`ValueError` being raised. + """ + crl = CRL() + self.assertRaises(ValueError, crl.export, self.cert, self.pkey, 100, 10) + + + def test_get_revoked(self): + """ + Use python to create a simple CRL with two revocations. + Get back the :py:obj:`Revoked` using :py:obj:`OpenSSL.CRL.get_revoked` and + verify them. + """ + crl = CRL() + + revoked = Revoked() + now = b(datetime.now().strftime("%Y%m%d%H%M%SZ")) + revoked.set_rev_date(now) + revoked.set_serial(b('3ab')) + crl.add_revoked(revoked) + revoked.set_serial(b('100')) + revoked.set_reason(b('sUpErSeDEd')) + crl.add_revoked(revoked) + + revs = crl.get_revoked() + self.assertEqual(len(revs), 2) + self.assertEqual(type(revs[0]), Revoked) + self.assertEqual(type(revs[1]), Revoked) + self.assertEqual(revs[0].get_serial(), b('03AB')) + self.assertEqual(revs[1].get_serial(), b('0100')) + self.assertEqual(revs[0].get_rev_date(), now) + self.assertEqual(revs[1].get_rev_date(), now) + + + def test_get_revoked_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.CRL.get_revoked` with any arguments results + in a :py:obj:`TypeError` being raised. + """ + crl = CRL() + self.assertRaises(TypeError, crl.get_revoked, None) + self.assertRaises(TypeError, crl.get_revoked, 1) + self.assertRaises(TypeError, crl.get_revoked, "") + self.assertRaises(TypeError, crl.get_revoked, "", 1, None) + + + def test_add_revoked_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.CRL.add_revoked` with other than one + argument results in a :py:obj:`TypeError` being raised. + """ + crl = CRL() + self.assertRaises(TypeError, crl.add_revoked) + self.assertRaises(TypeError, crl.add_revoked, 1, 2) + self.assertRaises(TypeError, crl.add_revoked, "foo", "bar") + + + def test_load_crl(self): + """ + Load a known CRL and inspect its revocations. Both + PEM and DER formats are loaded. + """ + crl = load_crl(FILETYPE_PEM, crlData) + revs = crl.get_revoked() + self.assertEqual(len(revs), 2) + self.assertEqual(revs[0].get_serial(), b('03AB')) + self.assertEqual(revs[0].get_reason(), None) + self.assertEqual(revs[1].get_serial(), b('0100')) + self.assertEqual(revs[1].get_reason(), b('Superseded')) + + der = _runopenssl(crlData, b"crl", b"-outform", b"DER") + crl = load_crl(FILETYPE_ASN1, der) + revs = crl.get_revoked() + self.assertEqual(len(revs), 2) + self.assertEqual(revs[0].get_serial(), b('03AB')) + self.assertEqual(revs[0].get_reason(), None) + self.assertEqual(revs[1].get_serial(), b('0100')) + self.assertEqual(revs[1].get_reason(), b('Superseded')) + + + def test_load_crl_wrong_args(self): + """ + Calling :py:obj:`OpenSSL.crypto.load_crl` with other than two + arguments results in a :py:obj:`TypeError` being raised. + """ + self.assertRaises(TypeError, load_crl) + self.assertRaises(TypeError, load_crl, FILETYPE_PEM) + self.assertRaises(TypeError, load_crl, FILETYPE_PEM, crlData, None) + + + def test_load_crl_bad_filetype(self): + """ + Calling :py:obj:`OpenSSL.crypto.load_crl` with an unknown file type + raises a :py:obj:`ValueError`. + """ + self.assertRaises(ValueError, load_crl, 100, crlData) + + + def test_load_crl_bad_data(self): + """ + Calling :py:obj:`OpenSSL.crypto.load_crl` with file data which can't + be loaded raises a :py:obj:`OpenSSL.crypto.Error`. + """ + self.assertRaises(Error, load_crl, FILETYPE_PEM, b"hello, world") + + + +class SignVerifyTests(TestCase): + """ + Tests for :py:obj:`OpenSSL.crypto.sign` and :py:obj:`OpenSSL.crypto.verify`. + """ + def test_sign_verify(self): + """ + :py:obj:`sign` generates a cryptographic signature which :py:obj:`verify` can check. + """ + content = b( + "It was a bright cold day in April, and the clocks were striking " + "thirteen. Winston Smith, his chin nuzzled into his breast in an " + "effort to escape the vile wind, slipped quickly through the " + "glass doors of Victory Mansions, though not quickly enough to " + "prevent a swirl of gritty dust from entering along with him.") + + # sign the content with this private key + priv_key = load_privatekey(FILETYPE_PEM, root_key_pem) + # verify the content with this cert + good_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + # certificate unrelated to priv_key, used to trigger an error + bad_cert = load_certificate(FILETYPE_PEM, server_cert_pem) + + for digest in ['md5', 'sha1']: + sig = sign(priv_key, content, digest) + + # Verify the signature of content, will throw an exception if error. + verify(good_cert, sig, content, digest) + + # This should fail because the certificate doesn't match the + # private key that was used to sign the content. + self.assertRaises(Error, verify, bad_cert, sig, content, digest) + + # This should fail because we've "tainted" the content after + # signing it. + self.assertRaises( + Error, verify, + good_cert, sig, content + b("tainted"), digest) + + # test that unknown digest types fail + self.assertRaises( + ValueError, sign, priv_key, content, "strange-digest") + self.assertRaises( + ValueError, verify, good_cert, sig, content, "strange-digest") + + + def test_sign_nulls(self): + """ + :py:obj:`sign` produces a signature for a string with embedded nulls. + """ + content = b("Watch out! \0 Did you see it?") + priv_key = load_privatekey(FILETYPE_PEM, root_key_pem) + good_cert = load_certificate(FILETYPE_PEM, root_cert_pem) + sig = sign(priv_key, content, "sha1") + verify(good_cert, sig, content, "sha1") + + +if __name__ == '__main__': + main() diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_rand.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_rand.py new file mode 100644 index 0000000..c52cb6b --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_rand.py @@ -0,0 +1,203 @@ +# Copyright (c) Frederick Dean +# See LICENSE for details. + +""" +Unit tests for :py:obj:`OpenSSL.rand`. +""" + +from unittest import main +import os +import stat +import sys + +from OpenSSL.test.util import TestCase, b +from OpenSSL import rand + + +class RandTests(TestCase): + def test_bytes_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.bytes` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`int` argument. + """ + self.assertRaises(TypeError, rand.bytes) + self.assertRaises(TypeError, rand.bytes, None) + self.assertRaises(TypeError, rand.bytes, 3, None) + + + def test_insufficientMemory(self): + """ + :py:obj:`OpenSSL.rand.bytes` raises :py:obj:`MemoryError` if more bytes + are requested than will fit in memory. + """ + self.assertRaises(MemoryError, rand.bytes, sys.maxsize) + + + def test_bytes(self): + """ + Verify that we can obtain bytes from rand_bytes() and + that they are different each time. Test the parameter + of rand_bytes() for bad values. + """ + b1 = rand.bytes(50) + self.assertEqual(len(b1), 50) + b2 = rand.bytes(num_bytes=50) # parameter by name + self.assertNotEqual(b1, b2) # Hip, Hip, Horay! FIPS complaince + b3 = rand.bytes(num_bytes=0) + self.assertEqual(len(b3), 0) + exc = self.assertRaises(ValueError, rand.bytes, -1) + self.assertEqual(str(exc), "num_bytes must not be negative") + + + def test_add_wrong_args(self): + """ + When called with the wrong number of arguments, or with arguments not of + type :py:obj:`str` and :py:obj:`int`, :py:obj:`OpenSSL.rand.add` raises :py:obj:`TypeError`. + """ + self.assertRaises(TypeError, rand.add) + self.assertRaises(TypeError, rand.add, b("foo"), None) + self.assertRaises(TypeError, rand.add, None, 3) + self.assertRaises(TypeError, rand.add, b("foo"), 3, None) + + + def test_add(self): + """ + :py:obj:`OpenSSL.rand.add` adds entropy to the PRNG. + """ + rand.add(b('hamburger'), 3) + + + def test_seed_wrong_args(self): + """ + When called with the wrong number of arguments, or with a non-:py:obj:`str` + argument, :py:obj:`OpenSSL.rand.seed` raises :py:obj:`TypeError`. + """ + self.assertRaises(TypeError, rand.seed) + self.assertRaises(TypeError, rand.seed, None) + self.assertRaises(TypeError, rand.seed, b("foo"), None) + + + def test_seed(self): + """ + :py:obj:`OpenSSL.rand.seed` adds entropy to the PRNG. + """ + rand.seed(b('milk shake')) + + + def test_status_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.status` raises :py:obj:`TypeError` when called with any + arguments. + """ + self.assertRaises(TypeError, rand.status, None) + + + def test_status(self): + """ + :py:obj:`OpenSSL.rand.status` returns :py:obj:`True` if the PRNG has sufficient + entropy, :py:obj:`False` otherwise. + """ + # It's hard to know what it is actually going to return. Different + # OpenSSL random engines decide differently whether they have enough + # entropy or not. + self.assertTrue(rand.status() in (1, 2)) + + + def test_egd_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.egd` raises :py:obj:`TypeError` when called with the wrong + number of arguments or with arguments not of type :py:obj:`str` and :py:obj:`int`. + """ + self.assertRaises(TypeError, rand.egd) + self.assertRaises(TypeError, rand.egd, None) + self.assertRaises(TypeError, rand.egd, "foo", None) + self.assertRaises(TypeError, rand.egd, None, 3) + self.assertRaises(TypeError, rand.egd, "foo", 3, None) + + + def test_egd_missing(self): + """ + :py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the + EGD socket passed to it does not exist. + """ + result = rand.egd(self.mktemp()) + expected = (-1, 0) + self.assertTrue( + result in expected, + "%r not in %r" % (result, expected)) + + + def test_egd_missing_and_bytes(self): + """ + :py:obj:`OpenSSL.rand.egd` returns :py:obj:`0` or :py:obj:`-1` if the + EGD socket passed to it does not exist even if a size argument is + explicitly passed. + """ + result = rand.egd(self.mktemp(), 1024) + expected = (-1, 0) + self.assertTrue( + result in expected, + "%r not in %r" % (result, expected)) + + + def test_cleanup_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.cleanup` raises :py:obj:`TypeError` when called with any + arguments. + """ + self.assertRaises(TypeError, rand.cleanup, None) + + + def test_cleanup(self): + """ + :py:obj:`OpenSSL.rand.cleanup` releases the memory used by the PRNG and returns + :py:obj:`None`. + """ + self.assertIdentical(rand.cleanup(), None) + + + def test_load_file_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.load_file` raises :py:obj:`TypeError` when called the wrong + number of arguments or arguments not of type :py:obj:`str` and :py:obj:`int`. + """ + self.assertRaises(TypeError, rand.load_file) + self.assertRaises(TypeError, rand.load_file, "foo", None) + self.assertRaises(TypeError, rand.load_file, None, 1) + self.assertRaises(TypeError, rand.load_file, "foo", 1, None) + + + def test_write_file_wrong_args(self): + """ + :py:obj:`OpenSSL.rand.write_file` raises :py:obj:`TypeError` when called with the + wrong number of arguments or a non-:py:obj:`str` argument. + """ + self.assertRaises(TypeError, rand.write_file) + self.assertRaises(TypeError, rand.write_file, None) + self.assertRaises(TypeError, rand.write_file, "foo", None) + + + def test_files(self): + """ + Test reading and writing of files via rand functions. + """ + # Write random bytes to a file + tmpfile = self.mktemp() + # Make sure it exists (so cleanup definitely succeeds) + fObj = open(tmpfile, 'w') + fObj.close() + try: + rand.write_file(tmpfile) + # Verify length of written file + size = os.stat(tmpfile)[stat.ST_SIZE] + self.assertEqual(1024, size) + # Read random bytes from file + rand.load_file(tmpfile) + rand.load_file(tmpfile, 4) # specify a length + finally: + # Cleanup + os.unlink(tmpfile) + + +if __name__ == '__main__': + main() diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_ssl.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_ssl.py new file mode 100644 index 0000000..a6f0127 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/test_ssl.py @@ -0,0 +1,2736 @@ +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +Unit tests for :py:obj:`OpenSSL.SSL`. +""" + +from gc import collect, get_referrers +from errno import ECONNREFUSED, EINPROGRESS, EWOULDBLOCK, EPIPE, ESHUTDOWN +from sys import platform, version_info +from socket import SHUT_RDWR, error, socket +from os import makedirs +from os.path import join +from unittest import main +from weakref import ref + +from six import PY3, u + +from OpenSSL.crypto import TYPE_RSA, FILETYPE_PEM +from OpenSSL.crypto import PKey, X509, X509Extension, X509Store +from OpenSSL.crypto import dump_privatekey, load_privatekey +from OpenSSL.crypto import dump_certificate, load_certificate + +from OpenSSL.SSL import OPENSSL_VERSION_NUMBER, SSLEAY_VERSION, SSLEAY_CFLAGS +from OpenSSL.SSL import SSLEAY_PLATFORM, SSLEAY_DIR, SSLEAY_BUILT_ON +from OpenSSL.SSL import SENT_SHUTDOWN, RECEIVED_SHUTDOWN +from OpenSSL.SSL import ( + SSLv2_METHOD, SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD, + TLSv1_1_METHOD, TLSv1_2_METHOD) +from OpenSSL.SSL import OP_SINGLE_DH_USE, OP_NO_SSLv2, OP_NO_SSLv3 +from OpenSSL.SSL import ( + VERIFY_PEER, VERIFY_FAIL_IF_NO_PEER_CERT, VERIFY_CLIENT_ONCE, VERIFY_NONE) + +from OpenSSL.SSL import ( + SESS_CACHE_OFF, SESS_CACHE_CLIENT, SESS_CACHE_SERVER, SESS_CACHE_BOTH, + SESS_CACHE_NO_AUTO_CLEAR, SESS_CACHE_NO_INTERNAL_LOOKUP, + SESS_CACHE_NO_INTERNAL_STORE, SESS_CACHE_NO_INTERNAL) + +from OpenSSL.SSL import ( + Error, SysCallError, WantReadError, WantWriteError, ZeroReturnError) +from OpenSSL.SSL import ( + Context, ContextType, Session, Connection, ConnectionType, SSLeay_version) + +from OpenSSL.test.util import TestCase, b +from OpenSSL.test.test_crypto import ( + cleartextCertificatePEM, cleartextPrivateKeyPEM) +from OpenSSL.test.test_crypto import ( + client_cert_pem, client_key_pem, server_cert_pem, server_key_pem, + root_cert_pem) + +try: + from OpenSSL.SSL import OP_NO_QUERY_MTU +except ImportError: + OP_NO_QUERY_MTU = None +try: + from OpenSSL.SSL import OP_COOKIE_EXCHANGE +except ImportError: + OP_COOKIE_EXCHANGE = None +try: + from OpenSSL.SSL import OP_NO_TICKET +except ImportError: + OP_NO_TICKET = None + +try: + from OpenSSL.SSL import OP_NO_COMPRESSION +except ImportError: + OP_NO_COMPRESSION = None + +try: + from OpenSSL.SSL import MODE_RELEASE_BUFFERS +except ImportError: + MODE_RELEASE_BUFFERS = None + +try: + from OpenSSL.SSL import OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2 +except ImportError: + OP_NO_TLSv1 = OP_NO_TLSv1_1 = OP_NO_TLSv1_2 = None + +from OpenSSL.SSL import ( + SSL_ST_CONNECT, SSL_ST_ACCEPT, SSL_ST_MASK, SSL_ST_INIT, SSL_ST_BEFORE, + SSL_ST_OK, SSL_ST_RENEGOTIATE, + SSL_CB_LOOP, SSL_CB_EXIT, SSL_CB_READ, SSL_CB_WRITE, SSL_CB_ALERT, + SSL_CB_READ_ALERT, SSL_CB_WRITE_ALERT, SSL_CB_ACCEPT_LOOP, + SSL_CB_ACCEPT_EXIT, SSL_CB_CONNECT_LOOP, SSL_CB_CONNECT_EXIT, + SSL_CB_HANDSHAKE_START, SSL_CB_HANDSHAKE_DONE) + +# openssl dhparam 128 -out dh-128.pem (note that 128 is a small number of bits +# to use) +dhparam = """\ +-----BEGIN DH PARAMETERS----- +MBYCEQCobsg29c9WZP/54oAPcwiDAgEC +-----END DH PARAMETERS----- +""" + + +def verify_cb(conn, cert, errnum, depth, ok): + return ok + + +def socket_pair(): + """ + Establish and return a pair of network sockets connected to each other. + """ + # Connect a pair of sockets + port = socket() + port.bind(('', 0)) + port.listen(1) + client = socket() + client.setblocking(False) + client.connect_ex(("127.0.0.1", port.getsockname()[1])) + client.setblocking(True) + server = port.accept()[0] + + # Let's pass some unencrypted data to make sure our socket connection is + # fine. Just one byte, so we don't have to worry about buffers getting + # filled up or fragmentation. + server.send(b("x")) + assert client.recv(1024) == b("x") + client.send(b("y")) + assert server.recv(1024) == b("y") + + # Most of our callers want non-blocking sockets, make it easy for them. + server.setblocking(False) + client.setblocking(False) + + return (server, client) + + + +def handshake(client, server): + conns = [client, server] + while conns: + for conn in conns: + try: + conn.do_handshake() + except WantReadError: + pass + else: + conns.remove(conn) + + +def _create_certificate_chain(): + """ + Construct and return a chain of certificates. + + 1. A new self-signed certificate authority certificate (cacert) + 2. A new intermediate certificate signed by cacert (icert) + 3. A new server certificate signed by icert (scert) + """ + caext = X509Extension(b('basicConstraints'), False, b('CA:true')) + + # Step 1 + cakey = PKey() + cakey.generate_key(TYPE_RSA, 512) + cacert = X509() + cacert.get_subject().commonName = "Authority Certificate" + cacert.set_issuer(cacert.get_subject()) + cacert.set_pubkey(cakey) + cacert.set_notBefore(b("20000101000000Z")) + cacert.set_notAfter(b("20200101000000Z")) + cacert.add_extensions([caext]) + cacert.set_serial_number(0) + cacert.sign(cakey, "sha1") + + # Step 2 + ikey = PKey() + ikey.generate_key(TYPE_RSA, 512) + icert = X509() + icert.get_subject().commonName = "Intermediate Certificate" + icert.set_issuer(cacert.get_subject()) + icert.set_pubkey(ikey) + icert.set_notBefore(b("20000101000000Z")) + icert.set_notAfter(b("20200101000000Z")) + icert.add_extensions([caext]) + icert.set_serial_number(0) + icert.sign(cakey, "sha1") + + # Step 3 + skey = PKey() + skey.generate_key(TYPE_RSA, 512) + scert = X509() + scert.get_subject().commonName = "Server Certificate" + scert.set_issuer(icert.get_subject()) + scert.set_pubkey(skey) + scert.set_notBefore(b("20000101000000Z")) + scert.set_notAfter(b("20200101000000Z")) + scert.add_extensions([ + X509Extension(b('basicConstraints'), True, b('CA:false'))]) + scert.set_serial_number(0) + scert.sign(ikey, "sha1") + + return [(cakey, cacert), (ikey, icert), (skey, scert)] + + + +class _LoopbackMixin: + """ + Helper mixin which defines methods for creating a connected socket pair and + for forcing two connected SSL sockets to talk to each other via memory BIOs. + """ + def _loopbackClientFactory(self, socket): + client = Connection(Context(TLSv1_METHOD), socket) + client.set_connect_state() + return client + + + def _loopbackServerFactory(self, socket): + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + server = Connection(ctx, socket) + server.set_accept_state() + return server + + + def _loopback(self, serverFactory=None, clientFactory=None): + if serverFactory is None: + serverFactory = self._loopbackServerFactory + if clientFactory is None: + clientFactory = self._loopbackClientFactory + + (server, client) = socket_pair() + server = serverFactory(server) + client = clientFactory(client) + + handshake(client, server) + + server.setblocking(True) + client.setblocking(True) + return server, client + + + def _interactInMemory(self, client_conn, server_conn): + """ + Try to read application bytes from each of the two :py:obj:`Connection` + objects. Copy bytes back and forth between their send/receive buffers + for as long as there is anything to copy. When there is nothing more + to copy, return :py:obj:`None`. If one of them actually manages to deliver + some application bytes, return a two-tuple of the connection from which + the bytes were read and the bytes themselves. + """ + wrote = True + while wrote: + # Loop until neither side has anything to say + wrote = False + + # Copy stuff from each side's send buffer to the other side's + # receive buffer. + for (read, write) in [(client_conn, server_conn), + (server_conn, client_conn)]: + + # Give the side a chance to generate some more bytes, or + # succeed. + try: + data = read.recv(2 ** 16) + except WantReadError: + # It didn't succeed, so we'll hope it generated some + # output. + pass + else: + # It did succeed, so we'll stop now and let the caller deal + # with it. + return (read, data) + + while True: + # Keep copying as long as there's more stuff there. + try: + dirty = read.bio_read(4096) + except WantReadError: + # Okay, nothing more waiting to be sent. Stop + # processing this send buffer. + break + else: + # Keep track of the fact that someone generated some + # output. + wrote = True + write.bio_write(dirty) + + + +class VersionTests(TestCase): + """ + Tests for version information exposed by + :py:obj:`OpenSSL.SSL.SSLeay_version` and + :py:obj:`OpenSSL.SSL.OPENSSL_VERSION_NUMBER`. + """ + def test_OPENSSL_VERSION_NUMBER(self): + """ + :py:obj:`OPENSSL_VERSION_NUMBER` is an integer with status in the low + byte and the patch, fix, minor, and major versions in the + nibbles above that. + """ + self.assertTrue(isinstance(OPENSSL_VERSION_NUMBER, int)) + + + def test_SSLeay_version(self): + """ + :py:obj:`SSLeay_version` takes a version type indicator and returns + one of a number of version strings based on that indicator. + """ + versions = {} + for t in [SSLEAY_VERSION, SSLEAY_CFLAGS, SSLEAY_BUILT_ON, + SSLEAY_PLATFORM, SSLEAY_DIR]: + version = SSLeay_version(t) + versions[version] = t + self.assertTrue(isinstance(version, bytes)) + self.assertEqual(len(versions), 5) + + + +class ContextTests(TestCase, _LoopbackMixin): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Context`. + """ + def test_method(self): + """ + :py:obj:`Context` can be instantiated with one of :py:obj:`SSLv2_METHOD`, + :py:obj:`SSLv3_METHOD`, :py:obj:`SSLv23_METHOD`, :py:obj:`TLSv1_METHOD`, + :py:obj:`TLSv1_1_METHOD`, or :py:obj:`TLSv1_2_METHOD`. + """ + methods = [ + SSLv3_METHOD, SSLv23_METHOD, TLSv1_METHOD] + for meth in methods: + Context(meth) + + + maybe = [SSLv2_METHOD, TLSv1_1_METHOD, TLSv1_2_METHOD] + for meth in maybe: + try: + Context(meth) + except (Error, ValueError): + # Some versions of OpenSSL have SSLv2 / TLSv1.1 / TLSv1.2, some + # don't. Difficult to say in advance. + pass + + self.assertRaises(TypeError, Context, "") + self.assertRaises(ValueError, Context, 10) + + + if not PY3: + def test_method_long(self): + """ + On Python 2 :py:class:`Context` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + Context(long(TLSv1_METHOD)) + + + + def test_type(self): + """ + :py:obj:`Context` and :py:obj:`ContextType` refer to the same type object and can be + used to create instances of that type. + """ + self.assertIdentical(Context, ContextType) + self.assertConsistentType(Context, 'Context', TLSv1_METHOD) + + + def test_use_privatekey(self): + """ + :py:obj:`Context.use_privatekey` takes an :py:obj:`OpenSSL.crypto.PKey` instance. + """ + key = PKey() + key.generate_key(TYPE_RSA, 128) + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(key) + self.assertRaises(TypeError, ctx.use_privatekey, "") + + + def test_use_privatekey_file_missing(self): + """ + :py:obj:`Context.use_privatekey_file` raises :py:obj:`OpenSSL.SSL.Error` + when passed the name of a file which does not exist. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(Error, ctx.use_privatekey_file, self.mktemp()) + + + if not PY3: + def test_use_privatekey_file_long(self): + """ + On Python 2 :py:obj:`Context.use_privatekey_file` accepts a + filetype of type :py:obj:`long` as well as :py:obj:`int`. + """ + pemfile = self.mktemp() + + key = PKey() + key.generate_key(TYPE_RSA, 128) + + with open(pemfile, "wt") as pem: + pem.write( + dump_privatekey(FILETYPE_PEM, key).decode("ascii")) + + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey_file(pemfile, long(FILETYPE_PEM)) + + + def test_use_certificate_wrong_args(self): + """ + :py:obj:`Context.use_certificate_wrong_args` raises :py:obj:`TypeError` + when not passed exactly one :py:obj:`OpenSSL.crypto.X509` instance as an + argument. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(TypeError, ctx.use_certificate) + self.assertRaises(TypeError, ctx.use_certificate, "hello, world") + self.assertRaises(TypeError, ctx.use_certificate, X509(), "hello, world") + + + def test_use_certificate_uninitialized(self): + """ + :py:obj:`Context.use_certificate` raises :py:obj:`OpenSSL.SSL.Error` + when passed a :py:obj:`OpenSSL.crypto.X509` instance which has not been + initialized (ie, which does not actually have any certificate data). + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(Error, ctx.use_certificate, X509()) + + + def test_use_certificate(self): + """ + :py:obj:`Context.use_certificate` sets the certificate which will be + used to identify connections created using the context. + """ + # TODO + # Hard to assert anything. But we could set a privatekey then ask + # OpenSSL if the cert and key agree using check_privatekey. Then as + # long as check_privatekey works right we're good... + ctx = Context(TLSv1_METHOD) + ctx.use_certificate(load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + + + def test_use_certificate_file_wrong_args(self): + """ + :py:obj:`Context.use_certificate_file` raises :py:obj:`TypeError` if + called with zero arguments or more than two arguments, or if the first + argument is not a byte string or the second argumnent is not an integer. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(TypeError, ctx.use_certificate_file) + self.assertRaises(TypeError, ctx.use_certificate_file, b"somefile", object()) + self.assertRaises( + TypeError, ctx.use_certificate_file, b"somefile", FILETYPE_PEM, object()) + self.assertRaises( + TypeError, ctx.use_certificate_file, object(), FILETYPE_PEM) + self.assertRaises( + TypeError, ctx.use_certificate_file, b"somefile", object()) + + + def test_use_certificate_file_missing(self): + """ + :py:obj:`Context.use_certificate_file` raises + `:py:obj:`OpenSSL.SSL.Error` if passed the name of a file which does not + exist. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(Error, ctx.use_certificate_file, self.mktemp()) + + + def test_use_certificate_file(self): + """ + :py:obj:`Context.use_certificate` sets the certificate which will be + used to identify connections created using the context. + """ + # TODO + # Hard to assert anything. But we could set a privatekey then ask + # OpenSSL if the cert and key agree using check_privatekey. Then as + # long as check_privatekey works right we're good... + pem_filename = self.mktemp() + with open(pem_filename, "wb") as pem_file: + pem_file.write(cleartextCertificatePEM) + + ctx = Context(TLSv1_METHOD) + ctx.use_certificate_file(pem_filename) + + + if not PY3: + def test_use_certificate_file_long(self): + """ + On Python 2 :py:obj:`Context.use_certificate_file` accepts a + filetype of type :py:obj:`long` as well as :py:obj:`int`. + """ + pem_filename = self.mktemp() + with open(pem_filename, "wb") as pem_file: + pem_file.write(cleartextCertificatePEM) + + ctx = Context(TLSv1_METHOD) + ctx.use_certificate_file(pem_filename, long(FILETYPE_PEM)) + + + def test_set_app_data_wrong_args(self): + """ + :py:obj:`Context.set_app_data` raises :py:obj:`TypeError` if called with other than + one argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_app_data) + self.assertRaises(TypeError, context.set_app_data, None, None) + + + def test_get_app_data_wrong_args(self): + """ + :py:obj:`Context.get_app_data` raises :py:obj:`TypeError` if called with any + arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_app_data, None) + + + def test_app_data(self): + """ + :py:obj:`Context.set_app_data` stores an object for later retrieval using + :py:obj:`Context.get_app_data`. + """ + app_data = object() + context = Context(TLSv1_METHOD) + context.set_app_data(app_data) + self.assertIdentical(context.get_app_data(), app_data) + + + def test_set_options_wrong_args(self): + """ + :py:obj:`Context.set_options` raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_options) + self.assertRaises(TypeError, context.set_options, None) + self.assertRaises(TypeError, context.set_options, 1, None) + + + def test_set_options(self): + """ + :py:obj:`Context.set_options` returns the new options value. + """ + context = Context(TLSv1_METHOD) + options = context.set_options(OP_NO_SSLv2) + self.assertTrue(OP_NO_SSLv2 & options) + + + if not PY3: + def test_set_options_long(self): + """ + On Python 2 :py:obj:`Context.set_options` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + options = context.set_options(long(OP_NO_SSLv2)) + self.assertTrue(OP_NO_SSLv2 & options) + + + def test_set_mode_wrong_args(self): + """ + :py:obj:`Context.set`mode} raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_mode) + self.assertRaises(TypeError, context.set_mode, None) + self.assertRaises(TypeError, context.set_mode, 1, None) + + + if MODE_RELEASE_BUFFERS is not None: + def test_set_mode(self): + """ + :py:obj:`Context.set_mode` accepts a mode bitvector and returns the newly + set mode. + """ + context = Context(TLSv1_METHOD) + self.assertTrue( + MODE_RELEASE_BUFFERS & context.set_mode(MODE_RELEASE_BUFFERS)) + + if not PY3: + def test_set_mode_long(self): + """ + On Python 2 :py:obj:`Context.set_mode` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + mode = context.set_mode(long(MODE_RELEASE_BUFFERS)) + self.assertTrue(MODE_RELEASE_BUFFERS & mode) + else: + "MODE_RELEASE_BUFFERS unavailable - OpenSSL version may be too old" + + + def test_set_timeout_wrong_args(self): + """ + :py:obj:`Context.set_timeout` raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_timeout) + self.assertRaises(TypeError, context.set_timeout, None) + self.assertRaises(TypeError, context.set_timeout, 1, None) + + + def test_get_timeout_wrong_args(self): + """ + :py:obj:`Context.get_timeout` raises :py:obj:`TypeError` if called with any arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_timeout, None) + + + def test_timeout(self): + """ + :py:obj:`Context.set_timeout` sets the session timeout for all connections + created using the context object. :py:obj:`Context.get_timeout` retrieves this + value. + """ + context = Context(TLSv1_METHOD) + context.set_timeout(1234) + self.assertEquals(context.get_timeout(), 1234) + + + if not PY3: + def test_timeout_long(self): + """ + On Python 2 :py:obj:`Context.set_timeout` accepts values of type + `long` as well as int. + """ + context = Context(TLSv1_METHOD) + context.set_timeout(long(1234)) + self.assertEquals(context.get_timeout(), 1234) + + + def test_set_verify_depth_wrong_args(self): + """ + :py:obj:`Context.set_verify_depth` raises :py:obj:`TypeError` if called with the wrong + number of arguments or a non-:py:obj:`int` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_verify_depth) + self.assertRaises(TypeError, context.set_verify_depth, None) + self.assertRaises(TypeError, context.set_verify_depth, 1, None) + + + def test_get_verify_depth_wrong_args(self): + """ + :py:obj:`Context.get_verify_depth` raises :py:obj:`TypeError` if called with any arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_verify_depth, None) + + + def test_verify_depth(self): + """ + :py:obj:`Context.set_verify_depth` sets the number of certificates in a chain + to follow before giving up. The value can be retrieved with + :py:obj:`Context.get_verify_depth`. + """ + context = Context(TLSv1_METHOD) + context.set_verify_depth(11) + self.assertEquals(context.get_verify_depth(), 11) + + + if not PY3: + def test_verify_depth_long(self): + """ + On Python 2 :py:obj:`Context.set_verify_depth` accepts values of + type `long` as well as int. + """ + context = Context(TLSv1_METHOD) + context.set_verify_depth(long(11)) + self.assertEquals(context.get_verify_depth(), 11) + + + def _write_encrypted_pem(self, passphrase): + """ + Write a new private key out to a new file, encrypted using the given + passphrase. Return the path to the new file. + """ + key = PKey() + key.generate_key(TYPE_RSA, 128) + pemFile = self.mktemp() + fObj = open(pemFile, 'w') + pem = dump_privatekey(FILETYPE_PEM, key, "blowfish", passphrase) + fObj.write(pem.decode('ascii')) + fObj.close() + return pemFile + + + def test_set_passwd_cb_wrong_args(self): + """ + :py:obj:`Context.set_passwd_cb` raises :py:obj:`TypeError` if called with the + wrong arguments or with a non-callable first argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_passwd_cb) + self.assertRaises(TypeError, context.set_passwd_cb, None) + self.assertRaises(TypeError, context.set_passwd_cb, lambda: None, None, None) + + + def test_set_passwd_cb(self): + """ + :py:obj:`Context.set_passwd_cb` accepts a callable which will be invoked when + a private key is loaded from an encrypted PEM. + """ + passphrase = b("foobar") + pemFile = self._write_encrypted_pem(passphrase) + calledWith = [] + def passphraseCallback(maxlen, verify, extra): + calledWith.append((maxlen, verify, extra)) + return passphrase + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + context.use_privatekey_file(pemFile) + self.assertTrue(len(calledWith), 1) + self.assertTrue(isinstance(calledWith[0][0], int)) + self.assertTrue(isinstance(calledWith[0][1], int)) + self.assertEqual(calledWith[0][2], None) + + + def test_passwd_callback_exception(self): + """ + :py:obj:`Context.use_privatekey_file` propagates any exception raised by the + passphrase callback. + """ + pemFile = self._write_encrypted_pem(b("monkeys are nice")) + def passphraseCallback(maxlen, verify, extra): + raise RuntimeError("Sorry, I am a fail.") + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + self.assertRaises(RuntimeError, context.use_privatekey_file, pemFile) + + + def test_passwd_callback_false(self): + """ + :py:obj:`Context.use_privatekey_file` raises :py:obj:`OpenSSL.SSL.Error` if the + passphrase callback returns a false value. + """ + pemFile = self._write_encrypted_pem(b("monkeys are nice")) + def passphraseCallback(maxlen, verify, extra): + return b"" + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + self.assertRaises(Error, context.use_privatekey_file, pemFile) + + + def test_passwd_callback_non_string(self): + """ + :py:obj:`Context.use_privatekey_file` raises :py:obj:`OpenSSL.SSL.Error` if the + passphrase callback returns a true non-string value. + """ + pemFile = self._write_encrypted_pem(b("monkeys are nice")) + def passphraseCallback(maxlen, verify, extra): + return 10 + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + self.assertRaises(ValueError, context.use_privatekey_file, pemFile) + + + def test_passwd_callback_too_long(self): + """ + If the passphrase returned by the passphrase callback returns a string + longer than the indicated maximum length, it is truncated. + """ + # A priori knowledge! + passphrase = b("x") * 1024 + pemFile = self._write_encrypted_pem(passphrase) + def passphraseCallback(maxlen, verify, extra): + assert maxlen == 1024 + return passphrase + b("y") + + context = Context(TLSv1_METHOD) + context.set_passwd_cb(passphraseCallback) + # This shall succeed because the truncated result is the correct + # passphrase. + context.use_privatekey_file(pemFile) + + + def test_set_info_callback(self): + """ + :py:obj:`Context.set_info_callback` accepts a callable which will be invoked + when certain information about an SSL connection is available. + """ + (server, client) = socket_pair() + + clientSSL = Connection(Context(TLSv1_METHOD), client) + clientSSL.set_connect_state() + + called = [] + def info(conn, where, ret): + called.append((conn, where, ret)) + context = Context(TLSv1_METHOD) + context.set_info_callback(info) + context.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + context.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + + serverSSL = Connection(context, server) + serverSSL.set_accept_state() + + handshake(clientSSL, serverSSL) + + # The callback must always be called with a Connection instance as the + # first argument. It would probably be better to split this into + # separate tests for client and server side info callbacks so we could + # assert it is called with the right Connection instance. It would + # also be good to assert *something* about `where` and `ret`. + notConnections = [ + conn for (conn, where, ret) in called + if not isinstance(conn, Connection)] + self.assertEqual( + [], notConnections, + "Some info callback arguments were not Connection instaces.") + + + def _load_verify_locations_test(self, *args): + """ + Create a client context which will verify the peer certificate and call + its :py:obj:`load_verify_locations` method with the given arguments. + Then connect it to a server and ensure that the handshake succeeds. + """ + (server, client) = socket_pair() + + clientContext = Context(TLSv1_METHOD) + clientContext.load_verify_locations(*args) + # Require that the server certificate verify properly or the + # connection will fail. + clientContext.set_verify( + VERIFY_PEER, + lambda conn, cert, errno, depth, preverify_ok: preverify_ok) + + clientSSL = Connection(clientContext, client) + clientSSL.set_connect_state() + + serverContext = Context(TLSv1_METHOD) + serverContext.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + serverContext.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + + serverSSL = Connection(serverContext, server) + serverSSL.set_accept_state() + + # Without load_verify_locations above, the handshake + # will fail: + # Error: [('SSL routines', 'SSL3_GET_SERVER_CERTIFICATE', + # 'certificate verify failed')] + handshake(clientSSL, serverSSL) + + cert = clientSSL.get_peer_certificate() + self.assertEqual(cert.get_subject().CN, 'Testing Root CA') + + + def test_load_verify_file(self): + """ + :py:obj:`Context.load_verify_locations` accepts a file name and uses the + certificates within for verification purposes. + """ + cafile = self.mktemp() + fObj = open(cafile, 'w') + fObj.write(cleartextCertificatePEM.decode('ascii')) + fObj.close() + + self._load_verify_locations_test(cafile) + + + def test_load_verify_invalid_file(self): + """ + :py:obj:`Context.load_verify_locations` raises :py:obj:`Error` when passed a + non-existent cafile. + """ + clientContext = Context(TLSv1_METHOD) + self.assertRaises( + Error, clientContext.load_verify_locations, self.mktemp()) + + + def test_load_verify_directory(self): + """ + :py:obj:`Context.load_verify_locations` accepts a directory name and uses + the certificates within for verification purposes. + """ + capath = self.mktemp() + makedirs(capath) + # Hash values computed manually with c_rehash to avoid depending on + # c_rehash in the test suite. One is from OpenSSL 0.9.8, the other + # from OpenSSL 1.0.0. + for name in [b'c7adac82.0', b'c3705638.0']: + cafile = join(capath, name) + fObj = open(cafile, 'w') + fObj.write(cleartextCertificatePEM.decode('ascii')) + fObj.close() + + self._load_verify_locations_test(None, capath) + + + def test_load_verify_locations_wrong_args(self): + """ + :py:obj:`Context.load_verify_locations` raises :py:obj:`TypeError` if called with + the wrong number of arguments or with non-:py:obj:`str` arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.load_verify_locations) + self.assertRaises(TypeError, context.load_verify_locations, object()) + self.assertRaises(TypeError, context.load_verify_locations, object(), object()) + self.assertRaises(TypeError, context.load_verify_locations, None, None, None) + + + if platform == "win32": + "set_default_verify_paths appears not to work on Windows. " + "See LP#404343 and LP#404344." + else: + def test_set_default_verify_paths(self): + """ + :py:obj:`Context.set_default_verify_paths` causes the platform-specific CA + certificate locations to be used for verification purposes. + """ + # Testing this requires a server with a certificate signed by one of + # the CAs in the platform CA location. Getting one of those costs + # money. Fortunately (or unfortunately, depending on your + # perspective), it's easy to think of a public server on the + # internet which has such a certificate. Connecting to the network + # in a unit test is bad, but it's the only way I can think of to + # really test this. -exarkun + + # Arg, verisign.com doesn't speak TLSv1 + context = Context(SSLv3_METHOD) + context.set_default_verify_paths() + context.set_verify( + VERIFY_PEER, + lambda conn, cert, errno, depth, preverify_ok: preverify_ok) + + client = socket() + client.connect(('verisign.com', 443)) + clientSSL = Connection(context, client) + clientSSL.set_connect_state() + clientSSL.do_handshake() + clientSSL.send(b"GET / HTTP/1.0\r\n\r\n") + self.assertTrue(clientSSL.recv(1024)) + + + def test_set_default_verify_paths_signature(self): + """ + :py:obj:`Context.set_default_verify_paths` takes no arguments and raises + :py:obj:`TypeError` if given any. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_default_verify_paths, None) + self.assertRaises(TypeError, context.set_default_verify_paths, 1) + self.assertRaises(TypeError, context.set_default_verify_paths, "") + + + def test_add_extra_chain_cert_invalid_cert(self): + """ + :py:obj:`Context.add_extra_chain_cert` raises :py:obj:`TypeError` if called with + other than one argument or if called with an object which is not an + instance of :py:obj:`X509`. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.add_extra_chain_cert) + self.assertRaises(TypeError, context.add_extra_chain_cert, object()) + self.assertRaises(TypeError, context.add_extra_chain_cert, object(), object()) + + + def _handshake_test(self, serverContext, clientContext): + """ + Verify that a client and server created with the given contexts can + successfully handshake and communicate. + """ + serverSocket, clientSocket = socket_pair() + + server = Connection(serverContext, serverSocket) + server.set_accept_state() + + client = Connection(clientContext, clientSocket) + client.set_connect_state() + + # Make them talk to each other. + # self._interactInMemory(client, server) + for i in range(3): + for s in [client, server]: + try: + s.do_handshake() + except WantReadError: + pass + + + def test_set_verify_callback_exception(self): + """ + If the verify callback passed to :py:obj:`Context.set_verify` raises an + exception, verification fails and the exception is propagated to the + caller of :py:obj:`Connection.do_handshake`. + """ + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey( + load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)) + serverContext.use_certificate( + load_certificate(FILETYPE_PEM, cleartextCertificatePEM)) + + clientContext = Context(TLSv1_METHOD) + def verify_callback(*args): + raise Exception("silly verify failure") + clientContext.set_verify(VERIFY_PEER, verify_callback) + + exc = self.assertRaises( + Exception, self._handshake_test, serverContext, clientContext) + self.assertEqual("silly verify failure", str(exc)) + + + def test_add_extra_chain_cert(self): + """ + :py:obj:`Context.add_extra_chain_cert` accepts an :py:obj:`X509` instance to add to + the certificate chain. + + See :py:obj:`_create_certificate_chain` for the details of the certificate + chain tested. + + The chain is tested by starting a server with scert and connecting + to it with a client which trusts cacert and requires verification to + succeed. + """ + chain = _create_certificate_chain() + [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + + # Dump the CA certificate to a file because that's the only way to load + # it as a trusted CA in the client context. + for cert, name in [(cacert, 'ca.pem'), (icert, 'i.pem'), (scert, 's.pem')]: + fObj = open(name, 'w') + fObj.write(dump_certificate(FILETYPE_PEM, cert).decode('ascii')) + fObj.close() + + for key, name in [(cakey, 'ca.key'), (ikey, 'i.key'), (skey, 's.key')]: + fObj = open(name, 'w') + fObj.write(dump_privatekey(FILETYPE_PEM, key).decode('ascii')) + fObj.close() + + # Create the server context + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey(skey) + serverContext.use_certificate(scert) + # The client already has cacert, we only need to give them icert. + serverContext.add_extra_chain_cert(icert) + + # Create the client + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify( + VERIFY_PEER | VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) + clientContext.load_verify_locations(b"ca.pem") + + # Try it out. + self._handshake_test(serverContext, clientContext) + + + def test_use_certificate_chain_file(self): + """ + :py:obj:`Context.use_certificate_chain_file` reads a certificate chain from + the specified file. + + The chain is tested by starting a server with scert and connecting + to it with a client which trusts cacert and requires verification to + succeed. + """ + chain = _create_certificate_chain() + [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + + # Write out the chain file. + chainFile = self.mktemp() + fObj = open(chainFile, 'wb') + # Most specific to least general. + fObj.write(dump_certificate(FILETYPE_PEM, scert)) + fObj.write(dump_certificate(FILETYPE_PEM, icert)) + fObj.write(dump_certificate(FILETYPE_PEM, cacert)) + fObj.close() + + serverContext = Context(TLSv1_METHOD) + serverContext.use_certificate_chain_file(chainFile) + serverContext.use_privatekey(skey) + + fObj = open('ca.pem', 'w') + fObj.write(dump_certificate(FILETYPE_PEM, cacert).decode('ascii')) + fObj.close() + + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify( + VERIFY_PEER | VERIFY_FAIL_IF_NO_PEER_CERT, verify_cb) + clientContext.load_verify_locations(b"ca.pem") + + self._handshake_test(serverContext, clientContext) + + + def test_use_certificate_chain_file_wrong_args(self): + """ + :py:obj:`Context.use_certificate_chain_file` raises :py:obj:`TypeError` + if passed zero or more than one argument or when passed a non-byte + string single argument. It also raises :py:obj:`OpenSSL.SSL.Error` when + passed a bad chain file name (for example, the name of a file which does + not exist). + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.use_certificate_chain_file) + self.assertRaises(TypeError, context.use_certificate_chain_file, object()) + self.assertRaises(TypeError, context.use_certificate_chain_file, b"foo", object()) + + self.assertRaises(Error, context.use_certificate_chain_file, self.mktemp()) + + # XXX load_client_ca + # XXX set_session_id + + def test_get_verify_mode_wrong_args(self): + """ + :py:obj:`Context.get_verify_mode` raises :py:obj:`TypeError` if called with any + arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_verify_mode, None) + + + def test_set_verify_mode(self): + """ + :py:obj:`Context.get_verify_mode` returns the verify mode flags previously + passed to :py:obj:`Context.set_verify`. + """ + context = Context(TLSv1_METHOD) + self.assertEquals(context.get_verify_mode(), 0) + context.set_verify( + VERIFY_PEER | VERIFY_CLIENT_ONCE, lambda *args: None) + self.assertEquals( + context.get_verify_mode(), VERIFY_PEER | VERIFY_CLIENT_ONCE) + + + if not PY3: + def test_set_verify_mode_long(self): + """ + On Python 2 :py:obj:`Context.set_verify_mode` accepts values of + type :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + self.assertEquals(context.get_verify_mode(), 0) + context.set_verify( + long(VERIFY_PEER | VERIFY_CLIENT_ONCE), lambda *args: None) + self.assertEquals( + context.get_verify_mode(), VERIFY_PEER | VERIFY_CLIENT_ONCE) + + + def test_load_tmp_dh_wrong_args(self): + """ + :py:obj:`Context.load_tmp_dh` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with a non-:py:obj:`str` argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.load_tmp_dh) + self.assertRaises(TypeError, context.load_tmp_dh, "foo", None) + self.assertRaises(TypeError, context.load_tmp_dh, object()) + + + def test_load_tmp_dh_missing_file(self): + """ + :py:obj:`Context.load_tmp_dh` raises :py:obj:`OpenSSL.SSL.Error` if the specified file + does not exist. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(Error, context.load_tmp_dh, b"hello") + + + def test_load_tmp_dh(self): + """ + :py:obj:`Context.load_tmp_dh` loads Diffie-Hellman parameters from the + specified file. + """ + context = Context(TLSv1_METHOD) + dhfilename = self.mktemp() + dhfile = open(dhfilename, "w") + dhfile.write(dhparam) + dhfile.close() + context.load_tmp_dh(dhfilename) + # XXX What should I assert here? -exarkun + + + def test_set_cipher_list_bytes(self): + """ + :py:obj:`Context.set_cipher_list` accepts a :py:obj:`bytes` naming the + ciphers which connections created with the context object will be able + to choose from. + """ + context = Context(TLSv1_METHOD) + context.set_cipher_list(b"hello world:EXP-RC4-MD5") + conn = Connection(context, None) + self.assertEquals(conn.get_cipher_list(), ["EXP-RC4-MD5"]) + + + def test_set_cipher_list_text(self): + """ + :py:obj:`Context.set_cipher_list` accepts a :py:obj:`unicode` naming + the ciphers which connections created with the context object will be + able to choose from. + """ + context = Context(TLSv1_METHOD) + context.set_cipher_list(u("hello world:EXP-RC4-MD5")) + conn = Connection(context, None) + self.assertEquals(conn.get_cipher_list(), ["EXP-RC4-MD5"]) + + + def test_set_cipher_list_wrong_args(self): + """ + :py:obj:`Context.set_cipher_list` raises :py:obj:`TypeError` when + passed zero arguments or more than one argument or when passed a + non-string single argument and raises :py:obj:`OpenSSL.SSL.Error` when + passed an incorrect cipher list string. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_cipher_list) + self.assertRaises(TypeError, context.set_cipher_list, object()) + self.assertRaises(TypeError, context.set_cipher_list, b"EXP-RC4-MD5", object()) + + self.assertRaises(Error, context.set_cipher_list, "imaginary-cipher") + + + def test_set_session_cache_mode_wrong_args(self): + """ + :py:obj:`Context.set_session_cache_mode` raises :py:obj:`TypeError` if + called with other than one integer argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_session_cache_mode) + self.assertRaises(TypeError, context.set_session_cache_mode, object()) + + + def test_get_session_cache_mode_wrong_args(self): + """ + :py:obj:`Context.get_session_cache_mode` raises :py:obj:`TypeError` if + called with any arguments. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.get_session_cache_mode, 1) + + + def test_session_cache_mode(self): + """ + :py:obj:`Context.set_session_cache_mode` specifies how sessions are + cached. The setting can be retrieved via + :py:obj:`Context.get_session_cache_mode`. + """ + context = Context(TLSv1_METHOD) + context.set_session_cache_mode(SESS_CACHE_OFF) + off = context.set_session_cache_mode(SESS_CACHE_BOTH) + self.assertEqual(SESS_CACHE_OFF, off) + self.assertEqual(SESS_CACHE_BOTH, context.get_session_cache_mode()) + + if not PY3: + def test_session_cache_mode_long(self): + """ + On Python 2 :py:obj:`Context.set_session_cache_mode` accepts values + of type :py:obj:`long` as well as :py:obj:`int`. + """ + context = Context(TLSv1_METHOD) + context.set_session_cache_mode(long(SESS_CACHE_BOTH)) + self.assertEqual( + SESS_CACHE_BOTH, context.get_session_cache_mode()) + + + def test_get_cert_store(self): + """ + :py:obj:`Context.get_cert_store` returns a :py:obj:`X509Store` instance. + """ + context = Context(TLSv1_METHOD) + store = context.get_cert_store() + self.assertIsInstance(store, X509Store) + + + +class ServerNameCallbackTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Context.set_tlsext_servername_callback` and its interaction with + :py:obj:`Connection`. + """ + def test_wrong_args(self): + """ + :py:obj:`Context.set_tlsext_servername_callback` raises :py:obj:`TypeError` if called + with other than one argument. + """ + context = Context(TLSv1_METHOD) + self.assertRaises(TypeError, context.set_tlsext_servername_callback) + self.assertRaises( + TypeError, context.set_tlsext_servername_callback, 1, 2) + + + def test_old_callback_forgotten(self): + """ + If :py:obj:`Context.set_tlsext_servername_callback` is used to specify a new + callback, the one it replaces is dereferenced. + """ + def callback(connection): + pass + + def replacement(connection): + pass + + context = Context(TLSv1_METHOD) + context.set_tlsext_servername_callback(callback) + + tracker = ref(callback) + del callback + + context.set_tlsext_servername_callback(replacement) + + # One run of the garbage collector happens to work on CPython. PyPy + # doesn't collect the underlying object until a second run for whatever + # reason. That's fine, it still demonstrates our code has properly + # dropped the reference. + collect() + collect() + + callback = tracker() + if callback is not None: + referrers = get_referrers(callback) + if len(referrers) > 1: + self.fail("Some references remain: %r" % (referrers,)) + + + def test_no_servername(self): + """ + When a client specifies no server name, the callback passed to + :py:obj:`Context.set_tlsext_servername_callback` is invoked and the result of + :py:obj:`Connection.get_servername` is :py:obj:`None`. + """ + args = [] + def servername(conn): + args.append((conn, conn.get_servername())) + context = Context(TLSv1_METHOD) + context.set_tlsext_servername_callback(servername) + + # Lose our reference to it. The Context is responsible for keeping it + # alive now. + del servername + collect() + + # Necessary to actually accept the connection + context.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + context.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(context, None) + server.set_accept_state() + + client = Connection(Context(TLSv1_METHOD), None) + client.set_connect_state() + + self._interactInMemory(server, client) + + self.assertEqual([(server, None)], args) + + + def test_servername(self): + """ + When a client specifies a server name in its hello message, the callback + passed to :py:obj:`Contexts.set_tlsext_servername_callback` is invoked and the + result of :py:obj:`Connection.get_servername` is that server name. + """ + args = [] + def servername(conn): + args.append((conn, conn.get_servername())) + context = Context(TLSv1_METHOD) + context.set_tlsext_servername_callback(servername) + + # Necessary to actually accept the connection + context.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + context.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + + # Do a little connection to trigger the logic + server = Connection(context, None) + server.set_accept_state() + + client = Connection(Context(TLSv1_METHOD), None) + client.set_connect_state() + client.set_tlsext_host_name(b("foo1.example.com")) + + self._interactInMemory(server, client) + + self.assertEqual([(server, b("foo1.example.com"))], args) + + + +class SessionTests(TestCase): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Session`. + """ + def test_construction(self): + """ + :py:class:`Session` can be constructed with no arguments, creating a new + instance of that type. + """ + new_session = Session() + self.assertTrue(isinstance(new_session, Session)) + + + def test_construction_wrong_args(self): + """ + If any arguments are passed to :py:class:`Session`, :py:obj:`TypeError` + is raised. + """ + self.assertRaises(TypeError, Session, 123) + self.assertRaises(TypeError, Session, "hello") + self.assertRaises(TypeError, Session, object()) + + + +class ConnectionTests(TestCase, _LoopbackMixin): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Connection`. + """ + # XXX get_peer_certificate -> None + # XXX sock_shutdown + # XXX master_key -> TypeError + # XXX server_random -> TypeError + # XXX state_string + # XXX connect -> TypeError + # XXX connect_ex -> TypeError + # XXX set_connect_state -> TypeError + # XXX set_accept_state -> TypeError + # XXX renegotiate_pending + # XXX do_handshake -> TypeError + # XXX bio_read -> TypeError + # XXX recv -> TypeError + # XXX send -> TypeError + # XXX bio_write -> TypeError + + def test_type(self): + """ + :py:obj:`Connection` and :py:obj:`ConnectionType` refer to the same type object and + can be used to create instances of that type. + """ + self.assertIdentical(Connection, ConnectionType) + ctx = Context(TLSv1_METHOD) + self.assertConsistentType(Connection, 'Connection', ctx, None) + + + def test_get_context(self): + """ + :py:obj:`Connection.get_context` returns the :py:obj:`Context` instance used to + construct the :py:obj:`Connection` instance. + """ + context = Context(TLSv1_METHOD) + connection = Connection(context, None) + self.assertIdentical(connection.get_context(), context) + + + def test_get_context_wrong_args(self): + """ + :py:obj:`Connection.get_context` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.get_context, None) + + + def test_set_context_wrong_args(self): + """ + :py:obj:`Connection.set_context` raises :py:obj:`TypeError` if called with a + non-:py:obj:`Context` instance argument or with any number of arguments other + than 1. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertRaises(TypeError, connection.set_context) + self.assertRaises(TypeError, connection.set_context, object()) + self.assertRaises(TypeError, connection.set_context, "hello") + self.assertRaises(TypeError, connection.set_context, 1) + self.assertRaises(TypeError, connection.set_context, 1, 2) + self.assertRaises( + TypeError, connection.set_context, Context(TLSv1_METHOD), 2) + self.assertIdentical(ctx, connection.get_context()) + + + def test_set_context(self): + """ + :py:obj:`Connection.set_context` specifies a new :py:obj:`Context` instance to be used + for the connection. + """ + original = Context(SSLv23_METHOD) + replacement = Context(TLSv1_METHOD) + connection = Connection(original, None) + connection.set_context(replacement) + self.assertIdentical(replacement, connection.get_context()) + # Lose our references to the contexts, just in case the Connection isn't + # properly managing its own contributions to their reference counts. + del original, replacement + collect() + + + def test_set_tlsext_host_name_wrong_args(self): + """ + If :py:obj:`Connection.set_tlsext_host_name` is called with a non-byte string + argument or a byte string with an embedded NUL or other than one + argument, :py:obj:`TypeError` is raised. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, conn.set_tlsext_host_name) + self.assertRaises(TypeError, conn.set_tlsext_host_name, object()) + self.assertRaises(TypeError, conn.set_tlsext_host_name, 123, 456) + self.assertRaises( + TypeError, conn.set_tlsext_host_name, b("with\0null")) + + if version_info >= (3,): + # On Python 3.x, don't accidentally implicitly convert from text. + self.assertRaises( + TypeError, + conn.set_tlsext_host_name, b("example.com").decode("ascii")) + + + def test_get_servername_wrong_args(self): + """ + :py:obj:`Connection.get_servername` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.get_servername, object()) + self.assertRaises(TypeError, connection.get_servername, 1) + self.assertRaises(TypeError, connection.get_servername, "hello") + + + def test_pending(self): + """ + :py:obj:`Connection.pending` returns the number of bytes available for + immediate read. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertEquals(connection.pending(), 0) + + + def test_pending_wrong_args(self): + """ + :py:obj:`Connection.pending` raises :py:obj:`TypeError` if called with any arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.pending, None) + + + def test_connect_wrong_args(self): + """ + :py:obj:`Connection.connect` raises :py:obj:`TypeError` if called with a non-address + argument or with the wrong number of arguments. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + self.assertRaises(TypeError, connection.connect, None) + self.assertRaises(TypeError, connection.connect) + self.assertRaises(TypeError, connection.connect, ("127.0.0.1", 1), None) + + + def test_connect_refused(self): + """ + :py:obj:`Connection.connect` raises :py:obj:`socket.error` if the underlying socket + connect method raises it. + """ + client = socket() + context = Context(TLSv1_METHOD) + clientSSL = Connection(context, client) + exc = self.assertRaises(error, clientSSL.connect, ("127.0.0.1", 1)) + self.assertEquals(exc.args[0], ECONNREFUSED) + + + def test_connect(self): + """ + :py:obj:`Connection.connect` establishes a connection to the specified address. + """ + port = socket() + port.bind(('', 0)) + port.listen(3) + + clientSSL = Connection(Context(TLSv1_METHOD), socket()) + clientSSL.connect(('127.0.0.1', port.getsockname()[1])) + # XXX An assertion? Or something? + + + if platform == "darwin": + "connect_ex sometimes causes a kernel panic on OS X 10.6.4" + else: + def test_connect_ex(self): + """ + If there is a connection error, :py:obj:`Connection.connect_ex` returns the + errno instead of raising an exception. + """ + port = socket() + port.bind(('', 0)) + port.listen(3) + + clientSSL = Connection(Context(TLSv1_METHOD), socket()) + clientSSL.setblocking(False) + result = clientSSL.connect_ex(port.getsockname()) + expected = (EINPROGRESS, EWOULDBLOCK) + self.assertTrue( + result in expected, "%r not in %r" % (result, expected)) + + + def test_accept_wrong_args(self): + """ + :py:obj:`Connection.accept` raises :py:obj:`TypeError` if called with any arguments. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + self.assertRaises(TypeError, connection.accept, None) + + + def test_accept(self): + """ + :py:obj:`Connection.accept` accepts a pending connection attempt and returns a + tuple of a new :py:obj:`Connection` (the accepted client) and the address the + connection originated from. + """ + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + port = socket() + portSSL = Connection(ctx, port) + portSSL.bind(('', 0)) + portSSL.listen(3) + + clientSSL = Connection(Context(TLSv1_METHOD), socket()) + + # Calling portSSL.getsockname() here to get the server IP address sounds + # great, but frequently fails on Windows. + clientSSL.connect(('127.0.0.1', portSSL.getsockname()[1])) + + serverSSL, address = portSSL.accept() + + self.assertTrue(isinstance(serverSSL, Connection)) + self.assertIdentical(serverSSL.get_context(), ctx) + self.assertEquals(address, clientSSL.getsockname()) + + + def test_shutdown_wrong_args(self): + """ + :py:obj:`Connection.shutdown` raises :py:obj:`TypeError` if called with the wrong + number of arguments or with arguments other than integers. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.shutdown, None) + self.assertRaises(TypeError, connection.get_shutdown, None) + self.assertRaises(TypeError, connection.set_shutdown) + self.assertRaises(TypeError, connection.set_shutdown, None) + self.assertRaises(TypeError, connection.set_shutdown, 0, 1) + + + def test_shutdown(self): + """ + :py:obj:`Connection.shutdown` performs an SSL-level connection shutdown. + """ + server, client = self._loopback() + self.assertFalse(server.shutdown()) + self.assertEquals(server.get_shutdown(), SENT_SHUTDOWN) + self.assertRaises(ZeroReturnError, client.recv, 1024) + self.assertEquals(client.get_shutdown(), RECEIVED_SHUTDOWN) + client.shutdown() + self.assertEquals(client.get_shutdown(), SENT_SHUTDOWN|RECEIVED_SHUTDOWN) + self.assertRaises(ZeroReturnError, server.recv, 1024) + self.assertEquals(server.get_shutdown(), SENT_SHUTDOWN|RECEIVED_SHUTDOWN) + + + def test_set_shutdown(self): + """ + :py:obj:`Connection.set_shutdown` sets the state of the SSL connection shutdown + process. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + connection.set_shutdown(RECEIVED_SHUTDOWN) + self.assertEquals(connection.get_shutdown(), RECEIVED_SHUTDOWN) + + + if not PY3: + def test_set_shutdown_long(self): + """ + On Python 2 :py:obj:`Connection.set_shutdown` accepts an argument + of type :py:obj:`long` as well as :py:obj:`int`. + """ + connection = Connection(Context(TLSv1_METHOD), socket()) + connection.set_shutdown(long(RECEIVED_SHUTDOWN)) + self.assertEquals(connection.get_shutdown(), RECEIVED_SHUTDOWN) + + + def test_app_data_wrong_args(self): + """ + :py:obj:`Connection.set_app_data` raises :py:obj:`TypeError` if called with other than + one argument. :py:obj:`Connection.get_app_data` raises :py:obj:`TypeError` if called + with any arguments. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, conn.get_app_data, None) + self.assertRaises(TypeError, conn.set_app_data) + self.assertRaises(TypeError, conn.set_app_data, None, None) + + + def test_app_data(self): + """ + Any object can be set as app data by passing it to + :py:obj:`Connection.set_app_data` and later retrieved with + :py:obj:`Connection.get_app_data`. + """ + conn = Connection(Context(TLSv1_METHOD), None) + app_data = object() + conn.set_app_data(app_data) + self.assertIdentical(conn.get_app_data(), app_data) + + + def test_makefile(self): + """ + :py:obj:`Connection.makefile` is not implemented and calling that method raises + :py:obj:`NotImplementedError`. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(NotImplementedError, conn.makefile) + + + def test_get_peer_cert_chain_wrong_args(self): + """ + :py:obj:`Connection.get_peer_cert_chain` raises :py:obj:`TypeError` if called with any + arguments. + """ + conn = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, conn.get_peer_cert_chain, 1) + self.assertRaises(TypeError, conn.get_peer_cert_chain, "foo") + self.assertRaises(TypeError, conn.get_peer_cert_chain, object()) + self.assertRaises(TypeError, conn.get_peer_cert_chain, []) + + + def test_get_peer_cert_chain(self): + """ + :py:obj:`Connection.get_peer_cert_chain` returns a list of certificates which + the connected server returned for the certification verification. + """ + chain = _create_certificate_chain() + [(cakey, cacert), (ikey, icert), (skey, scert)] = chain + + serverContext = Context(TLSv1_METHOD) + serverContext.use_privatekey(skey) + serverContext.use_certificate(scert) + serverContext.add_extra_chain_cert(icert) + serverContext.add_extra_chain_cert(cacert) + server = Connection(serverContext, None) + server.set_accept_state() + + # Create the client + clientContext = Context(TLSv1_METHOD) + clientContext.set_verify(VERIFY_NONE, verify_cb) + client = Connection(clientContext, None) + client.set_connect_state() + + self._interactInMemory(client, server) + + chain = client.get_peer_cert_chain() + self.assertEqual(len(chain), 3) + self.assertEqual( + "Server Certificate", chain[0].get_subject().CN) + self.assertEqual( + "Intermediate Certificate", chain[1].get_subject().CN) + self.assertEqual( + "Authority Certificate", chain[2].get_subject().CN) + + + def test_get_peer_cert_chain_none(self): + """ + :py:obj:`Connection.get_peer_cert_chain` returns :py:obj:`None` if the peer sends no + certificate chain. + """ + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + server = Connection(ctx, None) + server.set_accept_state() + client = Connection(Context(TLSv1_METHOD), None) + client.set_connect_state() + self._interactInMemory(client, server) + self.assertIdentical(None, server.get_peer_cert_chain()) + + + def test_get_session_wrong_args(self): + """ + :py:obj:`Connection.get_session` raises :py:obj:`TypeError` if called + with any arguments. + """ + ctx = Context(TLSv1_METHOD) + server = Connection(ctx, None) + self.assertRaises(TypeError, server.get_session, 123) + self.assertRaises(TypeError, server.get_session, "hello") + self.assertRaises(TypeError, server.get_session, object()) + + + def test_get_session_unconnected(self): + """ + :py:obj:`Connection.get_session` returns :py:obj:`None` when used with + an object which has not been connected. + """ + ctx = Context(TLSv1_METHOD) + server = Connection(ctx, None) + session = server.get_session() + self.assertIdentical(None, session) + + + def test_server_get_session(self): + """ + On the server side of a connection, :py:obj:`Connection.get_session` + returns a :py:class:`Session` instance representing the SSL session for + that connection. + """ + server, client = self._loopback() + session = server.get_session() + self.assertIsInstance(session, Session) + + + def test_client_get_session(self): + """ + On the client side of a connection, :py:obj:`Connection.get_session` + returns a :py:class:`Session` instance representing the SSL session for + that connection. + """ + server, client = self._loopback() + session = client.get_session() + self.assertIsInstance(session, Session) + + + def test_set_session_wrong_args(self): + """ + If called with an object that is not an instance of :py:class:`Session`, + or with other than one argument, :py:obj:`Connection.set_session` raises + :py:obj:`TypeError`. + """ + ctx = Context(TLSv1_METHOD) + connection = Connection(ctx, None) + self.assertRaises(TypeError, connection.set_session) + self.assertRaises(TypeError, connection.set_session, 123) + self.assertRaises(TypeError, connection.set_session, "hello") + self.assertRaises(TypeError, connection.set_session, object()) + self.assertRaises( + TypeError, connection.set_session, Session(), Session()) + + + def test_client_set_session(self): + """ + :py:obj:`Connection.set_session`, when used prior to a connection being + established, accepts a :py:class:`Session` instance and causes an + attempt to re-use the session it represents when the SSL handshake is + performed. + """ + key = load_privatekey(FILETYPE_PEM, server_key_pem) + cert = load_certificate(FILETYPE_PEM, server_cert_pem) + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(key) + ctx.use_certificate(cert) + ctx.set_session_id("unity-test") + + def makeServer(socket): + server = Connection(ctx, socket) + server.set_accept_state() + return server + + originalServer, originalClient = self._loopback( + serverFactory=makeServer) + originalSession = originalClient.get_session() + + def makeClient(socket): + client = self._loopbackClientFactory(socket) + client.set_session(originalSession) + return client + resumedServer, resumedClient = self._loopback( + serverFactory=makeServer, + clientFactory=makeClient) + + # This is a proxy: in general, we have no access to any unique + # identifier for the session (new enough versions of OpenSSL expose a + # hash which could be usable, but "new enough" is very, very new). + # Instead, exploit the fact that the master key is re-used if the + # session is re-used. As long as the master key for the two connections + # is the same, the session was re-used! + self.assertEqual( + originalServer.master_key(), resumedServer.master_key()) + + + def test_set_session_wrong_method(self): + """ + If :py:obj:`Connection.set_session` is passed a :py:class:`Session` + instance associated with a context using a different SSL method than the + :py:obj:`Connection` is using, a :py:class:`OpenSSL.SSL.Error` is + raised. + """ + key = load_privatekey(FILETYPE_PEM, server_key_pem) + cert = load_certificate(FILETYPE_PEM, server_cert_pem) + ctx = Context(TLSv1_METHOD) + ctx.use_privatekey(key) + ctx.use_certificate(cert) + ctx.set_session_id("unity-test") + + def makeServer(socket): + server = Connection(ctx, socket) + server.set_accept_state() + return server + + originalServer, originalClient = self._loopback( + serverFactory=makeServer) + originalSession = originalClient.get_session() + + def makeClient(socket): + # Intentionally use a different, incompatible method here. + client = Connection(Context(SSLv3_METHOD), socket) + client.set_connect_state() + client.set_session(originalSession) + return client + + self.assertRaises( + Error, + self._loopback, clientFactory=makeClient, serverFactory=makeServer) + + + def test_wantWriteError(self): + """ + :py:obj:`Connection` methods which generate output raise + :py:obj:`OpenSSL.SSL.WantWriteError` if writing to the connection's BIO + fail indicating a should-write state. + """ + client_socket, server_socket = socket_pair() + # Fill up the client's send buffer so Connection won't be able to write + # anything. + msg = b"x" * 512 + for i in range(2048): + try: + client_socket.send(msg) + except error as e: + if e.errno == EWOULDBLOCK: + break + raise + else: + self.fail( + "Failed to fill socket buffer, cannot test BIO want write") + + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, client_socket) + # Client's speak first, so make it an SSL client + conn.set_connect_state() + self.assertRaises(WantWriteError, conn.do_handshake) + + # XXX want_read + + + +class ConnectionGetCipherListTests(TestCase): + """ + Tests for :py:obj:`Connection.get_cipher_list`. + """ + def test_wrong_args(self): + """ + :py:obj:`Connection.get_cipher_list` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.get_cipher_list, None) + + + def test_result(self): + """ + :py:obj:`Connection.get_cipher_list` returns a :py:obj:`list` of + :py:obj:`bytes` giving the names of the ciphers which might be used. + """ + connection = Connection(Context(TLSv1_METHOD), None) + ciphers = connection.get_cipher_list() + self.assertTrue(isinstance(ciphers, list)) + for cipher in ciphers: + self.assertTrue(isinstance(cipher, str)) + + + +class ConnectionSendTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Connection.send` + """ + def test_wrong_args(self): + """ + When called with arguments other than string argument for its first + parameter or more than two arguments, :py:obj:`Connection.send` raises + :py:obj:`TypeError`. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.send) + self.assertRaises(TypeError, connection.send, object()) + self.assertRaises(TypeError, connection.send, "foo", object(), "bar") + + + def test_short_bytes(self): + """ + When passed a short byte string, :py:obj:`Connection.send` transmits all of it + and returns the number of bytes sent. + """ + server, client = self._loopback() + count = server.send(b('xy')) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b('xy')) + + try: + memoryview + except NameError: + "cannot test sending memoryview without memoryview" + else: + def test_short_memoryview(self): + """ + When passed a memoryview onto a small number of bytes, + :py:obj:`Connection.send` transmits all of them and returns the number of + bytes sent. + """ + server, client = self._loopback() + count = server.send(memoryview(b('xy'))) + self.assertEquals(count, 2) + self.assertEquals(client.recv(2), b('xy')) + + + +class ConnectionSendallTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`Connection.sendall`. + """ + def test_wrong_args(self): + """ + When called with arguments other than a string argument for its first + parameter or with more than two arguments, :py:obj:`Connection.sendall` + raises :py:obj:`TypeError`. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.sendall) + self.assertRaises(TypeError, connection.sendall, object()) + self.assertRaises( + TypeError, connection.sendall, "foo", object(), "bar") + + + def test_short(self): + """ + :py:obj:`Connection.sendall` transmits all of the bytes in the string passed to + it. + """ + server, client = self._loopback() + server.sendall(b('x')) + self.assertEquals(client.recv(1), b('x')) + + + try: + memoryview + except NameError: + "cannot test sending memoryview without memoryview" + else: + def test_short_memoryview(self): + """ + When passed a memoryview onto a small number of bytes, + :py:obj:`Connection.sendall` transmits all of them. + """ + server, client = self._loopback() + server.sendall(memoryview(b('x'))) + self.assertEquals(client.recv(1), b('x')) + + + def test_long(self): + """ + :py:obj:`Connection.sendall` transmits all of the bytes in the string passed to + it even if this requires multiple calls of an underlying write function. + """ + server, client = self._loopback() + # Should be enough, underlying SSL_write should only do 16k at a time. + # On Windows, after 32k of bytes the write will block (forever - because + # no one is yet reading). + message = b('x') * (1024 * 32 - 1) + b('y') + server.sendall(message) + accum = [] + received = 0 + while received < len(message): + data = client.recv(1024) + accum.append(data) + received += len(data) + self.assertEquals(message, b('').join(accum)) + + + def test_closed(self): + """ + If the underlying socket is closed, :py:obj:`Connection.sendall` propagates the + write error from the low level write call. + """ + server, client = self._loopback() + server.sock_shutdown(2) + exc = self.assertRaises(SysCallError, server.sendall, b"hello, world") + if platform == "win32": + self.assertEqual(exc.args[0], ESHUTDOWN) + else: + self.assertEqual(exc.args[0], EPIPE) + + + +class ConnectionRenegotiateTests(TestCase, _LoopbackMixin): + """ + Tests for SSL renegotiation APIs. + """ + def test_renegotiate_wrong_args(self): + """ + :py:obj:`Connection.renegotiate` raises :py:obj:`TypeError` if called with any + arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.renegotiate, None) + + + def test_total_renegotiations_wrong_args(self): + """ + :py:obj:`Connection.total_renegotiations` raises :py:obj:`TypeError` if called with + any arguments. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertRaises(TypeError, connection.total_renegotiations, None) + + + def test_total_renegotiations(self): + """ + :py:obj:`Connection.total_renegotiations` returns :py:obj:`0` before any + renegotiations have happened. + """ + connection = Connection(Context(TLSv1_METHOD), None) + self.assertEquals(connection.total_renegotiations(), 0) + + +# def test_renegotiate(self): +# """ +# """ +# server, client = self._loopback() + +# server.send("hello world") +# self.assertEquals(client.recv(len("hello world")), "hello world") + +# self.assertEquals(server.total_renegotiations(), 0) +# self.assertTrue(server.renegotiate()) + +# server.setblocking(False) +# client.setblocking(False) +# while server.renegotiate_pending(): +# client.do_handshake() +# server.do_handshake() + +# self.assertEquals(server.total_renegotiations(), 1) + + + + +class ErrorTests(TestCase): + """ + Unit tests for :py:obj:`OpenSSL.SSL.Error`. + """ + def test_type(self): + """ + :py:obj:`Error` is an exception type. + """ + self.assertTrue(issubclass(Error, Exception)) + self.assertEqual(Error.__name__, 'Error') + + + +class ConstantsTests(TestCase): + """ + Tests for the values of constants exposed in :py:obj:`OpenSSL.SSL`. + + These are values defined by OpenSSL intended only to be used as flags to + OpenSSL APIs. The only assertions it seems can be made about them is + their values. + """ + # unittest.TestCase has no skip mechanism + if OP_NO_QUERY_MTU is not None: + def test_op_no_query_mtu(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_NO_QUERY_MTU` is 0x1000, the value of + :py:const:`SSL_OP_NO_QUERY_MTU` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_NO_QUERY_MTU, 0x1000) + else: + "OP_NO_QUERY_MTU unavailable - OpenSSL version may be too old" + + + if OP_COOKIE_EXCHANGE is not None: + def test_op_cookie_exchange(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_COOKIE_EXCHANGE` is 0x2000, the value + of :py:const:`SSL_OP_COOKIE_EXCHANGE` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_COOKIE_EXCHANGE, 0x2000) + else: + "OP_COOKIE_EXCHANGE unavailable - OpenSSL version may be too old" + + + if OP_NO_TICKET is not None: + def test_op_no_ticket(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_NO_TICKET` is 0x4000, the value of + :py:const:`SSL_OP_NO_TICKET` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_NO_TICKET, 0x4000) + else: + "OP_NO_TICKET unavailable - OpenSSL version may be too old" + + + if OP_NO_COMPRESSION is not None: + def test_op_no_compression(self): + """ + The value of :py:obj:`OpenSSL.SSL.OP_NO_COMPRESSION` is 0x20000, the value + of :py:const:`SSL_OP_NO_COMPRESSION` defined by :file:`openssl/ssl.h`. + """ + self.assertEqual(OP_NO_COMPRESSION, 0x20000) + else: + "OP_NO_COMPRESSION unavailable - OpenSSL version may be too old" + + + def test_sess_cache_off(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_OFF` 0x0, the value of + :py:obj:`SSL_SESS_CACHE_OFF` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x0, SESS_CACHE_OFF) + + + def test_sess_cache_client(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_CLIENT` 0x1, the value of + :py:obj:`SSL_SESS_CACHE_CLIENT` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x1, SESS_CACHE_CLIENT) + + + def test_sess_cache_server(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_SERVER` 0x2, the value of + :py:obj:`SSL_SESS_CACHE_SERVER` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x2, SESS_CACHE_SERVER) + + + def test_sess_cache_both(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_BOTH` 0x3, the value of + :py:obj:`SSL_SESS_CACHE_BOTH` defined by ``openssl/ssl.h``. + """ + self.assertEqual(0x3, SESS_CACHE_BOTH) + + + def test_sess_cache_no_auto_clear(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_AUTO_CLEAR` 0x80, the + value of :py:obj:`SSL_SESS_CACHE_NO_AUTO_CLEAR` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x80, SESS_CACHE_NO_AUTO_CLEAR) + + + def test_sess_cache_no_internal_lookup(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_INTERNAL_LOOKUP` 0x100, + the value of :py:obj:`SSL_SESS_CACHE_NO_INTERNAL_LOOKUP` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x100, SESS_CACHE_NO_INTERNAL_LOOKUP) + + + def test_sess_cache_no_internal_store(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_INTERNAL_STORE` 0x200, + the value of :py:obj:`SSL_SESS_CACHE_NO_INTERNAL_STORE` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x200, SESS_CACHE_NO_INTERNAL_STORE) + + + def test_sess_cache_no_internal(self): + """ + The value of :py:obj:`OpenSSL.SSL.SESS_CACHE_NO_INTERNAL` 0x300, the + value of :py:obj:`SSL_SESS_CACHE_NO_INTERNAL` defined by + ``openssl/ssl.h``. + """ + self.assertEqual(0x300, SESS_CACHE_NO_INTERNAL) + + + +class MemoryBIOTests(TestCase, _LoopbackMixin): + """ + Tests for :py:obj:`OpenSSL.SSL.Connection` using a memory BIO. + """ + def _server(self, sock): + """ + Create a new server-side SSL :py:obj:`Connection` object wrapped around + :py:obj:`sock`. + """ + # Create the server side Connection. This is mostly setup boilerplate + # - use TLSv1, use a particular certificate, etc. + server_ctx = Context(TLSv1_METHOD) + server_ctx.set_options(OP_NO_SSLv2 | OP_NO_SSLv3 | OP_SINGLE_DH_USE ) + server_ctx.set_verify(VERIFY_PEER|VERIFY_FAIL_IF_NO_PEER_CERT|VERIFY_CLIENT_ONCE, verify_cb) + server_store = server_ctx.get_cert_store() + server_ctx.use_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem)) + server_ctx.use_certificate(load_certificate(FILETYPE_PEM, server_cert_pem)) + server_ctx.check_privatekey() + server_store.add_cert(load_certificate(FILETYPE_PEM, root_cert_pem)) + # Here the Connection is actually created. If None is passed as the 2nd + # parameter, it indicates a memory BIO should be created. + server_conn = Connection(server_ctx, sock) + server_conn.set_accept_state() + return server_conn + + + def _client(self, sock): + """ + Create a new client-side SSL :py:obj:`Connection` object wrapped around + :py:obj:`sock`. + """ + # Now create the client side Connection. Similar boilerplate to the + # above. + client_ctx = Context(TLSv1_METHOD) + client_ctx.set_options(OP_NO_SSLv2 | OP_NO_SSLv3 | OP_SINGLE_DH_USE ) + client_ctx.set_verify(VERIFY_PEER|VERIFY_FAIL_IF_NO_PEER_CERT|VERIFY_CLIENT_ONCE, verify_cb) + client_store = client_ctx.get_cert_store() + client_ctx.use_privatekey(load_privatekey(FILETYPE_PEM, client_key_pem)) + client_ctx.use_certificate(load_certificate(FILETYPE_PEM, client_cert_pem)) + client_ctx.check_privatekey() + client_store.add_cert(load_certificate(FILETYPE_PEM, root_cert_pem)) + client_conn = Connection(client_ctx, sock) + client_conn.set_connect_state() + return client_conn + + + def test_memoryConnect(self): + """ + Two :py:obj:`Connection`s which use memory BIOs can be manually connected by + reading from the output of each and writing those bytes to the input of + the other and in this way establish a connection and exchange + application-level bytes with each other. + """ + server_conn = self._server(None) + client_conn = self._client(None) + + # There should be no key or nonces yet. + self.assertIdentical(server_conn.master_key(), None) + self.assertIdentical(server_conn.client_random(), None) + self.assertIdentical(server_conn.server_random(), None) + + # First, the handshake needs to happen. We'll deliver bytes back and + # forth between the client and server until neither of them feels like + # speaking any more. + self.assertIdentical( + self._interactInMemory(client_conn, server_conn), None) + + # Now that the handshake is done, there should be a key and nonces. + self.assertNotIdentical(server_conn.master_key(), None) + self.assertNotIdentical(server_conn.client_random(), None) + self.assertNotIdentical(server_conn.server_random(), None) + self.assertEquals(server_conn.client_random(), client_conn.client_random()) + self.assertEquals(server_conn.server_random(), client_conn.server_random()) + self.assertNotEquals(server_conn.client_random(), server_conn.server_random()) + self.assertNotEquals(client_conn.client_random(), client_conn.server_random()) + + # Here are the bytes we'll try to send. + important_message = b('One if by land, two if by sea.') + + server_conn.write(important_message) + self.assertEquals( + self._interactInMemory(client_conn, server_conn), + (client_conn, important_message)) + + client_conn.write(important_message[::-1]) + self.assertEquals( + self._interactInMemory(client_conn, server_conn), + (server_conn, important_message[::-1])) + + + def test_socketConnect(self): + """ + Just like :py:obj:`test_memoryConnect` but with an actual socket. + + This is primarily to rule out the memory BIO code as the source of + any problems encountered while passing data over a :py:obj:`Connection` (if + this test fails, there must be a problem outside the memory BIO + code, as no memory BIO is involved here). Even though this isn't a + memory BIO test, it's convenient to have it here. + """ + server_conn, client_conn = self._loopback() + + important_message = b("Help me Obi Wan Kenobi, you're my only hope.") + client_conn.send(important_message) + msg = server_conn.recv(1024) + self.assertEqual(msg, important_message) + + # Again in the other direction, just for fun. + important_message = important_message[::-1] + server_conn.send(important_message) + msg = client_conn.recv(1024) + self.assertEqual(msg, important_message) + + + def test_socketOverridesMemory(self): + """ + Test that :py:obj:`OpenSSL.SSL.bio_read` and :py:obj:`OpenSSL.SSL.bio_write` don't + work on :py:obj:`OpenSSL.SSL.Connection`() that use sockets. + """ + context = Context(SSLv3_METHOD) + client = socket() + clientSSL = Connection(context, client) + self.assertRaises( TypeError, clientSSL.bio_read, 100) + self.assertRaises( TypeError, clientSSL.bio_write, "foo") + self.assertRaises( TypeError, clientSSL.bio_shutdown ) + + + def test_outgoingOverflow(self): + """ + If more bytes than can be written to the memory BIO are passed to + :py:obj:`Connection.send` at once, the number of bytes which were written is + returned and that many bytes from the beginning of the input can be + read from the other end of the connection. + """ + server = self._server(None) + client = self._client(None) + + self._interactInMemory(client, server) + + size = 2 ** 15 + sent = client.send(b"x" * size) + # Sanity check. We're trying to test what happens when the entire + # input can't be sent. If the entire input was sent, this test is + # meaningless. + self.assertTrue(sent < size) + + receiver, received = self._interactInMemory(client, server) + self.assertIdentical(receiver, server) + + # We can rely on all of these bytes being received at once because + # _loopback passes 2 ** 16 to recv - more than 2 ** 15. + self.assertEquals(len(received), sent) + + + def test_shutdown(self): + """ + :py:obj:`Connection.bio_shutdown` signals the end of the data stream from + which the :py:obj:`Connection` reads. + """ + server = self._server(None) + server.bio_shutdown() + e = self.assertRaises(Error, server.recv, 1024) + # We don't want WantReadError or ZeroReturnError or anything - it's a + # handshake failure. + self.assertEquals(e.__class__, Error) + + + def test_unexpectedEndOfFile(self): + """ + If the connection is lost before an orderly SSL shutdown occurs, + :py:obj:`OpenSSL.SSL.SysCallError` is raised with a message of + "Unexpected EOF". + """ + server_conn, client_conn = self._loopback() + client_conn.sock_shutdown(SHUT_RDWR) + exc = self.assertRaises(SysCallError, server_conn.recv, 1024) + self.assertEqual(exc.args, (-1, "Unexpected EOF")) + + + def _check_client_ca_list(self, func): + """ + Verify the return value of the :py:obj:`get_client_ca_list` method for server and client connections. + + :param func: A function which will be called with the server context + before the client and server are connected to each other. This + function should specify a list of CAs for the server to send to the + client and return that same list. The list will be used to verify + that :py:obj:`get_client_ca_list` returns the proper value at various + times. + """ + server = self._server(None) + client = self._client(None) + self.assertEqual(client.get_client_ca_list(), []) + self.assertEqual(server.get_client_ca_list(), []) + ctx = server.get_context() + expected = func(ctx) + self.assertEqual(client.get_client_ca_list(), []) + self.assertEqual(server.get_client_ca_list(), expected) + self._interactInMemory(client, server) + self.assertEqual(client.get_client_ca_list(), expected) + self.assertEqual(server.get_client_ca_list(), expected) + + + def test_set_client_ca_list_errors(self): + """ + :py:obj:`Context.set_client_ca_list` raises a :py:obj:`TypeError` if called with a + non-list or a list that contains objects other than X509Names. + """ + ctx = Context(TLSv1_METHOD) + self.assertRaises(TypeError, ctx.set_client_ca_list, "spam") + self.assertRaises(TypeError, ctx.set_client_ca_list, ["spam"]) + self.assertIdentical(ctx.set_client_ca_list([]), None) + + + def test_set_empty_ca_list(self): + """ + If passed an empty list, :py:obj:`Context.set_client_ca_list` configures the + context to send no CA names to the client and, on both the server and + client sides, :py:obj:`Connection.get_client_ca_list` returns an empty list + after the connection is set up. + """ + def no_ca(ctx): + ctx.set_client_ca_list([]) + return [] + self._check_client_ca_list(no_ca) + + + def test_set_one_ca_list(self): + """ + If passed a list containing a single X509Name, + :py:obj:`Context.set_client_ca_list` configures the context to send that CA + name to the client and, on both the server and client sides, + :py:obj:`Connection.get_client_ca_list` returns a list containing that + X509Name after the connection is set up. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + cadesc = cacert.get_subject() + def single_ca(ctx): + ctx.set_client_ca_list([cadesc]) + return [cadesc] + self._check_client_ca_list(single_ca) + + + def test_set_multiple_ca_list(self): + """ + If passed a list containing multiple X509Name objects, + :py:obj:`Context.set_client_ca_list` configures the context to send those CA + names to the client and, on both the server and client sides, + :py:obj:`Connection.get_client_ca_list` returns a list containing those + X509Names after the connection is set up. + """ + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + sedesc = secert.get_subject() + cldesc = clcert.get_subject() + + def multiple_ca(ctx): + L = [sedesc, cldesc] + ctx.set_client_ca_list(L) + return L + self._check_client_ca_list(multiple_ca) + + + def test_reset_ca_list(self): + """ + If called multiple times, only the X509Names passed to the final call + of :py:obj:`Context.set_client_ca_list` are used to configure the CA names + sent to the client. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + cldesc = clcert.get_subject() + + def changed_ca(ctx): + ctx.set_client_ca_list([sedesc, cldesc]) + ctx.set_client_ca_list([cadesc]) + return [cadesc] + self._check_client_ca_list(changed_ca) + + + def test_mutated_ca_list(self): + """ + If the list passed to :py:obj:`Context.set_client_ca_list` is mutated + afterwards, this does not affect the list of CA names sent to the + client. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + + def mutated_ca(ctx): + L = [cadesc] + ctx.set_client_ca_list([cadesc]) + L.append(sedesc) + return [cadesc] + self._check_client_ca_list(mutated_ca) + + + def test_add_client_ca_errors(self): + """ + :py:obj:`Context.add_client_ca` raises :py:obj:`TypeError` if called with a non-X509 + object or with a number of arguments other than one. + """ + ctx = Context(TLSv1_METHOD) + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + self.assertRaises(TypeError, ctx.add_client_ca) + self.assertRaises(TypeError, ctx.add_client_ca, "spam") + self.assertRaises(TypeError, ctx.add_client_ca, cacert, cacert) + + + def test_one_add_client_ca(self): + """ + A certificate's subject can be added as a CA to be sent to the client + with :py:obj:`Context.add_client_ca`. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + cadesc = cacert.get_subject() + def single_ca(ctx): + ctx.add_client_ca(cacert) + return [cadesc] + self._check_client_ca_list(single_ca) + + + def test_multiple_add_client_ca(self): + """ + Multiple CA names can be sent to the client by calling + :py:obj:`Context.add_client_ca` with multiple X509 objects. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + + def multiple_ca(ctx): + ctx.add_client_ca(cacert) + ctx.add_client_ca(secert) + return [cadesc, sedesc] + self._check_client_ca_list(multiple_ca) + + + def test_set_and_add_client_ca(self): + """ + A call to :py:obj:`Context.set_client_ca_list` followed by a call to + :py:obj:`Context.add_client_ca` results in using the CA names from the first + call and the CA name from the second call. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + cldesc = clcert.get_subject() + + def mixed_set_add_ca(ctx): + ctx.set_client_ca_list([cadesc, sedesc]) + ctx.add_client_ca(clcert) + return [cadesc, sedesc, cldesc] + self._check_client_ca_list(mixed_set_add_ca) + + + def test_set_after_add_client_ca(self): + """ + A call to :py:obj:`Context.set_client_ca_list` after a call to + :py:obj:`Context.add_client_ca` replaces the CA name specified by the former + call with the names specified by the latter cal. + """ + cacert = load_certificate(FILETYPE_PEM, root_cert_pem) + secert = load_certificate(FILETYPE_PEM, server_cert_pem) + clcert = load_certificate(FILETYPE_PEM, server_cert_pem) + + cadesc = cacert.get_subject() + sedesc = secert.get_subject() + + def set_replaces_add_ca(ctx): + ctx.add_client_ca(clcert) + ctx.set_client_ca_list([cadesc]) + ctx.add_client_ca(secert) + return [cadesc, sedesc] + self._check_client_ca_list(set_replaces_add_ca) + + + +class ConnectionBIOTests(TestCase): + """ + Tests for :py:obj:`Connection.bio_read` and :py:obj:`Connection.bio_write`. + """ + def test_wantReadError(self): + """ + :py:obj:`Connection.bio_read` raises :py:obj:`OpenSSL.SSL.WantReadError` + if there are no bytes available to be read from the BIO. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + self.assertRaises(WantReadError, conn.bio_read, 1024) + + + def test_buffer_size(self): + """ + :py:obj:`Connection.bio_read` accepts an integer giving the maximum + number of bytes to read and return. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + conn.set_connect_state() + try: + conn.do_handshake() + except WantReadError: + pass + data = conn.bio_read(2) + self.assertEqual(2, len(data)) + + + if not PY3: + def test_buffer_size_long(self): + """ + On Python 2 :py:obj:`Connection.bio_read` accepts values of type + :py:obj:`long` as well as :py:obj:`int`. + """ + ctx = Context(TLSv1_METHOD) + conn = Connection(ctx, None) + conn.set_connect_state() + try: + conn.do_handshake() + except WantReadError: + pass + data = conn.bio_read(long(2)) + self.assertEqual(2, len(data)) + + + + +class InfoConstantTests(TestCase): + """ + Tests for assorted constants exposed for use in info callbacks. + """ + def test_integers(self): + """ + All of the info constants are integers. + + This is a very weak test. It would be nice to have one that actually + verifies that as certain info events happen, the value passed to the + info callback matches up with the constant exposed by OpenSSL.SSL. + """ + for const in [ + SSL_ST_CONNECT, SSL_ST_ACCEPT, SSL_ST_MASK, SSL_ST_INIT, + SSL_ST_BEFORE, SSL_ST_OK, SSL_ST_RENEGOTIATE, + SSL_CB_LOOP, SSL_CB_EXIT, SSL_CB_READ, SSL_CB_WRITE, SSL_CB_ALERT, + SSL_CB_READ_ALERT, SSL_CB_WRITE_ALERT, SSL_CB_ACCEPT_LOOP, + SSL_CB_ACCEPT_EXIT, SSL_CB_CONNECT_LOOP, SSL_CB_CONNECT_EXIT, + SSL_CB_HANDSHAKE_START, SSL_CB_HANDSHAKE_DONE]: + + self.assertTrue(isinstance(const, int)) + + +if __name__ == '__main__': + main() diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/util.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/util.py new file mode 100644 index 0000000..4e4d812 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/test/util.py @@ -0,0 +1,302 @@ +# Copyright (C) Jean-Paul Calderone +# Copyright (C) Twisted Matrix Laboratories. +# See LICENSE for details. + +""" +Helpers for the OpenSSL test suite, largely copied from +U{Twisted}. +""" + +import shutil +import traceback +import os, os.path +from tempfile import mktemp +from unittest import TestCase +import sys + +from OpenSSL._util import exception_from_error_queue +from OpenSSL.crypto import Error + +try: + import memdbg +except Exception: + class _memdbg(object): heap = None + memdbg = _memdbg() + +from OpenSSL._util import ffi, lib, byte_string as b + +class TestCase(TestCase): + """ + :py:class:`TestCase` adds useful testing functionality beyond what is available + from the standard library :py:class:`unittest.TestCase`. + """ + def run(self, result): + run = super(TestCase, self).run + if memdbg.heap is None: + return run(result) + + # Run the test as usual + before = set(memdbg.heap) + run(result) + + # Clean up some long-lived allocations so they won't be reported as + # memory leaks. + lib.CRYPTO_cleanup_all_ex_data() + lib.ERR_remove_thread_state(ffi.NULL) + after = set(memdbg.heap) + + if not after - before: + # No leaks, fast succeed + return + + if result.wasSuccessful(): + # If it passed, run it again with memory debugging + before = set(memdbg.heap) + run(result) + + # Clean up some long-lived allocations so they won't be reported as + # memory leaks. + lib.CRYPTO_cleanup_all_ex_data() + lib.ERR_remove_thread_state(ffi.NULL) + + after = set(memdbg.heap) + + self._reportLeaks(after - before, result) + + + def _reportLeaks(self, leaks, result): + def format_leak(p): + stacks = memdbg.heap[p] + # Eventually look at multiple stacks for the realloc() case. For + # now just look at the original allocation location. + (size, python_stack, c_stack) = stacks[0] + + stack = traceback.format_list(python_stack)[:-1] + + # c_stack looks something like this (interesting parts indicated + # with inserted arrows not part of the data): + # + # /home/exarkun/Projects/pyOpenSSL/branches/use-opentls/__pycache__/_cffi__x89095113xb9185b9b.so(+0x12cf) [0x7fe2e20582cf] + # /home/exarkun/Projects/cpython/2.7/python(PyCFunction_Call+0x8b) [0x56265a] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d5f52] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d6419] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d6129] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalCodeEx+0x1043) [0x4d3726] + # /home/exarkun/Projects/cpython/2.7/python() [0x55fd51] + # /home/exarkun/Projects/cpython/2.7/python(PyObject_Call+0x7e) [0x420ee6] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_CallObjectWithKeywords+0x158) [0x4d56ec] + # /home/exarkun/.local/lib/python2.7/site-packages/cffi-0.5-py2.7-linux-x86_64.egg/_cffi_backend.so(+0xe96e) [0x7fe2e38be96e] + # /usr/lib/x86_64-linux-gnu/libffi.so.6(ffi_closure_unix64_inner+0x1b9) [0x7fe2e36ad819] + # /usr/lib/x86_64-linux-gnu/libffi.so.6(ffi_closure_unix64+0x46) [0x7fe2e36adb7c] + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(CRYPTO_malloc+0x64) [0x7fe2e1cef784] <------ end interesting + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(lh_insert+0x16b) [0x7fe2e1d6a24b] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(+0x61c18) [0x7fe2e1cf0c18] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(+0x625ec) [0x7fe2e1cf15ec] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(DSA_new_method+0xe6) [0x7fe2e1d524d6] . + # /lib/x86_64-linux-gnu/libcrypto.so.1.0.0(DSA_generate_parameters+0x3a) [0x7fe2e1d5364a] <------ begin interesting + # /home/exarkun/Projects/opentls/trunk/tls/c/__pycache__/_cffi__x305d4698xb539baaa.so(+0x1f397) [0x7fe2df84d397] + # /home/exarkun/Projects/cpython/2.7/python(PyCFunction_Call+0x8b) [0x56265a] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d5f52] + # /home/exarkun/Projects/cpython/2.7/python(PyEval_EvalFrameEx+0x753b) [0x4d0e1e] + # /home/exarkun/Projects/cpython/2.7/python() [0x4d6419] + # ... + # + # Notice the stack is upside down compared to a Python traceback. + # Identify the start and end of interesting bits and stuff it into the stack we report. + + saved = list(c_stack) + + # Figure the first interesting frame will be after a the cffi-compiled module + while c_stack and '/__pycache__/_cffi__' not in c_stack[-1]: + c_stack.pop() + + # Figure the last interesting frame will always be CRYPTO_malloc, + # since that's where we hooked in to things. + while c_stack and 'CRYPTO_malloc' not in c_stack[0] and 'CRYPTO_realloc' not in c_stack[0]: + c_stack.pop(0) + + if c_stack: + c_stack.reverse() + else: + c_stack = saved[::-1] + stack.extend([frame + "\n" for frame in c_stack]) + + stack.insert(0, "Leaked (%s) at:\n") + return "".join(stack) + + if leaks: + unique_leaks = {} + for p in leaks: + size = memdbg.heap[p][-1][0] + new_leak = format_leak(p) + if new_leak not in unique_leaks: + unique_leaks[new_leak] = [(size, p)] + else: + unique_leaks[new_leak].append((size, p)) + memdbg.free(p) + + for (stack, allocs) in unique_leaks.iteritems(): + allocs_accum = [] + for (size, pointer) in allocs: + + addr = int(ffi.cast('uintptr_t', pointer)) + allocs_accum.append("%d@0x%x" % (size, addr)) + allocs_report = ", ".join(sorted(allocs_accum)) + + result.addError( + self, + (None, Exception(stack % (allocs_report,)), None)) + + + def tearDown(self): + """ + Clean up any files or directories created using :py:meth:`TestCase.mktemp`. + Subclasses must invoke this method if they override it or the + cleanup will not occur. + """ + if False and self._temporaryFiles is not None: + for temp in self._temporaryFiles: + if os.path.isdir(temp): + shutil.rmtree(temp) + elif os.path.exists(temp): + os.unlink(temp) + try: + exception_from_error_queue(Error) + except Error: + e = sys.exc_info()[1] + if e.args != ([],): + self.fail("Left over errors in OpenSSL error queue: " + repr(e)) + + + def assertIsInstance(self, instance, classOrTuple, message=None): + """ + Fail if C{instance} is not an instance of the given class or of + one of the given classes. + + @param instance: the object to test the type (first argument of the + C{isinstance} call). + @type instance: any. + @param classOrTuple: the class or classes to test against (second + argument of the C{isinstance} call). + @type classOrTuple: class, type, or tuple. + + @param message: Custom text to include in the exception text if the + assertion fails. + """ + if not isinstance(instance, classOrTuple): + if message is None: + suffix = "" + else: + suffix = ": " + message + self.fail("%r is not an instance of %s%s" % ( + instance, classOrTuple, suffix)) + + + def failUnlessIn(self, containee, container, msg=None): + """ + Fail the test if :py:data:`containee` is not found in :py:data:`container`. + + :param containee: the value that should be in :py:class:`container` + :param container: a sequence type, or in the case of a mapping type, + will follow semantics of 'if key in dict.keys()' + :param msg: if msg is None, then the failure message will be + '%r not in %r' % (first, second) + """ + if containee not in container: + raise self.failureException(msg or "%r not in %r" + % (containee, container)) + return containee + assertIn = failUnlessIn + + def failUnlessIdentical(self, first, second, msg=None): + """ + Fail the test if :py:data:`first` is not :py:data:`second`. This is an + obect-identity-equality test, not an object equality + (i.e. :py:func:`__eq__`) test. + + :param msg: if msg is None, then the failure message will be + '%r is not %r' % (first, second) + """ + if first is not second: + raise self.failureException(msg or '%r is not %r' % (first, second)) + return first + assertIdentical = failUnlessIdentical + + + def failIfIdentical(self, first, second, msg=None): + """ + Fail the test if :py:data:`first` is :py:data:`second`. This is an + obect-identity-equality test, not an object equality + (i.e. :py:func:`__eq__`) test. + + :param msg: if msg is None, then the failure message will be + '%r is %r' % (first, second) + """ + if first is second: + raise self.failureException(msg or '%r is %r' % (first, second)) + return first + assertNotIdentical = failIfIdentical + + + def failUnlessRaises(self, exception, f, *args, **kwargs): + """ + Fail the test unless calling the function :py:data:`f` with the given + :py:data:`args` and :py:data:`kwargs` raises :py:data:`exception`. The + failure will report the traceback and call stack of the unexpected + exception. + + :param exception: exception type that is to be expected + :param f: the function to call + + :return: The raised exception instance, if it is of the given type. + :raise self.failureException: Raised if the function call does + not raise an exception or if it raises an exception of a + different type. + """ + try: + result = f(*args, **kwargs) + except exception: + inst = sys.exc_info()[1] + return inst + except: + raise self.failureException('%s raised instead of %s' + % (sys.exc_info()[0], + exception.__name__, + )) + else: + raise self.failureException('%s not raised (%r returned)' + % (exception.__name__, result)) + assertRaises = failUnlessRaises + + + _temporaryFiles = None + def mktemp(self): + """ + Pathetic substitute for twisted.trial.unittest.TestCase.mktemp. + """ + if self._temporaryFiles is None: + self._temporaryFiles = [] + temp = b(mktemp(dir=".")) + self._temporaryFiles.append(temp) + return temp + + + # Other stuff + def assertConsistentType(self, theType, name, *constructionArgs): + """ + Perform various assertions about :py:data:`theType` to ensure that it is a + well-defined type. This is useful for extension types, where it's + pretty easy to do something wacky. If something about the type is + unusual, an exception will be raised. + + :param theType: The type object about which to make assertions. + :param name: A string giving the name of the type. + :param constructionArgs: Positional arguments to use with :py:data:`theType` to + create an instance of it. + """ + self.assertEqual(theType.__name__, name) + self.assertTrue(isinstance(theType, type)) + instance = theType(*constructionArgs) + self.assertIdentical(type(instance), theType) diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/tsafe.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/tsafe.py new file mode 100644 index 0000000..9d7ad2f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/tsafe.py @@ -0,0 +1,28 @@ +from OpenSSL import SSL +_ssl = SSL +del SSL + +import threading +_RLock = threading.RLock +del threading + +class Connection: + def __init__(self, *args): + self._ssl_conn = apply(_ssl.Connection, args) + self._lock = _RLock() + + for f in ('get_context', 'pending', 'send', 'write', 'recv', 'read', + 'renegotiate', 'bind', 'listen', 'connect', 'accept', + 'setblocking', 'fileno', 'shutdown', 'close', 'get_cipher_list', + 'getpeername', 'getsockname', 'getsockopt', 'setsockopt', + 'makefile', 'get_app_data', 'set_app_data', 'state_string', + 'sock_shutdown', 'get_peer_certificate', 'get_peer_cert_chain', 'want_read', + 'want_write', 'set_connect_state', 'set_accept_state', + 'connect_ex', 'sendall'): + exec("""def %s(self, *args): + self._lock.acquire() + try: + return self._ssl_conn.%s(*args) + finally: + self._lock.release()\n""" % (f, f)) + diff --git a/Linux_i686/lib/python2.7/site-packages/OpenSSL/version.py b/Linux_i686/lib/python2.7/site-packages/OpenSSL/version.py new file mode 100644 index 0000000..307dba0 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/OpenSSL/version.py @@ -0,0 +1,9 @@ +# Copyright (C) AB Strakt +# Copyright (C) Jean-Paul Calderone +# See LICENSE for details. + +""" +pyOpenSSL - A simple wrapper around the OpenSSL library +""" + +__version__ = '0.14' diff --git a/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/PKG-INFO new file mode 100644 index 0000000..e1d882a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/PKG-INFO @@ -0,0 +1,155 @@ +Metadata-Version: 1.1 +Name: SQLAlchemy +Version: 0.9.4 +Summary: Database Abstraction Library +Home-page: http://www.sqlalchemy.org +Author: Mike Bayer +Author-email: mike_mp@zzzcomputing.com +License: MIT License +Description: SQLAlchemy + ========== + + The Python SQL Toolkit and Object Relational Mapper + + Introduction + ------------- + + SQLAlchemy is the Python SQL toolkit and Object Relational Mapper + that gives application developers the full power and + flexibility of SQL. SQLAlchemy provides a full suite + of well known enterprise-level persistence patterns, + designed for efficient and high-performing database + access, adapted into a simple and Pythonic domain + language. + + Major SQLAlchemy features include: + + * An industrial strength ORM, built + from the core on the identity map, unit of work, + and data mapper patterns. These patterns + allow transparent persistence of objects + using a declarative configuration system. + Domain models + can be constructed and manipulated naturally, + and changes are synchronized with the + current transaction automatically. + * A relationally-oriented query system, exposing + the full range of SQL's capabilities + explicitly, including joins, subqueries, + correlation, and most everything else, + in terms of the object model. + Writing queries with the ORM uses the same + techniques of relational composition you use + when writing SQL. While you can drop into + literal SQL at any time, it's virtually never + needed. + * A comprehensive and flexible system + of eager loading for related collections and objects. + Collections are cached within a session, + and can be loaded on individual access, all + at once using joins, or by query per collection + across the full result set. + * A Core SQL construction system and DBAPI + interaction layer. The SQLAlchemy Core is + separate from the ORM and is a full database + abstraction layer in its own right, and includes + an extensible Python-based SQL expression + language, schema metadata, connection pooling, + type coercion, and custom types. + * All primary and foreign key constraints are + assumed to be composite and natural. Surrogate + integer primary keys are of course still the + norm, but SQLAlchemy never assumes or hardcodes + to this model. + * Database introspection and generation. Database + schemas can be "reflected" in one step into + Python structures representing database metadata; + those same structures can then generate + CREATE statements right back out - all within + the Core, independent of the ORM. + + SQLAlchemy's philosophy: + + * SQL databases behave less and less like object + collections the more size and performance start to + matter; object collections behave less and less like + tables and rows the more abstraction starts to matter. + SQLAlchemy aims to accommodate both of these + principles. + * An ORM doesn't need to hide the "R". A relational + database provides rich, set-based functionality + that should be fully exposed. SQLAlchemy's + ORM provides an open-ended set of patterns + that allow a developer to construct a custom + mediation layer between a domain model and + a relational schema, turning the so-called + "object relational impedance" issue into + a distant memory. + * The developer, in all cases, makes all decisions + regarding the design, structure, and naming conventions + of both the object model as well as the relational + schema. SQLAlchemy only provides the means + to automate the execution of these decisions. + * With SQLAlchemy, there's no such thing as + "the ORM generated a bad query" - you + retain full control over the structure of + queries, including how joins are organized, + how subqueries and correlation is used, what + columns are requested. Everything SQLAlchemy + does is ultimately the result of a developer- + initiated decision. + * Don't use an ORM if the problem doesn't need one. + SQLAlchemy consists of a Core and separate ORM + component. The Core offers a full SQL expression + language that allows Pythonic construction + of SQL constructs that render directly to SQL + strings for a target database, returning + result sets that are essentially enhanced DBAPI + cursors. + * Transactions should be the norm. With SQLAlchemy's + ORM, nothing goes to permanent storage until + commit() is called. SQLAlchemy encourages applications + to create a consistent means of delineating + the start and end of a series of operations. + * Never render a literal value in a SQL statement. + Bound parameters are used to the greatest degree + possible, allowing query optimizers to cache + query plans effectively and making SQL injection + attacks a non-issue. + + Documentation + ------------- + + Latest documentation is at: + + http://www.sqlalchemy.org/docs/ + + Installation / Requirements + --------------------------- + + Full documentation for installation is at + `Installation `_. + + Getting Help / Development / Bug reporting + ------------------------------------------ + + Please refer to the `SQLAlchemy Community Guide `_. + + License + ------- + + SQLAlchemy is distributed under the `MIT license + `_. + + +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: Jython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends +Classifier: Operating System :: OS Independent diff --git a/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..e93aaa2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/SOURCES.txt @@ -0,0 +1,754 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +README.dialects.rst +README.rst +README.unittests.rst +setup.cfg +setup.py +sqla_nose.py +doc/contents.html +doc/copyright.html +doc/faq.html +doc/genindex.html +doc/glossary.html +doc/index.html +doc/intro.html +doc/search.html +doc/searchindex.js +doc/_images/sqla_arch_small.png +doc/_images/sqla_engine_arch.png +doc/_modules/index.html +doc/_modules/examples/adjacency_list/adjacency_list.html +doc/_modules/examples/association/basic_association.html +doc/_modules/examples/association/dict_of_sets_with_default.html +doc/_modules/examples/association/proxied_association.html +doc/_modules/examples/custom_attributes/custom_management.html +doc/_modules/examples/custom_attributes/listen_for_events.html +doc/_modules/examples/dogpile_caching/advanced.html +doc/_modules/examples/dogpile_caching/caching_query.html +doc/_modules/examples/dogpile_caching/environment.html +doc/_modules/examples/dogpile_caching/fixture_data.html +doc/_modules/examples/dogpile_caching/helloworld.html +doc/_modules/examples/dogpile_caching/local_session_caching.html +doc/_modules/examples/dogpile_caching/model.html +doc/_modules/examples/dogpile_caching/relationship_caching.html +doc/_modules/examples/dynamic_dict/dynamic_dict.html +doc/_modules/examples/elementtree/adjacency_list.html +doc/_modules/examples/elementtree/optimized_al.html +doc/_modules/examples/elementtree/pickle.html +doc/_modules/examples/generic_associations/discriminator_on_association.html +doc/_modules/examples/generic_associations/generic_fk.html +doc/_modules/examples/generic_associations/table_per_association.html +doc/_modules/examples/generic_associations/table_per_related.html +doc/_modules/examples/graphs/directed_graph.html +doc/_modules/examples/inheritance/concrete.html +doc/_modules/examples/inheritance/joined.html +doc/_modules/examples/inheritance/single.html +doc/_modules/examples/join_conditions/cast.html +doc/_modules/examples/join_conditions/threeway.html +doc/_modules/examples/large_collection/large_collection.html +doc/_modules/examples/nested_sets/nested_sets.html +doc/_modules/examples/postgis/postgis.html +doc/_modules/examples/sharding/attribute_shard.html +doc/_modules/examples/versioned_history/history_meta.html +doc/_modules/examples/versioned_history/test_versioning.html +doc/_modules/examples/versioned_rows/versioned_map.html +doc/_modules/examples/versioned_rows/versioned_rows.html +doc/_modules/examples/vertical/dictlike-polymorphic.html +doc/_modules/examples/vertical/dictlike.html +doc/_sources/contents.txt +doc/_sources/copyright.txt +doc/_sources/faq.txt +doc/_sources/glossary.txt +doc/_sources/index.txt +doc/_sources/intro.txt +doc/_sources/changelog/changelog_01.txt +doc/_sources/changelog/changelog_02.txt +doc/_sources/changelog/changelog_03.txt +doc/_sources/changelog/changelog_04.txt +doc/_sources/changelog/changelog_05.txt +doc/_sources/changelog/changelog_06.txt +doc/_sources/changelog/changelog_07.txt +doc/_sources/changelog/changelog_08.txt +doc/_sources/changelog/changelog_09.txt +doc/_sources/changelog/index.txt +doc/_sources/changelog/migration_04.txt +doc/_sources/changelog/migration_05.txt +doc/_sources/changelog/migration_06.txt +doc/_sources/changelog/migration_07.txt +doc/_sources/changelog/migration_08.txt +doc/_sources/changelog/migration_09.txt +doc/_sources/core/compiler.txt +doc/_sources/core/connections.txt +doc/_sources/core/constraints.txt +doc/_sources/core/ddl.txt +doc/_sources/core/defaults.txt +doc/_sources/core/dml.txt +doc/_sources/core/engines.txt +doc/_sources/core/event.txt +doc/_sources/core/events.txt +doc/_sources/core/exceptions.txt +doc/_sources/core/expression_api.txt +doc/_sources/core/functions.txt +doc/_sources/core/index.txt +doc/_sources/core/inspection.txt +doc/_sources/core/interfaces.txt +doc/_sources/core/internals.txt +doc/_sources/core/metadata.txt +doc/_sources/core/pooling.txt +doc/_sources/core/reflection.txt +doc/_sources/core/schema.txt +doc/_sources/core/selectable.txt +doc/_sources/core/serializer.txt +doc/_sources/core/sqlelement.txt +doc/_sources/core/tutorial.txt +doc/_sources/core/types.txt +doc/_sources/dialects/drizzle.txt +doc/_sources/dialects/firebird.txt +doc/_sources/dialects/index.txt +doc/_sources/dialects/mssql.txt +doc/_sources/dialects/mysql.txt +doc/_sources/dialects/oracle.txt +doc/_sources/dialects/postgresql.txt +doc/_sources/dialects/sqlite.txt +doc/_sources/dialects/sybase.txt +doc/_sources/orm/collections.txt +doc/_sources/orm/deprecated.txt +doc/_sources/orm/events.txt +doc/_sources/orm/examples.txt +doc/_sources/orm/exceptions.txt +doc/_sources/orm/index.txt +doc/_sources/orm/inheritance.txt +doc/_sources/orm/internals.txt +doc/_sources/orm/loading.txt +doc/_sources/orm/mapper_config.txt +doc/_sources/orm/query.txt +doc/_sources/orm/relationships.txt +doc/_sources/orm/session.txt +doc/_sources/orm/tutorial.txt +doc/_sources/orm/extensions/associationproxy.txt +doc/_sources/orm/extensions/automap.txt +doc/_sources/orm/extensions/declarative.txt +doc/_sources/orm/extensions/horizontal_shard.txt +doc/_sources/orm/extensions/hybrid.txt +doc/_sources/orm/extensions/index.txt +doc/_sources/orm/extensions/instrumentation.txt +doc/_sources/orm/extensions/mutable.txt +doc/_sources/orm/extensions/orderinglist.txt +doc/_static/basic.css +doc/_static/changelog.css +doc/_static/comment-bright.png +doc/_static/comment-close.png +doc/_static/comment.png +doc/_static/default.css +doc/_static/docs.css +doc/_static/doctools.js +doc/_static/down-pressed.png +doc/_static/down.png +doc/_static/file.png +doc/_static/init.js +doc/_static/jquery.js +doc/_static/minus.png +doc/_static/plus.png +doc/_static/pygments.css +doc/_static/searchtools.js +doc/_static/sidebar.js +doc/_static/sphinx_paramlinks.css +doc/_static/underscore.js +doc/_static/up-pressed.png +doc/_static/up.png +doc/_static/websupport.js +doc/build/Makefile +doc/build/conf.py +doc/build/contents.rst +doc/build/copyright.rst +doc/build/faq.rst +doc/build/glossary.rst +doc/build/index.rst +doc/build/intro.rst +doc/build/requirements.txt +doc/build/sqla_arch_small.png +doc/build/testdocs.py +doc/build/builder/__init__.py +doc/build/builder/autodoc_mods.py +doc/build/builder/dialect_info.py +doc/build/builder/mako.py +doc/build/builder/sqlformatter.py +doc/build/builder/util.py +doc/build/builder/viewsource.py +doc/build/changelog/changelog_01.rst +doc/build/changelog/changelog_02.rst +doc/build/changelog/changelog_03.rst +doc/build/changelog/changelog_04.rst +doc/build/changelog/changelog_05.rst +doc/build/changelog/changelog_06.rst +doc/build/changelog/changelog_07.rst +doc/build/changelog/changelog_08.rst +doc/build/changelog/changelog_09.rst +doc/build/changelog/index.rst +doc/build/changelog/migration_04.rst +doc/build/changelog/migration_05.rst +doc/build/changelog/migration_06.rst +doc/build/changelog/migration_07.rst +doc/build/changelog/migration_08.rst +doc/build/changelog/migration_09.rst +doc/build/core/compiler.rst +doc/build/core/connections.rst +doc/build/core/constraints.rst +doc/build/core/ddl.rst +doc/build/core/defaults.rst +doc/build/core/dml.rst +doc/build/core/engines.rst +doc/build/core/event.rst +doc/build/core/events.rst +doc/build/core/exceptions.rst +doc/build/core/expression_api.rst +doc/build/core/functions.rst +doc/build/core/index.rst +doc/build/core/inspection.rst +doc/build/core/interfaces.rst +doc/build/core/internals.rst +doc/build/core/metadata.rst +doc/build/core/pooling.rst +doc/build/core/reflection.rst +doc/build/core/schema.rst +doc/build/core/selectable.rst +doc/build/core/serializer.rst +doc/build/core/sqla_engine_arch.png +doc/build/core/sqlelement.rst +doc/build/core/tutorial.rst +doc/build/core/types.rst +doc/build/dialects/drizzle.rst +doc/build/dialects/firebird.rst +doc/build/dialects/index.rst +doc/build/dialects/mssql.rst +doc/build/dialects/mysql.rst +doc/build/dialects/oracle.rst +doc/build/dialects/postgresql.rst +doc/build/dialects/sqlite.rst +doc/build/dialects/sybase.rst +doc/build/orm/collections.rst +doc/build/orm/deprecated.rst +doc/build/orm/events.rst +doc/build/orm/examples.rst +doc/build/orm/exceptions.rst +doc/build/orm/index.rst +doc/build/orm/inheritance.rst +doc/build/orm/internals.rst +doc/build/orm/loading.rst +doc/build/orm/mapper_config.rst +doc/build/orm/query.rst +doc/build/orm/relationships.rst +doc/build/orm/session.rst +doc/build/orm/tutorial.rst +doc/build/orm/extensions/associationproxy.rst +doc/build/orm/extensions/automap.rst +doc/build/orm/extensions/declarative.rst +doc/build/orm/extensions/horizontal_shard.rst +doc/build/orm/extensions/hybrid.rst +doc/build/orm/extensions/index.rst +doc/build/orm/extensions/instrumentation.rst +doc/build/orm/extensions/mutable.rst +doc/build/orm/extensions/orderinglist.rst +doc/build/static/docs.css +doc/build/static/init.js +doc/build/templates/genindex.mako +doc/build/templates/layout.mako +doc/build/templates/page.mako +doc/build/templates/search.mako +doc/build/templates/static_base.mako +doc/build/texinputs/Makefile +doc/build/texinputs/sphinx.sty +doc/changelog/changelog_01.html +doc/changelog/changelog_02.html +doc/changelog/changelog_03.html +doc/changelog/changelog_04.html +doc/changelog/changelog_05.html +doc/changelog/changelog_06.html +doc/changelog/changelog_07.html +doc/changelog/changelog_08.html +doc/changelog/changelog_09.html +doc/changelog/index.html +doc/changelog/migration_04.html +doc/changelog/migration_05.html +doc/changelog/migration_06.html +doc/changelog/migration_07.html +doc/changelog/migration_08.html +doc/changelog/migration_09.html +doc/core/compiler.html +doc/core/connections.html +doc/core/constraints.html +doc/core/ddl.html +doc/core/defaults.html +doc/core/dml.html +doc/core/engines.html +doc/core/event.html +doc/core/events.html +doc/core/exceptions.html +doc/core/expression_api.html +doc/core/functions.html +doc/core/index.html +doc/core/inspection.html +doc/core/interfaces.html +doc/core/internals.html +doc/core/metadata.html +doc/core/pooling.html +doc/core/reflection.html +doc/core/schema.html +doc/core/selectable.html +doc/core/serializer.html +doc/core/sqlelement.html +doc/core/tutorial.html +doc/core/types.html +doc/dialects/drizzle.html +doc/dialects/firebird.html +doc/dialects/index.html +doc/dialects/mssql.html +doc/dialects/mysql.html +doc/dialects/oracle.html +doc/dialects/postgresql.html +doc/dialects/sqlite.html +doc/dialects/sybase.html +doc/orm/collections.html +doc/orm/deprecated.html +doc/orm/events.html +doc/orm/examples.html +doc/orm/exceptions.html +doc/orm/index.html +doc/orm/inheritance.html +doc/orm/internals.html +doc/orm/loading.html +doc/orm/mapper_config.html +doc/orm/query.html +doc/orm/relationships.html +doc/orm/session.html +doc/orm/tutorial.html +doc/orm/extensions/associationproxy.html +doc/orm/extensions/automap.html +doc/orm/extensions/declarative.html +doc/orm/extensions/horizontal_shard.html +doc/orm/extensions/hybrid.html +doc/orm/extensions/index.html +doc/orm/extensions/instrumentation.html +doc/orm/extensions/mutable.html +doc/orm/extensions/orderinglist.html +examples/__init__.py +examples/adjacency_list/__init__.py +examples/adjacency_list/adjacency_list.py +examples/association/__init__.py +examples/association/basic_association.py +examples/association/dict_of_sets_with_default.py +examples/association/proxied_association.py +examples/custom_attributes/__init__.py +examples/custom_attributes/custom_management.py +examples/custom_attributes/listen_for_events.py +examples/dogpile_caching/__init__.py +examples/dogpile_caching/advanced.py +examples/dogpile_caching/caching_query.py +examples/dogpile_caching/environment.py +examples/dogpile_caching/fixture_data.py +examples/dogpile_caching/helloworld.py +examples/dogpile_caching/local_session_caching.py +examples/dogpile_caching/model.py +examples/dogpile_caching/relationship_caching.py +examples/dynamic_dict/__init__.py +examples/dynamic_dict/dynamic_dict.py +examples/elementtree/__init__.py +examples/elementtree/adjacency_list.py +examples/elementtree/optimized_al.py +examples/elementtree/pickle.py +examples/elementtree/test.xml +examples/elementtree/test2.xml +examples/elementtree/test3.xml +examples/generic_associations/__init__.py +examples/generic_associations/discriminator_on_association.py +examples/generic_associations/generic_fk.py +examples/generic_associations/table_per_association.py +examples/generic_associations/table_per_related.py +examples/graphs/__init__.py +examples/graphs/directed_graph.py +examples/inheritance/__init__.py +examples/inheritance/concrete.py +examples/inheritance/joined.py +examples/inheritance/single.py +examples/join_conditions/__init__.py +examples/join_conditions/cast.py +examples/join_conditions/threeway.py +examples/large_collection/__init__.py +examples/large_collection/large_collection.py +examples/nested_sets/__init__.py +examples/nested_sets/nested_sets.py +examples/postgis/__init__.py +examples/postgis/postgis.py +examples/sharding/__init__.py +examples/sharding/attribute_shard.py +examples/versioned_history/__init__.py +examples/versioned_history/history_meta.py +examples/versioned_history/test_versioning.py +examples/versioned_rows/__init__.py +examples/versioned_rows/versioned_map.py +examples/versioned_rows/versioned_rows.py +examples/vertical/__init__.py +examples/vertical/dictlike-polymorphic.py +examples/vertical/dictlike.py +lib/SQLAlchemy.egg-info/PKG-INFO +lib/SQLAlchemy.egg-info/SOURCES.txt +lib/SQLAlchemy.egg-info/dependency_links.txt +lib/SQLAlchemy.egg-info/top_level.txt +lib/sqlalchemy/__init__.py +lib/sqlalchemy/events.py +lib/sqlalchemy/exc.py +lib/sqlalchemy/inspection.py +lib/sqlalchemy/interfaces.py +lib/sqlalchemy/log.py +lib/sqlalchemy/pool.py +lib/sqlalchemy/processors.py +lib/sqlalchemy/schema.py +lib/sqlalchemy/types.py +lib/sqlalchemy/cextension/processors.c +lib/sqlalchemy/cextension/resultproxy.c +lib/sqlalchemy/cextension/utils.c +lib/sqlalchemy/connectors/__init__.py +lib/sqlalchemy/connectors/mxodbc.py +lib/sqlalchemy/connectors/mysqldb.py +lib/sqlalchemy/connectors/pyodbc.py +lib/sqlalchemy/connectors/zxJDBC.py +lib/sqlalchemy/databases/__init__.py +lib/sqlalchemy/dialects/__init__.py +lib/sqlalchemy/dialects/postgres.py +lib/sqlalchemy/dialects/type_migration_guidelines.txt +lib/sqlalchemy/dialects/drizzle/__init__.py +lib/sqlalchemy/dialects/drizzle/base.py +lib/sqlalchemy/dialects/drizzle/mysqldb.py +lib/sqlalchemy/dialects/firebird/__init__.py +lib/sqlalchemy/dialects/firebird/base.py +lib/sqlalchemy/dialects/firebird/fdb.py +lib/sqlalchemy/dialects/firebird/kinterbasdb.py +lib/sqlalchemy/dialects/mssql/__init__.py +lib/sqlalchemy/dialects/mssql/adodbapi.py +lib/sqlalchemy/dialects/mssql/base.py +lib/sqlalchemy/dialects/mssql/information_schema.py +lib/sqlalchemy/dialects/mssql/mxodbc.py +lib/sqlalchemy/dialects/mssql/pymssql.py +lib/sqlalchemy/dialects/mssql/pyodbc.py +lib/sqlalchemy/dialects/mssql/zxjdbc.py +lib/sqlalchemy/dialects/mysql/__init__.py +lib/sqlalchemy/dialects/mysql/base.py +lib/sqlalchemy/dialects/mysql/cymysql.py +lib/sqlalchemy/dialects/mysql/gaerdbms.py +lib/sqlalchemy/dialects/mysql/mysqlconnector.py +lib/sqlalchemy/dialects/mysql/mysqldb.py +lib/sqlalchemy/dialects/mysql/oursql.py +lib/sqlalchemy/dialects/mysql/pymysql.py +lib/sqlalchemy/dialects/mysql/pyodbc.py +lib/sqlalchemy/dialects/mysql/zxjdbc.py +lib/sqlalchemy/dialects/oracle/__init__.py +lib/sqlalchemy/dialects/oracle/base.py +lib/sqlalchemy/dialects/oracle/cx_oracle.py +lib/sqlalchemy/dialects/oracle/zxjdbc.py +lib/sqlalchemy/dialects/postgresql/__init__.py +lib/sqlalchemy/dialects/postgresql/base.py +lib/sqlalchemy/dialects/postgresql/constraints.py +lib/sqlalchemy/dialects/postgresql/hstore.py +lib/sqlalchemy/dialects/postgresql/json.py +lib/sqlalchemy/dialects/postgresql/pg8000.py +lib/sqlalchemy/dialects/postgresql/psycopg2.py +lib/sqlalchemy/dialects/postgresql/pypostgresql.py +lib/sqlalchemy/dialects/postgresql/ranges.py +lib/sqlalchemy/dialects/postgresql/zxjdbc.py +lib/sqlalchemy/dialects/sqlite/__init__.py +lib/sqlalchemy/dialects/sqlite/base.py +lib/sqlalchemy/dialects/sqlite/pysqlite.py +lib/sqlalchemy/dialects/sybase/__init__.py +lib/sqlalchemy/dialects/sybase/base.py +lib/sqlalchemy/dialects/sybase/mxodbc.py +lib/sqlalchemy/dialects/sybase/pyodbc.py +lib/sqlalchemy/dialects/sybase/pysybase.py +lib/sqlalchemy/engine/__init__.py +lib/sqlalchemy/engine/base.py +lib/sqlalchemy/engine/default.py +lib/sqlalchemy/engine/interfaces.py +lib/sqlalchemy/engine/reflection.py +lib/sqlalchemy/engine/result.py +lib/sqlalchemy/engine/strategies.py +lib/sqlalchemy/engine/threadlocal.py +lib/sqlalchemy/engine/url.py +lib/sqlalchemy/engine/util.py +lib/sqlalchemy/event/__init__.py +lib/sqlalchemy/event/api.py +lib/sqlalchemy/event/attr.py +lib/sqlalchemy/event/base.py +lib/sqlalchemy/event/legacy.py +lib/sqlalchemy/event/registry.py +lib/sqlalchemy/ext/__init__.py +lib/sqlalchemy/ext/associationproxy.py +lib/sqlalchemy/ext/automap.py +lib/sqlalchemy/ext/compiler.py +lib/sqlalchemy/ext/horizontal_shard.py +lib/sqlalchemy/ext/hybrid.py +lib/sqlalchemy/ext/instrumentation.py +lib/sqlalchemy/ext/mutable.py +lib/sqlalchemy/ext/orderinglist.py +lib/sqlalchemy/ext/serializer.py +lib/sqlalchemy/ext/declarative/__init__.py +lib/sqlalchemy/ext/declarative/api.py +lib/sqlalchemy/ext/declarative/base.py +lib/sqlalchemy/ext/declarative/clsregistry.py +lib/sqlalchemy/orm/__init__.py +lib/sqlalchemy/orm/attributes.py +lib/sqlalchemy/orm/base.py +lib/sqlalchemy/orm/collections.py +lib/sqlalchemy/orm/dependency.py +lib/sqlalchemy/orm/deprecated_interfaces.py +lib/sqlalchemy/orm/descriptor_props.py +lib/sqlalchemy/orm/dynamic.py +lib/sqlalchemy/orm/evaluator.py +lib/sqlalchemy/orm/events.py +lib/sqlalchemy/orm/exc.py +lib/sqlalchemy/orm/identity.py +lib/sqlalchemy/orm/instrumentation.py +lib/sqlalchemy/orm/interfaces.py +lib/sqlalchemy/orm/loading.py +lib/sqlalchemy/orm/mapper.py +lib/sqlalchemy/orm/path_registry.py +lib/sqlalchemy/orm/persistence.py +lib/sqlalchemy/orm/properties.py +lib/sqlalchemy/orm/query.py +lib/sqlalchemy/orm/relationships.py +lib/sqlalchemy/orm/scoping.py +lib/sqlalchemy/orm/session.py +lib/sqlalchemy/orm/state.py +lib/sqlalchemy/orm/strategies.py +lib/sqlalchemy/orm/strategy_options.py +lib/sqlalchemy/orm/sync.py +lib/sqlalchemy/orm/unitofwork.py +lib/sqlalchemy/orm/util.py +lib/sqlalchemy/sql/__init__.py +lib/sqlalchemy/sql/annotation.py +lib/sqlalchemy/sql/base.py +lib/sqlalchemy/sql/compiler.py +lib/sqlalchemy/sql/ddl.py +lib/sqlalchemy/sql/default_comparator.py +lib/sqlalchemy/sql/dml.py +lib/sqlalchemy/sql/elements.py +lib/sqlalchemy/sql/expression.py +lib/sqlalchemy/sql/functions.py +lib/sqlalchemy/sql/naming.py +lib/sqlalchemy/sql/operators.py +lib/sqlalchemy/sql/schema.py +lib/sqlalchemy/sql/selectable.py +lib/sqlalchemy/sql/sqltypes.py +lib/sqlalchemy/sql/type_api.py +lib/sqlalchemy/sql/util.py +lib/sqlalchemy/sql/visitors.py +lib/sqlalchemy/testing/__init__.py +lib/sqlalchemy/testing/assertions.py +lib/sqlalchemy/testing/assertsql.py +lib/sqlalchemy/testing/config.py +lib/sqlalchemy/testing/engines.py +lib/sqlalchemy/testing/entities.py +lib/sqlalchemy/testing/exclusions.py +lib/sqlalchemy/testing/fixtures.py +lib/sqlalchemy/testing/mock.py +lib/sqlalchemy/testing/pickleable.py +lib/sqlalchemy/testing/profiling.py +lib/sqlalchemy/testing/requirements.py +lib/sqlalchemy/testing/runner.py +lib/sqlalchemy/testing/schema.py +lib/sqlalchemy/testing/util.py +lib/sqlalchemy/testing/warnings.py +lib/sqlalchemy/testing/plugin/__init__.py +lib/sqlalchemy/testing/plugin/noseplugin.py +lib/sqlalchemy/testing/plugin/plugin_base.py +lib/sqlalchemy/testing/plugin/pytestplugin.py +lib/sqlalchemy/testing/suite/__init__.py +lib/sqlalchemy/testing/suite/test_ddl.py +lib/sqlalchemy/testing/suite/test_insert.py +lib/sqlalchemy/testing/suite/test_reflection.py +lib/sqlalchemy/testing/suite/test_results.py +lib/sqlalchemy/testing/suite/test_select.py +lib/sqlalchemy/testing/suite/test_sequence.py +lib/sqlalchemy/testing/suite/test_types.py +lib/sqlalchemy/testing/suite/test_update_delete.py +lib/sqlalchemy/util/__init__.py +lib/sqlalchemy/util/_collections.py +lib/sqlalchemy/util/compat.py +lib/sqlalchemy/util/deprecations.py +lib/sqlalchemy/util/langhelpers.py +lib/sqlalchemy/util/queue.py +lib/sqlalchemy/util/topological.py +test/__init__.py +test/binary_data_one.dat +test/binary_data_two.dat +test/conftest.py +test/requirements.py +test/aaa_profiling/__init__.py +test/aaa_profiling/test_compiler.py +test/aaa_profiling/test_memusage.py +test/aaa_profiling/test_orm.py +test/aaa_profiling/test_pool.py +test/aaa_profiling/test_resultset.py +test/aaa_profiling/test_zoomark.py +test/aaa_profiling/test_zoomark_orm.py +test/base/__init__.py +test/base/test_dependency.py +test/base/test_events.py +test/base/test_except.py +test/base/test_inspect.py +test/base/test_utils.py +test/dialect/__init__.py +test/dialect/test_firebird.py +test/dialect/test_mxodbc.py +test/dialect/test_oracle.py +test/dialect/test_pyodbc.py +test/dialect/test_sqlite.py +test/dialect/test_suite.py +test/dialect/test_sybase.py +test/dialect/mssql/__init__.py +test/dialect/mssql/test_compiler.py +test/dialect/mssql/test_engine.py +test/dialect/mssql/test_query.py +test/dialect/mssql/test_reflection.py +test/dialect/mssql/test_types.py +test/dialect/mysql/__init__.py +test/dialect/mysql/test_compiler.py +test/dialect/mysql/test_dialect.py +test/dialect/mysql/test_query.py +test/dialect/mysql/test_reflection.py +test/dialect/mysql/test_types.py +test/dialect/postgresql/__init__.py +test/dialect/postgresql/test_compiler.py +test/dialect/postgresql/test_dialect.py +test/dialect/postgresql/test_query.py +test/dialect/postgresql/test_reflection.py +test/dialect/postgresql/test_types.py +test/engine/__init__.py +test/engine/test_bind.py +test/engine/test_ddlevents.py +test/engine/test_execute.py +test/engine/test_logging.py +test/engine/test_parseconnect.py +test/engine/test_pool.py +test/engine/test_processors.py +test/engine/test_reconnect.py +test/engine/test_reflection.py +test/engine/test_transaction.py +test/ext/__init__.py +test/ext/test_associationproxy.py +test/ext/test_automap.py +test/ext/test_compiler.py +test/ext/test_extendedattr.py +test/ext/test_horizontal_shard.py +test/ext/test_hybrid.py +test/ext/test_mutable.py +test/ext/test_orderinglist.py +test/ext/test_serializer.py +test/ext/declarative/__init__.py +test/ext/declarative/test_basic.py +test/ext/declarative/test_clsregistry.py +test/ext/declarative/test_inheritance.py +test/ext/declarative/test_mixin.py +test/ext/declarative/test_reflection.py +test/orm/__init__.py +test/orm/_fixtures.py +test/orm/test_association.py +test/orm/test_assorted_eager.py +test/orm/test_attributes.py +test/orm/test_backref_mutations.py +test/orm/test_bind.py +test/orm/test_bundle.py +test/orm/test_cascade.py +test/orm/test_collection.py +test/orm/test_compile.py +test/orm/test_composites.py +test/orm/test_cycles.py +test/orm/test_default_strategies.py +test/orm/test_defaults.py +test/orm/test_deferred.py +test/orm/test_deprecations.py +test/orm/test_descriptor.py +test/orm/test_dynamic.py +test/orm/test_eager_relations.py +test/orm/test_evaluator.py +test/orm/test_events.py +test/orm/test_expire.py +test/orm/test_froms.py +test/orm/test_generative.py +test/orm/test_hasparent.py +test/orm/test_immediate_load.py +test/orm/test_inspect.py +test/orm/test_instrumentation.py +test/orm/test_joins.py +test/orm/test_lazy_relations.py +test/orm/test_load_on_fks.py +test/orm/test_loading.py +test/orm/test_lockmode.py +test/orm/test_manytomany.py +test/orm/test_mapper.py +test/orm/test_merge.py +test/orm/test_naturalpks.py +test/orm/test_of_type.py +test/orm/test_onetoone.py +test/orm/test_options.py +test/orm/test_pickled.py +test/orm/test_query.py +test/orm/test_rel_fn.py +test/orm/test_relationships.py +test/orm/test_scoping.py +test/orm/test_selectable.py +test/orm/test_session.py +test/orm/test_subquery_relations.py +test/orm/test_sync.py +test/orm/test_transaction.py +test/orm/test_unitofwork.py +test/orm/test_unitofworkv2.py +test/orm/test_update_delete.py +test/orm/test_utils.py +test/orm/test_validators.py +test/orm/test_versioning.py +test/orm/inheritance/__init__.py +test/orm/inheritance/_poly_fixtures.py +test/orm/inheritance/test_abc_inheritance.py +test/orm/inheritance/test_abc_polymorphic.py +test/orm/inheritance/test_assorted_poly.py +test/orm/inheritance/test_basic.py +test/orm/inheritance/test_concrete.py +test/orm/inheritance/test_magazine.py +test/orm/inheritance/test_manytomany.py +test/orm/inheritance/test_poly_linked_list.py +test/orm/inheritance/test_poly_persistence.py +test/orm/inheritance/test_polymorphic_rel.py +test/orm/inheritance/test_productspec.py +test/orm/inheritance/test_relationship.py +test/orm/inheritance/test_selects.py +test/orm/inheritance/test_single.py +test/orm/inheritance/test_with_poly.py +test/perf/orm2010.py +test/sql/__init__.py +test/sql/test_case_statement.py +test/sql/test_compiler.py +test/sql/test_constraints.py +test/sql/test_cte.py +test/sql/test_ddlemit.py +test/sql/test_defaults.py +test/sql/test_delete.py +test/sql/test_functions.py +test/sql/test_generative.py +test/sql/test_insert.py +test/sql/test_inspect.py +test/sql/test_join_rewriting.py +test/sql/test_labels.py +test/sql/test_metadata.py +test/sql/test_operators.py +test/sql/test_query.py +test/sql/test_quote.py +test/sql/test_returning.py +test/sql/test_rowcount.py +test/sql/test_selectable.py +test/sql/test_text.py +test/sql/test_type_expressions.py +test/sql/test_types.py +test/sql/test_unicode.py +test/sql/test_update.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/installed-files.txt new file mode 100644 index 0000000..839dba7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/installed-files.txt @@ -0,0 +1,364 @@ +../sqlalchemy/log.py +../sqlalchemy/inspection.py +../sqlalchemy/interfaces.py +../sqlalchemy/types.py +../sqlalchemy/pool.py +../sqlalchemy/__init__.py +../sqlalchemy/schema.py +../sqlalchemy/exc.py +../sqlalchemy/events.py +../sqlalchemy/processors.py +../sqlalchemy/util/compat.py +../sqlalchemy/util/deprecations.py +../sqlalchemy/util/_collections.py +../sqlalchemy/util/topological.py +../sqlalchemy/util/queue.py +../sqlalchemy/util/__init__.py +../sqlalchemy/util/langhelpers.py +../sqlalchemy/ext/horizontal_shard.py +../sqlalchemy/ext/serializer.py +../sqlalchemy/ext/automap.py +../sqlalchemy/ext/associationproxy.py +../sqlalchemy/ext/__init__.py +../sqlalchemy/ext/hybrid.py +../sqlalchemy/ext/mutable.py +../sqlalchemy/ext/orderinglist.py +../sqlalchemy/ext/compiler.py +../sqlalchemy/ext/instrumentation.py +../sqlalchemy/ext/declarative/api.py +../sqlalchemy/ext/declarative/__init__.py +../sqlalchemy/ext/declarative/base.py +../sqlalchemy/ext/declarative/clsregistry.py +../sqlalchemy/event/api.py +../sqlalchemy/event/registry.py +../sqlalchemy/event/__init__.py +../sqlalchemy/event/legacy.py +../sqlalchemy/event/base.py +../sqlalchemy/event/attr.py +../sqlalchemy/sql/type_api.py +../sqlalchemy/sql/ddl.py +../sqlalchemy/sql/selectable.py +../sqlalchemy/sql/util.py +../sqlalchemy/sql/sqltypes.py +../sqlalchemy/sql/functions.py +../sqlalchemy/sql/operators.py +../sqlalchemy/sql/dml.py +../sqlalchemy/sql/naming.py +../sqlalchemy/sql/expression.py +../sqlalchemy/sql/__init__.py +../sqlalchemy/sql/schema.py +../sqlalchemy/sql/base.py +../sqlalchemy/sql/default_comparator.py +../sqlalchemy/sql/visitors.py +../sqlalchemy/sql/compiler.py +../sqlalchemy/sql/elements.py +../sqlalchemy/sql/annotation.py +../sqlalchemy/engine/default.py +../sqlalchemy/engine/url.py +../sqlalchemy/engine/util.py +../sqlalchemy/engine/interfaces.py +../sqlalchemy/engine/result.py +../sqlalchemy/engine/__init__.py +../sqlalchemy/engine/base.py +../sqlalchemy/engine/threadlocal.py +../sqlalchemy/engine/reflection.py +../sqlalchemy/engine/strategies.py +../sqlalchemy/dialects/postgres.py +../sqlalchemy/dialects/__init__.py +../sqlalchemy/dialects/mysql/mysqlconnector.py +../sqlalchemy/dialects/mysql/pymysql.py +../sqlalchemy/dialects/mysql/cymysql.py +../sqlalchemy/dialects/mysql/oursql.py +../sqlalchemy/dialects/mysql/pyodbc.py +../sqlalchemy/dialects/mysql/__init__.py +../sqlalchemy/dialects/mysql/zxjdbc.py +../sqlalchemy/dialects/mysql/base.py +../sqlalchemy/dialects/mysql/gaerdbms.py +../sqlalchemy/dialects/mysql/mysqldb.py +../sqlalchemy/dialects/postgresql/pg8000.py +../sqlalchemy/dialects/postgresql/hstore.py +../sqlalchemy/dialects/postgresql/pypostgresql.py +../sqlalchemy/dialects/postgresql/psycopg2.py +../sqlalchemy/dialects/postgresql/ranges.py +../sqlalchemy/dialects/postgresql/json.py +../sqlalchemy/dialects/postgresql/__init__.py +../sqlalchemy/dialects/postgresql/zxjdbc.py +../sqlalchemy/dialects/postgresql/base.py +../sqlalchemy/dialects/postgresql/constraints.py +../sqlalchemy/dialects/sybase/pyodbc.py +../sqlalchemy/dialects/sybase/__init__.py +../sqlalchemy/dialects/sybase/base.py +../sqlalchemy/dialects/sybase/mxodbc.py +../sqlalchemy/dialects/sybase/pysybase.py +../sqlalchemy/dialects/firebird/fdb.py +../sqlalchemy/dialects/firebird/__init__.py +../sqlalchemy/dialects/firebird/kinterbasdb.py +../sqlalchemy/dialects/firebird/base.py +../sqlalchemy/dialects/sqlite/pysqlite.py +../sqlalchemy/dialects/sqlite/__init__.py +../sqlalchemy/dialects/sqlite/base.py +../sqlalchemy/dialects/mssql/information_schema.py +../sqlalchemy/dialects/mssql/pymssql.py +../sqlalchemy/dialects/mssql/adodbapi.py +../sqlalchemy/dialects/mssql/pyodbc.py +../sqlalchemy/dialects/mssql/__init__.py +../sqlalchemy/dialects/mssql/zxjdbc.py +../sqlalchemy/dialects/mssql/base.py +../sqlalchemy/dialects/mssql/mxodbc.py +../sqlalchemy/dialects/drizzle/__init__.py +../sqlalchemy/dialects/drizzle/base.py +../sqlalchemy/dialects/drizzle/mysqldb.py +../sqlalchemy/dialects/oracle/cx_oracle.py +../sqlalchemy/dialects/oracle/__init__.py +../sqlalchemy/dialects/oracle/zxjdbc.py +../sqlalchemy/dialects/oracle/base.py +../sqlalchemy/testing/profiling.py +../sqlalchemy/testing/warnings.py +../sqlalchemy/testing/engines.py +../sqlalchemy/testing/util.py +../sqlalchemy/testing/entities.py +../sqlalchemy/testing/mock.py +../sqlalchemy/testing/exclusions.py +../sqlalchemy/testing/assertions.py +../sqlalchemy/testing/fixtures.py +../sqlalchemy/testing/pickleable.py +../sqlalchemy/testing/__init__.py +../sqlalchemy/testing/schema.py +../sqlalchemy/testing/runner.py +../sqlalchemy/testing/config.py +../sqlalchemy/testing/requirements.py +../sqlalchemy/testing/assertsql.py +../sqlalchemy/testing/plugin/pytestplugin.py +../sqlalchemy/testing/plugin/plugin_base.py +../sqlalchemy/testing/plugin/noseplugin.py +../sqlalchemy/testing/plugin/__init__.py +../sqlalchemy/testing/suite/test_types.py +../sqlalchemy/testing/suite/test_select.py +../sqlalchemy/testing/suite/test_ddl.py +../sqlalchemy/testing/suite/test_reflection.py +../sqlalchemy/testing/suite/test_sequence.py +../sqlalchemy/testing/suite/__init__.py +../sqlalchemy/testing/suite/test_results.py +../sqlalchemy/testing/suite/test_insert.py +../sqlalchemy/testing/suite/test_update_delete.py +../sqlalchemy/orm/state.py +../sqlalchemy/orm/session.py +../sqlalchemy/orm/unitofwork.py +../sqlalchemy/orm/properties.py +../sqlalchemy/orm/collections.py +../sqlalchemy/orm/identity.py +../sqlalchemy/orm/util.py +../sqlalchemy/orm/sync.py +../sqlalchemy/orm/interfaces.py +../sqlalchemy/orm/path_registry.py +../sqlalchemy/orm/evaluator.py +../sqlalchemy/orm/deprecated_interfaces.py +../sqlalchemy/orm/mapper.py +../sqlalchemy/orm/persistence.py +../sqlalchemy/orm/dynamic.py +../sqlalchemy/orm/__init__.py +../sqlalchemy/orm/scoping.py +../sqlalchemy/orm/descriptor_props.py +../sqlalchemy/orm/loading.py +../sqlalchemy/orm/base.py +../sqlalchemy/orm/exc.py +../sqlalchemy/orm/events.py +../sqlalchemy/orm/strategies.py +../sqlalchemy/orm/query.py +../sqlalchemy/orm/dependency.py +../sqlalchemy/orm/attributes.py +../sqlalchemy/orm/relationships.py +../sqlalchemy/orm/strategy_options.py +../sqlalchemy/orm/instrumentation.py +../sqlalchemy/connectors/pyodbc.py +../sqlalchemy/connectors/__init__.py +../sqlalchemy/connectors/zxJDBC.py +../sqlalchemy/connectors/mysqldb.py +../sqlalchemy/connectors/mxodbc.py +../sqlalchemy/databases/__init__.py +../sqlalchemy/log.pyc +../sqlalchemy/inspection.pyc +../sqlalchemy/interfaces.pyc +../sqlalchemy/types.pyc +../sqlalchemy/pool.pyc +../sqlalchemy/__init__.pyc +../sqlalchemy/schema.pyc +../sqlalchemy/exc.pyc +../sqlalchemy/events.pyc +../sqlalchemy/processors.pyc +../sqlalchemy/util/compat.pyc +../sqlalchemy/util/deprecations.pyc +../sqlalchemy/util/_collections.pyc +../sqlalchemy/util/topological.pyc +../sqlalchemy/util/queue.pyc +../sqlalchemy/util/__init__.pyc +../sqlalchemy/util/langhelpers.pyc +../sqlalchemy/ext/horizontal_shard.pyc +../sqlalchemy/ext/serializer.pyc +../sqlalchemy/ext/automap.pyc +../sqlalchemy/ext/associationproxy.pyc +../sqlalchemy/ext/__init__.pyc +../sqlalchemy/ext/hybrid.pyc +../sqlalchemy/ext/mutable.pyc +../sqlalchemy/ext/orderinglist.pyc +../sqlalchemy/ext/compiler.pyc +../sqlalchemy/ext/instrumentation.pyc +../sqlalchemy/ext/declarative/api.pyc +../sqlalchemy/ext/declarative/__init__.pyc +../sqlalchemy/ext/declarative/base.pyc +../sqlalchemy/ext/declarative/clsregistry.pyc +../sqlalchemy/event/api.pyc +../sqlalchemy/event/registry.pyc +../sqlalchemy/event/__init__.pyc +../sqlalchemy/event/legacy.pyc +../sqlalchemy/event/base.pyc +../sqlalchemy/event/attr.pyc +../sqlalchemy/sql/type_api.pyc +../sqlalchemy/sql/ddl.pyc +../sqlalchemy/sql/selectable.pyc +../sqlalchemy/sql/util.pyc +../sqlalchemy/sql/sqltypes.pyc +../sqlalchemy/sql/functions.pyc +../sqlalchemy/sql/operators.pyc +../sqlalchemy/sql/dml.pyc +../sqlalchemy/sql/naming.pyc +../sqlalchemy/sql/expression.pyc +../sqlalchemy/sql/__init__.pyc +../sqlalchemy/sql/schema.pyc +../sqlalchemy/sql/base.pyc +../sqlalchemy/sql/default_comparator.pyc +../sqlalchemy/sql/visitors.pyc +../sqlalchemy/sql/compiler.pyc +../sqlalchemy/sql/elements.pyc +../sqlalchemy/sql/annotation.pyc +../sqlalchemy/engine/default.pyc +../sqlalchemy/engine/url.pyc +../sqlalchemy/engine/util.pyc +../sqlalchemy/engine/interfaces.pyc +../sqlalchemy/engine/result.pyc +../sqlalchemy/engine/__init__.pyc +../sqlalchemy/engine/base.pyc +../sqlalchemy/engine/threadlocal.pyc +../sqlalchemy/engine/reflection.pyc +../sqlalchemy/engine/strategies.pyc +../sqlalchemy/dialects/postgres.pyc +../sqlalchemy/dialects/__init__.pyc +../sqlalchemy/dialects/mysql/mysqlconnector.pyc +../sqlalchemy/dialects/mysql/pymysql.pyc +../sqlalchemy/dialects/mysql/cymysql.pyc +../sqlalchemy/dialects/mysql/oursql.pyc +../sqlalchemy/dialects/mysql/pyodbc.pyc +../sqlalchemy/dialects/mysql/__init__.pyc +../sqlalchemy/dialects/mysql/zxjdbc.pyc +../sqlalchemy/dialects/mysql/base.pyc +../sqlalchemy/dialects/mysql/gaerdbms.pyc +../sqlalchemy/dialects/mysql/mysqldb.pyc +../sqlalchemy/dialects/postgresql/pg8000.pyc +../sqlalchemy/dialects/postgresql/hstore.pyc +../sqlalchemy/dialects/postgresql/pypostgresql.pyc +../sqlalchemy/dialects/postgresql/psycopg2.pyc +../sqlalchemy/dialects/postgresql/ranges.pyc +../sqlalchemy/dialects/postgresql/json.pyc +../sqlalchemy/dialects/postgresql/__init__.pyc +../sqlalchemy/dialects/postgresql/zxjdbc.pyc +../sqlalchemy/dialects/postgresql/base.pyc +../sqlalchemy/dialects/postgresql/constraints.pyc +../sqlalchemy/dialects/sybase/pyodbc.pyc +../sqlalchemy/dialects/sybase/__init__.pyc +../sqlalchemy/dialects/sybase/base.pyc +../sqlalchemy/dialects/sybase/mxodbc.pyc +../sqlalchemy/dialects/sybase/pysybase.pyc +../sqlalchemy/dialects/firebird/fdb.pyc +../sqlalchemy/dialects/firebird/__init__.pyc +../sqlalchemy/dialects/firebird/kinterbasdb.pyc +../sqlalchemy/dialects/firebird/base.pyc +../sqlalchemy/dialects/sqlite/pysqlite.pyc +../sqlalchemy/dialects/sqlite/__init__.pyc +../sqlalchemy/dialects/sqlite/base.pyc +../sqlalchemy/dialects/mssql/information_schema.pyc +../sqlalchemy/dialects/mssql/pymssql.pyc +../sqlalchemy/dialects/mssql/adodbapi.pyc +../sqlalchemy/dialects/mssql/pyodbc.pyc +../sqlalchemy/dialects/mssql/__init__.pyc +../sqlalchemy/dialects/mssql/zxjdbc.pyc +../sqlalchemy/dialects/mssql/base.pyc +../sqlalchemy/dialects/mssql/mxodbc.pyc +../sqlalchemy/dialects/drizzle/__init__.pyc +../sqlalchemy/dialects/drizzle/base.pyc +../sqlalchemy/dialects/drizzle/mysqldb.pyc +../sqlalchemy/dialects/oracle/cx_oracle.pyc +../sqlalchemy/dialects/oracle/__init__.pyc +../sqlalchemy/dialects/oracle/zxjdbc.pyc +../sqlalchemy/dialects/oracle/base.pyc +../sqlalchemy/testing/profiling.pyc +../sqlalchemy/testing/warnings.pyc +../sqlalchemy/testing/engines.pyc +../sqlalchemy/testing/util.pyc +../sqlalchemy/testing/entities.pyc +../sqlalchemy/testing/mock.pyc +../sqlalchemy/testing/exclusions.pyc +../sqlalchemy/testing/assertions.pyc +../sqlalchemy/testing/fixtures.pyc +../sqlalchemy/testing/pickleable.pyc +../sqlalchemy/testing/__init__.pyc +../sqlalchemy/testing/schema.pyc +../sqlalchemy/testing/runner.pyc +../sqlalchemy/testing/config.pyc +../sqlalchemy/testing/requirements.pyc +../sqlalchemy/testing/assertsql.pyc +../sqlalchemy/testing/plugin/pytestplugin.pyc +../sqlalchemy/testing/plugin/plugin_base.pyc +../sqlalchemy/testing/plugin/noseplugin.pyc +../sqlalchemy/testing/plugin/__init__.pyc +../sqlalchemy/testing/suite/test_types.pyc +../sqlalchemy/testing/suite/test_select.pyc +../sqlalchemy/testing/suite/test_ddl.pyc +../sqlalchemy/testing/suite/test_reflection.pyc +../sqlalchemy/testing/suite/test_sequence.pyc +../sqlalchemy/testing/suite/__init__.pyc +../sqlalchemy/testing/suite/test_results.pyc +../sqlalchemy/testing/suite/test_insert.pyc +../sqlalchemy/testing/suite/test_update_delete.pyc +../sqlalchemy/orm/state.pyc +../sqlalchemy/orm/session.pyc +../sqlalchemy/orm/unitofwork.pyc +../sqlalchemy/orm/properties.pyc +../sqlalchemy/orm/collections.pyc +../sqlalchemy/orm/identity.pyc +../sqlalchemy/orm/util.pyc +../sqlalchemy/orm/sync.pyc +../sqlalchemy/orm/interfaces.pyc +../sqlalchemy/orm/path_registry.pyc +../sqlalchemy/orm/evaluator.pyc +../sqlalchemy/orm/deprecated_interfaces.pyc +../sqlalchemy/orm/mapper.pyc +../sqlalchemy/orm/persistence.pyc +../sqlalchemy/orm/dynamic.pyc +../sqlalchemy/orm/__init__.pyc +../sqlalchemy/orm/scoping.pyc +../sqlalchemy/orm/descriptor_props.pyc +../sqlalchemy/orm/loading.pyc +../sqlalchemy/orm/base.pyc +../sqlalchemy/orm/exc.pyc +../sqlalchemy/orm/events.pyc +../sqlalchemy/orm/strategies.pyc +../sqlalchemy/orm/query.pyc +../sqlalchemy/orm/dependency.pyc +../sqlalchemy/orm/attributes.pyc +../sqlalchemy/orm/relationships.pyc +../sqlalchemy/orm/strategy_options.pyc +../sqlalchemy/orm/instrumentation.pyc +../sqlalchemy/connectors/pyodbc.pyc +../sqlalchemy/connectors/__init__.pyc +../sqlalchemy/connectors/zxJDBC.pyc +../sqlalchemy/connectors/mysqldb.pyc +../sqlalchemy/connectors/mxodbc.pyc +../sqlalchemy/databases/__init__.pyc +../sqlalchemy/cprocessors.so +../sqlalchemy/cresultproxy.so +../sqlalchemy/cutils.so +./ +SOURCES.txt +dependency_links.txt +PKG-INFO +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/top_level.txt new file mode 100644 index 0000000..39fb2be --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/SQLAlchemy-0.9.4.egg-info/top_level.txt @@ -0,0 +1 @@ +sqlalchemy diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/PKG-INFO new file mode 100644 index 0000000..9414e47 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/PKG-INFO @@ -0,0 +1,16 @@ +Metadata-Version: 1.1 +Name: Twisted +Version: 14.0.0 +Summary: An asynchronous networking framework written in Python +Home-page: http://twistedmatrix.com/ +Author: Glyph Lefkowitz +Author-email: glyph@twistedmatrix.com +License: MIT +Description: An extensible framework for Python programming, with special focus + on event-based network programming and multiprotocol integration. + +Platform: UNKNOWN +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/SOURCES.txt new file mode 100644 index 0000000..073c9d5 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/SOURCES.txt @@ -0,0 +1,846 @@ +README +Twisted.egg-info/PKG-INFO +Twisted.egg-info/SOURCES.txt +Twisted.egg-info/dependency_links.txt +Twisted.egg-info/not-zip-safe +Twisted.egg-info/requires.txt +Twisted.egg-info/top_level.txt +bin/manhole +bin/pyhtmlizer +bin/tap2deb +bin/tap2rpm +bin/tapconvert +bin/trial +bin/twistd +bin/conch/cftp +bin/conch/ckeygen +bin/conch/conch +bin/conch/tkconch +bin/lore/lore +bin/mail/mailmail +twisted/__init__.py +twisted/_version.py +twisted/copyright.py +twisted/plugin.py +twisted/application/__init__.py +twisted/application/app.py +twisted/application/internet.py +twisted/application/reactors.py +twisted/application/service.py +twisted/application/strports.py +twisted/application/test/__init__.py +twisted/application/test/test_internet.py +twisted/conch/__init__.py +twisted/conch/_version.py +twisted/conch/avatar.py +twisted/conch/checkers.py +twisted/conch/endpoints.py +twisted/conch/error.py +twisted/conch/interfaces.py +twisted/conch/ls.py +twisted/conch/manhole.py +twisted/conch/manhole_ssh.py +twisted/conch/manhole_tap.py +twisted/conch/mixin.py +twisted/conch/recvline.py +twisted/conch/stdio.py +twisted/conch/tap.py +twisted/conch/telnet.py +twisted/conch/ttymodes.py +twisted/conch/unix.py +twisted/conch/client/__init__.py +twisted/conch/client/agent.py +twisted/conch/client/connect.py +twisted/conch/client/default.py +twisted/conch/client/direct.py +twisted/conch/client/knownhosts.py +twisted/conch/client/options.py +twisted/conch/insults/__init__.py +twisted/conch/insults/client.py +twisted/conch/insults/colors.py +twisted/conch/insults/helper.py +twisted/conch/insults/insults.py +twisted/conch/insults/text.py +twisted/conch/insults/window.py +twisted/conch/openssh_compat/__init__.py +twisted/conch/openssh_compat/factory.py +twisted/conch/openssh_compat/primes.py +twisted/conch/scripts/__init__.py +twisted/conch/scripts/cftp.py +twisted/conch/scripts/ckeygen.py +twisted/conch/scripts/conch.py +twisted/conch/scripts/tkconch.py +twisted/conch/ssh/__init__.py +twisted/conch/ssh/address.py +twisted/conch/ssh/agent.py +twisted/conch/ssh/channel.py +twisted/conch/ssh/common.py +twisted/conch/ssh/connection.py +twisted/conch/ssh/factory.py +twisted/conch/ssh/filetransfer.py +twisted/conch/ssh/forwarding.py +twisted/conch/ssh/keys.py +twisted/conch/ssh/service.py +twisted/conch/ssh/session.py +twisted/conch/ssh/sexpy.py +twisted/conch/ssh/transport.py +twisted/conch/ssh/userauth.py +twisted/conch/test/__init__.py +twisted/conch/test/keydata.py +twisted/conch/test/test_address.py +twisted/conch/test/test_agent.py +twisted/conch/test/test_cftp.py +twisted/conch/test/test_channel.py +twisted/conch/test/test_checkers.py +twisted/conch/test/test_ckeygen.py +twisted/conch/test/test_conch.py +twisted/conch/test/test_connection.py +twisted/conch/test/test_default.py +twisted/conch/test/test_endpoints.py +twisted/conch/test/test_filetransfer.py +twisted/conch/test/test_helper.py +twisted/conch/test/test_insults.py +twisted/conch/test/test_keys.py +twisted/conch/test/test_knownhosts.py +twisted/conch/test/test_manhole.py +twisted/conch/test/test_mixin.py +twisted/conch/test/test_openssh_compat.py +twisted/conch/test/test_recvline.py +twisted/conch/test/test_scripts.py +twisted/conch/test/test_session.py +twisted/conch/test/test_ssh.py +twisted/conch/test/test_tap.py +twisted/conch/test/test_telnet.py +twisted/conch/test/test_text.py +twisted/conch/test/test_transport.py +twisted/conch/test/test_userauth.py +twisted/conch/test/test_window.py +twisted/conch/ui/__init__.py +twisted/conch/ui/ansi.py +twisted/conch/ui/tkvt100.py +twisted/cred/__init__.py +twisted/cred/_digest.py +twisted/cred/checkers.py +twisted/cred/credentials.py +twisted/cred/error.py +twisted/cred/pamauth.py +twisted/cred/portal.py +twisted/cred/strcred.py +twisted/enterprise/__init__.py +twisted/enterprise/adbapi.py +twisted/internet/__init__.py +twisted/internet/_baseprocess.py +twisted/internet/_dumbwin32proc.py +twisted/internet/_glibbase.py +twisted/internet/_newtls.py +twisted/internet/_pollingfile.py +twisted/internet/_posixserialport.py +twisted/internet/_posixstdio.py +twisted/internet/_signals.py +twisted/internet/_ssl.py +twisted/internet/_sslverify.py +twisted/internet/_threadedselect.py +twisted/internet/_win32serialport.py +twisted/internet/_win32stdio.py +twisted/internet/abstract.py +twisted/internet/address.py +twisted/internet/base.py +twisted/internet/cfreactor.py +twisted/internet/default.py +twisted/internet/defer.py +twisted/internet/endpoints.py +twisted/internet/epollreactor.py +twisted/internet/error.py +twisted/internet/fdesc.py +twisted/internet/gireactor.py +twisted/internet/glib2reactor.py +twisted/internet/gtk2reactor.py +twisted/internet/gtk3reactor.py +twisted/internet/gtkreactor.py +twisted/internet/inotify.py +twisted/internet/interfaces.py +twisted/internet/kqreactor.py +twisted/internet/main.py +twisted/internet/pollreactor.py +twisted/internet/posixbase.py +twisted/internet/process.py +twisted/internet/protocol.py +twisted/internet/pyuisupport.py +twisted/internet/qtreactor.py +twisted/internet/reactor.py +twisted/internet/selectreactor.py +twisted/internet/serialport.py +twisted/internet/ssl.py +twisted/internet/stdio.py +twisted/internet/task.py +twisted/internet/tcp.py +twisted/internet/threads.py +twisted/internet/tksupport.py +twisted/internet/udp.py +twisted/internet/unix.py +twisted/internet/utils.py +twisted/internet/win32eventreactor.py +twisted/internet/wxreactor.py +twisted/internet/wxsupport.py +twisted/internet/iocpreactor/__init__.py +twisted/internet/iocpreactor/abstract.py +twisted/internet/iocpreactor/const.py +twisted/internet/iocpreactor/interfaces.py +twisted/internet/iocpreactor/reactor.py +twisted/internet/iocpreactor/setup.py +twisted/internet/iocpreactor/tcp.py +twisted/internet/iocpreactor/udp.py +twisted/internet/iocpreactor/iocpsupport/iocpsupport.c +twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c +twisted/internet/test/__init__.py +twisted/internet/test/_posixifaces.py +twisted/internet/test/_win32ifaces.py +twisted/internet/test/connectionmixins.py +twisted/internet/test/fakeendpoint.py +twisted/internet/test/modulehelpers.py +twisted/internet/test/process_gireactornocompat.py +twisted/internet/test/process_helper.py +twisted/internet/test/reactormixins.py +twisted/internet/test/test_abstract.py +twisted/internet/test/test_address.py +twisted/internet/test/test_base.py +twisted/internet/test/test_baseprocess.py +twisted/internet/test/test_core.py +twisted/internet/test/test_default.py +twisted/internet/test/test_endpoints.py +twisted/internet/test/test_epollreactor.py +twisted/internet/test/test_fdset.py +twisted/internet/test/test_filedescriptor.py +twisted/internet/test/test_gireactor.py +twisted/internet/test/test_glibbase.py +twisted/internet/test/test_gtkreactor.py +twisted/internet/test/test_inlinecb.py +twisted/internet/test/test_inotify.py +twisted/internet/test/test_iocp.py +twisted/internet/test/test_main.py +twisted/internet/test/test_newtls.py +twisted/internet/test/test_pollingfile.py +twisted/internet/test/test_posixbase.py +twisted/internet/test/test_posixprocess.py +twisted/internet/test/test_process.py +twisted/internet/test/test_protocol.py +twisted/internet/test/test_qtreactor.py +twisted/internet/test/test_serialport.py +twisted/internet/test/test_sigchld.py +twisted/internet/test/test_socket.py +twisted/internet/test/test_stdio.py +twisted/internet/test/test_tcp.py +twisted/internet/test/test_threads.py +twisted/internet/test/test_time.py +twisted/internet/test/test_tls.py +twisted/internet/test/test_udp.py +twisted/internet/test/test_udp_internals.py +twisted/internet/test/test_unix.py +twisted/internet/test/test_win32events.py +twisted/lore/__init__.py +twisted/lore/_version.py +twisted/lore/default.py +twisted/lore/docbook.py +twisted/lore/htmlbook.py +twisted/lore/indexer.py +twisted/lore/latex.py +twisted/lore/lint.py +twisted/lore/lmath.py +twisted/lore/man2lore.py +twisted/lore/numberer.py +twisted/lore/process.py +twisted/lore/slides.py +twisted/lore/texi.py +twisted/lore/tree.py +twisted/lore/scripts/__init__.py +twisted/lore/scripts/lore.py +twisted/lore/test/__init__.py +twisted/lore/test/test_docbook.py +twisted/lore/test/test_latex.py +twisted/lore/test/test_lint.py +twisted/lore/test/test_lmath.py +twisted/lore/test/test_lore.py +twisted/lore/test/test_man2lore.py +twisted/lore/test/test_scripts.py +twisted/lore/test/test_slides.py +twisted/lore/test/test_texi.py +twisted/mail/__init__.py +twisted/mail/_version.py +twisted/mail/alias.py +twisted/mail/bounce.py +twisted/mail/imap4.py +twisted/mail/mail.py +twisted/mail/maildir.py +twisted/mail/pb.py +twisted/mail/pop3.py +twisted/mail/pop3client.py +twisted/mail/protocols.py +twisted/mail/relay.py +twisted/mail/relaymanager.py +twisted/mail/smtp.py +twisted/mail/tap.py +twisted/mail/scripts/__init__.py +twisted/mail/scripts/mailmail.py +twisted/mail/test/__init__.py +twisted/mail/test/pop3testserver.py +twisted/mail/test/test_bounce.py +twisted/mail/test/test_imap.py +twisted/mail/test/test_mail.py +twisted/mail/test/test_mailmail.py +twisted/mail/test/test_options.py +twisted/mail/test/test_pop3.py +twisted/mail/test/test_pop3client.py +twisted/mail/test/test_scripts.py +twisted/mail/test/test_smtp.py +twisted/manhole/__init__.py +twisted/manhole/_inspectro.py +twisted/manhole/explorer.py +twisted/manhole/gladereactor.py +twisted/manhole/service.py +twisted/manhole/telnet.py +twisted/manhole/test/__init__.py +twisted/manhole/test/test_explorer.py +twisted/manhole/ui/__init__.py +twisted/manhole/ui/gtk2manhole.py +twisted/manhole/ui/test/__init__.py +twisted/manhole/ui/test/test_gtk2manhole.py +twisted/names/__init__.py +twisted/names/_rfc1982.py +twisted/names/_version.py +twisted/names/authority.py +twisted/names/cache.py +twisted/names/client.py +twisted/names/common.py +twisted/names/dns.py +twisted/names/error.py +twisted/names/hosts.py +twisted/names/resolve.py +twisted/names/root.py +twisted/names/secondary.py +twisted/names/server.py +twisted/names/srvconnect.py +twisted/names/tap.py +twisted/names/test/__init__.py +twisted/names/test/test_cache.py +twisted/names/test/test_client.py +twisted/names/test/test_common.py +twisted/names/test/test_dns.py +twisted/names/test/test_examples.py +twisted/names/test/test_hosts.py +twisted/names/test/test_names.py +twisted/names/test/test_resolve.py +twisted/names/test/test_rfc1982.py +twisted/names/test/test_rootresolve.py +twisted/names/test/test_server.py +twisted/names/test/test_srvconnect.py +twisted/names/test/test_tap.py +twisted/news/__init__.py +twisted/news/_version.py +twisted/news/database.py +twisted/news/news.py +twisted/news/nntp.py +twisted/news/tap.py +twisted/news/test/__init__.py +twisted/news/test/test_database.py +twisted/news/test/test_news.py +twisted/news/test/test_nntp.py +twisted/pair/__init__.py +twisted/pair/_version.py +twisted/pair/ethernet.py +twisted/pair/ip.py +twisted/pair/raw.py +twisted/pair/rawudp.py +twisted/pair/testing.py +twisted/pair/tuntap.py +twisted/pair/test/__init__.py +twisted/pair/test/test_ethernet.py +twisted/pair/test/test_ip.py +twisted/pair/test/test_rawudp.py +twisted/pair/test/test_tuntap.py +twisted/persisted/__init__.py +twisted/persisted/aot.py +twisted/persisted/crefutil.py +twisted/persisted/dirdbm.py +twisted/persisted/sob.py +twisted/persisted/styles.py +twisted/persisted/test/__init__.py +twisted/persisted/test/test_styles.py +twisted/plugins/__init__.py +twisted/plugins/cred_anonymous.py +twisted/plugins/cred_file.py +twisted/plugins/cred_memory.py +twisted/plugins/cred_sshkeys.py +twisted/plugins/cred_unix.py +twisted/plugins/twisted_conch.py +twisted/plugins/twisted_core.py +twisted/plugins/twisted_ftp.py +twisted/plugins/twisted_inet.py +twisted/plugins/twisted_lore.py +twisted/plugins/twisted_mail.py +twisted/plugins/twisted_manhole.py +twisted/plugins/twisted_names.py +twisted/plugins/twisted_news.py +twisted/plugins/twisted_portforward.py +twisted/plugins/twisted_qtstub.py +twisted/plugins/twisted_reactors.py +twisted/plugins/twisted_runner.py +twisted/plugins/twisted_socks.py +twisted/plugins/twisted_telnet.py +twisted/plugins/twisted_trial.py +twisted/plugins/twisted_web.py +twisted/plugins/twisted_words.py +twisted/positioning/__init__.py +twisted/positioning/_sentence.py +twisted/positioning/base.py +twisted/positioning/ipositioning.py +twisted/positioning/nmea.py +twisted/positioning/test/__init__.py +twisted/positioning/test/receiver.py +twisted/positioning/test/test_base.py +twisted/positioning/test/test_nmea.py +twisted/positioning/test/test_sentence.py +twisted/protocols/__init__.py +twisted/protocols/amp.py +twisted/protocols/basic.py +twisted/protocols/dict.py +twisted/protocols/finger.py +twisted/protocols/ftp.py +twisted/protocols/htb.py +twisted/protocols/ident.py +twisted/protocols/loopback.py +twisted/protocols/memcache.py +twisted/protocols/pcp.py +twisted/protocols/policies.py +twisted/protocols/portforward.py +twisted/protocols/postfix.py +twisted/protocols/shoutcast.py +twisted/protocols/sip.py +twisted/protocols/socks.py +twisted/protocols/stateful.py +twisted/protocols/telnet.py +twisted/protocols/tls.py +twisted/protocols/wire.py +twisted/protocols/gps/__init__.py +twisted/protocols/gps/nmea.py +twisted/protocols/gps/rockwell.py +twisted/protocols/mice/__init__.py +twisted/protocols/mice/mouseman.py +twisted/protocols/test/__init__.py +twisted/protocols/test/test_basic.py +twisted/protocols/test/test_tls.py +twisted/python/__init__.py +twisted/python/_inotify.py +twisted/python/_release.py +twisted/python/_shellcomp.py +twisted/python/_textattributes.py +twisted/python/compat.py +twisted/python/components.py +twisted/python/constants.py +twisted/python/context.py +twisted/python/deprecate.py +twisted/python/dist.py +twisted/python/dist3.py +twisted/python/failure.py +twisted/python/fakepwd.py +twisted/python/filepath.py +twisted/python/finalize.py +twisted/python/formmethod.py +twisted/python/hashlib.py +twisted/python/hook.py +twisted/python/htmlizer.py +twisted/python/lockfile.py +twisted/python/log.py +twisted/python/logfile.py +twisted/python/modules.py +twisted/python/monkey.py +twisted/python/procutils.py +twisted/python/randbytes.py +twisted/python/rebuild.py +twisted/python/reflect.py +twisted/python/release.py +twisted/python/roots.py +twisted/python/runtime.py +twisted/python/sendmsg.c +twisted/python/shortcut.py +twisted/python/syslog.py +twisted/python/systemd.py +twisted/python/text.py +twisted/python/threadable.py +twisted/python/threadpool.py +twisted/python/urlpath.py +twisted/python/usage.py +twisted/python/util.py +twisted/python/versions.py +twisted/python/win32.py +twisted/python/zippath.py +twisted/python/zipstream.py +twisted/python/test/__init__.py +twisted/python/test/deprecatedattributes.py +twisted/python/test/modules_helpers.py +twisted/python/test/pullpipe.py +twisted/python/test/test_components.py +twisted/python/test/test_constants.py +twisted/python/test/test_deprecate.py +twisted/python/test/test_dist.py +twisted/python/test/test_dist3.py +twisted/python/test/test_fakepwd.py +twisted/python/test/test_hashlib.py +twisted/python/test/test_htmlizer.py +twisted/python/test/test_inotify.py +twisted/python/test/test_release.py +twisted/python/test/test_runtime.py +twisted/python/test/test_sendmsg.py +twisted/python/test/test_shellcomp.py +twisted/python/test/test_syslog.py +twisted/python/test/test_systemd.py +twisted/python/test/test_textattributes.py +twisted/python/test/test_urlpath.py +twisted/python/test/test_util.py +twisted/python/test/test_versions.py +twisted/python/test/test_win32.py +twisted/python/test/test_zippath.py +twisted/python/test/test_zipstream.py +twisted/runner/__init__.py +twisted/runner/_version.py +twisted/runner/inetd.py +twisted/runner/inetdconf.py +twisted/runner/inetdtap.py +twisted/runner/portmap.c +twisted/runner/procmon.py +twisted/runner/procmontap.py +twisted/runner/test/__init__.py +twisted/runner/test/test_procmon.py +twisted/runner/test/test_procmontap.py +twisted/scripts/__init__.py +twisted/scripts/_twistd_unix.py +twisted/scripts/_twistw.py +twisted/scripts/htmlizer.py +twisted/scripts/manhole.py +twisted/scripts/tap2deb.py +twisted/scripts/tap2rpm.py +twisted/scripts/tapconvert.py +twisted/scripts/tkunzip.py +twisted/scripts/trial.py +twisted/scripts/twistd.py +twisted/scripts/test/__init__.py +twisted/scripts/test/test_scripts.py +twisted/scripts/test/test_tap2deb.py +twisted/scripts/test/test_tap2rpm.py +twisted/spread/__init__.py +twisted/spread/banana.py +twisted/spread/flavors.py +twisted/spread/interfaces.py +twisted/spread/jelly.py +twisted/spread/pb.py +twisted/spread/publish.py +twisted/spread/util.py +twisted/spread/ui/__init__.py +twisted/spread/ui/gtk2util.py +twisted/spread/ui/tktree.py +twisted/spread/ui/tkutil.py +twisted/tap/__init__.py +twisted/tap/ftp.py +twisted/tap/manhole.py +twisted/tap/portforward.py +twisted/tap/socks.py +twisted/tap/telnet.py +twisted/test/__init__.py +twisted/test/_preamble.py +twisted/test/crash_test_dummy.py +twisted/test/iosim.py +twisted/test/mock_win32process.py +twisted/test/myrebuilder1.py +twisted/test/myrebuilder2.py +twisted/test/plugin_basic.py +twisted/test/plugin_extra1.py +twisted/test/plugin_extra2.py +twisted/test/process_cmdline.py +twisted/test/process_echoer.py +twisted/test/process_fds.py +twisted/test/process_linger.py +twisted/test/process_reader.py +twisted/test/process_signal.py +twisted/test/process_stdinreader.py +twisted/test/process_tester.py +twisted/test/process_tty.py +twisted/test/process_twisted.py +twisted/test/proto_helpers.py +twisted/test/raiser.c +twisted/test/reflect_helper_IE.py +twisted/test/reflect_helper_VE.py +twisted/test/reflect_helper_ZDE.py +twisted/test/ssl_helpers.py +twisted/test/stdio_test_consumer.py +twisted/test/stdio_test_halfclose.py +twisted/test/stdio_test_hostpeer.py +twisted/test/stdio_test_lastwrite.py +twisted/test/stdio_test_loseconn.py +twisted/test/stdio_test_producer.py +twisted/test/stdio_test_write.py +twisted/test/stdio_test_writeseq.py +twisted/test/test_abstract.py +twisted/test/test_adbapi.py +twisted/test/test_amp.py +twisted/test/test_application.py +twisted/test/test_banana.py +twisted/test/test_compat.py +twisted/test/test_context.py +twisted/test/test_cooperator.py +twisted/test/test_defer.py +twisted/test/test_defgen.py +twisted/test/test_dict.py +twisted/test/test_digestauth.py +twisted/test/test_dirdbm.py +twisted/test/test_doc.py +twisted/test/test_error.py +twisted/test/test_explorer.py +twisted/test/test_factories.py +twisted/test/test_failure.py +twisted/test/test_fdesc.py +twisted/test/test_finger.py +twisted/test/test_formmethod.py +twisted/test/test_ftp.py +twisted/test/test_ftp_options.py +twisted/test/test_hook.py +twisted/test/test_htb.py +twisted/test/test_ident.py +twisted/test/test_internet.py +twisted/test/test_iosim.py +twisted/test/test_iutils.py +twisted/test/test_jelly.py +twisted/test/test_lockfile.py +twisted/test/test_log.py +twisted/test/test_logfile.py +twisted/test/test_loopback.py +twisted/test/test_manhole.py +twisted/test/test_memcache.py +twisted/test/test_modules.py +twisted/test/test_monkey.py +twisted/test/test_newcred.py +twisted/test/test_nmea.py +twisted/test/test_paths.py +twisted/test/test_pb.py +twisted/test/test_pbfailure.py +twisted/test/test_pcp.py +twisted/test/test_persisted.py +twisted/test/test_plugin.py +twisted/test/test_policies.py +twisted/test/test_postfix.py +twisted/test/test_process.py +twisted/test/test_protocols.py +twisted/test/test_randbytes.py +twisted/test/test_rebuild.py +twisted/test/test_reflect.py +twisted/test/test_roots.py +twisted/test/test_setup.py +twisted/test/test_shortcut.py +twisted/test/test_sip.py +twisted/test/test_sob.py +twisted/test/test_socks.py +twisted/test/test_ssl.py +twisted/test/test_sslverify.py +twisted/test/test_stateful.py +twisted/test/test_stdio.py +twisted/test/test_strcred.py +twisted/test/test_strerror.py +twisted/test/test_stringtransport.py +twisted/test/test_strports.py +twisted/test/test_task.py +twisted/test/test_tcp.py +twisted/test/test_tcp_internals.py +twisted/test/test_text.py +twisted/test/test_threadable.py +twisted/test/test_threadpool.py +twisted/test/test_threads.py +twisted/test/test_tpfile.py +twisted/test/test_twistd.py +twisted/test/test_twisted.py +twisted/test/test_udp.py +twisted/test/test_unix.py +twisted/test/test_usage.py +twisted/test/testutils.py +twisted/trial/__init__.py +twisted/trial/_asyncrunner.py +twisted/trial/_asynctest.py +twisted/trial/_synctest.py +twisted/trial/itrial.py +twisted/trial/reporter.py +twisted/trial/runner.py +twisted/trial/unittest.py +twisted/trial/util.py +twisted/trial/_dist/__init__.py +twisted/trial/_dist/distreporter.py +twisted/trial/_dist/disttrial.py +twisted/trial/_dist/managercommands.py +twisted/trial/_dist/options.py +twisted/trial/_dist/worker.py +twisted/trial/_dist/workercommands.py +twisted/trial/_dist/workerreporter.py +twisted/trial/_dist/workertrial.py +twisted/trial/_dist/test/__init__.py +twisted/trial/_dist/test/test_distreporter.py +twisted/trial/_dist/test/test_disttrial.py +twisted/trial/_dist/test/test_options.py +twisted/trial/_dist/test/test_worker.py +twisted/trial/_dist/test/test_workerreporter.py +twisted/trial/_dist/test/test_workertrial.py +twisted/trial/test/__init__.py +twisted/trial/test/detests.py +twisted/trial/test/erroneous.py +twisted/trial/test/mockcustomsuite.py +twisted/trial/test/mockcustomsuite2.py +twisted/trial/test/mockcustomsuite3.py +twisted/trial/test/mockdoctest.py +twisted/trial/test/moduleself.py +twisted/trial/test/moduletest.py +twisted/trial/test/novars.py +twisted/trial/test/ordertests.py +twisted/trial/test/packages.py +twisted/trial/test/sample.py +twisted/trial/test/scripttest.py +twisted/trial/test/skipping.py +twisted/trial/test/suppression.py +twisted/trial/test/test_assertions.py +twisted/trial/test/test_asyncassertions.py +twisted/trial/test/test_deferred.py +twisted/trial/test/test_doctest.py +twisted/trial/test/test_keyboard.py +twisted/trial/test/test_loader.py +twisted/trial/test/test_log.py +twisted/trial/test/test_output.py +twisted/trial/test/test_plugins.py +twisted/trial/test/test_pyunitcompat.py +twisted/trial/test/test_reporter.py +twisted/trial/test/test_runner.py +twisted/trial/test/test_script.py +twisted/trial/test/test_suppression.py +twisted/trial/test/test_testcase.py +twisted/trial/test/test_tests.py +twisted/trial/test/test_util.py +twisted/trial/test/test_warning.py +twisted/trial/test/weird.py +twisted/web/__init__.py +twisted/web/_element.py +twisted/web/_flatten.py +twisted/web/_newclient.py +twisted/web/_responses.py +twisted/web/_stan.py +twisted/web/_version.py +twisted/web/client.py +twisted/web/demo.py +twisted/web/distrib.py +twisted/web/domhelpers.py +twisted/web/error.py +twisted/web/guard.py +twisted/web/html.py +twisted/web/http.py +twisted/web/http_headers.py +twisted/web/iweb.py +twisted/web/microdom.py +twisted/web/proxy.py +twisted/web/resource.py +twisted/web/rewrite.py +twisted/web/script.py +twisted/web/server.py +twisted/web/soap.py +twisted/web/static.py +twisted/web/sux.py +twisted/web/tap.py +twisted/web/template.py +twisted/web/twcgi.py +twisted/web/util.py +twisted/web/vhost.py +twisted/web/wsgi.py +twisted/web/xmlrpc.py +twisted/web/_auth/__init__.py +twisted/web/_auth/basic.py +twisted/web/_auth/digest.py +twisted/web/_auth/wrapper.py +twisted/web/test/__init__.py +twisted/web/test/_util.py +twisted/web/test/requesthelper.py +twisted/web/test/test_agent.py +twisted/web/test/test_cgi.py +twisted/web/test/test_distrib.py +twisted/web/test/test_domhelpers.py +twisted/web/test/test_error.py +twisted/web/test/test_flatten.py +twisted/web/test/test_http.py +twisted/web/test/test_http_headers.py +twisted/web/test/test_httpauth.py +twisted/web/test/test_newclient.py +twisted/web/test/test_proxy.py +twisted/web/test/test_resource.py +twisted/web/test/test_script.py +twisted/web/test/test_soap.py +twisted/web/test/test_stan.py +twisted/web/test/test_static.py +twisted/web/test/test_tap.py +twisted/web/test/test_template.py +twisted/web/test/test_util.py +twisted/web/test/test_vhost.py +twisted/web/test/test_web.py +twisted/web/test/test_webclient.py +twisted/web/test/test_wsgi.py +twisted/web/test/test_xml.py +twisted/web/test/test_xmlrpc.py +twisted/words/__init__.py +twisted/words/_version.py +twisted/words/ewords.py +twisted/words/iwords.py +twisted/words/service.py +twisted/words/tap.py +twisted/words/xmpproutertap.py +twisted/words/im/__init__.py +twisted/words/im/baseaccount.py +twisted/words/im/basechat.py +twisted/words/im/basesupport.py +twisted/words/im/interfaces.py +twisted/words/im/ircsupport.py +twisted/words/im/locals.py +twisted/words/im/pbsupport.py +twisted/words/protocols/__init__.py +twisted/words/protocols/irc.py +twisted/words/protocols/msn.py +twisted/words/protocols/oscar.py +twisted/words/protocols/jabber/__init__.py +twisted/words/protocols/jabber/client.py +twisted/words/protocols/jabber/component.py +twisted/words/protocols/jabber/error.py +twisted/words/protocols/jabber/ijabber.py +twisted/words/protocols/jabber/jid.py +twisted/words/protocols/jabber/jstrports.py +twisted/words/protocols/jabber/sasl.py +twisted/words/protocols/jabber/sasl_mechanisms.py +twisted/words/protocols/jabber/xmlstream.py +twisted/words/protocols/jabber/xmpp_stringprep.py +twisted/words/test/__init__.py +twisted/words/test/test_basechat.py +twisted/words/test/test_basesupport.py +twisted/words/test/test_domish.py +twisted/words/test/test_irc.py +twisted/words/test/test_irc_service.py +twisted/words/test/test_ircsupport.py +twisted/words/test/test_jabberclient.py +twisted/words/test/test_jabbercomponent.py +twisted/words/test/test_jabbererror.py +twisted/words/test/test_jabberjid.py +twisted/words/test/test_jabberjstrports.py +twisted/words/test/test_jabbersasl.py +twisted/words/test/test_jabbersaslmechanisms.py +twisted/words/test/test_jabberxmlstream.py +twisted/words/test/test_jabberxmppstringprep.py +twisted/words/test/test_msn.py +twisted/words/test/test_oscar.py +twisted/words/test/test_service.py +twisted/words/test/test_tap.py +twisted/words/test/test_xishutil.py +twisted/words/test/test_xmlstream.py +twisted/words/test/test_xmpproutertap.py +twisted/words/test/test_xpath.py +twisted/words/xish/__init__.py +twisted/words/xish/domish.py +twisted/words/xish/utility.py +twisted/words/xish/xmlstream.py +twisted/words/xish/xpath.py +twisted/words/xish/xpathparser.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/installed-files.txt new file mode 100644 index 0000000..dd46462 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/installed-files.txt @@ -0,0 +1,1757 @@ +../twisted/copyright.py +../twisted/_version.py +../twisted/plugin.py +../twisted/__init__.py +../twisted/manhole/telnet.py +../twisted/manhole/explorer.py +../twisted/manhole/service.py +../twisted/manhole/_inspectro.py +../twisted/manhole/__init__.py +../twisted/manhole/gladereactor.py +../twisted/manhole/ui/gtk2manhole.py +../twisted/manhole/ui/__init__.py +../twisted/manhole/ui/test/__init__.py +../twisted/manhole/ui/test/test_gtk2manhole.py +../twisted/manhole/test/__init__.py +../twisted/manhole/test/test_explorer.py +../twisted/mail/pb.py +../twisted/mail/relaymanager.py +../twisted/mail/imap4.py +../twisted/mail/_version.py +../twisted/mail/relay.py +../twisted/mail/pop3client.py +../twisted/mail/bounce.py +../twisted/mail/pop3.py +../twisted/mail/mail.py +../twisted/mail/__init__.py +../twisted/mail/alias.py +../twisted/mail/smtp.py +../twisted/mail/protocols.py +../twisted/mail/maildir.py +../twisted/mail/tap.py +../twisted/mail/scripts/mailmail.py +../twisted/mail/scripts/__init__.py +../twisted/mail/test/test_pop3client.py +../twisted/mail/test/test_smtp.py +../twisted/mail/test/test_scripts.py +../twisted/mail/test/test_imap.py +../twisted/mail/test/test_bounce.py +../twisted/mail/test/pop3testserver.py +../twisted/mail/test/__init__.py +../twisted/mail/test/test_mail.py +../twisted/mail/test/test_pop3.py +../twisted/mail/test/test_options.py +../twisted/mail/test/test_mailmail.py +../twisted/names/error.py +../twisted/names/client.py +../twisted/names/common.py +../twisted/names/server.py +../twisted/names/_version.py +../twisted/names/root.py +../twisted/names/hosts.py +../twisted/names/_rfc1982.py +../twisted/names/dns.py +../twisted/names/resolve.py +../twisted/names/__init__.py +../twisted/names/secondary.py +../twisted/names/srvconnect.py +../twisted/names/cache.py +../twisted/names/authority.py +../twisted/names/tap.py +../twisted/names/test/test_rfc1982.py +../twisted/names/test/test_client.py +../twisted/names/test/test_hosts.py +../twisted/names/test/test_server.py +../twisted/names/test/test_names.py +../twisted/names/test/test_resolve.py +../twisted/names/test/test_common.py +../twisted/names/test/__init__.py +../twisted/names/test/test_examples.py +../twisted/names/test/test_cache.py +../twisted/names/test/test_tap.py +../twisted/names/test/test_rootresolve.py +../twisted/names/test/test_srvconnect.py +../twisted/names/test/test_dns.py +../twisted/trial/unittest.py +../twisted/trial/_synctest.py +../twisted/trial/util.py +../twisted/trial/__init__.py +../twisted/trial/reporter.py +../twisted/trial/itrial.py +../twisted/trial/_asynctest.py +../twisted/trial/runner.py +../twisted/trial/_asyncrunner.py +../twisted/trial/test/scripttest.py +../twisted/trial/test/weird.py +../twisted/trial/test/test_assertions.py +../twisted/trial/test/mockcustomsuite.py +../twisted/trial/test/skipping.py +../twisted/trial/test/test_loader.py +../twisted/trial/test/test_reporter.py +../twisted/trial/test/novars.py +../twisted/trial/test/test_keyboard.py +../twisted/trial/test/moduletest.py +../twisted/trial/test/test_deferred.py +../twisted/trial/test/test_script.py +../twisted/trial/test/mockdoctest.py +../twisted/trial/test/test_testcase.py +../twisted/trial/test/test_util.py +../twisted/trial/test/ordertests.py +../twisted/trial/test/suppression.py +../twisted/trial/test/test_tests.py +../twisted/trial/test/test_warning.py +../twisted/trial/test/test_doctest.py +../twisted/trial/test/__init__.py +../twisted/trial/test/test_log.py +../twisted/trial/test/erroneous.py +../twisted/trial/test/test_plugins.py +../twisted/trial/test/test_asyncassertions.py +../twisted/trial/test/test_suppression.py +../twisted/trial/test/sample.py +../twisted/trial/test/detests.py +../twisted/trial/test/mockcustomsuite2.py +../twisted/trial/test/test_pyunitcompat.py +../twisted/trial/test/test_runner.py +../twisted/trial/test/test_output.py +../twisted/trial/test/mockcustomsuite3.py +../twisted/trial/test/moduleself.py +../twisted/trial/test/packages.py +../twisted/trial/_dist/managercommands.py +../twisted/trial/_dist/disttrial.py +../twisted/trial/_dist/workerreporter.py +../twisted/trial/_dist/distreporter.py +../twisted/trial/_dist/workertrial.py +../twisted/trial/_dist/__init__.py +../twisted/trial/_dist/workercommands.py +../twisted/trial/_dist/worker.py +../twisted/trial/_dist/options.py +../twisted/trial/_dist/test/test_workerreporter.py +../twisted/trial/_dist/test/test_worker.py +../twisted/trial/_dist/test/test_distreporter.py +../twisted/trial/_dist/test/test_workertrial.py +../twisted/trial/_dist/test/__init__.py +../twisted/trial/_dist/test/test_options.py +../twisted/trial/_dist/test/test_disttrial.py +../twisted/cred/error.py +../twisted/cred/portal.py +../twisted/cred/strcred.py +../twisted/cred/__init__.py +../twisted/cred/_digest.py +../twisted/cred/credentials.py +../twisted/cred/checkers.py +../twisted/cred/pamauth.py +../twisted/conch/error.py +../twisted/conch/stdio.py +../twisted/conch/manhole_tap.py +../twisted/conch/telnet.py +../twisted/conch/_version.py +../twisted/conch/unix.py +../twisted/conch/interfaces.py +../twisted/conch/endpoints.py +../twisted/conch/recvline.py +../twisted/conch/__init__.py +../twisted/conch/mixin.py +../twisted/conch/ls.py +../twisted/conch/checkers.py +../twisted/conch/avatar.py +../twisted/conch/manhole_ssh.py +../twisted/conch/ttymodes.py +../twisted/conch/manhole.py +../twisted/conch/tap.py +../twisted/conch/ui/tkvt100.py +../twisted/conch/ui/__init__.py +../twisted/conch/ui/ansi.py +../twisted/conch/client/connect.py +../twisted/conch/client/default.py +../twisted/conch/client/agent.py +../twisted/conch/client/direct.py +../twisted/conch/client/__init__.py +../twisted/conch/client/knownhosts.py +../twisted/conch/client/options.py +../twisted/conch/openssh_compat/primes.py +../twisted/conch/openssh_compat/__init__.py +../twisted/conch/openssh_compat/factory.py +../twisted/conch/scripts/conch.py +../twisted/conch/scripts/tkconch.py +../twisted/conch/scripts/cftp.py +../twisted/conch/scripts/ckeygen.py +../twisted/conch/scripts/__init__.py +../twisted/conch/ssh/session.py +../twisted/conch/ssh/channel.py +../twisted/conch/ssh/common.py +../twisted/conch/ssh/keys.py +../twisted/conch/ssh/userauth.py +../twisted/conch/ssh/forwarding.py +../twisted/conch/ssh/connection.py +../twisted/conch/ssh/agent.py +../twisted/conch/ssh/service.py +../twisted/conch/ssh/transport.py +../twisted/conch/ssh/__init__.py +../twisted/conch/ssh/sexpy.py +../twisted/conch/ssh/address.py +../twisted/conch/ssh/factory.py +../twisted/conch/ssh/filetransfer.py +../twisted/conch/test/test_openssh_compat.py +../twisted/conch/test/test_helper.py +../twisted/conch/test/test_knownhosts.py +../twisted/conch/test/keydata.py +../twisted/conch/test/test_ckeygen.py +../twisted/conch/test/test_cftp.py +../twisted/conch/test/test_conch.py +../twisted/conch/test/test_keys.py +../twisted/conch/test/test_filetransfer.py +../twisted/conch/test/test_scripts.py +../twisted/conch/test/test_text.py +../twisted/conch/test/test_recvline.py +../twisted/conch/test/test_endpoints.py +../twisted/conch/test/test_userauth.py +../twisted/conch/test/test_manhole.py +../twisted/conch/test/__init__.py +../twisted/conch/test/test_telnet.py +../twisted/conch/test/test_channel.py +../twisted/conch/test/test_mixin.py +../twisted/conch/test/test_agent.py +../twisted/conch/test/test_transport.py +../twisted/conch/test/test_window.py +../twisted/conch/test/test_checkers.py +../twisted/conch/test/test_address.py +../twisted/conch/test/test_tap.py +../twisted/conch/test/test_connection.py +../twisted/conch/test/test_insults.py +../twisted/conch/test/test_ssh.py +../twisted/conch/test/test_default.py +../twisted/conch/test/test_session.py +../twisted/conch/insults/client.py +../twisted/conch/insults/text.py +../twisted/conch/insults/colors.py +../twisted/conch/insults/helper.py +../twisted/conch/insults/window.py +../twisted/conch/insults/__init__.py +../twisted/conch/insults/insults.py +../twisted/python/logfile.py +../twisted/python/formmethod.py +../twisted/python/compat.py +../twisted/python/threadpool.py +../twisted/python/monkey.py +../twisted/python/log.py +../twisted/python/hook.py +../twisted/python/fakepwd.py +../twisted/python/usage.py +../twisted/python/dist3.py +../twisted/python/util.py +../twisted/python/syslog.py +../twisted/python/threadable.py +../twisted/python/text.py +../twisted/python/runtime.py +../twisted/python/context.py +../twisted/python/_textattributes.py +../twisted/python/roots.py +../twisted/python/win32.py +../twisted/python/randbytes.py +../twisted/python/hashlib.py +../twisted/python/components.py +../twisted/python/_inotify.py +../twisted/python/modules.py +../twisted/python/zippath.py +../twisted/python/versions.py +../twisted/python/_release.py +../twisted/python/deprecate.py +../twisted/python/failure.py +../twisted/python/urlpath.py +../twisted/python/reflect.py +../twisted/python/__init__.py +../twisted/python/rebuild.py +../twisted/python/procutils.py +../twisted/python/finalize.py +../twisted/python/zipstream.py +../twisted/python/filepath.py +../twisted/python/dist.py +../twisted/python/systemd.py +../twisted/python/constants.py +../twisted/python/_shellcomp.py +../twisted/python/shortcut.py +../twisted/python/release.py +../twisted/python/htmlizer.py +../twisted/python/lockfile.py +../twisted/python/test/test_zippath.py +../twisted/python/test/test_fakepwd.py +../twisted/python/test/test_release.py +../twisted/python/test/deprecatedattributes.py +../twisted/python/test/test_constants.py +../twisted/python/test/test_sendmsg.py +../twisted/python/test/test_textattributes.py +../twisted/python/test/test_shellcomp.py +../twisted/python/test/test_htmlizer.py +../twisted/python/test/test_runtime.py +../twisted/python/test/pullpipe.py +../twisted/python/test/test_deprecate.py +../twisted/python/test/test_util.py +../twisted/python/test/test_hashlib.py +../twisted/python/test/test_inotify.py +../twisted/python/test/__init__.py +../twisted/python/test/test_dist.py +../twisted/python/test/test_syslog.py +../twisted/python/test/test_zipstream.py +../twisted/python/test/test_urlpath.py +../twisted/python/test/test_components.py +../twisted/python/test/test_win32.py +../twisted/python/test/test_systemd.py +../twisted/python/test/test_versions.py +../twisted/python/test/test_dist3.py +../twisted/python/test/modules_helpers.py +../twisted/runner/procmon.py +../twisted/runner/_version.py +../twisted/runner/inetdconf.py +../twisted/runner/inetdtap.py +../twisted/runner/__init__.py +../twisted/runner/procmontap.py +../twisted/runner/inetd.py +../twisted/runner/test/test_procmon.py +../twisted/runner/test/test_procmontap.py +../twisted/runner/test/__init__.py +../twisted/tap/telnet.py +../twisted/tap/ftp.py +../twisted/tap/socks.py +../twisted/tap/portforward.py +../twisted/tap/__init__.py +../twisted/tap/manhole.py +../twisted/positioning/ipositioning.py +../twisted/positioning/__init__.py +../twisted/positioning/_sentence.py +../twisted/positioning/base.py +../twisted/positioning/nmea.py +../twisted/positioning/test/test_sentence.py +../twisted/positioning/test/test_base.py +../twisted/positioning/test/receiver.py +../twisted/positioning/test/__init__.py +../twisted/positioning/test/test_nmea.py +../twisted/spread/pb.py +../twisted/spread/util.py +../twisted/spread/banana.py +../twisted/spread/interfaces.py +../twisted/spread/jelly.py +../twisted/spread/__init__.py +../twisted/spread/flavors.py +../twisted/spread/publish.py +../twisted/spread/ui/gtk2util.py +../twisted/spread/ui/__init__.py +../twisted/spread/ui/tkutil.py +../twisted/spread/ui/tktree.py +../twisted/internet/error.py +../twisted/internet/default.py +../twisted/internet/qtreactor.py +../twisted/internet/win32eventreactor.py +../twisted/internet/stdio.py +../twisted/internet/cfreactor.py +../twisted/internet/_posixserialport.py +../twisted/internet/ssl.py +../twisted/internet/threads.py +../twisted/internet/_pollingfile.py +../twisted/internet/gtk2reactor.py +../twisted/internet/tksupport.py +../twisted/internet/gireactor.py +../twisted/internet/glib2reactor.py +../twisted/internet/_newtls.py +../twisted/internet/gtk3reactor.py +../twisted/internet/_baseprocess.py +../twisted/internet/abstract.py +../twisted/internet/inotify.py +../twisted/internet/unix.py +../twisted/internet/interfaces.py +../twisted/internet/protocol.py +../twisted/internet/_dumbwin32proc.py +../twisted/internet/tcp.py +../twisted/internet/endpoints.py +../twisted/internet/main.py +../twisted/internet/udp.py +../twisted/internet/wxsupport.py +../twisted/internet/pyuisupport.py +../twisted/internet/process.py +../twisted/internet/_signals.py +../twisted/internet/__init__.py +../twisted/internet/posixbase.py +../twisted/internet/pollreactor.py +../twisted/internet/serialport.py +../twisted/internet/_sslverify.py +../twisted/internet/kqreactor.py +../twisted/internet/fdesc.py +../twisted/internet/reactor.py +../twisted/internet/base.py +../twisted/internet/address.py +../twisted/internet/_threadedselect.py +../twisted/internet/_ssl.py +../twisted/internet/selectreactor.py +../twisted/internet/_win32serialport.py +../twisted/internet/wxreactor.py +../twisted/internet/epollreactor.py +../twisted/internet/defer.py +../twisted/internet/utils.py +../twisted/internet/task.py +../twisted/internet/_win32stdio.py +../twisted/internet/gtkreactor.py +../twisted/internet/_posixstdio.py +../twisted/internet/_glibbase.py +../twisted/internet/iocpreactor/abstract.py +../twisted/internet/iocpreactor/interfaces.py +../twisted/internet/iocpreactor/tcp.py +../twisted/internet/iocpreactor/udp.py +../twisted/internet/iocpreactor/const.py +../twisted/internet/iocpreactor/__init__.py +../twisted/internet/iocpreactor/reactor.py +../twisted/internet/iocpreactor/setup.py +../twisted/internet/test/test_posixbase.py +../twisted/internet/test/modulehelpers.py +../twisted/internet/test/test_stdio.py +../twisted/internet/test/test_qtreactor.py +../twisted/internet/test/test_serialport.py +../twisted/internet/test/test_time.py +../twisted/internet/test/process_helper.py +../twisted/internet/test/test_threads.py +../twisted/internet/test/test_core.py +../twisted/internet/test/test_posixprocess.py +../twisted/internet/test/test_filedescriptor.py +../twisted/internet/test/_win32ifaces.py +../twisted/internet/test/test_main.py +../twisted/internet/test/test_sigchld.py +../twisted/internet/test/test_unix.py +../twisted/internet/test/test_base.py +../twisted/internet/test/test_epollreactor.py +../twisted/internet/test/test_socket.py +../twisted/internet/test/test_endpoints.py +../twisted/internet/test/test_newtls.py +../twisted/internet/test/test_abstract.py +../twisted/internet/test/test_inotify.py +../twisted/internet/test/test_protocol.py +../twisted/internet/test/test_baseprocess.py +../twisted/internet/test/__init__.py +../twisted/internet/test/test_fdset.py +../twisted/internet/test/test_glibbase.py +../twisted/internet/test/test_gireactor.py +../twisted/internet/test/process_gireactornocompat.py +../twisted/internet/test/fakeendpoint.py +../twisted/internet/test/test_tcp.py +../twisted/internet/test/test_address.py +../twisted/internet/test/test_iocp.py +../twisted/internet/test/reactormixins.py +../twisted/internet/test/test_tls.py +../twisted/internet/test/_posixifaces.py +../twisted/internet/test/test_pollingfile.py +../twisted/internet/test/test_gtkreactor.py +../twisted/internet/test/test_udp.py +../twisted/internet/test/test_default.py +../twisted/internet/test/test_inlinecb.py +../twisted/internet/test/test_process.py +../twisted/internet/test/test_udp_internals.py +../twisted/internet/test/connectionmixins.py +../twisted/internet/test/test_win32events.py +../twisted/news/nntp.py +../twisted/news/database.py +../twisted/news/_version.py +../twisted/news/news.py +../twisted/news/__init__.py +../twisted/news/tap.py +../twisted/news/test/test_database.py +../twisted/news/test/__init__.py +../twisted/news/test/test_nntp.py +../twisted/news/test/test_news.py +../twisted/words/_version.py +../twisted/words/service.py +../twisted/words/iwords.py +../twisted/words/__init__.py +../twisted/words/ewords.py +../twisted/words/xmpproutertap.py +../twisted/words/tap.py +../twisted/words/xish/xpathparser.py +../twisted/words/xish/utility.py +../twisted/words/xish/xmlstream.py +../twisted/words/xish/__init__.py +../twisted/words/xish/xpath.py +../twisted/words/xish/domish.py +../twisted/words/im/basesupport.py +../twisted/words/im/baseaccount.py +../twisted/words/im/basechat.py +../twisted/words/im/interfaces.py +../twisted/words/im/ircsupport.py +../twisted/words/im/__init__.py +../twisted/words/im/pbsupport.py +../twisted/words/im/locals.py +../twisted/words/test/test_msn.py +../twisted/words/test/test_jabberjid.py +../twisted/words/test/test_ircsupport.py +../twisted/words/test/test_service.py +../twisted/words/test/test_jabbersaslmechanisms.py +../twisted/words/test/test_xmlstream.py +../twisted/words/test/test_irc.py +../twisted/words/test/test_xmpproutertap.py +../twisted/words/test/test_jabberxmlstream.py +../twisted/words/test/test_basesupport.py +../twisted/words/test/test_irc_service.py +../twisted/words/test/test_domish.py +../twisted/words/test/__init__.py +../twisted/words/test/test_jabbercomponent.py +../twisted/words/test/test_jabberxmppstringprep.py +../twisted/words/test/test_tap.py +../twisted/words/test/test_basechat.py +../twisted/words/test/test_oscar.py +../twisted/words/test/test_jabberjstrports.py +../twisted/words/test/test_jabberclient.py +../twisted/words/test/test_jabbersasl.py +../twisted/words/test/test_xpath.py +../twisted/words/test/test_xishutil.py +../twisted/words/test/test_jabbererror.py +../twisted/words/protocols/irc.py +../twisted/words/protocols/oscar.py +../twisted/words/protocols/__init__.py +../twisted/words/protocols/msn.py +../twisted/words/protocols/jabber/error.py +../twisted/words/protocols/jabber/client.py +../twisted/words/protocols/jabber/ijabber.py +../twisted/words/protocols/jabber/jid.py +../twisted/words/protocols/jabber/sasl_mechanisms.py +../twisted/words/protocols/jabber/xmpp_stringprep.py +../twisted/words/protocols/jabber/component.py +../twisted/words/protocols/jabber/xmlstream.py +../twisted/words/protocols/jabber/__init__.py +../twisted/words/protocols/jabber/jstrports.py +../twisted/words/protocols/jabber/sasl.py +../twisted/scripts/tapconvert.py +../twisted/scripts/tap2rpm.py +../twisted/scripts/twistd.py +../twisted/scripts/trial.py +../twisted/scripts/_twistd_unix.py +../twisted/scripts/_twistw.py +../twisted/scripts/tap2deb.py +../twisted/scripts/__init__.py +../twisted/scripts/tkunzip.py +../twisted/scripts/manhole.py +../twisted/scripts/htmlizer.py +../twisted/scripts/test/test_scripts.py +../twisted/scripts/test/test_tap2deb.py +../twisted/scripts/test/__init__.py +../twisted/scripts/test/test_tap2rpm.py +../twisted/lore/slides.py +../twisted/lore/lint.py +../twisted/lore/default.py +../twisted/lore/_version.py +../twisted/lore/numberer.py +../twisted/lore/docbook.py +../twisted/lore/htmlbook.py +../twisted/lore/process.py +../twisted/lore/__init__.py +../twisted/lore/man2lore.py +../twisted/lore/lmath.py +../twisted/lore/latex.py +../twisted/lore/indexer.py +../twisted/lore/tree.py +../twisted/lore/texi.py +../twisted/lore/scripts/lore.py +../twisted/lore/scripts/__init__.py +../twisted/lore/test/test_man2lore.py +../twisted/lore/test/test_lint.py +../twisted/lore/test/test_scripts.py +../twisted/lore/test/test_lmath.py +../twisted/lore/test/test_lore.py +../twisted/lore/test/test_latex.py +../twisted/lore/test/__init__.py +../twisted/lore/test/test_texi.py +../twisted/lore/test/test_docbook.py +../twisted/lore/test/test_slides.py +../twisted/web/error.py +../twisted/web/client.py +../twisted/web/twcgi.py +../twisted/web/soap.py +../twisted/web/xmlrpc.py +../twisted/web/server.py +../twisted/web/util.py +../twisted/web/_stan.py +../twisted/web/distrib.py +../twisted/web/_version.py +../twisted/web/http.py +../twisted/web/wsgi.py +../twisted/web/sux.py +../twisted/web/static.py +../twisted/web/http_headers.py +../twisted/web/domhelpers.py +../twisted/web/_newclient.py +../twisted/web/script.py +../twisted/web/iweb.py +../twisted/web/vhost.py +../twisted/web/guard.py +../twisted/web/_flatten.py +../twisted/web/template.py +../twisted/web/demo.py +../twisted/web/_responses.py +../twisted/web/resource.py +../twisted/web/proxy.py +../twisted/web/__init__.py +../twisted/web/microdom.py +../twisted/web/_element.py +../twisted/web/html.py +../twisted/web/rewrite.py +../twisted/web/tap.py +../twisted/web/_auth/digest.py +../twisted/web/_auth/wrapper.py +../twisted/web/_auth/__init__.py +../twisted/web/_auth/basic.py +../twisted/web/test/test_http_headers.py +../twisted/web/test/test_xml.py +../twisted/web/test/requesthelper.py +../twisted/web/test/test_httpauth.py +../twisted/web/test/test_error.py +../twisted/web/test/test_newclient.py +../twisted/web/test/test_stan.py +../twisted/web/test/test_script.py +../twisted/web/test/test_wsgi.py +../twisted/web/test/test_util.py +../twisted/web/test/test_cgi.py +../twisted/web/test/test_http.py +../twisted/web/test/_util.py +../twisted/web/test/__init__.py +../twisted/web/test/test_flatten.py +../twisted/web/test/test_static.py +../twisted/web/test/test_proxy.py +../twisted/web/test/test_agent.py +../twisted/web/test/test_soap.py +../twisted/web/test/test_webclient.py +../twisted/web/test/test_web.py +../twisted/web/test/test_tap.py +../twisted/web/test/test_template.py +../twisted/web/test/test_domhelpers.py +../twisted/web/test/test_distrib.py +../twisted/web/test/test_xmlrpc.py +../twisted/web/test/test_resource.py +../twisted/web/test/test_vhost.py +../twisted/pair/_version.py +../twisted/pair/ip.py +../twisted/pair/ethernet.py +../twisted/pair/__init__.py +../twisted/pair/rawudp.py +../twisted/pair/testing.py +../twisted/pair/raw.py +../twisted/pair/tuntap.py +../twisted/pair/test/test_ip.py +../twisted/pair/test/test_ethernet.py +../twisted/pair/test/__init__.py +../twisted/pair/test/test_tuntap.py +../twisted/pair/test/test_rawudp.py +../twisted/persisted/aot.py +../twisted/persisted/crefutil.py +../twisted/persisted/sob.py +../twisted/persisted/__init__.py +../twisted/persisted/dirdbm.py +../twisted/persisted/styles.py +../twisted/persisted/test/test_styles.py +../twisted/persisted/test/__init__.py +../twisted/application/reactors.py +../twisted/application/service.py +../twisted/application/internet.py +../twisted/application/__init__.py +../twisted/application/strports.py +../twisted/application/app.py +../twisted/application/test/__init__.py +../twisted/application/test/test_internet.py +../twisted/plugins/twisted_reactors.py +../twisted/plugins/twisted_lore.py +../twisted/plugins/twisted_runner.py +../twisted/plugins/twisted_conch.py +../twisted/plugins/twisted_portforward.py +../twisted/plugins/twisted_qtstub.py +../twisted/plugins/twisted_names.py +../twisted/plugins/twisted_news.py +../twisted/plugins/cred_memory.py +../twisted/plugins/cred_anonymous.py +../twisted/plugins/twisted_web.py +../twisted/plugins/twisted_ftp.py +../twisted/plugins/__init__.py +../twisted/plugins/twisted_mail.py +../twisted/plugins/twisted_socks.py +../twisted/plugins/twisted_words.py +../twisted/plugins/cred_file.py +../twisted/plugins/twisted_inet.py +../twisted/plugins/cred_sshkeys.py +../twisted/plugins/twisted_telnet.py +../twisted/plugins/twisted_core.py +../twisted/plugins/cred_unix.py +../twisted/plugins/twisted_trial.py +../twisted/plugins/twisted_manhole.py +../twisted/test/plugin_extra1.py +../twisted/test/test_stdio.py +../twisted/test/test_compat.py +../twisted/test/test_sip.py +../twisted/test/test_shortcut.py +../twisted/test/stdio_test_writeseq.py +../twisted/test/stdio_test_consumer.py +../twisted/test/test_rebuild.py +../twisted/test/process_echoer.py +../twisted/test/iosim.py +../twisted/test/stdio_test_write.py +../twisted/test/test_usage.py +../twisted/test/stdio_test_hostpeer.py +../twisted/test/test_dirdbm.py +../twisted/test/test_threads.py +../twisted/test/test_htb.py +../twisted/test/test_logfile.py +../twisted/test/process_tester.py +../twisted/test/test_monkey.py +../twisted/test/test_adbapi.py +../twisted/test/test_task.py +../twisted/test/test_strerror.py +../twisted/test/test_amp.py +../twisted/test/test_stringtransport.py +../twisted/test/test_threadpool.py +../twisted/test/test_error.py +../twisted/test/test_formmethod.py +../twisted/test/process_cmdline.py +../twisted/test/test_socks.py +../twisted/test/myrebuilder2.py +../twisted/test/test_sslverify.py +../twisted/test/test_defgen.py +../twisted/test/test_unix.py +../twisted/test/test_banana.py +../twisted/test/stdio_test_halfclose.py +../twisted/test/test_text.py +../twisted/test/test_iutils.py +../twisted/test/process_fds.py +../twisted/test/process_stdinreader.py +../twisted/test/test_twisted.py +../twisted/test/test_ftp_options.py +../twisted/test/reflect_helper_IE.py +../twisted/test/test_roots.py +../twisted/test/test_dict.py +../twisted/test/test_postfix.py +../twisted/test/test_ssl.py +../twisted/test/stdio_test_producer.py +../twisted/test/test_persisted.py +../twisted/test/test_defer.py +../twisted/test/test_jelly.py +../twisted/test/proto_helpers.py +../twisted/test/test_setup.py +../twisted/test/test_strcred.py +../twisted/test/test_abstract.py +../twisted/test/mock_win32process.py +../twisted/test/process_signal.py +../twisted/test/test_tcp_internals.py +../twisted/test/test_threadable.py +../twisted/test/test_doc.py +../twisted/test/test_manhole.py +../twisted/test/process_tty.py +../twisted/test/test_hook.py +../twisted/test/test_loopback.py +../twisted/test/__init__.py +../twisted/test/test_failure.py +../twisted/test/test_log.py +../twisted/test/stdio_test_lastwrite.py +../twisted/test/test_strports.py +../twisted/test/test_cooperator.py +../twisted/test/test_tpfile.py +../twisted/test/stdio_test_loseconn.py +../twisted/test/crash_test_dummy.py +../twisted/test/myrebuilder1.py +../twisted/test/test_ftp.py +../twisted/test/test_pcp.py +../twisted/test/plugin_basic.py +../twisted/test/test_protocols.py +../twisted/test/test_policies.py +../twisted/test/test_sob.py +../twisted/test/test_explorer.py +../twisted/test/test_factories.py +../twisted/test/test_tcp.py +../twisted/test/test_digestauth.py +../twisted/test/test_pb.py +../twisted/test/test_application.py +../twisted/test/test_context.py +../twisted/test/test_iosim.py +../twisted/test/process_reader.py +../twisted/test/test_stateful.py +../twisted/test/test_reflect.py +../twisted/test/test_pbfailure.py +../twisted/test/test_nmea.py +../twisted/test/plugin_extra2.py +../twisted/test/test_finger.py +../twisted/test/test_randbytes.py +../twisted/test/process_twisted.py +../twisted/test/ssl_helpers.py +../twisted/test/test_udp.py +../twisted/test/test_ident.py +../twisted/test/test_paths.py +../twisted/test/reflect_helper_ZDE.py +../twisted/test/test_twistd.py +../twisted/test/test_memcache.py +../twisted/test/reflect_helper_VE.py +../twisted/test/test_process.py +../twisted/test/test_newcred.py +../twisted/test/testutils.py +../twisted/test/test_modules.py +../twisted/test/test_plugin.py +../twisted/test/_preamble.py +../twisted/test/test_internet.py +../twisted/test/test_fdesc.py +../twisted/test/process_linger.py +../twisted/test/test_lockfile.py +../twisted/protocols/sip.py +../twisted/protocols/stateful.py +../twisted/protocols/telnet.py +../twisted/protocols/wire.py +../twisted/protocols/shoutcast.py +../twisted/protocols/dict.py +../twisted/protocols/ident.py +../twisted/protocols/ftp.py +../twisted/protocols/finger.py +../twisted/protocols/amp.py +../twisted/protocols/postfix.py +../twisted/protocols/socks.py +../twisted/protocols/portforward.py +../twisted/protocols/__init__.py +../twisted/protocols/tls.py +../twisted/protocols/basic.py +../twisted/protocols/memcache.py +../twisted/protocols/htb.py +../twisted/protocols/loopback.py +../twisted/protocols/policies.py +../twisted/protocols/pcp.py +../twisted/protocols/gps/__init__.py +../twisted/protocols/gps/rockwell.py +../twisted/protocols/gps/nmea.py +../twisted/protocols/mice/__init__.py +../twisted/protocols/mice/mouseman.py +../twisted/protocols/test/test_basic.py +../twisted/protocols/test/__init__.py +../twisted/protocols/test/test_tls.py +../twisted/enterprise/__init__.py +../twisted/enterprise/adbapi.py +../twisted/python/sendmsg.c +../twisted/runner/portmap.c +../twisted/internet/iocpreactor/iocpsupport/iocpsupport.c +../twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c +../twisted/test/raiser.c +../twisted/copyright.pyc +../twisted/_version.pyc +../twisted/plugin.pyc +../twisted/__init__.pyc +../twisted/manhole/telnet.pyc +../twisted/manhole/explorer.pyc +../twisted/manhole/service.pyc +../twisted/manhole/_inspectro.pyc +../twisted/manhole/__init__.pyc +../twisted/manhole/gladereactor.pyc +../twisted/manhole/ui/gtk2manhole.pyc +../twisted/manhole/ui/__init__.pyc +../twisted/manhole/ui/test/__init__.pyc +../twisted/manhole/ui/test/test_gtk2manhole.pyc +../twisted/manhole/test/__init__.pyc +../twisted/manhole/test/test_explorer.pyc +../twisted/mail/pb.pyc +../twisted/mail/relaymanager.pyc +../twisted/mail/imap4.pyc +../twisted/mail/_version.pyc +../twisted/mail/relay.pyc +../twisted/mail/pop3client.pyc +../twisted/mail/bounce.pyc +../twisted/mail/pop3.pyc +../twisted/mail/mail.pyc +../twisted/mail/__init__.pyc +../twisted/mail/alias.pyc +../twisted/mail/smtp.pyc +../twisted/mail/protocols.pyc +../twisted/mail/maildir.pyc +../twisted/mail/tap.pyc +../twisted/mail/scripts/mailmail.pyc +../twisted/mail/scripts/__init__.pyc +../twisted/mail/test/test_pop3client.pyc +../twisted/mail/test/test_smtp.pyc +../twisted/mail/test/test_scripts.pyc +../twisted/mail/test/test_imap.pyc +../twisted/mail/test/test_bounce.pyc +../twisted/mail/test/pop3testserver.pyc +../twisted/mail/test/__init__.pyc +../twisted/mail/test/test_mail.pyc +../twisted/mail/test/test_pop3.pyc +../twisted/mail/test/test_options.pyc +../twisted/mail/test/test_mailmail.pyc +../twisted/names/error.pyc +../twisted/names/client.pyc +../twisted/names/common.pyc +../twisted/names/server.pyc +../twisted/names/_version.pyc +../twisted/names/root.pyc +../twisted/names/hosts.pyc +../twisted/names/_rfc1982.pyc +../twisted/names/dns.pyc +../twisted/names/resolve.pyc +../twisted/names/__init__.pyc +../twisted/names/secondary.pyc +../twisted/names/srvconnect.pyc +../twisted/names/cache.pyc +../twisted/names/authority.pyc +../twisted/names/tap.pyc +../twisted/names/test/test_rfc1982.pyc +../twisted/names/test/test_client.pyc +../twisted/names/test/test_hosts.pyc +../twisted/names/test/test_server.pyc +../twisted/names/test/test_names.pyc +../twisted/names/test/test_resolve.pyc +../twisted/names/test/test_common.pyc +../twisted/names/test/__init__.pyc +../twisted/names/test/test_examples.pyc +../twisted/names/test/test_cache.pyc +../twisted/names/test/test_tap.pyc +../twisted/names/test/test_rootresolve.pyc +../twisted/names/test/test_srvconnect.pyc +../twisted/names/test/test_dns.pyc +../twisted/trial/unittest.pyc +../twisted/trial/_synctest.pyc +../twisted/trial/util.pyc +../twisted/trial/__init__.pyc +../twisted/trial/reporter.pyc +../twisted/trial/itrial.pyc +../twisted/trial/_asynctest.pyc +../twisted/trial/runner.pyc +../twisted/trial/_asyncrunner.pyc +../twisted/trial/test/scripttest.pyc +../twisted/trial/test/weird.pyc +../twisted/trial/test/test_assertions.pyc +../twisted/trial/test/mockcustomsuite.pyc +../twisted/trial/test/skipping.pyc +../twisted/trial/test/test_loader.pyc +../twisted/trial/test/test_reporter.pyc +../twisted/trial/test/novars.pyc +../twisted/trial/test/test_keyboard.pyc +../twisted/trial/test/moduletest.pyc +../twisted/trial/test/test_deferred.pyc +../twisted/trial/test/test_script.pyc +../twisted/trial/test/mockdoctest.pyc +../twisted/trial/test/test_testcase.pyc +../twisted/trial/test/test_util.pyc +../twisted/trial/test/ordertests.pyc +../twisted/trial/test/suppression.pyc +../twisted/trial/test/test_tests.pyc +../twisted/trial/test/test_warning.pyc +../twisted/trial/test/test_doctest.pyc +../twisted/trial/test/__init__.pyc +../twisted/trial/test/test_log.pyc +../twisted/trial/test/erroneous.pyc +../twisted/trial/test/test_plugins.pyc +../twisted/trial/test/test_asyncassertions.pyc +../twisted/trial/test/test_suppression.pyc +../twisted/trial/test/sample.pyc +../twisted/trial/test/detests.pyc +../twisted/trial/test/mockcustomsuite2.pyc +../twisted/trial/test/test_pyunitcompat.pyc +../twisted/trial/test/test_runner.pyc +../twisted/trial/test/test_output.pyc +../twisted/trial/test/mockcustomsuite3.pyc +../twisted/trial/test/moduleself.pyc +../twisted/trial/test/packages.pyc +../twisted/trial/_dist/managercommands.pyc +../twisted/trial/_dist/disttrial.pyc +../twisted/trial/_dist/workerreporter.pyc +../twisted/trial/_dist/distreporter.pyc +../twisted/trial/_dist/workertrial.pyc +../twisted/trial/_dist/__init__.pyc +../twisted/trial/_dist/workercommands.pyc +../twisted/trial/_dist/worker.pyc +../twisted/trial/_dist/options.pyc +../twisted/trial/_dist/test/test_workerreporter.pyc +../twisted/trial/_dist/test/test_worker.pyc +../twisted/trial/_dist/test/test_distreporter.pyc +../twisted/trial/_dist/test/test_workertrial.pyc +../twisted/trial/_dist/test/__init__.pyc +../twisted/trial/_dist/test/test_options.pyc +../twisted/trial/_dist/test/test_disttrial.pyc +../twisted/cred/error.pyc +../twisted/cred/portal.pyc +../twisted/cred/strcred.pyc +../twisted/cred/__init__.pyc +../twisted/cred/_digest.pyc +../twisted/cred/credentials.pyc +../twisted/cred/checkers.pyc +../twisted/cred/pamauth.pyc +../twisted/conch/error.pyc +../twisted/conch/stdio.pyc +../twisted/conch/manhole_tap.pyc +../twisted/conch/telnet.pyc +../twisted/conch/_version.pyc +../twisted/conch/unix.pyc +../twisted/conch/interfaces.pyc +../twisted/conch/endpoints.pyc +../twisted/conch/recvline.pyc +../twisted/conch/__init__.pyc +../twisted/conch/mixin.pyc +../twisted/conch/ls.pyc +../twisted/conch/checkers.pyc +../twisted/conch/avatar.pyc +../twisted/conch/manhole_ssh.pyc +../twisted/conch/ttymodes.pyc +../twisted/conch/manhole.pyc +../twisted/conch/tap.pyc +../twisted/conch/ui/tkvt100.pyc +../twisted/conch/ui/__init__.pyc +../twisted/conch/ui/ansi.pyc +../twisted/conch/client/connect.pyc +../twisted/conch/client/default.pyc +../twisted/conch/client/agent.pyc +../twisted/conch/client/direct.pyc +../twisted/conch/client/__init__.pyc +../twisted/conch/client/knownhosts.pyc +../twisted/conch/client/options.pyc +../twisted/conch/openssh_compat/primes.pyc +../twisted/conch/openssh_compat/__init__.pyc +../twisted/conch/openssh_compat/factory.pyc +../twisted/conch/scripts/conch.pyc +../twisted/conch/scripts/tkconch.pyc +../twisted/conch/scripts/cftp.pyc +../twisted/conch/scripts/ckeygen.pyc +../twisted/conch/scripts/__init__.pyc +../twisted/conch/ssh/session.pyc +../twisted/conch/ssh/channel.pyc +../twisted/conch/ssh/common.pyc +../twisted/conch/ssh/keys.pyc +../twisted/conch/ssh/userauth.pyc +../twisted/conch/ssh/forwarding.pyc +../twisted/conch/ssh/connection.pyc +../twisted/conch/ssh/agent.pyc +../twisted/conch/ssh/service.pyc +../twisted/conch/ssh/transport.pyc +../twisted/conch/ssh/__init__.pyc +../twisted/conch/ssh/sexpy.pyc +../twisted/conch/ssh/address.pyc +../twisted/conch/ssh/factory.pyc +../twisted/conch/ssh/filetransfer.pyc +../twisted/conch/test/test_openssh_compat.pyc +../twisted/conch/test/test_helper.pyc +../twisted/conch/test/test_knownhosts.pyc +../twisted/conch/test/keydata.pyc +../twisted/conch/test/test_ckeygen.pyc +../twisted/conch/test/test_cftp.pyc +../twisted/conch/test/test_conch.pyc +../twisted/conch/test/test_keys.pyc +../twisted/conch/test/test_filetransfer.pyc +../twisted/conch/test/test_scripts.pyc +../twisted/conch/test/test_text.pyc +../twisted/conch/test/test_recvline.pyc +../twisted/conch/test/test_endpoints.pyc +../twisted/conch/test/test_userauth.pyc +../twisted/conch/test/test_manhole.pyc +../twisted/conch/test/__init__.pyc +../twisted/conch/test/test_telnet.pyc +../twisted/conch/test/test_channel.pyc +../twisted/conch/test/test_mixin.pyc +../twisted/conch/test/test_agent.pyc +../twisted/conch/test/test_transport.pyc +../twisted/conch/test/test_window.pyc +../twisted/conch/test/test_checkers.pyc +../twisted/conch/test/test_address.pyc +../twisted/conch/test/test_tap.pyc +../twisted/conch/test/test_connection.pyc +../twisted/conch/test/test_insults.pyc +../twisted/conch/test/test_ssh.pyc +../twisted/conch/test/test_default.pyc +../twisted/conch/test/test_session.pyc +../twisted/conch/insults/client.pyc +../twisted/conch/insults/text.pyc +../twisted/conch/insults/colors.pyc +../twisted/conch/insults/helper.pyc +../twisted/conch/insults/window.pyc +../twisted/conch/insults/__init__.pyc +../twisted/conch/insults/insults.pyc +../twisted/python/logfile.pyc +../twisted/python/formmethod.pyc +../twisted/python/compat.pyc +../twisted/python/threadpool.pyc +../twisted/python/monkey.pyc +../twisted/python/log.pyc +../twisted/python/hook.pyc +../twisted/python/fakepwd.pyc +../twisted/python/usage.pyc +../twisted/python/dist3.pyc +../twisted/python/util.pyc +../twisted/python/syslog.pyc +../twisted/python/threadable.pyc +../twisted/python/text.pyc +../twisted/python/runtime.pyc +../twisted/python/context.pyc +../twisted/python/_textattributes.pyc +../twisted/python/roots.pyc +../twisted/python/win32.pyc +../twisted/python/randbytes.pyc +../twisted/python/hashlib.pyc +../twisted/python/components.pyc +../twisted/python/_inotify.pyc +../twisted/python/modules.pyc +../twisted/python/zippath.pyc +../twisted/python/versions.pyc +../twisted/python/_release.pyc +../twisted/python/deprecate.pyc +../twisted/python/failure.pyc +../twisted/python/urlpath.pyc +../twisted/python/reflect.pyc +../twisted/python/__init__.pyc +../twisted/python/rebuild.pyc +../twisted/python/procutils.pyc +../twisted/python/finalize.pyc +../twisted/python/zipstream.pyc +../twisted/python/filepath.pyc +../twisted/python/dist.pyc +../twisted/python/systemd.pyc +../twisted/python/constants.pyc +../twisted/python/_shellcomp.pyc +../twisted/python/shortcut.pyc +../twisted/python/release.pyc +../twisted/python/htmlizer.pyc +../twisted/python/lockfile.pyc +../twisted/python/test/test_zippath.pyc +../twisted/python/test/test_fakepwd.pyc +../twisted/python/test/test_release.pyc +../twisted/python/test/deprecatedattributes.pyc +../twisted/python/test/test_constants.pyc +../twisted/python/test/test_sendmsg.pyc +../twisted/python/test/test_textattributes.pyc +../twisted/python/test/test_shellcomp.pyc +../twisted/python/test/test_htmlizer.pyc +../twisted/python/test/test_runtime.pyc +../twisted/python/test/pullpipe.pyc +../twisted/python/test/test_deprecate.pyc +../twisted/python/test/test_util.pyc +../twisted/python/test/test_hashlib.pyc +../twisted/python/test/test_inotify.pyc +../twisted/python/test/__init__.pyc +../twisted/python/test/test_dist.pyc +../twisted/python/test/test_syslog.pyc +../twisted/python/test/test_zipstream.pyc +../twisted/python/test/test_urlpath.pyc +../twisted/python/test/test_components.pyc +../twisted/python/test/test_win32.pyc +../twisted/python/test/test_systemd.pyc +../twisted/python/test/test_versions.pyc +../twisted/python/test/test_dist3.pyc +../twisted/python/test/modules_helpers.pyc +../twisted/runner/procmon.pyc +../twisted/runner/_version.pyc +../twisted/runner/inetdconf.pyc +../twisted/runner/inetdtap.pyc +../twisted/runner/__init__.pyc +../twisted/runner/procmontap.pyc +../twisted/runner/inetd.pyc +../twisted/runner/test/test_procmon.pyc +../twisted/runner/test/test_procmontap.pyc +../twisted/runner/test/__init__.pyc +../twisted/tap/telnet.pyc +../twisted/tap/ftp.pyc +../twisted/tap/socks.pyc +../twisted/tap/portforward.pyc +../twisted/tap/__init__.pyc +../twisted/tap/manhole.pyc +../twisted/positioning/ipositioning.pyc +../twisted/positioning/__init__.pyc +../twisted/positioning/_sentence.pyc +../twisted/positioning/base.pyc +../twisted/positioning/nmea.pyc +../twisted/positioning/test/test_sentence.pyc +../twisted/positioning/test/test_base.pyc +../twisted/positioning/test/receiver.pyc +../twisted/positioning/test/__init__.pyc +../twisted/positioning/test/test_nmea.pyc +../twisted/spread/pb.pyc +../twisted/spread/util.pyc +../twisted/spread/banana.pyc +../twisted/spread/interfaces.pyc +../twisted/spread/jelly.pyc +../twisted/spread/__init__.pyc +../twisted/spread/flavors.pyc +../twisted/spread/publish.pyc +../twisted/spread/ui/gtk2util.pyc +../twisted/spread/ui/__init__.pyc +../twisted/spread/ui/tkutil.pyc +../twisted/spread/ui/tktree.pyc +../twisted/internet/error.pyc +../twisted/internet/default.pyc +../twisted/internet/qtreactor.pyc +../twisted/internet/win32eventreactor.pyc +../twisted/internet/stdio.pyc +../twisted/internet/cfreactor.pyc +../twisted/internet/_posixserialport.pyc +../twisted/internet/ssl.pyc +../twisted/internet/threads.pyc +../twisted/internet/_pollingfile.pyc +../twisted/internet/gtk2reactor.pyc +../twisted/internet/tksupport.pyc +../twisted/internet/gireactor.pyc +../twisted/internet/glib2reactor.pyc +../twisted/internet/_newtls.pyc +../twisted/internet/gtk3reactor.pyc +../twisted/internet/_baseprocess.pyc +../twisted/internet/abstract.pyc +../twisted/internet/inotify.pyc +../twisted/internet/unix.pyc +../twisted/internet/interfaces.pyc +../twisted/internet/protocol.pyc +../twisted/internet/_dumbwin32proc.pyc +../twisted/internet/tcp.pyc +../twisted/internet/endpoints.pyc +../twisted/internet/main.pyc +../twisted/internet/udp.pyc +../twisted/internet/wxsupport.pyc +../twisted/internet/pyuisupport.pyc +../twisted/internet/process.pyc +../twisted/internet/_signals.pyc +../twisted/internet/__init__.pyc +../twisted/internet/posixbase.pyc +../twisted/internet/pollreactor.pyc +../twisted/internet/serialport.pyc +../twisted/internet/_sslverify.pyc +../twisted/internet/kqreactor.pyc +../twisted/internet/fdesc.pyc +../twisted/internet/reactor.pyc +../twisted/internet/base.pyc +../twisted/internet/address.pyc +../twisted/internet/_threadedselect.pyc +../twisted/internet/_ssl.pyc +../twisted/internet/selectreactor.pyc +../twisted/internet/_win32serialport.pyc +../twisted/internet/wxreactor.pyc +../twisted/internet/epollreactor.pyc +../twisted/internet/defer.pyc +../twisted/internet/utils.pyc +../twisted/internet/task.pyc +../twisted/internet/_win32stdio.pyc +../twisted/internet/gtkreactor.pyc +../twisted/internet/_posixstdio.pyc +../twisted/internet/_glibbase.pyc +../twisted/internet/iocpreactor/abstract.pyc +../twisted/internet/iocpreactor/interfaces.pyc +../twisted/internet/iocpreactor/tcp.pyc +../twisted/internet/iocpreactor/udp.pyc +../twisted/internet/iocpreactor/const.pyc +../twisted/internet/iocpreactor/__init__.pyc +../twisted/internet/iocpreactor/reactor.pyc +../twisted/internet/iocpreactor/setup.pyc +../twisted/internet/test/test_posixbase.pyc +../twisted/internet/test/modulehelpers.pyc +../twisted/internet/test/test_stdio.pyc +../twisted/internet/test/test_qtreactor.pyc +../twisted/internet/test/test_serialport.pyc +../twisted/internet/test/test_time.pyc +../twisted/internet/test/process_helper.pyc +../twisted/internet/test/test_threads.pyc +../twisted/internet/test/test_core.pyc +../twisted/internet/test/test_posixprocess.pyc +../twisted/internet/test/test_filedescriptor.pyc +../twisted/internet/test/_win32ifaces.pyc +../twisted/internet/test/test_main.pyc +../twisted/internet/test/test_sigchld.pyc +../twisted/internet/test/test_unix.pyc +../twisted/internet/test/test_base.pyc +../twisted/internet/test/test_epollreactor.pyc +../twisted/internet/test/test_socket.pyc +../twisted/internet/test/test_endpoints.pyc +../twisted/internet/test/test_newtls.pyc +../twisted/internet/test/test_abstract.pyc +../twisted/internet/test/test_inotify.pyc +../twisted/internet/test/test_protocol.pyc +../twisted/internet/test/test_baseprocess.pyc +../twisted/internet/test/__init__.pyc +../twisted/internet/test/test_fdset.pyc +../twisted/internet/test/test_glibbase.pyc +../twisted/internet/test/test_gireactor.pyc +../twisted/internet/test/process_gireactornocompat.pyc +../twisted/internet/test/fakeendpoint.pyc +../twisted/internet/test/test_tcp.pyc +../twisted/internet/test/test_address.pyc +../twisted/internet/test/test_iocp.pyc +../twisted/internet/test/reactormixins.pyc +../twisted/internet/test/test_tls.pyc +../twisted/internet/test/_posixifaces.pyc +../twisted/internet/test/test_pollingfile.pyc +../twisted/internet/test/test_gtkreactor.pyc +../twisted/internet/test/test_udp.pyc +../twisted/internet/test/test_default.pyc +../twisted/internet/test/test_inlinecb.pyc +../twisted/internet/test/test_process.pyc +../twisted/internet/test/test_udp_internals.pyc +../twisted/internet/test/connectionmixins.pyc +../twisted/internet/test/test_win32events.pyc +../twisted/news/nntp.pyc +../twisted/news/database.pyc +../twisted/news/_version.pyc +../twisted/news/news.pyc +../twisted/news/__init__.pyc +../twisted/news/tap.pyc +../twisted/news/test/test_database.pyc +../twisted/news/test/__init__.pyc +../twisted/news/test/test_nntp.pyc +../twisted/news/test/test_news.pyc +../twisted/words/_version.pyc +../twisted/words/service.pyc +../twisted/words/iwords.pyc +../twisted/words/__init__.pyc +../twisted/words/ewords.pyc +../twisted/words/xmpproutertap.pyc +../twisted/words/tap.pyc +../twisted/words/xish/xpathparser.pyc +../twisted/words/xish/utility.pyc +../twisted/words/xish/xmlstream.pyc +../twisted/words/xish/__init__.pyc +../twisted/words/xish/xpath.pyc +../twisted/words/xish/domish.pyc +../twisted/words/im/basesupport.pyc +../twisted/words/im/baseaccount.pyc +../twisted/words/im/basechat.pyc +../twisted/words/im/interfaces.pyc +../twisted/words/im/ircsupport.pyc +../twisted/words/im/__init__.pyc +../twisted/words/im/pbsupport.pyc +../twisted/words/im/locals.pyc +../twisted/words/test/test_msn.pyc +../twisted/words/test/test_jabberjid.pyc +../twisted/words/test/test_ircsupport.pyc +../twisted/words/test/test_service.pyc +../twisted/words/test/test_jabbersaslmechanisms.pyc +../twisted/words/test/test_xmlstream.pyc +../twisted/words/test/test_irc.pyc +../twisted/words/test/test_xmpproutertap.pyc +../twisted/words/test/test_jabberxmlstream.pyc +../twisted/words/test/test_basesupport.pyc +../twisted/words/test/test_irc_service.pyc +../twisted/words/test/test_domish.pyc +../twisted/words/test/__init__.pyc +../twisted/words/test/test_jabbercomponent.pyc +../twisted/words/test/test_jabberxmppstringprep.pyc +../twisted/words/test/test_tap.pyc +../twisted/words/test/test_basechat.pyc +../twisted/words/test/test_oscar.pyc +../twisted/words/test/test_jabberjstrports.pyc +../twisted/words/test/test_jabberclient.pyc +../twisted/words/test/test_jabbersasl.pyc +../twisted/words/test/test_xpath.pyc +../twisted/words/test/test_xishutil.pyc +../twisted/words/test/test_jabbererror.pyc +../twisted/words/protocols/irc.pyc +../twisted/words/protocols/oscar.pyc +../twisted/words/protocols/__init__.pyc +../twisted/words/protocols/msn.pyc +../twisted/words/protocols/jabber/error.pyc +../twisted/words/protocols/jabber/client.pyc +../twisted/words/protocols/jabber/ijabber.pyc +../twisted/words/protocols/jabber/jid.pyc +../twisted/words/protocols/jabber/sasl_mechanisms.pyc +../twisted/words/protocols/jabber/xmpp_stringprep.pyc +../twisted/words/protocols/jabber/component.pyc +../twisted/words/protocols/jabber/xmlstream.pyc +../twisted/words/protocols/jabber/__init__.pyc +../twisted/words/protocols/jabber/jstrports.pyc +../twisted/words/protocols/jabber/sasl.pyc +../twisted/scripts/tapconvert.pyc +../twisted/scripts/tap2rpm.pyc +../twisted/scripts/twistd.pyc +../twisted/scripts/trial.pyc +../twisted/scripts/_twistd_unix.pyc +../twisted/scripts/_twistw.pyc +../twisted/scripts/tap2deb.pyc +../twisted/scripts/__init__.pyc +../twisted/scripts/tkunzip.pyc +../twisted/scripts/manhole.pyc +../twisted/scripts/htmlizer.pyc +../twisted/scripts/test/test_scripts.pyc +../twisted/scripts/test/test_tap2deb.pyc +../twisted/scripts/test/__init__.pyc +../twisted/scripts/test/test_tap2rpm.pyc +../twisted/lore/slides.pyc +../twisted/lore/lint.pyc +../twisted/lore/default.pyc +../twisted/lore/_version.pyc +../twisted/lore/numberer.pyc +../twisted/lore/docbook.pyc +../twisted/lore/htmlbook.pyc +../twisted/lore/process.pyc +../twisted/lore/__init__.pyc +../twisted/lore/man2lore.pyc +../twisted/lore/lmath.pyc +../twisted/lore/latex.pyc +../twisted/lore/indexer.pyc +../twisted/lore/tree.pyc +../twisted/lore/texi.pyc +../twisted/lore/scripts/lore.pyc +../twisted/lore/scripts/__init__.pyc +../twisted/lore/test/test_man2lore.pyc +../twisted/lore/test/test_lint.pyc +../twisted/lore/test/test_scripts.pyc +../twisted/lore/test/test_lmath.pyc +../twisted/lore/test/test_lore.pyc +../twisted/lore/test/test_latex.pyc +../twisted/lore/test/__init__.pyc +../twisted/lore/test/test_texi.pyc +../twisted/lore/test/test_docbook.pyc +../twisted/lore/test/test_slides.pyc +../twisted/web/error.pyc +../twisted/web/client.pyc +../twisted/web/twcgi.pyc +../twisted/web/soap.pyc +../twisted/web/xmlrpc.pyc +../twisted/web/server.pyc +../twisted/web/util.pyc +../twisted/web/_stan.pyc +../twisted/web/distrib.pyc +../twisted/web/_version.pyc +../twisted/web/http.pyc +../twisted/web/wsgi.pyc +../twisted/web/sux.pyc +../twisted/web/static.pyc +../twisted/web/http_headers.pyc +../twisted/web/domhelpers.pyc +../twisted/web/_newclient.pyc +../twisted/web/script.pyc +../twisted/web/iweb.pyc +../twisted/web/vhost.pyc +../twisted/web/guard.pyc +../twisted/web/_flatten.pyc +../twisted/web/template.pyc +../twisted/web/demo.pyc +../twisted/web/_responses.pyc +../twisted/web/resource.pyc +../twisted/web/proxy.pyc +../twisted/web/__init__.pyc +../twisted/web/microdom.pyc +../twisted/web/_element.pyc +../twisted/web/html.pyc +../twisted/web/rewrite.pyc +../twisted/web/tap.pyc +../twisted/web/_auth/digest.pyc +../twisted/web/_auth/wrapper.pyc +../twisted/web/_auth/__init__.pyc +../twisted/web/_auth/basic.pyc +../twisted/web/test/test_http_headers.pyc +../twisted/web/test/test_xml.pyc +../twisted/web/test/requesthelper.pyc +../twisted/web/test/test_httpauth.pyc +../twisted/web/test/test_error.pyc +../twisted/web/test/test_newclient.pyc +../twisted/web/test/test_stan.pyc +../twisted/web/test/test_script.pyc +../twisted/web/test/test_wsgi.pyc +../twisted/web/test/test_util.pyc +../twisted/web/test/test_cgi.pyc +../twisted/web/test/test_http.pyc +../twisted/web/test/_util.pyc +../twisted/web/test/__init__.pyc +../twisted/web/test/test_flatten.pyc +../twisted/web/test/test_static.pyc +../twisted/web/test/test_proxy.pyc +../twisted/web/test/test_agent.pyc +../twisted/web/test/test_soap.pyc +../twisted/web/test/test_webclient.pyc +../twisted/web/test/test_web.pyc +../twisted/web/test/test_tap.pyc +../twisted/web/test/test_template.pyc +../twisted/web/test/test_domhelpers.pyc +../twisted/web/test/test_distrib.pyc +../twisted/web/test/test_xmlrpc.pyc +../twisted/web/test/test_resource.pyc +../twisted/web/test/test_vhost.pyc +../twisted/pair/_version.pyc +../twisted/pair/ip.pyc +../twisted/pair/ethernet.pyc +../twisted/pair/__init__.pyc +../twisted/pair/rawudp.pyc +../twisted/pair/testing.pyc +../twisted/pair/raw.pyc +../twisted/pair/tuntap.pyc +../twisted/pair/test/test_ip.pyc +../twisted/pair/test/test_ethernet.pyc +../twisted/pair/test/__init__.pyc +../twisted/pair/test/test_tuntap.pyc +../twisted/pair/test/test_rawudp.pyc +../twisted/persisted/aot.pyc +../twisted/persisted/crefutil.pyc +../twisted/persisted/sob.pyc +../twisted/persisted/__init__.pyc +../twisted/persisted/dirdbm.pyc +../twisted/persisted/styles.pyc +../twisted/persisted/test/test_styles.pyc +../twisted/persisted/test/__init__.pyc +../twisted/application/reactors.pyc +../twisted/application/service.pyc +../twisted/application/internet.pyc +../twisted/application/__init__.pyc +../twisted/application/strports.pyc +../twisted/application/app.pyc +../twisted/application/test/__init__.pyc +../twisted/application/test/test_internet.pyc +../twisted/plugins/twisted_reactors.pyc +../twisted/plugins/twisted_lore.pyc +../twisted/plugins/twisted_runner.pyc +../twisted/plugins/twisted_conch.pyc +../twisted/plugins/twisted_portforward.pyc +../twisted/plugins/twisted_qtstub.pyc +../twisted/plugins/twisted_names.pyc +../twisted/plugins/twisted_news.pyc +../twisted/plugins/cred_memory.pyc +../twisted/plugins/cred_anonymous.pyc +../twisted/plugins/twisted_web.pyc +../twisted/plugins/twisted_ftp.pyc +../twisted/plugins/__init__.pyc +../twisted/plugins/twisted_mail.pyc +../twisted/plugins/twisted_socks.pyc +../twisted/plugins/twisted_words.pyc +../twisted/plugins/cred_file.pyc +../twisted/plugins/twisted_inet.pyc +../twisted/plugins/cred_sshkeys.pyc +../twisted/plugins/twisted_telnet.pyc +../twisted/plugins/twisted_core.pyc +../twisted/plugins/cred_unix.pyc +../twisted/plugins/twisted_trial.pyc +../twisted/plugins/twisted_manhole.pyc +../twisted/test/plugin_extra1.pyc +../twisted/test/test_stdio.pyc +../twisted/test/test_compat.pyc +../twisted/test/test_sip.pyc +../twisted/test/test_shortcut.pyc +../twisted/test/stdio_test_writeseq.pyc +../twisted/test/stdio_test_consumer.pyc +../twisted/test/test_rebuild.pyc +../twisted/test/process_echoer.pyc +../twisted/test/iosim.pyc +../twisted/test/stdio_test_write.pyc +../twisted/test/test_usage.pyc +../twisted/test/stdio_test_hostpeer.pyc +../twisted/test/test_dirdbm.pyc +../twisted/test/test_threads.pyc +../twisted/test/test_htb.pyc +../twisted/test/test_logfile.pyc +../twisted/test/process_tester.pyc +../twisted/test/test_monkey.pyc +../twisted/test/test_adbapi.pyc +../twisted/test/test_task.pyc +../twisted/test/test_strerror.pyc +../twisted/test/test_amp.pyc +../twisted/test/test_stringtransport.pyc +../twisted/test/test_threadpool.pyc +../twisted/test/test_error.pyc +../twisted/test/test_formmethod.pyc +../twisted/test/process_cmdline.pyc +../twisted/test/test_socks.pyc +../twisted/test/myrebuilder2.pyc +../twisted/test/test_sslverify.pyc +../twisted/test/test_defgen.pyc +../twisted/test/test_unix.pyc +../twisted/test/test_banana.pyc +../twisted/test/stdio_test_halfclose.pyc +../twisted/test/test_text.pyc +../twisted/test/test_iutils.pyc +../twisted/test/process_fds.pyc +../twisted/test/process_stdinreader.pyc +../twisted/test/test_twisted.pyc +../twisted/test/test_ftp_options.pyc +../twisted/test/reflect_helper_IE.pyc +../twisted/test/test_roots.pyc +../twisted/test/test_dict.pyc +../twisted/test/test_postfix.pyc +../twisted/test/test_ssl.pyc +../twisted/test/stdio_test_producer.pyc +../twisted/test/test_persisted.pyc +../twisted/test/test_defer.pyc +../twisted/test/test_jelly.pyc +../twisted/test/proto_helpers.pyc +../twisted/test/test_setup.pyc +../twisted/test/test_strcred.pyc +../twisted/test/test_abstract.pyc +../twisted/test/mock_win32process.pyc +../twisted/test/process_signal.pyc +../twisted/test/test_tcp_internals.pyc +../twisted/test/test_threadable.pyc +../twisted/test/test_doc.pyc +../twisted/test/test_manhole.pyc +../twisted/test/process_tty.pyc +../twisted/test/test_hook.pyc +../twisted/test/test_loopback.pyc +../twisted/test/__init__.pyc +../twisted/test/test_failure.pyc +../twisted/test/test_log.pyc +../twisted/test/stdio_test_lastwrite.pyc +../twisted/test/test_strports.pyc +../twisted/test/test_cooperator.pyc +../twisted/test/test_tpfile.pyc +../twisted/test/stdio_test_loseconn.pyc +../twisted/test/crash_test_dummy.pyc +../twisted/test/myrebuilder1.pyc +../twisted/test/test_ftp.pyc +../twisted/test/test_pcp.pyc +../twisted/test/plugin_basic.pyc +../twisted/test/test_protocols.pyc +../twisted/test/test_policies.pyc +../twisted/test/test_sob.pyc +../twisted/test/test_explorer.pyc +../twisted/test/test_factories.pyc +../twisted/test/test_tcp.pyc +../twisted/test/test_digestauth.pyc +../twisted/test/test_pb.pyc +../twisted/test/test_application.pyc +../twisted/test/test_context.pyc +../twisted/test/test_iosim.pyc +../twisted/test/process_reader.pyc +../twisted/test/test_stateful.pyc +../twisted/test/test_reflect.pyc +../twisted/test/test_pbfailure.pyc +../twisted/test/test_nmea.pyc +../twisted/test/plugin_extra2.pyc +../twisted/test/test_finger.pyc +../twisted/test/test_randbytes.pyc +../twisted/test/process_twisted.pyc +../twisted/test/ssl_helpers.pyc +../twisted/test/test_udp.pyc +../twisted/test/test_ident.pyc +../twisted/test/test_paths.pyc +../twisted/test/reflect_helper_ZDE.pyc +../twisted/test/test_twistd.pyc +../twisted/test/test_memcache.pyc +../twisted/test/reflect_helper_VE.pyc +../twisted/test/test_process.pyc +../twisted/test/test_newcred.pyc +../twisted/test/testutils.pyc +../twisted/test/test_modules.pyc +../twisted/test/test_plugin.pyc +../twisted/test/_preamble.pyc +../twisted/test/test_internet.pyc +../twisted/test/test_fdesc.pyc +../twisted/test/process_linger.pyc +../twisted/test/test_lockfile.pyc +../twisted/protocols/sip.pyc +../twisted/protocols/stateful.pyc +../twisted/protocols/telnet.pyc +../twisted/protocols/wire.pyc +../twisted/protocols/shoutcast.pyc +../twisted/protocols/dict.pyc +../twisted/protocols/ident.pyc +../twisted/protocols/ftp.pyc +../twisted/protocols/finger.pyc +../twisted/protocols/amp.pyc +../twisted/protocols/postfix.pyc +../twisted/protocols/socks.pyc +../twisted/protocols/portforward.pyc +../twisted/protocols/__init__.pyc +../twisted/protocols/tls.pyc +../twisted/protocols/basic.pyc +../twisted/protocols/memcache.pyc +../twisted/protocols/htb.pyc +../twisted/protocols/loopback.pyc +../twisted/protocols/policies.pyc +../twisted/protocols/pcp.pyc +../twisted/protocols/gps/__init__.pyc +../twisted/protocols/gps/rockwell.pyc +../twisted/protocols/gps/nmea.pyc +../twisted/protocols/mice/__init__.pyc +../twisted/protocols/mice/mouseman.pyc +../twisted/protocols/test/test_basic.pyc +../twisted/protocols/test/__init__.pyc +../twisted/protocols/test/test_tls.pyc +../twisted/enterprise/__init__.pyc +../twisted/enterprise/adbapi.pyc +../twisted/runner/portmap.so +../twisted/test/raiser.so +../twisted/python/sendmsg.so +../twisted/manhole/inspectro.glade +../twisted/manhole/logview.glade +../twisted/manhole/gladereactor.glade +../twisted/manhole/ui/gtk2manhole.glade +../twisted/mail/topfiles/README +../twisted/mail/topfiles/NEWS +../twisted/mail/test/rfc822.message +../twisted/mail/test/server.pem +../twisted/names/topfiles/README +../twisted/names/topfiles/NEWS +../twisted/conch/topfiles/README +../twisted/conch/topfiles/NEWS +../twisted/python/sendmsg.c +../twisted/python/twisted-completion.zsh +../twisted/python/_initgroups.c +../twisted/python/zsh/_tkmktap +../twisted/python/zsh/_twistd +../twisted/python/zsh/_mktap +../twisted/python/zsh/_lore +../twisted/python/zsh/_pyhtmlizer +../twisted/python/zsh/_tap2deb +../twisted/python/zsh/_websetroot +../twisted/python/zsh/README.txt +../twisted/python/zsh/_cftp +../twisted/python/zsh/_conch +../twisted/python/zsh/_tkconch +../twisted/python/zsh/_manhole +../twisted/python/zsh/_tapconvert +../twisted/python/zsh/_ckeygen +../twisted/python/zsh/_trial +../twisted/python/zsh/_tap2rpm +../twisted/runner/portmap.c +../twisted/runner/topfiles/README +../twisted/runner/topfiles/NEWS +../twisted/spread/ui/login2.glade +../twisted/internet/iocpreactor/notes.txt +../twisted/internet/iocpreactor/build.bat +../twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c +../twisted/internet/iocpreactor/iocpsupport/wsasend.pxi +../twisted/internet/iocpreactor/iocpsupport/acceptex.pxi +../twisted/internet/iocpreactor/iocpsupport/winsock_pointers.h +../twisted/internet/iocpreactor/iocpsupport/wsarecv.pxi +../twisted/internet/iocpreactor/iocpsupport/connectex.pxi +../twisted/internet/iocpreactor/iocpsupport/iocpsupport.pyx +../twisted/internet/iocpreactor/iocpsupport/iocpsupport.c +../twisted/internet/test/fake_CAs/chain.pem +../twisted/internet/test/fake_CAs/thing2-duplicate.pem +../twisted/internet/test/fake_CAs/thing1.pem +../twisted/internet/test/fake_CAs/not-a-certificate +../twisted/internet/test/fake_CAs/thing2.pem +../twisted/news/topfiles/README +../twisted/news/topfiles/NEWS +../twisted/topfiles/README +../twisted/topfiles/NEWS +../twisted/topfiles/CREDITS +../twisted/topfiles/ChangeLog.Old +../twisted/words/xish/xpathparser.g +../twisted/words/topfiles/README +../twisted/words/topfiles/NEWS +../twisted/words/im/instancemessenger.glade +../twisted/lore/xhtml-symbol.ent +../twisted/lore/xhtml-lat1.ent +../twisted/lore/template.mgp +../twisted/lore/xhtml1-transitional.dtd +../twisted/lore/xhtml-special.ent +../twisted/lore/xhtml1-strict.dtd +../twisted/lore/topfiles/README +../twisted/lore/topfiles/NEWS +../twisted/lore/test/lore_index_file_unnumbered_out.html +../twisted/lore/test/template.tpl +../twisted/lore/test/lore_index_file_out_multiple.html +../twisted/lore/test/lore_numbering_test_out.html +../twisted/lore/test/simple.html +../twisted/lore/test/lore_numbering_test_out2.html +../twisted/lore/test/lore_index_file_out.html +../twisted/lore/test/lore_index_test.xhtml +../twisted/lore/test/simple3.html +../twisted/lore/test/simple4.html +../twisted/lore/test/lore_index_test2.xhtml +../twisted/web/failure.xhtml +../twisted/web/topfiles/README +../twisted/web/topfiles/NEWS +../twisted/pair/topfiles/README +../twisted/pair/topfiles/NEWS +../twisted/test/raiser.c +../twisted/test/server.pem +../twisted/test/raiser.pyx +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt +../../../../bin/manhole +../../../../bin/twistd +../../../../bin/tap2deb +../../../../bin/mailmail +../../../../bin/tkconch +../../../../bin/trial +../../../../bin/ckeygen +../../../../bin/conch +../../../../bin/tap2rpm +../../../../bin/pyhtmlizer +../../../../bin/lore +../../../../bin/tapconvert +../../../../bin/cftp diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/requires.txt new file mode 100644 index 0000000..fcdfa93 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/requires.txt @@ -0,0 +1 @@ +zope.interface >= 3.6.0 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/top_level.txt new file mode 100644 index 0000000..3eb29f0 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/top_level.txt @@ -0,0 +1 @@ +twisted diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/PKG-INFO new file mode 100644 index 0000000..a4d99cb --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/PKG-INFO @@ -0,0 +1,72 @@ +Metadata-Version: 1.1 +Name: Werkzeug +Version: 0.9.4 +Summary: The Swiss Army knife of Python web development +Home-page: http://werkzeug.pocoo.org/ +Author: Armin Ronacher +Author-email: armin.ronacher@active-4.com +License: BSD +Description: + Werkzeug + ======== + + Werkzeug started as simple collection of various utilities for WSGI + applications and has become one of the most advanced WSGI utility + modules. It includes a powerful debugger, full featured request and + response objects, HTTP utilities to handle entity tags, cache control + headers, HTTP dates, cookie handling, file uploads, a powerful URL + routing system and a bunch of community contributed addon modules. + + Werkzeug is unicode aware and doesn't enforce a specific template + engine, database adapter or anything else. It doesn't even enforce + a specific way of handling requests and leaves all that up to the + developer. It's most useful for end user applications which should work + on as many server environments as possible (such as blogs, wikis, + bulletin boards, etc.). + + Details and example applications are available on the + `Werkzeug website `_. + + + Features + -------- + + - unicode awareness + + - request and response objects + + - various utility functions for dealing with HTTP headers such as + `Accept` and `Cache-Control` headers. + + - thread local objects with proper cleanup at request end + + - an interactive debugger + + - A simple WSGI server with support for threading and forking + with an automatic reloader. + + - a flexible URL routing system with REST support. + + - fully WSGI compatible + + + Development Version + ------------------- + + The Werkzeug development version can be installed by cloning the git + repository from `github`_:: + + git clone git@github.com:mitsuhiko/werkzeug.git + + .. _github: http://github.com/mitsuhiko/werkzeug + +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Environment :: Web Environment +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: BSD License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content +Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..be54ff7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/SOURCES.txt @@ -0,0 +1,289 @@ +AUTHORS +CHANGES +LICENSE +MANIFEST.in +Makefile +setup.cfg +setup.py +Werkzeug.egg-info/PKG-INFO +Werkzeug.egg-info/SOURCES.txt +Werkzeug.egg-info/dependency_links.txt +Werkzeug.egg-info/not-zip-safe +Werkzeug.egg-info/top_level.txt +artwork/logo.png +artwork/logo.svg +docs/Makefile +docs/changes.rst +docs/conf.py +docs/contents.rst.inc +docs/datastructures.rst +docs/debug.rst +docs/exceptions.rst +docs/http.rst +docs/index.rst +docs/installation.rst +docs/latexindex.rst +docs/levels.rst +docs/local.rst +docs/logo.pdf +docs/make.bat +docs/makearchive.py +docs/middlewares.rst +docs/python3.rst +docs/quickstart.rst +docs/request_data.rst +docs/routing.rst +docs/serving.rst +docs/terms.rst +docs/test.rst +docs/transition.rst +docs/tutorial.rst +docs/unicode.rst +docs/utils.rst +docs/werkzeugext.py +docs/werkzeugstyle.sty +docs/wrappers.rst +docs/wsgi.rst +docs/_static/background.png +docs/_static/codebackground.png +docs/_static/contents.png +docs/_static/debug-screenshot.png +docs/_static/favicon.ico +docs/_static/header.png +docs/_static/navigation.png +docs/_static/navigation_active.png +docs/_static/shortly.png +docs/_static/shorty-screenshot.png +docs/_static/style.css +docs/_static/werkzeug.js +docs/_static/werkzeug.png +docs/_templates/sidebarintro.html +docs/_templates/sidebarlogo.html +docs/_themes/LICENSE +docs/_themes/README +docs/_themes/werkzeug_theme_support.py +docs/_themes/werkzeug/layout.html +docs/_themes/werkzeug/relations.html +docs/_themes/werkzeug/theme.conf +docs/_themes/werkzeug/static/werkzeug.css_t +docs/contrib/atom.rst +docs/contrib/cache.rst +docs/contrib/fixers.rst +docs/contrib/index.rst +docs/contrib/iterio.rst +docs/contrib/lint.rst +docs/contrib/profiler.rst +docs/contrib/securecookie.rst +docs/contrib/sessions.rst +docs/contrib/wrappers.rst +docs/deployment/cgi.rst +docs/deployment/fastcgi.rst +docs/deployment/index.rst +docs/deployment/mod_wsgi.rst +docs/deployment/proxying.rst +examples/README +examples/cookieauth.py +examples/httpbasicauth.py +examples/manage-coolmagic.py +examples/manage-couchy.py +examples/manage-cupoftee.py +examples/manage-i18nurls.py +examples/manage-plnt.py +examples/manage-shorty.py +examples/manage-simplewiki.py +examples/manage-webpylike.py +examples/upload.py +examples/contrib/README +examples/contrib/securecookie.py +examples/contrib/sessions.py +examples/coolmagic/__init__.py +examples/coolmagic/application.py +examples/coolmagic/helpers.py +examples/coolmagic/utils.py +examples/coolmagic/public/style.css +examples/coolmagic/templates/layout.html +examples/coolmagic/templates/static/about.html +examples/coolmagic/templates/static/index.html +examples/coolmagic/templates/static/not_found.html +examples/coolmagic/views/__init__.py +examples/coolmagic/views/static.py +examples/couchy/README +examples/couchy/__init__.py +examples/couchy/application.py +examples/couchy/models.py +examples/couchy/utils.py +examples/couchy/views.py +examples/couchy/static/style.css +examples/couchy/templates/display.html +examples/couchy/templates/layout.html +examples/couchy/templates/list.html +examples/couchy/templates/new.html +examples/couchy/templates/not_found.html +examples/cupoftee/__init__.py +examples/cupoftee/application.py +examples/cupoftee/db.py +examples/cupoftee/network.py +examples/cupoftee/pages.py +examples/cupoftee/utils.py +examples/cupoftee/shared/content.png +examples/cupoftee/shared/down.png +examples/cupoftee/shared/favicon.ico +examples/cupoftee/shared/header.png +examples/cupoftee/shared/logo.png +examples/cupoftee/shared/style.css +examples/cupoftee/shared/up.png +examples/cupoftee/templates/layout.html +examples/cupoftee/templates/missingpage.html +examples/cupoftee/templates/search.html +examples/cupoftee/templates/server.html +examples/cupoftee/templates/serverlist.html +examples/i18nurls/__init__.py +examples/i18nurls/application.py +examples/i18nurls/urls.py +examples/i18nurls/views.py +examples/i18nurls/templates/about.html +examples/i18nurls/templates/blog.html +examples/i18nurls/templates/index.html +examples/i18nurls/templates/layout.html +examples/partial/README +examples/partial/complex_routing.py +examples/plnt/__init__.py +examples/plnt/database.py +examples/plnt/sync.py +examples/plnt/utils.py +examples/plnt/views.py +examples/plnt/webapp.py +examples/plnt/shared/style.css +examples/plnt/templates/about.html +examples/plnt/templates/index.html +examples/plnt/templates/layout.html +examples/shortly/shortly.py +examples/shortly/static/style.css +examples/shortly/templates/404.html +examples/shortly/templates/layout.html +examples/shortly/templates/new_url.html +examples/shortly/templates/short_link_details.html +examples/shorty/__init__.py +examples/shorty/application.py +examples/shorty/models.py +examples/shorty/utils.py +examples/shorty/views.py +examples/shorty/static/style.css +examples/shorty/templates/display.html +examples/shorty/templates/layout.html +examples/shorty/templates/list.html +examples/shorty/templates/new.html +examples/shorty/templates/not_found.html +examples/simplewiki/__init__.py +examples/simplewiki/actions.py +examples/simplewiki/application.py +examples/simplewiki/database.py +examples/simplewiki/specialpages.py +examples/simplewiki/utils.py +examples/simplewiki/shared/style.css +examples/simplewiki/templates/action_diff.html +examples/simplewiki/templates/action_edit.html +examples/simplewiki/templates/action_log.html +examples/simplewiki/templates/action_revert.html +examples/simplewiki/templates/action_show.html +examples/simplewiki/templates/layout.html +examples/simplewiki/templates/macros.xml +examples/simplewiki/templates/missing_action.html +examples/simplewiki/templates/page_index.html +examples/simplewiki/templates/page_missing.html +examples/simplewiki/templates/recent_changes.html +examples/webpylike/example.py +examples/webpylike/webpylike.py +werkzeug/__init__.py +werkzeug/_compat.py +werkzeug/_internal.py +werkzeug/datastructures.py +werkzeug/exceptions.py +werkzeug/formparser.py +werkzeug/http.py +werkzeug/local.py +werkzeug/posixemulation.py +werkzeug/routing.py +werkzeug/script.py +werkzeug/security.py +werkzeug/serving.py +werkzeug/test.py +werkzeug/testapp.py +werkzeug/urls.py +werkzeug/useragents.py +werkzeug/utils.py +werkzeug/wrappers.py +werkzeug/wsgi.py +werkzeug/contrib/__init__.py +werkzeug/contrib/atom.py +werkzeug/contrib/cache.py +werkzeug/contrib/fixers.py +werkzeug/contrib/iterio.py +werkzeug/contrib/jsrouting.py +werkzeug/contrib/limiter.py +werkzeug/contrib/lint.py +werkzeug/contrib/profiler.py +werkzeug/contrib/securecookie.py +werkzeug/contrib/sessions.py +werkzeug/contrib/testtools.py +werkzeug/contrib/wrappers.py +werkzeug/debug/__init__.py +werkzeug/debug/console.py +werkzeug/debug/repr.py +werkzeug/debug/tbtools.py +werkzeug/debug/shared/FONT_LICENSE +werkzeug/debug/shared/console.png +werkzeug/debug/shared/debugger.js +werkzeug/debug/shared/jquery.js +werkzeug/debug/shared/less.png +werkzeug/debug/shared/more.png +werkzeug/debug/shared/source.png +werkzeug/debug/shared/style.css +werkzeug/debug/shared/ubuntu.ttf +werkzeug/testsuite/__init__.py +werkzeug/testsuite/compat.py +werkzeug/testsuite/datastructures.py +werkzeug/testsuite/debug.py +werkzeug/testsuite/exceptions.py +werkzeug/testsuite/formparser.py +werkzeug/testsuite/http.py +werkzeug/testsuite/internal.py +werkzeug/testsuite/local.py +werkzeug/testsuite/routing.py +werkzeug/testsuite/security.py +werkzeug/testsuite/serving.py +werkzeug/testsuite/test.py +werkzeug/testsuite/urls.py +werkzeug/testsuite/utils.py +werkzeug/testsuite/wrappers.py +werkzeug/testsuite/wsgi.py +werkzeug/testsuite/contrib/__init__.py +werkzeug/testsuite/contrib/cache.py +werkzeug/testsuite/contrib/fixers.py +werkzeug/testsuite/contrib/iterio.py +werkzeug/testsuite/contrib/securecookie.py +werkzeug/testsuite/contrib/sessions.py +werkzeug/testsuite/contrib/wrappers.py +werkzeug/testsuite/multipart/collect.py +werkzeug/testsuite/multipart/ie7_full_path_request.txt +werkzeug/testsuite/multipart/firefox3-2png1txt/file1.png +werkzeug/testsuite/multipart/firefox3-2png1txt/file2.png +werkzeug/testsuite/multipart/firefox3-2png1txt/request.txt +werkzeug/testsuite/multipart/firefox3-2png1txt/text.txt +werkzeug/testsuite/multipart/firefox3-2pnglongtext/file1.png +werkzeug/testsuite/multipart/firefox3-2pnglongtext/file2.png +werkzeug/testsuite/multipart/firefox3-2pnglongtext/request.txt +werkzeug/testsuite/multipart/firefox3-2pnglongtext/text.txt +werkzeug/testsuite/multipart/ie6-2png1txt/file1.png +werkzeug/testsuite/multipart/ie6-2png1txt/file2.png +werkzeug/testsuite/multipart/ie6-2png1txt/request.txt +werkzeug/testsuite/multipart/ie6-2png1txt/text.txt +werkzeug/testsuite/multipart/opera8-2png1txt/file1.png +werkzeug/testsuite/multipart/opera8-2png1txt/file2.png +werkzeug/testsuite/multipart/opera8-2png1txt/request.txt +werkzeug/testsuite/multipart/opera8-2png1txt/text.txt +werkzeug/testsuite/multipart/webkit3-2png1txt/file1.png +werkzeug/testsuite/multipart/webkit3-2png1txt/file2.png +werkzeug/testsuite/multipart/webkit3-2png1txt/request.txt +werkzeug/testsuite/multipart/webkit3-2png1txt/text.txt +werkzeug/testsuite/res/test.txt \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/installed-files.txt new file mode 100644 index 0000000..c213d9d --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/installed-files.txt @@ -0,0 +1,161 @@ +../werkzeug/wrappers.py +../werkzeug/_compat.py +../werkzeug/security.py +../werkzeug/http.py +../werkzeug/wsgi.py +../werkzeug/useragents.py +../werkzeug/script.py +../werkzeug/exceptions.py +../werkzeug/datastructures.py +../werkzeug/posixemulation.py +../werkzeug/testapp.py +../werkzeug/urls.py +../werkzeug/local.py +../werkzeug/__init__.py +../werkzeug/serving.py +../werkzeug/formparser.py +../werkzeug/_internal.py +../werkzeug/test.py +../werkzeug/utils.py +../werkzeug/routing.py +../werkzeug/debug/repr.py +../werkzeug/debug/console.py +../werkzeug/debug/tbtools.py +../werkzeug/debug/__init__.py +../werkzeug/contrib/wrappers.py +../werkzeug/contrib/lint.py +../werkzeug/contrib/profiler.py +../werkzeug/contrib/iterio.py +../werkzeug/contrib/fixers.py +../werkzeug/contrib/sessions.py +../werkzeug/contrib/securecookie.py +../werkzeug/contrib/testtools.py +../werkzeug/contrib/__init__.py +../werkzeug/contrib/limiter.py +../werkzeug/contrib/jsrouting.py +../werkzeug/contrib/cache.py +../werkzeug/contrib/atom.py +../werkzeug/testsuite/wrappers.py +../werkzeug/testsuite/compat.py +../werkzeug/testsuite/internal.py +../werkzeug/testsuite/security.py +../werkzeug/testsuite/http.py +../werkzeug/testsuite/wsgi.py +../werkzeug/testsuite/exceptions.py +../werkzeug/testsuite/datastructures.py +../werkzeug/testsuite/urls.py +../werkzeug/testsuite/local.py +../werkzeug/testsuite/__init__.py +../werkzeug/testsuite/serving.py +../werkzeug/testsuite/formparser.py +../werkzeug/testsuite/test.py +../werkzeug/testsuite/debug.py +../werkzeug/testsuite/utils.py +../werkzeug/testsuite/routing.py +../werkzeug/testsuite/contrib/wrappers.py +../werkzeug/testsuite/contrib/iterio.py +../werkzeug/testsuite/contrib/fixers.py +../werkzeug/testsuite/contrib/sessions.py +../werkzeug/testsuite/contrib/securecookie.py +../werkzeug/testsuite/contrib/__init__.py +../werkzeug/testsuite/contrib/cache.py +../werkzeug/debug/shared/FONT_LICENSE +../werkzeug/debug/shared/console.png +../werkzeug/debug/shared/debugger.js +../werkzeug/debug/shared/jquery.js +../werkzeug/debug/shared/less.png +../werkzeug/debug/shared/more.png +../werkzeug/debug/shared/source.png +../werkzeug/debug/shared/style.css +../werkzeug/debug/shared/ubuntu.ttf +../werkzeug/testsuite/multipart/collect.py +../werkzeug/testsuite/multipart/ie7_full_path_request.txt +../werkzeug/testsuite/multipart/firefox3-2png1txt/file1.png +../werkzeug/testsuite/multipart/firefox3-2png1txt/file2.png +../werkzeug/testsuite/multipart/firefox3-2png1txt/request.txt +../werkzeug/testsuite/multipart/firefox3-2png1txt/text.txt +../werkzeug/testsuite/multipart/firefox3-2pnglongtext/file1.png +../werkzeug/testsuite/multipart/firefox3-2pnglongtext/file2.png +../werkzeug/testsuite/multipart/firefox3-2pnglongtext/request.txt +../werkzeug/testsuite/multipart/firefox3-2pnglongtext/text.txt +../werkzeug/testsuite/multipart/ie6-2png1txt/file1.png +../werkzeug/testsuite/multipart/ie6-2png1txt/file2.png +../werkzeug/testsuite/multipart/ie6-2png1txt/request.txt +../werkzeug/testsuite/multipart/ie6-2png1txt/text.txt +../werkzeug/testsuite/multipart/opera8-2png1txt/file1.png +../werkzeug/testsuite/multipart/opera8-2png1txt/file2.png +../werkzeug/testsuite/multipart/opera8-2png1txt/request.txt +../werkzeug/testsuite/multipart/opera8-2png1txt/text.txt +../werkzeug/testsuite/multipart/webkit3-2png1txt/file1.png +../werkzeug/testsuite/multipart/webkit3-2png1txt/file2.png +../werkzeug/testsuite/multipart/webkit3-2png1txt/request.txt +../werkzeug/testsuite/multipart/webkit3-2png1txt/text.txt +../werkzeug/testsuite/res/test.txt +../werkzeug/wrappers.pyc +../werkzeug/_compat.pyc +../werkzeug/security.pyc +../werkzeug/http.pyc +../werkzeug/wsgi.pyc +../werkzeug/useragents.pyc +../werkzeug/script.pyc +../werkzeug/exceptions.pyc +../werkzeug/datastructures.pyc +../werkzeug/posixemulation.pyc +../werkzeug/testapp.pyc +../werkzeug/urls.pyc +../werkzeug/local.pyc +../werkzeug/__init__.pyc +../werkzeug/serving.pyc +../werkzeug/formparser.pyc +../werkzeug/_internal.pyc +../werkzeug/test.pyc +../werkzeug/utils.pyc +../werkzeug/routing.pyc +../werkzeug/debug/repr.pyc +../werkzeug/debug/console.pyc +../werkzeug/debug/tbtools.pyc +../werkzeug/debug/__init__.pyc +../werkzeug/contrib/wrappers.pyc +../werkzeug/contrib/lint.pyc +../werkzeug/contrib/profiler.pyc +../werkzeug/contrib/iterio.pyc +../werkzeug/contrib/fixers.pyc +../werkzeug/contrib/sessions.pyc +../werkzeug/contrib/securecookie.pyc +../werkzeug/contrib/testtools.pyc +../werkzeug/contrib/__init__.pyc +../werkzeug/contrib/limiter.pyc +../werkzeug/contrib/jsrouting.pyc +../werkzeug/contrib/cache.pyc +../werkzeug/contrib/atom.pyc +../werkzeug/testsuite/wrappers.pyc +../werkzeug/testsuite/compat.pyc +../werkzeug/testsuite/internal.pyc +../werkzeug/testsuite/security.pyc +../werkzeug/testsuite/http.pyc +../werkzeug/testsuite/wsgi.pyc +../werkzeug/testsuite/exceptions.pyc +../werkzeug/testsuite/datastructures.pyc +../werkzeug/testsuite/urls.pyc +../werkzeug/testsuite/local.pyc +../werkzeug/testsuite/__init__.pyc +../werkzeug/testsuite/serving.pyc +../werkzeug/testsuite/formparser.pyc +../werkzeug/testsuite/test.pyc +../werkzeug/testsuite/debug.pyc +../werkzeug/testsuite/utils.pyc +../werkzeug/testsuite/routing.pyc +../werkzeug/testsuite/contrib/wrappers.pyc +../werkzeug/testsuite/contrib/iterio.pyc +../werkzeug/testsuite/contrib/fixers.pyc +../werkzeug/testsuite/contrib/sessions.pyc +../werkzeug/testsuite/contrib/securecookie.pyc +../werkzeug/testsuite/contrib/__init__.pyc +../werkzeug/testsuite/contrib/cache.pyc +../werkzeug/testsuite/multipart/collect.pyc +./ +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/top_level.txt new file mode 100644 index 0000000..6fe8da8 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/top_level.txt @@ -0,0 +1 @@ +werkzeug diff --git a/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so b/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so new file mode 100755 index 0000000..a42fe86 Binary files /dev/null and b/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/PKG-INFO new file mode 100644 index 0000000..3e0ec60 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/PKG-INFO @@ -0,0 +1,96 @@ +Metadata-Version: 1.1 +Name: alembic +Version: 0.6.5 +Summary: A database migration tool for SQLAlchemy. +Home-page: http://bitbucket.org/zzzeek/alembic +Author: Mike Bayer +Author-email: mike@zzzcomputing.com +License: MIT +Description: Alembic is a new database migrations tool, written by the author + of `SQLAlchemy `_. A migrations tool + offers the following functionality: + + * Can emit ALTER statements to a database in order to change + the structure of tables and other constructs + * Provides a system whereby "migration scripts" may be constructed; + each script indicates a particular series of steps that can "upgrade" a + target database to a new version, and optionally a series of steps that can + "downgrade" similarly, doing the same steps in reverse. + * Allows the scripts to execute in some sequential manner. + + The goals of Alembic are: + + * Very open ended and transparent configuration and operation. A new + Alembic environment is generated from a set of templates which is selected + among a set of options when setup first occurs. The templates then deposit a + series of scripts that define fully how database connectivity is established + and how migration scripts are invoked; the migration scripts themselves are + generated from a template within that series of scripts. The scripts can + then be further customized to define exactly how databases will be + interacted with and what structure new migration files should take. + * Full support for transactional DDL. The default scripts ensure that all + migrations occur within a transaction - for those databases which support + this (Postgresql, Microsoft SQL Server), migrations can be tested with no + need to manually undo changes upon failure. + * Minimalist script construction. Basic operations like renaming + tables/columns, adding/removing columns, changing column attributes can be + performed through one line commands like alter_column(), rename_table(), + add_constraint(). There is no need to recreate full SQLAlchemy Table + structures for simple operations like these - the functions themselves + generate minimalist schema structures behind the scenes to achieve the given + DDL sequence. + * "auto generation" of migrations. While real world migrations are far more + complex than what can be automatically determined, Alembic can still + eliminate the initial grunt work in generating new migration directives + from an altered schema. The ``--autogenerate`` feature will inspect the + current status of a database using SQLAlchemy's schema inspection + capabilities, compare it to the current state of the database model as + specified in Python, and generate a series of "candidate" migrations, + rendering them into a new migration script as Python directives. The + developer then edits the new file, adding additional directives and data + migrations as needed, to produce a finished migration. Table and column + level changes can be detected, with constraints and indexes to follow as + well. + * Full support for migrations generated as SQL scripts. Those of us who + work in corporate environments know that direct access to DDL commands on a + production database is a rare privilege, and DBAs want textual SQL scripts. + Alembic's usage model and commands are oriented towards being able to run a + series of migrations into a textual output file as easily as it runs them + directly to a database. Care must be taken in this mode to not invoke other + operations that rely upon in-memory SELECTs of rows - Alembic tries to + provide helper constructs like bulk_insert() to help with data-oriented + operations that are compatible with script-based DDL. + * Non-linear versioning. Scripts are given UUID identifiers similarly + to a DVCS, and the linkage of one script to the next is achieved via markers + within the scripts themselves. Through this open-ended mechanism, branches + containing other migration scripts can be merged - the linkages can be + manually edited within the script files to create the new sequence. + * Provide a library of ALTER constructs that can be used by any SQLAlchemy + application. The DDL constructs build upon SQLAlchemy's own DDLElement base + and can be used standalone by any application or script. + * Don't break our necks over SQLite's inability to ALTER things. SQLite + has almost no support for table or column alteration, and this is likely + intentional. Alembic's design + is kept simple by not contorting its core API around these limitations, + understanding that SQLite is simply not intended to support schema + changes. While Alembic's architecture can support SQLite's workarounds, and + we will support these features provided someone takes the initiative + to implement and test, until the SQLite developers decide + to provide a fully working version of ALTER, it's still vastly preferable + to use Alembic, or any migrations tool, with databases that + are designed to work under the assumption of in-place schema migrations + taking place. + + Documentation and status of Alembic is at http://readthedocs.org/docs/alembic/. + + +Keywords: SQLAlchemy migrations +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Environment :: Console +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Database :: Front-Ends diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/SOURCES.txt new file mode 100644 index 0000000..19bf79b --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/SOURCES.txt @@ -0,0 +1,123 @@ +CHANGES +LICENSE +MANIFEST.in +README.rst +README.unittests +setup.cfg +setup.py +test.cfg +alembic/__init__.py +alembic/command.py +alembic/compat.py +alembic/config.py +alembic/context.py +alembic/environment.py +alembic/migration.py +alembic/op.py +alembic/operations.py +alembic/script.py +alembic/util.py +alembic.egg-info/PKG-INFO +alembic.egg-info/SOURCES.txt +alembic.egg-info/dependency_links.txt +alembic.egg-info/entry_points.txt +alembic.egg-info/not-zip-safe +alembic.egg-info/requires.txt +alembic.egg-info/top_level.txt +alembic/autogenerate/__init__.py +alembic/autogenerate/api.py +alembic/autogenerate/compare.py +alembic/autogenerate/render.py +alembic/ddl/__init__.py +alembic/ddl/base.py +alembic/ddl/impl.py +alembic/ddl/mssql.py +alembic/ddl/mysql.py +alembic/ddl/oracle.py +alembic/ddl/postgresql.py +alembic/ddl/sqlite.py +alembic/templates/generic/README +alembic/templates/generic/alembic.ini.mako +alembic/templates/generic/env.py +alembic/templates/generic/script.py.mako +alembic/templates/multidb/README +alembic/templates/multidb/alembic.ini.mako +alembic/templates/multidb/env.py +alembic/templates/multidb/script.py.mako +alembic/templates/pylons/README +alembic/templates/pylons/alembic.ini.mako +alembic/templates/pylons/env.py +alembic/templates/pylons/script.py.mako +docs/api.html +docs/changelog.html +docs/cookbook.html +docs/front.html +docs/genindex.html +docs/index.html +docs/ops.html +docs/py-modindex.html +docs/search.html +docs/searchindex.js +docs/tutorial.html +docs/_images/api_overview.png +docs/_sources/api.txt +docs/_sources/changelog.txt +docs/_sources/cookbook.txt +docs/_sources/front.txt +docs/_sources/index.txt +docs/_sources/ops.txt +docs/_sources/tutorial.txt +docs/_static/basic.css +docs/_static/changelog.css +docs/_static/comment-bright.png +docs/_static/comment-close.png +docs/_static/comment.png +docs/_static/doctools.js +docs/_static/down-pressed.png +docs/_static/down.png +docs/_static/file.png +docs/_static/jquery.js +docs/_static/minus.png +docs/_static/nature.css +docs/_static/nature_override.css +docs/_static/plus.png +docs/_static/pygments.css +docs/_static/searchtools.js +docs/_static/sphinx_paramlinks.css +docs/_static/underscore.js +docs/_static/up-pressed.png +docs/_static/up.png +docs/_static/websupport.js +docs/build/Makefile +docs/build/api.rst +docs/build/api_overview.png +docs/build/changelog.rst +docs/build/conf.py +docs/build/cookbook.rst +docs/build/front.rst +docs/build/index.rst +docs/build/ops.rst +docs/build/requirements.txt +docs/build/tutorial.rst +docs/build/_static/nature_override.css +tests/__init__.py +tests/test_autogen_indexes.py +tests/test_autogen_render.py +tests/test_autogenerate.py +tests/test_bulk_insert.py +tests/test_command.py +tests/test_config.py +tests/test_environment.py +tests/test_mssql.py +tests/test_mysql.py +tests/test_offline_environment.py +tests/test_op.py +tests/test_op_naming_convention.py +tests/test_oracle.py +tests/test_postgresql.py +tests/test_revision_create.py +tests/test_revision_paths.py +tests/test_sql_script.py +tests/test_sqlite.py +tests/test_version_table.py +tests/test_versioning.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/entry_points.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/entry_points.txt new file mode 100644 index 0000000..27ac374 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/entry_points.txt @@ -0,0 +1,3 @@ +[console_scripts] +alembic = alembic.config:main + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/installed-files.txt new file mode 100644 index 0000000..1c6e409 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/installed-files.txt @@ -0,0 +1,70 @@ +../alembic/compat.py +../alembic/util.py +../alembic/script.py +../alembic/op.py +../alembic/context.py +../alembic/command.py +../alembic/__init__.py +../alembic/operations.py +../alembic/config.py +../alembic/migration.py +../alembic/environment.py +../alembic/ddl/impl.py +../alembic/ddl/mysql.py +../alembic/ddl/mssql.py +../alembic/ddl/__init__.py +../alembic/ddl/oracle.py +../alembic/ddl/sqlite.py +../alembic/ddl/base.py +../alembic/ddl/postgresql.py +../alembic/autogenerate/api.py +../alembic/autogenerate/compare.py +../alembic/autogenerate/render.py +../alembic/autogenerate/__init__.py +../alembic/templates/generic/README +../alembic/templates/generic/alembic.ini.mako +../alembic/templates/generic/env.py +../alembic/templates/generic/script.py.mako +../alembic/templates/multidb/README +../alembic/templates/multidb/alembic.ini.mako +../alembic/templates/multidb/env.py +../alembic/templates/multidb/script.py.mako +../alembic/templates/pylons/README +../alembic/templates/pylons/alembic.ini.mako +../alembic/templates/pylons/env.py +../alembic/templates/pylons/script.py.mako +../alembic/compat.pyc +../alembic/util.pyc +../alembic/script.pyc +../alembic/op.pyc +../alembic/context.pyc +../alembic/command.pyc +../alembic/__init__.pyc +../alembic/operations.pyc +../alembic/config.pyc +../alembic/migration.pyc +../alembic/environment.pyc +../alembic/ddl/impl.pyc +../alembic/ddl/mysql.pyc +../alembic/ddl/mssql.pyc +../alembic/ddl/__init__.pyc +../alembic/ddl/oracle.pyc +../alembic/ddl/sqlite.pyc +../alembic/ddl/base.pyc +../alembic/ddl/postgresql.pyc +../alembic/autogenerate/api.pyc +../alembic/autogenerate/compare.pyc +../alembic/autogenerate/render.pyc +../alembic/autogenerate/__init__.pyc +../alembic/templates/generic/env.pyc +../alembic/templates/multidb/env.pyc +../alembic/templates/pylons/env.pyc +./ +requires.txt +SOURCES.txt +entry_points.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt +../../../../bin/alembic diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/requires.txt new file mode 100644 index 0000000..39a2c32 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/requires.txt @@ -0,0 +1,2 @@ +SQLAlchemy>=0.7.3 +Mako \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/top_level.txt new file mode 100644 index 0000000..b5bd98d --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/top_level.txt @@ -0,0 +1 @@ +alembic diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/__init__.py b/Linux_i686/lib/python2.7/site-packages/alembic/__init__.py new file mode 100644 index 0000000..6680966 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/__init__.py @@ -0,0 +1,11 @@ +from os import path + +__version__ = '0.6.5' + +package_dir = path.abspath(path.dirname(__file__)) + + +from . import op +from . import context + + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/__init__.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/__init__.py new file mode 100644 index 0000000..d0f54ba --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/__init__.py @@ -0,0 +1 @@ +from .api import compare_metadata, _produce_migration_diffs, _produce_net_changes diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/api.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/api.py new file mode 100644 index 0000000..148e352 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/api.py @@ -0,0 +1,301 @@ +"""Provide the 'autogenerate' feature which can produce migration operations +automatically.""" + +import logging +import re + +from sqlalchemy.engine.reflection import Inspector +from sqlalchemy.util import OrderedSet +from .compare import _compare_tables +from .render import _drop_table, _drop_column, _drop_index, _drop_constraint, \ + _add_table, _add_column, _add_index, _add_constraint, _modify_col +from .. import util + +log = logging.getLogger(__name__) + +################################################### +# public +def compare_metadata(context, metadata): + """Compare a database schema to that given in a + :class:`~sqlalchemy.schema.MetaData` instance. + + The database connection is presented in the context + of a :class:`.MigrationContext` object, which + provides database connectivity as well as optional + comparison functions to use for datatypes and + server defaults - see the "autogenerate" arguments + at :meth:`.EnvironmentContext.configure` + for details on these. + + The return format is a list of "diff" directives, + each representing individual differences:: + + from alembic.migration import MigrationContext + from alembic.autogenerate import compare_metadata + from sqlalchemy.schema import SchemaItem + from sqlalchemy.types import TypeEngine + from sqlalchemy import (create_engine, MetaData, Column, + Integer, String, Table) + import pprint + + engine = create_engine("sqlite://") + + engine.execute(''' + create table foo ( + id integer not null primary key, + old_data varchar, + x integer + )''') + + engine.execute(''' + create table bar ( + data varchar + )''') + + metadata = MetaData() + Table('foo', metadata, + Column('id', Integer, primary_key=True), + Column('data', Integer), + Column('x', Integer, nullable=False) + ) + Table('bat', metadata, + Column('info', String) + ) + + mc = MigrationContext.configure(engine.connect()) + + diff = compare_metadata(mc, metadata) + pprint.pprint(diff, indent=2, width=20) + + Output:: + + [ ( 'add_table', + Table('bat', MetaData(bind=None), + Column('info', String(), table=), schema=None)), + ( 'remove_table', + Table(u'bar', MetaData(bind=None), + Column(u'data', VARCHAR(), table=), schema=None)), + ( 'add_column', + None, + 'foo', + Column('data', Integer(), table=)), + ( 'remove_column', + None, + 'foo', + Column(u'old_data', VARCHAR(), table=None)), + [ ( 'modify_nullable', + None, + 'foo', + u'x', + { 'existing_server_default': None, + 'existing_type': INTEGER()}, + True, + False)]] + + + :param context: a :class:`.MigrationContext` + instance. + :param metadata: a :class:`~sqlalchemy.schema.MetaData` + instance. + + """ + autogen_context, connection = _autogen_context(context, None) + diffs = [] + + object_filters = _get_object_filters(context.opts) + include_schemas = context.opts.get('include_schemas', False) + + _produce_net_changes(connection, metadata, diffs, autogen_context, + object_filters, include_schemas) + + return diffs + +################################################### +# top level + +def _produce_migration_diffs(context, template_args, + imports, include_symbol=None, + include_object=None, + include_schemas=False): + opts = context.opts + metadata = opts['target_metadata'] + include_schemas = opts.get('include_schemas', include_schemas) + + object_filters = _get_object_filters(opts, include_symbol, include_object) + + if metadata is None: + raise util.CommandError( + "Can't proceed with --autogenerate option; environment " + "script %s does not provide " + "a MetaData object to the context." % ( + context.script.env_py_location + )) + autogen_context, connection = _autogen_context(context, imports) + + diffs = [] + _produce_net_changes(connection, metadata, diffs, + autogen_context, object_filters, include_schemas) + template_args[opts['upgrade_token']] = \ + _indent(_produce_upgrade_commands(diffs, autogen_context)) + template_args[opts['downgrade_token']] = \ + _indent(_produce_downgrade_commands(diffs, autogen_context)) + template_args['imports'] = "\n".join(sorted(imports)) + + +def _get_object_filters(context_opts, include_symbol=None, include_object=None): + include_symbol = context_opts.get('include_symbol', include_symbol) + include_object = context_opts.get('include_object', include_object) + + object_filters = [] + if include_symbol: + def include_symbol_filter(object, name, type_, reflected, compare_to): + if type_ == "table": + return include_symbol(name, object.schema) + else: + return True + object_filters.append(include_symbol_filter) + if include_object: + object_filters.append(include_object) + + return object_filters + + +def _autogen_context(context, imports): + opts = context.opts + connection = context.bind + return { + 'imports': imports, + 'connection': connection, + 'dialect': connection.dialect, + 'context': context, + 'opts': opts + }, connection + +def _indent(text): + text = "### commands auto generated by Alembic - "\ + "please adjust! ###\n" + text + text += "\n### end Alembic commands ###" + text = re.compile(r'^', re.M).sub(" ", text).strip() + return text + +################################################### +# walk structures + + +def _produce_net_changes(connection, metadata, diffs, autogen_context, + object_filters=(), + include_schemas=False): + inspector = Inspector.from_engine(connection) + # TODO: not hardcode alembic_version here ? + conn_table_names = set() + + default_schema = connection.dialect.default_schema_name + if include_schemas: + schemas = set(inspector.get_schema_names()) + # replace default schema name with None + schemas.discard("information_schema") + # replace the "default" schema with None + schemas.add(None) + schemas.discard(default_schema) + else: + schemas = [None] + + for s in schemas: + tables = set(inspector.get_table_names(schema=s)).\ + difference(['alembic_version']) + conn_table_names.update(zip([s] * len(tables), tables)) + + metadata_table_names = OrderedSet([(table.schema, table.name) + for table in metadata.sorted_tables]) + + _compare_tables(conn_table_names, metadata_table_names, + object_filters, + inspector, metadata, diffs, autogen_context) + + +################################################### +# element comparison + + +################################################### +# render python + + +################################################### +# produce command structure + +def _produce_upgrade_commands(diffs, autogen_context): + buf = [] + for diff in diffs: + buf.append(_invoke_command("upgrade", diff, autogen_context)) + if not buf: + buf = ["pass"] + return "\n".join(buf) + +def _produce_downgrade_commands(diffs, autogen_context): + buf = [] + for diff in reversed(diffs): + buf.append(_invoke_command("downgrade", diff, autogen_context)) + if not buf: + buf = ["pass"] + return "\n".join(buf) + +def _invoke_command(updown, args, autogen_context): + if isinstance(args, tuple): + return _invoke_adddrop_command(updown, args, autogen_context) + else: + return _invoke_modify_command(updown, args, autogen_context) + +def _invoke_adddrop_command(updown, args, autogen_context): + cmd_type = args[0] + adddrop, cmd_type = cmd_type.split("_") + + cmd_args = args[1:] + (autogen_context,) + + _commands = { + "table": (_drop_table, _add_table), + "column": (_drop_column, _add_column), + "index": (_drop_index, _add_index), + "constraint": (_drop_constraint, _add_constraint), + } + + cmd_callables = _commands[cmd_type] + + if ( + updown == "upgrade" and adddrop == "add" + ) or ( + updown == "downgrade" and adddrop == "remove" + ): + return cmd_callables[1](*cmd_args) + else: + return cmd_callables[0](*cmd_args) + +def _invoke_modify_command(updown, args, autogen_context): + sname, tname, cname = args[0][1:4] + kw = {} + + _arg_struct = { + "modify_type": ("existing_type", "type_"), + "modify_nullable": ("existing_nullable", "nullable"), + "modify_default": ("existing_server_default", "server_default"), + } + for diff in args: + diff_kw = diff[4] + for arg in ("existing_type", \ + "existing_nullable", \ + "existing_server_default"): + if arg in diff_kw: + kw.setdefault(arg, diff_kw[arg]) + old_kw, new_kw = _arg_struct[diff[0]] + if updown == "upgrade": + kw[new_kw] = diff[-1] + kw[old_kw] = diff[-2] + else: + kw[new_kw] = diff[-2] + kw[old_kw] = diff[-1] + + if "nullable" in kw: + kw.pop("existing_nullable", None) + if "server_default" in kw: + kw.pop("existing_server_default", None) + return _modify_col(tname, cname, autogen_context, schema=sname, **kw) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/compare.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/compare.py new file mode 100644 index 0000000..ec077fd --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/compare.py @@ -0,0 +1,490 @@ +from sqlalchemy.exc import NoSuchTableError +from sqlalchemy import schema as sa_schema, types as sqltypes +import logging +from .. import compat +from .render import _render_server_default +from sqlalchemy.util import OrderedSet + + +log = logging.getLogger(__name__) + +def _run_filters(object_, name, type_, reflected, compare_to, object_filters): + for fn in object_filters: + if not fn(object_, name, type_, reflected, compare_to): + return False + else: + return True + +def _compare_tables(conn_table_names, metadata_table_names, + object_filters, + inspector, metadata, diffs, autogen_context): + + default_schema = inspector.bind.dialect.default_schema_name + + # tables coming from the connection will not have "schema" + # set if it matches default_schema_name; so we need a list + # of table names from local metadata that also have "None" if schema + # == default_schema_name. Most setups will be like this anyway but + # some are not (see #170) + metadata_table_names_no_dflt_schema = OrderedSet([ + (schema if schema != default_schema else None, tname) + for schema, tname in metadata_table_names + ]) + + # to adjust for the MetaData collection storing the tables either + # as "schemaname.tablename" or just "tablename", create a new lookup + # which will match the "non-default-schema" keys to the Table object. + tname_to_table = dict( + ( + no_dflt_schema, + metadata.tables[sa_schema._get_table_key(tname, schema)] + ) + for no_dflt_schema, (schema, tname) in zip( + metadata_table_names_no_dflt_schema, + metadata_table_names) + ) + metadata_table_names = metadata_table_names_no_dflt_schema + + for s, tname in metadata_table_names.difference(conn_table_names): + name = '%s.%s' % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + if _run_filters(metadata_table, tname, "table", False, None, object_filters): + diffs.append(("add_table", metadata_table)) + log.info("Detected added table %r", name) + _compare_indexes_and_uniques(s, tname, object_filters, + None, + metadata_table, + diffs, autogen_context, inspector) + + removal_metadata = sa_schema.MetaData() + for s, tname in conn_table_names.difference(metadata_table_names): + name = sa_schema._get_table_key(tname, s) + exists = name in removal_metadata.tables + t = sa_schema.Table(tname, removal_metadata, schema=s) + if not exists: + inspector.reflecttable(t, None) + if _run_filters(t, tname, "table", True, None, object_filters): + diffs.append(("remove_table", t)) + log.info("Detected removed table %r", name) + + existing_tables = conn_table_names.intersection(metadata_table_names) + + existing_metadata = sa_schema.MetaData() + conn_column_info = {} + for s, tname in existing_tables: + name = sa_schema._get_table_key(tname, s) + exists = name in existing_metadata.tables + t = sa_schema.Table(tname, existing_metadata, schema=s) + if not exists: + inspector.reflecttable(t, None) + conn_column_info[(s, tname)] = t + + for s, tname in sorted(existing_tables): + name = '%s.%s' % (s, tname) if s else tname + metadata_table = tname_to_table[(s, tname)] + conn_table = existing_metadata.tables[name] + + if _run_filters(metadata_table, tname, "table", False, conn_table, object_filters): + _compare_columns(s, tname, object_filters, + conn_table, + metadata_table, + diffs, autogen_context, inspector) + _compare_indexes_and_uniques(s, tname, object_filters, + conn_table, + metadata_table, + diffs, autogen_context, inspector) + + # TODO: + # table constraints + # sequences + +def _make_index(params, conn_table): + return sa_schema.Index( + params['name'], + *[conn_table.c[cname] for cname in params['column_names']], + unique=params['unique'] + ) + +def _make_unique_constraint(params, conn_table): + return sa_schema.UniqueConstraint( + *[conn_table.c[cname] for cname in params['column_names']], + name=params['name'] + ) + +def _compare_columns(schema, tname, object_filters, conn_table, metadata_table, + diffs, autogen_context, inspector): + name = '%s.%s' % (schema, tname) if schema else tname + metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c) + conn_col_names = dict((c.name, c) for c in conn_table.c) + metadata_col_names = OrderedSet(sorted(metadata_cols_by_name)) + + for cname in metadata_col_names.difference(conn_col_names): + if _run_filters(metadata_cols_by_name[cname], cname, + "column", False, None, object_filters): + diffs.append( + ("add_column", schema, tname, metadata_cols_by_name[cname]) + ) + log.info("Detected added column '%s.%s'", name, cname) + + for cname in set(conn_col_names).difference(metadata_col_names): + if _run_filters(conn_table.c[cname], cname, + "column", True, None, object_filters): + diffs.append( + ("remove_column", schema, tname, conn_table.c[cname]) + ) + log.info("Detected removed column '%s.%s'", name, cname) + + for colname in metadata_col_names.intersection(conn_col_names): + metadata_col = metadata_cols_by_name[colname] + conn_col = conn_table.c[colname] + if not _run_filters( + metadata_col, colname, "column", False, conn_col, object_filters): + continue + col_diff = [] + _compare_type(schema, tname, colname, + conn_col, + metadata_col, + col_diff, autogen_context + ) + _compare_nullable(schema, tname, colname, + conn_col, + metadata_col.nullable, + col_diff, autogen_context + ) + _compare_server_default(schema, tname, colname, + conn_col, + metadata_col, + col_diff, autogen_context + ) + if col_diff: + diffs.append(col_diff) + +class _constraint_sig(object): + def __eq__(self, other): + return self.const == other.const + + def __ne__(self, other): + return self.const != other.const + + def __hash__(self): + return hash(self.const) + +class _uq_constraint_sig(_constraint_sig): + is_index = False + is_unique = True + + def __init__(self, const): + self.const = const + self.name = const.name + self.sig = tuple(sorted([col.name for col in const.columns])) + + @property + def column_names(self): + return [col.name for col in self.const.columns] + +class _ix_constraint_sig(_constraint_sig): + is_index = True + + def __init__(self, const): + self.const = const + self.name = const.name + self.sig = tuple(sorted([col.name for col in const.columns])) + self.is_unique = bool(const.unique) + + @property + def column_names(self): + return _get_index_column_names(self.const) + +def _get_index_column_names(idx): + if compat.sqla_08: + return [getattr(exp, "name", None) for exp in idx.expressions] + else: + return [getattr(col, "name", None) for col in idx.columns] + +def _compare_indexes_and_uniques(schema, tname, object_filters, conn_table, + metadata_table, diffs, autogen_context, inspector): + + is_create_table = conn_table is None + + # 1a. get raw indexes and unique constraints from metadata ... + metadata_unique_constraints = set(uq for uq in metadata_table.constraints + if isinstance(uq, sa_schema.UniqueConstraint) + ) + metadata_indexes = set(metadata_table.indexes) + + conn_uniques = conn_indexes = frozenset() + + supports_unique_constraints = False + + if conn_table is not None: + # 1b. ... and from connection, if the table exists + if hasattr(inspector, "get_unique_constraints"): + try: + conn_uniques = inspector.get_unique_constraints( + tname, schema=schema) + supports_unique_constraints = True + except NotImplementedError: + pass + try: + conn_indexes = inspector.get_indexes(tname, schema=schema) + except NotImplementedError: + pass + + # 2. convert conn-level objects from raw inspector records + # into schema objects + conn_uniques = set(_make_unique_constraint(uq_def, conn_table) + for uq_def in conn_uniques) + conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes) + + # 3. give the dialect a chance to omit indexes and constraints that + # we know are either added implicitly by the DB or that the DB + # can't accurately report on + autogen_context['context'].impl.\ + correct_for_autogen_constraints( + conn_uniques, conn_indexes, + metadata_unique_constraints, + metadata_indexes + ) + + # 4. organize the constraints into "signature" collections, the + # _constraint_sig() objects provide a consistent facade over both + # Index and UniqueConstraint so we can easily work with them + # interchangeably + metadata_unique_constraints = set(_uq_constraint_sig(uq) + for uq in metadata_unique_constraints + ) + + metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes) + + conn_unique_constraints = set(_uq_constraint_sig(uq) for uq in conn_uniques) + + conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes) + + # 5. index things by name, for those objects that have names + metadata_names = dict( + (c.name, c) for c in + metadata_unique_constraints.union(metadata_indexes) + if c.name is not None) + + conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints) + conn_indexes_by_name = dict((c.name, c) for c in conn_indexes) + + conn_names = dict((c.name, c) for c in + conn_unique_constraints.union(conn_indexes) + if c.name is not None) + + doubled_constraints = dict( + (name, (conn_uniques_by_name[name], conn_indexes_by_name[name])) + for name in set(conn_uniques_by_name).intersection(conn_indexes_by_name) + ) + + # 6. index things by "column signature", to help with unnamed unique + # constraints. + conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints) + metadata_uniques_by_sig = dict( + (uq.sig, uq) for uq in metadata_unique_constraints) + metadata_indexes_by_sig = dict( + (ix.sig, ix) for ix in metadata_indexes) + unnamed_metadata_uniques = dict((uq.sig, uq) for uq in + metadata_unique_constraints if uq.name is None) + + # assumptions: + # 1. a unique constraint or an index from the connection *always* + # has a name. + # 2. an index on the metadata side *always* has a name. + # 3. a unique constraint on the metadata side *might* have a name. + # 4. The backend may double up indexes as unique constraints and + # vice versa (e.g. MySQL, Postgresql) + + def obj_added(obj): + if obj.is_index: + diffs.append(("add_index", obj.const)) + log.info("Detected added index '%s' on %s", + obj.name, ', '.join([ + "'%s'" % obj.column_names + ]) + ) + else: + if not supports_unique_constraints: + # can't report unique indexes as added if we don't + # detect them + return + if is_create_table: + # unique constraints are created inline with table defs + return + diffs.append(("add_constraint", obj.const)) + log.info("Detected added unique constraint '%s' on %s", + obj.name, ', '.join([ + "'%s'" % obj.column_names + ]) + ) + + def obj_removed(obj): + if obj.is_index: + if obj.is_unique and not supports_unique_constraints: + # many databases double up unique constraints + # as unique indexes. without that list we can't + # be sure what we're doing here + return + + diffs.append(("remove_index", obj.const)) + log.info("Detected removed index '%s' on '%s'", obj.name, tname) + else: + diffs.append(("remove_constraint", obj.const)) + log.info("Detected removed unique constraint '%s' on '%s'", + obj.name, tname + ) + + def obj_changed(old, new, msg): + if old.is_index: + log.info("Detected changed index '%s' on '%s':%s", + old.name, tname, ', '.join(msg) + ) + diffs.append(("remove_index", old.const)) + diffs.append(("add_index", new.const)) + else: + log.info("Detected changed unique constraint '%s' on '%s':%s", + old.name, tname, ', '.join(msg) + ) + diffs.append(("remove_constraint", old.const)) + diffs.append(("add_constraint", new.const)) + + for added_name in sorted(set(metadata_names).difference(conn_names)): + obj = metadata_names[added_name] + obj_added(obj) + + + for existing_name in sorted(set(metadata_names).intersection(conn_names)): + metadata_obj = metadata_names[existing_name] + + if existing_name in doubled_constraints: + conn_uq, conn_idx = doubled_constraints[existing_name] + if metadata_obj.is_index: + conn_obj = conn_idx + else: + conn_obj = conn_uq + else: + conn_obj = conn_names[existing_name] + + if conn_obj.is_index != metadata_obj.is_index: + obj_removed(conn_obj) + obj_added(metadata_obj) + else: + msg = [] + if conn_obj.is_unique != metadata_obj.is_unique: + msg.append(' unique=%r to unique=%r' % ( + conn_obj.is_unique, metadata_obj.is_unique + )) + if conn_obj.sig != metadata_obj.sig: + msg.append(' columns %r to %r' % ( + conn_obj.sig, metadata_obj.sig + )) + + if msg: + obj_changed(conn_obj, metadata_obj, msg) + + + for removed_name in sorted(set(conn_names).difference(metadata_names)): + conn_obj = conn_names[removed_name] + if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques: + continue + elif removed_name in doubled_constraints: + if conn_obj.sig not in metadata_indexes_by_sig and \ + conn_obj.sig not in metadata_uniques_by_sig: + conn_uq, conn_idx = doubled_constraints[removed_name] + obj_removed(conn_uq) + obj_removed(conn_idx) + else: + obj_removed(conn_obj) + + for uq_sig in unnamed_metadata_uniques: + if uq_sig not in conn_uniques_by_sig: + obj_added(unnamed_metadata_uniques[uq_sig]) + + +def _compare_nullable(schema, tname, cname, conn_col, + metadata_col_nullable, diffs, + autogen_context): + conn_col_nullable = conn_col.nullable + if conn_col_nullable is not metadata_col_nullable: + diffs.append( + ("modify_nullable", schema, tname, cname, + { + "existing_type": conn_col.type, + "existing_server_default": conn_col.server_default, + }, + conn_col_nullable, + metadata_col_nullable), + ) + log.info("Detected %s on column '%s.%s'", + "NULL" if metadata_col_nullable else "NOT NULL", + tname, + cname + ) + +def _compare_type(schema, tname, cname, conn_col, + metadata_col, diffs, + autogen_context): + + conn_type = conn_col.type + metadata_type = metadata_col.type + if conn_type._type_affinity is sqltypes.NullType: + log.info("Couldn't determine database type " + "for column '%s.%s'", tname, cname) + return + if metadata_type._type_affinity is sqltypes.NullType: + log.info("Column '%s.%s' has no type within " + "the model; can't compare", tname, cname) + return + + isdiff = autogen_context['context']._compare_type(conn_col, metadata_col) + + if isdiff: + + diffs.append( + ("modify_type", schema, tname, cname, + { + "existing_nullable": conn_col.nullable, + "existing_server_default": conn_col.server_default, + }, + conn_type, + metadata_type), + ) + log.info("Detected type change from %r to %r on '%s.%s'", + conn_type, metadata_type, tname, cname + ) + +def _compare_server_default(schema, tname, cname, conn_col, metadata_col, + diffs, autogen_context): + + metadata_default = metadata_col.server_default + conn_col_default = conn_col.server_default + if conn_col_default is None and metadata_default is None: + return False + rendered_metadata_default = _render_server_default( + metadata_default, autogen_context) + rendered_conn_default = conn_col.server_default.arg.text \ + if conn_col.server_default else None + isdiff = autogen_context['context']._compare_server_default( + conn_col, metadata_col, + rendered_metadata_default, + rendered_conn_default + ) + if isdiff: + conn_col_default = rendered_conn_default + diffs.append( + ("modify_default", schema, tname, cname, + { + "existing_nullable": conn_col.nullable, + "existing_type": conn_col.type, + }, + conn_col_default, + metadata_default), + ) + log.info("Detected server default on column '%s.%s'", + tname, + cname + ) + + + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/render.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/render.py new file mode 100644 index 0000000..ed9536c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/render.py @@ -0,0 +1,457 @@ +from sqlalchemy import schema as sa_schema, types as sqltypes, sql +import logging +from .. import compat +import re +from ..compat import string_types + +log = logging.getLogger(__name__) + +try: + from sqlalchemy.sql.naming import conv + def _render_gen_name(autogen_context, name): + if isinstance(name, conv): + return _f_name(_alembic_autogenerate_prefix(autogen_context), name) + else: + return name +except ImportError: + def _render_gen_name(autogen_context, name): + return name + +class _f_name(object): + def __init__(self, prefix, name): + self.prefix = prefix + self.name = name + + def __repr__(self): + return "%sf(%r)" % (self.prefix, self.name) + +def _render_potential_expr(value, autogen_context): + if isinstance(value, sql.ClauseElement): + if compat.sqla_08: + compile_kw = dict(compile_kwargs={'literal_binds': True}) + else: + compile_kw = {} + + return "%(prefix)stext(%(sql)r)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "sql": str( + value.compile(dialect=autogen_context['dialect'], + **compile_kw) + ) + } + + else: + return repr(value) + +def _add_table(table, autogen_context): + text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % { + 'tablename': table.name, + 'prefix': _alembic_autogenerate_prefix(autogen_context), + 'args': ',\n'.join( + [col for col in + [_render_column(col, autogen_context) for col in table.c] + if col] + + sorted([rcons for rcons in + [_render_constraint(cons, autogen_context) for cons in + table.constraints] + if rcons is not None + ]) + ) + } + if table.schema: + text += ",\nschema=%r" % table.schema + for k in sorted(table.kwargs): + text += ",\n%s=%r" % (k.replace(" ", "_"), table.kwargs[k]) + text += "\n)" + return text + +def _drop_table(table, autogen_context): + text = "%(prefix)sdrop_table(%(tname)r" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": table.name + } + if table.schema: + text += ", schema=%r" % table.schema + text += ")" + return text + +def _add_index(index, autogen_context): + """ + Generate Alembic operations for the CREATE INDEX of an + :class:`~sqlalchemy.schema.Index` instance. + """ + from .compare import _get_index_column_names + + text = "%(prefix)screate_index(%(name)r, '%(table)s', %(columns)s, "\ + "unique=%(unique)r%(schema)s%(kwargs)s)" % { + 'prefix': _alembic_autogenerate_prefix(autogen_context), + 'name': _render_gen_name(autogen_context, index.name), + 'table': index.table.name, + 'columns': _get_index_column_names(index), + 'unique': index.unique or False, + 'schema': (", schema='%s'" % index.table.schema) if index.table.schema else '', + 'kwargs': (', '+', '.join( + ["%s=%s" % (key, _render_potential_expr(val, autogen_context)) + for key, val in index.kwargs.items()]))\ + if len(index.kwargs) else '' + } + return text + +def _drop_index(index, autogen_context): + """ + Generate Alembic operations for the DROP INDEX of an + :class:`~sqlalchemy.schema.Index` instance. + """ + text = "%(prefix)sdrop_index(%(name)r, "\ + "table_name='%(table_name)s'%(schema)s)" % { + 'prefix': _alembic_autogenerate_prefix(autogen_context), + 'name': _render_gen_name(autogen_context, index.name), + 'table_name': index.table.name, + 'schema': ((", schema='%s'" % index.table.schema) + if index.table.schema else '') + } + return text + + +def _render_unique_constraint(constraint, autogen_context): + rendered = _user_defined_render("unique", constraint, autogen_context) + if rendered is not False: + return rendered + + return _uq_constraint(constraint, autogen_context, False) + + +def _add_unique_constraint(constraint, autogen_context): + """ + Generate Alembic operations for the ALTER TABLE .. ADD CONSTRAINT ... + UNIQUE of a :class:`~sqlalchemy.schema.UniqueConstraint` instance. + """ + return _uq_constraint(constraint, autogen_context, True) + +def _uq_constraint(constraint, autogen_context, alter): + opts = [] + if constraint.deferrable: + opts.append(("deferrable", str(constraint.deferrable))) + if constraint.initially: + opts.append(("initially", str(constraint.initially))) + if alter and constraint.table.schema: + opts.append(("schema", str(constraint.table.schema))) + if not alter and constraint.name: + opts.append(("name", _render_gen_name(autogen_context, constraint.name))) + + if alter: + args = [repr(_render_gen_name(autogen_context, constraint.name)), + repr(constraint.table.name)] + args.append(repr([col.name for col in constraint.columns])) + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return "%(prefix)screate_unique_constraint(%(args)s)" % { + 'prefix': _alembic_autogenerate_prefix(autogen_context), + 'args': ", ".join(args) + } + else: + args = [repr(col.name) for col in constraint.columns] + args.extend(["%s=%r" % (k, v) for k, v in opts]) + return "%(prefix)sUniqueConstraint(%(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "args": ", ".join(args) + } + + +def _add_fk_constraint(constraint, autogen_context): + raise NotImplementedError() + +def _add_pk_constraint(constraint, autogen_context): + raise NotImplementedError() + +def _add_check_constraint(constraint, autogen_context): + raise NotImplementedError() + +def _add_constraint(constraint, autogen_context): + """ + Dispatcher for the different types of constraints. + """ + funcs = { + "unique_constraint": _add_unique_constraint, + "foreign_key_constraint": _add_fk_constraint, + "primary_key_constraint": _add_pk_constraint, + "check_constraint": _add_check_constraint, + "column_check_constraint": _add_check_constraint, + } + return funcs[constraint.__visit_name__](constraint, autogen_context) + +def _drop_constraint(constraint, autogen_context): + """ + Generate Alembic operations for the ALTER TABLE ... DROP CONSTRAINT + of a :class:`~sqlalchemy.schema.UniqueConstraint` instance. + """ + text = "%(prefix)sdrop_constraint(%(name)r, '%(table_name)s'%(schema)s)" % { + 'prefix': _alembic_autogenerate_prefix(autogen_context), + 'name': _render_gen_name(autogen_context, constraint.name), + 'table_name': constraint.table.name, + 'schema': (", schema='%s'" % constraint.table.schema) + if constraint.table.schema else '', + } + return text + +def _add_column(schema, tname, column, autogen_context): + text = "%(prefix)sadd_column(%(tname)r, %(column)s" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": tname, + "column": _render_column(column, autogen_context) + } + if schema: + text += ", schema=%r" % schema + text += ")" + return text + +def _drop_column(schema, tname, column, autogen_context): + text = "%(prefix)sdrop_column(%(tname)r, %(cname)r" % { + "prefix": _alembic_autogenerate_prefix(autogen_context), + "tname": tname, + "cname": column.name + } + if schema: + text += ", schema=%r" % schema + text += ")" + return text + +def _modify_col(tname, cname, + autogen_context, + server_default=False, + type_=None, + nullable=None, + existing_type=None, + existing_nullable=None, + existing_server_default=False, + schema=None): + indent = " " * 11 + text = "%(prefix)salter_column(%(tname)r, %(cname)r" % { + 'prefix': _alembic_autogenerate_prefix( + autogen_context), + 'tname': tname, + 'cname': cname} + text += ",\n%sexisting_type=%s" % (indent, + _repr_type(existing_type, autogen_context)) + if server_default is not False: + rendered = _render_server_default( + server_default, autogen_context) + text += ",\n%sserver_default=%s" % (indent, rendered) + + if type_ is not None: + text += ",\n%stype_=%s" % (indent, + _repr_type(type_, autogen_context)) + if nullable is not None: + text += ",\n%snullable=%r" % ( + indent, nullable,) + if existing_nullable is not None: + text += ",\n%sexisting_nullable=%r" % ( + indent, existing_nullable) + if existing_server_default: + rendered = _render_server_default( + existing_server_default, + autogen_context) + text += ",\n%sexisting_server_default=%s" % ( + indent, rendered) + if schema: + text += ",\n%sschema=%r" % (indent, schema) + text += ")" + return text + +def _user_autogenerate_prefix(autogen_context): + prefix = autogen_context['opts']['user_module_prefix'] + if prefix is None: + return _sqlalchemy_autogenerate_prefix(autogen_context) + else: + return prefix + +def _sqlalchemy_autogenerate_prefix(autogen_context): + return autogen_context['opts']['sqlalchemy_module_prefix'] or '' + +def _alembic_autogenerate_prefix(autogen_context): + return autogen_context['opts']['alembic_module_prefix'] or '' + +def _user_defined_render(type_, object_, autogen_context): + if 'opts' in autogen_context and \ + 'render_item' in autogen_context['opts']: + render = autogen_context['opts']['render_item'] + if render: + rendered = render(type_, object_, autogen_context) + if rendered is not False: + return rendered + return False + +def _render_column(column, autogen_context): + rendered = _user_defined_render("column", column, autogen_context) + if rendered is not False: + return rendered + + opts = [] + if column.server_default: + rendered = _render_server_default( + column.server_default, autogen_context + ) + if rendered: + opts.append(("server_default", rendered)) + + if not column.autoincrement: + opts.append(("autoincrement", column.autoincrement)) + + if column.nullable is not None: + opts.append(("nullable", column.nullable)) + + # TODO: for non-ascii colname, assign a "key" + return "%(prefix)sColumn(%(name)r, %(type)s, %(kw)s)" % { + 'prefix': _sqlalchemy_autogenerate_prefix(autogen_context), + 'name': column.name, + 'type': _repr_type(column.type, autogen_context), + 'kw': ", ".join(["%s=%s" % (kwname, val) for kwname, val in opts]) + } + +def _render_server_default(default, autogen_context): + rendered = _user_defined_render("server_default", default, autogen_context) + if rendered is not False: + return rendered + + if isinstance(default, sa_schema.DefaultClause): + if isinstance(default.arg, string_types): + default = default.arg + else: + default = str(default.arg.compile( + dialect=autogen_context['dialect'])) + if isinstance(default, string_types): + # TODO: this is just a hack to get + # tests to pass until we figure out + # WTF sqlite is doing + default = re.sub(r"^'|'$", "", default) + return repr(default) + else: + return None + +def _repr_type(type_, autogen_context): + rendered = _user_defined_render("type", type_, autogen_context) + if rendered is not False: + return rendered + + mod = type(type_).__module__ + imports = autogen_context.get('imports', None) + if mod.startswith("sqlalchemy.dialects"): + dname = re.match(r"sqlalchemy\.dialects\.(\w+)", mod).group(1) + if imports is not None: + imports.add("from sqlalchemy.dialects import %s" % dname) + return "%s.%r" % (dname, type_) + elif mod.startswith("sqlalchemy"): + prefix = _sqlalchemy_autogenerate_prefix(autogen_context) + return "%s%r" % (prefix, type_) + else: + prefix = _user_autogenerate_prefix(autogen_context) + return "%s%r" % (prefix, type_) + +def _render_constraint(constraint, autogen_context): + renderer = _constraint_renderers.get(type(constraint), None) + if renderer: + return renderer(constraint, autogen_context) + else: + return None + +def _render_primary_key(constraint, autogen_context): + rendered = _user_defined_render("primary_key", constraint, autogen_context) + if rendered is not False: + return rendered + + if not constraint.columns: + return None + + opts = [] + if constraint.name: + opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name)))) + return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "args": ", ".join( + [repr(c.key) for c in constraint.columns] + + ["%s=%s" % (kwname, val) for kwname, val in opts] + ), + } + +def _fk_colspec(fk, metadata_schema): + """Implement a 'safe' version of ForeignKey._get_colspec() that + never tries to resolve the remote table. + + """ + if metadata_schema is None: + return fk._get_colspec() + else: + # need to render schema breaking up tokens by hand, since the + # ForeignKeyConstraint here may not actually have a remote + # Table present + tokens = fk._colspec.split(".") + # no schema in the colspec, render it + if len(tokens) == 2: + return "%s.%s" % (metadata_schema, fk._colspec) + else: + return fk._colspec + +def _render_foreign_key(constraint, autogen_context): + rendered = _user_defined_render("foreign_key", constraint, autogen_context) + if rendered is not False: + return rendered + + opts = [] + if constraint.name: + opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name)))) + if constraint.onupdate: + opts.append(("onupdate", repr(constraint.onupdate))) + if constraint.ondelete: + opts.append(("ondelete", repr(constraint.ondelete))) + if constraint.initially: + opts.append(("initially", repr(constraint.initially))) + if constraint.deferrable: + opts.append(("deferrable", repr(constraint.deferrable))) + if constraint.use_alter: + opts.append(("use_alter", repr(constraint.use_alter))) + + apply_metadata_schema = constraint.parent.metadata.schema + return "%(prefix)sForeignKeyConstraint([%(cols)s], "\ + "[%(refcols)s], %(args)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "cols": ", ".join("'%s'" % f.parent.key for f in constraint.elements), + "refcols": ", ".join(repr(_fk_colspec(f, apply_metadata_schema)) + for f in constraint.elements), + "args": ", ".join( + ["%s=%s" % (kwname, val) for kwname, val in opts] + ), + } + +def _render_check_constraint(constraint, autogen_context): + rendered = _user_defined_render("check", constraint, autogen_context) + if rendered is not False: + return rendered + + # detect the constraint being part of + # a parent type which is probably in the Table already. + # ideally SQLAlchemy would give us more of a first class + # way to detect this. + if constraint._create_rule and \ + hasattr(constraint._create_rule, 'target') and \ + isinstance(constraint._create_rule.target, + sqltypes.TypeEngine): + return None + opts = [] + if constraint.name: + opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name)))) + return "%(prefix)sCheckConstraint(%(sqltext)r%(opts)s)" % { + "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), + "opts": ", " + (", ".join("%s=%s" % (k, v) + for k, v in opts)) if opts else "", + "sqltext": str( + constraint.sqltext.compile( + dialect=autogen_context['dialect'] + ) + ) + } + +_constraint_renderers = { + sa_schema.PrimaryKeyConstraint: _render_primary_key, + sa_schema.ForeignKeyConstraint: _render_foreign_key, + sa_schema.UniqueConstraint: _render_unique_constraint, + sa_schema.CheckConstraint: _render_check_constraint +} diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/command.py b/Linux_i686/lib/python2.7/site-packages/alembic/command.py new file mode 100644 index 0000000..f1c5962 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/command.py @@ -0,0 +1,266 @@ +import os + +from .script import ScriptDirectory +from .environment import EnvironmentContext +from . import util, autogenerate as autogen + +def list_templates(config): + """List available templates""" + + config.print_stdout("Available templates:\n") + for tempname in os.listdir(config.get_template_directory()): + with open(os.path.join( + config.get_template_directory(), + tempname, + 'README')) as readme: + synopsis = next(readme) + config.print_stdout("%s - %s", tempname, synopsis) + + config.print_stdout("\nTemplates are used via the 'init' command, e.g.:") + config.print_stdout("\n alembic init --template pylons ./scripts") + +def init(config, directory, template='generic'): + """Initialize a new scripts directory.""" + + if os.access(directory, os.F_OK): + raise util.CommandError("Directory %s already exists" % directory) + + template_dir = os.path.join(config.get_template_directory(), + template) + if not os.access(template_dir, os.F_OK): + raise util.CommandError("No such template %r" % template) + + util.status("Creating directory %s" % os.path.abspath(directory), + os.makedirs, directory) + + versions = os.path.join(directory, 'versions') + util.status("Creating directory %s" % os.path.abspath(versions), + os.makedirs, versions) + + script = ScriptDirectory(directory) + + for file_ in os.listdir(template_dir): + file_path = os.path.join(template_dir, file_) + if file_ == 'alembic.ini.mako': + config_file = os.path.abspath(config.config_file_name) + if os.access(config_file, os.F_OK): + util.msg("File %s already exists, skipping" % config_file) + else: + script._generate_template( + file_path, + config_file, + script_location=directory + ) + elif os.path.isfile(file_path): + output_file = os.path.join(directory, file_) + script._copy_file( + file_path, + output_file + ) + + util.msg("Please edit configuration/connection/logging "\ + "settings in %r before proceeding." % config_file) + +def revision(config, message=None, autogenerate=False, sql=False): + """Create a new revision file.""" + + script = ScriptDirectory.from_config(config) + template_args = { + 'config': config # Let templates use config for + # e.g. multiple databases + } + imports = set() + + environment = util.asbool( + config.get_main_option("revision_environment") + ) + + if autogenerate: + environment = True + def retrieve_migrations(rev, context): + if script.get_revision(rev) is not script.get_revision("head"): + raise util.CommandError("Target database is not up to date.") + autogen._produce_migration_diffs(context, template_args, imports) + return [] + elif environment: + def retrieve_migrations(rev, context): + return [] + + if environment: + with EnvironmentContext( + config, + script, + fn=retrieve_migrations, + as_sql=sql, + template_args=template_args, + ): + script.run_env() + return script.generate_revision(util.rev_id(), message, refresh=True, + **template_args) + + +def upgrade(config, revision, sql=False, tag=None): + """Upgrade to a later version.""" + + script = ScriptDirectory.from_config(config) + + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(':', 2) + + def upgrade(rev, context): + return script._upgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=upgrade, + as_sql=sql, + starting_rev=starting_rev, + destination_rev=revision, + tag=tag + ): + script.run_env() + +def downgrade(config, revision, sql=False, tag=None): + """Revert to a previous version.""" + + script = ScriptDirectory.from_config(config) + starting_rev = None + if ":" in revision: + if not sql: + raise util.CommandError("Range revision not allowed") + starting_rev, revision = revision.split(':', 2) + elif sql: + raise util.CommandError("downgrade with --sql requires :") + + def downgrade(rev, context): + return script._downgrade_revs(revision, rev) + + with EnvironmentContext( + config, + script, + fn=downgrade, + as_sql=sql, + starting_rev=starting_rev, + destination_rev=revision, + tag=tag + ): + script.run_env() + +def history(config, rev_range=None): + """List changeset scripts in chronological order.""" + + script = ScriptDirectory.from_config(config) + if rev_range is not None: + if ":" not in rev_range: + raise util.CommandError( + "History range requires [start]:[end], " + "[start]:, or :[end]") + base, head = rev_range.strip().split(":") + else: + base = head = None + + def _display_history(config, script, base, head): + for sc in script.walk_revisions( + base=base or "base", + head=head or "head"): + if sc.is_head: + config.print_stdout("") + config.print_stdout(sc.log_entry) + + def _display_history_w_current(config, script, base=None, head=None): + def _display_current_history(rev, context): + if head is None: + _display_history(config, script, base, rev) + elif base is None: + _display_history(config, script, rev, head) + return [] + + with EnvironmentContext( + config, + script, + fn=_display_current_history + ): + script.run_env() + + if base == "current": + _display_history_w_current(config, script, head=head) + elif head == "current": + _display_history_w_current(config, script, base=base) + else: + _display_history(config, script, base, head) + + +def branches(config): + """Show current un-spliced branch points""" + script = ScriptDirectory.from_config(config) + for sc in script.walk_revisions(): + if sc.is_branch_point: + config.print_stdout(sc) + for rev in sc.nextrev: + config.print_stdout("%s -> %s", + " " * len(str(sc.down_revision)), + script.get_revision(rev) + ) + +def current(config, head_only=False): + """Display the current revision for each database.""" + + script = ScriptDirectory.from_config(config) + def display_version(rev, context): + rev = script.get_revision(rev) + + if head_only: + config.print_stdout("%s%s" % ( + rev.revision if rev else None, + " (head)" if rev and rev.is_head else "")) + + else: + config.print_stdout("Current revision for %s: %s", + util.obfuscate_url_pw( + context.connection.engine.url), + rev) + return [] + + with EnvironmentContext( + config, + script, + fn=display_version + ): + script.run_env() + +def stamp(config, revision, sql=False, tag=None): + """'stamp' the revision table with the given revision; don't + run any migrations.""" + + script = ScriptDirectory.from_config(config) + def do_stamp(rev, context): + if sql: + current = False + else: + current = context._current_rev() + dest = script.get_revision(revision) + if dest is not None: + dest = dest.revision + context._update_current_rev(current, dest) + return [] + with EnvironmentContext( + config, + script, + fn=do_stamp, + as_sql=sql, + destination_rev=revision, + tag=tag + ): + script.run_env() + +def splice(config, parent, child): + """'splice' two branches, creating a new revision file. + + this command isn't implemented right now. + + """ + raise NotImplementedError() diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/compat.py b/Linux_i686/lib/python2.7/site-packages/alembic/compat.py new file mode 100644 index 0000000..aac0560 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/compat.py @@ -0,0 +1,130 @@ +import io +import sys +from sqlalchemy import __version__ as sa_version + +if sys.version_info < (2, 6): + raise NotImplementedError("Python 2.6 or greater is required.") + +sqla_08 = sa_version >= '0.8.0' +sqla_09 = sa_version >= '0.9.0' + +py2k = sys.version_info < (3, 0) +py3k = sys.version_info >= (3, 0) +py33 = sys.version_info >= (3, 3) + +if py3k: + import builtins as compat_builtins + string_types = str, + binary_type = bytes + text_type = str + def callable(fn): + return hasattr(fn, '__call__') + + def u(s): + return s + +else: + import __builtin__ as compat_builtins + string_types = basestring, + binary_type = str + text_type = unicode + callable = callable + + def u(s): + return unicode(s, "utf-8") + +if py3k: + from configparser import ConfigParser as SafeConfigParser + import configparser +else: + from ConfigParser import SafeConfigParser + import ConfigParser as configparser + +if py2k: + from mako.util import parse_encoding + +if py33: + from importlib import machinery + def load_module_py(module_id, path): + return machinery.SourceFileLoader(module_id, path).load_module(module_id) + + def load_module_pyc(module_id, path): + return machinery.SourcelessFileLoader(module_id, path).load_module(module_id) + +else: + import imp + def load_module_py(module_id, path): + with open(path, 'rb') as fp: + mod = imp.load_source(module_id, path, fp) + if py2k: + source_encoding = parse_encoding(fp) + if source_encoding: + mod._alembic_source_encoding = source_encoding + return mod + + def load_module_pyc(module_id, path): + with open(path, 'rb') as fp: + mod = imp.load_compiled(module_id, path, fp) + # no source encoding here + return mod + +try: + exec_ = getattr(compat_builtins, 'exec') +except AttributeError: + # Python 2 + def exec_(func_text, globals_, lcl): + exec('exec func_text in globals_, lcl') + +################################################ +# cross-compatible metaclass implementation +# Copyright (c) 2010-2012 Benjamin Peterson +def with_metaclass(meta, base=object): + """Create a base class with a metaclass.""" + return meta("%sBase" % meta.__name__, (base,), {}) +################################################ + + +# produce a wrapper that allows encoded text to stream +# into a given buffer, but doesn't close it. +# not sure of a more idiomatic approach to this. +class EncodedIO(io.TextIOWrapper): + def close(self): + pass + +if py2k: + # in Py2K, the io.* package is awkward because it does not + # easily wrap the file type (e.g. sys.stdout) and I can't + # figure out at all how to wrap StringIO.StringIO (used by nosetests) + # and also might be user specified too. So create a full + # adapter. + + class ActLikePy3kIO(object): + """Produce an object capable of wrapping either + sys.stdout (e.g. file) *or* StringIO.StringIO(). + + """ + def _false(self): + return False + + def _true(self): + return True + + readable = seekable = _false + writable = _true + closed = False + + def __init__(self, file_): + self.file_ = file_ + + def write(self, text): + return self.file_.write(text) + + def flush(self): + return self.file_.flush() + + class EncodedIO(EncodedIO): + def __init__(self, file_, encoding): + super(EncodedIO, self).__init__( + ActLikePy3kIO(file_), encoding=encoding) + + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/config.py b/Linux_i686/lib/python2.7/site-packages/alembic/config.py new file mode 100644 index 0000000..86ff1df --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/config.py @@ -0,0 +1,301 @@ +from argparse import ArgumentParser +from .compat import SafeConfigParser +import inspect +import os +import sys + +from . import command, util, package_dir, compat + +class Config(object): + """Represent an Alembic configuration. + + Within an ``env.py`` script, this is available + via the :attr:`.EnvironmentContext.config` attribute, + which in turn is available at ``alembic.context``:: + + from alembic import context + + some_param = context.config.get_main_option("my option") + + When invoking Alembic programatically, a new + :class:`.Config` can be created by passing + the name of an .ini file to the constructor:: + + from alembic.config import Config + alembic_cfg = Config("/path/to/yourapp/alembic.ini") + + With a :class:`.Config` object, you can then + run Alembic commands programmatically using the directives + in :mod:`alembic.command`. + + The :class:`.Config` object can also be constructed without + a filename. Values can be set programmatically, and + new sections will be created as needed:: + + from alembic.config import Config + alembic_cfg = Config() + alembic_cfg.set_main_option("script_location", "myapp:migrations") + alembic_cfg.set_main_option("url", "postgresql://foo/bar") + alembic_cfg.set_section_option("mysection", "foo", "bar") + + :param file_: name of the .ini file to open. + :param ini_section: name of the main Alembic section within the + .ini file + :param output_buffer: optional file-like input buffer which + will be passed to the :class:`.MigrationContext` - used to redirect + the output of "offline generation" when using Alembic programmatically. + :param stdout: buffer where the "print" output of commands will be sent. + Defaults to ``sys.stdout``. + + ..versionadded:: 0.4 + + """ + def __init__(self, file_=None, ini_section='alembic', output_buffer=None, + stdout=sys.stdout, cmd_opts=None): + """Construct a new :class:`.Config` + + """ + self.config_file_name = file_ + self.config_ini_section = ini_section + self.output_buffer = output_buffer + self.stdout = stdout + self.cmd_opts = cmd_opts + + cmd_opts = None + """The command-line options passed to the ``alembic`` script. + + Within an ``env.py`` script this can be accessed via the + :attr:`.EnvironmentContext.config` attribute. + + .. versionadded:: 0.6.0 + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` + + """ + + config_file_name = None + """Filesystem path to the .ini file in use.""" + + config_ini_section = None + """Name of the config file section to read basic configuration + from. Defaults to ``alembic``, that is the ``[alembic]`` section + of the .ini file. This value is modified using the ``-n/--name`` + option to the Alembic runnier. + + """ + + def print_stdout(self, text, *arg): + """Render a message to standard out.""" + + util.write_outstream( + self.stdout, + (compat.text_type(text) % arg), + "\n" + ) + + @util.memoized_property + def file_config(self): + """Return the underlying :class:`ConfigParser` object. + + Direct access to the .ini file is available here, + though the :meth:`.Config.get_section` and + :meth:`.Config.get_main_option` + methods provide a possibly simpler interface. + + """ + + if self.config_file_name: + here = os.path.abspath(os.path.dirname(self.config_file_name)) + else: + here = "" + file_config = SafeConfigParser({'here': here}) + if self.config_file_name: + file_config.read([self.config_file_name]) + else: + file_config.add_section(self.config_ini_section) + return file_config + + def get_template_directory(self): + """Return the directory where Alembic setup templates are found. + + This method is used by the alembic ``init`` and ``list_templates`` + commands. + + """ + return os.path.join(package_dir, 'templates') + + def get_section(self, name): + """Return all the configuration options from a given .ini file section + as a dictionary. + + """ + return dict(self.file_config.items(name)) + + def set_main_option(self, name, value): + """Set an option programmatically within the 'main' section. + + This overrides whatever was in the .ini file. + + """ + self.file_config.set(self.config_ini_section, name, value) + + def remove_main_option(self, name): + self.file_config.remove_option(self.config_ini_section, name) + + def set_section_option(self, section, name, value): + """Set an option programmatically within the given section. + + The section is created if it doesn't exist already. + The value here will override whatever was in the .ini + file. + + """ + if not self.file_config.has_section(section): + self.file_config.add_section(section) + self.file_config.set(section, name, value) + + def get_section_option(self, section, name, default=None): + """Return an option from the given section of the .ini file. + + """ + if not self.file_config.has_section(section): + raise util.CommandError("No config file %r found, or file has no " + "'[%s]' section" % + (self.config_file_name, section)) + if self.file_config.has_option(section, name): + return self.file_config.get(section, name) + else: + return default + + def get_main_option(self, name, default=None): + """Return an option from the 'main' section of the .ini file. + + This defaults to being a key from the ``[alembic]`` + section, unless the ``-n/--name`` flag were used to + indicate a different section. + + """ + return self.get_section_option(self.config_ini_section, name, default) + + +class CommandLine(object): + def __init__(self, prog=None): + self._generate_args(prog) + + + def _generate_args(self, prog): + def add_options(parser, positional, kwargs): + if 'template' in kwargs: + parser.add_argument("-t", "--template", + default='generic', + type=str, + help="Setup template for use with 'init'") + if 'message' in kwargs: + parser.add_argument("-m", "--message", + type=str, + help="Message string to use with 'revision'") + if 'sql' in kwargs: + parser.add_argument("--sql", + action="store_true", + help="Don't emit SQL to database - dump to " + "standard output/file instead") + if 'tag' in kwargs: + parser.add_argument("--tag", + type=str, + help="Arbitrary 'tag' name - can be used by " + "custom env.py scripts.") + if 'autogenerate' in kwargs: + parser.add_argument("--autogenerate", + action="store_true", + help="Populate revision script with candidate " + "migration operations, based on comparison " + "of database to model.") + # "current" command + if 'head_only' in kwargs: + parser.add_argument("--head-only", + action="store_true", + help="Only show current version and " + "whether or not this is the head revision.") + + if 'rev_range' in kwargs: + parser.add_argument("-r", "--rev-range", + action="store", + help="Specify a revision range; " + "format is [start]:[end]") + + + positional_help = { + 'directory': "location of scripts directory", + 'revision': "revision identifier" + } + for arg in positional: + subparser.add_argument(arg, help=positional_help.get(arg)) + + parser = ArgumentParser(prog=prog) + parser.add_argument("-c", "--config", + type=str, + default="alembic.ini", + help="Alternate config file") + parser.add_argument("-n", "--name", + type=str, + default="alembic", + help="Name of section in .ini file to " + "use for Alembic config") + parser.add_argument("-x", action="append", + help="Additional arguments consumed by " + "custom env.py scripts, e.g. -x " + "setting1=somesetting -x setting2=somesetting") + + subparsers = parser.add_subparsers() + + for fn in [getattr(command, n) for n in dir(command)]: + if inspect.isfunction(fn) and \ + fn.__name__[0] != '_' and \ + fn.__module__ == 'alembic.command': + + spec = inspect.getargspec(fn) + if spec[3]: + positional = spec[0][1:-len(spec[3])] + kwarg = spec[0][-len(spec[3]):] + else: + positional = spec[0][1:] + kwarg = [] + + subparser = subparsers.add_parser( + fn.__name__, + help=fn.__doc__) + add_options(subparser, positional, kwarg) + subparser.set_defaults(cmd=(fn, positional, kwarg)) + self.parser = parser + + def run_cmd(self, config, options): + fn, positional, kwarg = options.cmd + + try: + fn(config, + *[getattr(options, k) for k in positional], + **dict((k, getattr(options, k)) for k in kwarg) + ) + except util.CommandError as e: + util.err(str(e)) + + def main(self, argv=None): + options = self.parser.parse_args(argv) + if not hasattr(options, "cmd"): + # see http://bugs.python.org/issue9253, argparse + # behavior changed incompatibly in py3.3 + self.parser.error("too few arguments") + else: + cfg = Config(file_=options.config, + ini_section=options.name, cmd_opts=options) + self.run_cmd(cfg, options) + +def main(argv=None, prog=None, **kwargs): + """The console runner function for Alembic.""" + + CommandLine(prog=prog).main(argv=argv) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/context.py b/Linux_i686/lib/python2.7/site-packages/alembic/context.py new file mode 100644 index 0000000..9c0f676 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/context.py @@ -0,0 +1,6 @@ +from .environment import EnvironmentContext +from . import util + +# create proxy functions for +# each method on the EnvironmentContext class. +util.create_module_class_proxy(EnvironmentContext, globals(), locals()) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/__init__.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/__init__.py new file mode 100644 index 0000000..bfc8ab4 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/__init__.py @@ -0,0 +1,2 @@ +from . import postgresql, mysql, sqlite, mssql, oracle +from .impl import DefaultImpl diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/base.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/base.py new file mode 100644 index 0000000..5d703a5 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/base.py @@ -0,0 +1,161 @@ +import functools + +from sqlalchemy.ext.compiler import compiles +from sqlalchemy.schema import DDLElement, Column +from sqlalchemy import Integer +from sqlalchemy import types as sqltypes + +class AlterTable(DDLElement): + """Represent an ALTER TABLE statement. + + Only the string name and optional schema name of the table + is required, not a full Table object. + + """ + def __init__(self, table_name, schema=None): + self.table_name = table_name + self.schema = schema + +class RenameTable(AlterTable): + def __init__(self, old_table_name, new_table_name, schema=None): + super(RenameTable, self).__init__(old_table_name, schema=schema) + self.new_table_name = new_table_name + +class AlterColumn(AlterTable): + def __init__(self, name, column_name, schema=None, + existing_type=None, + existing_nullable=None, + existing_server_default=None): + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.existing_type=sqltypes.to_instance(existing_type) \ + if existing_type is not None else None + self.existing_nullable=existing_nullable + self.existing_server_default=existing_server_default + +class ColumnNullable(AlterColumn): + def __init__(self, name, column_name, nullable, **kw): + super(ColumnNullable, self).__init__(name, column_name, + **kw) + self.nullable = nullable + +class ColumnType(AlterColumn): + def __init__(self, name, column_name, type_, **kw): + super(ColumnType, self).__init__(name, column_name, + **kw) + self.type_ = sqltypes.to_instance(type_) + +class ColumnName(AlterColumn): + def __init__(self, name, column_name, newname, **kw): + super(ColumnName, self).__init__(name, column_name, **kw) + self.newname = newname + +class ColumnDefault(AlterColumn): + def __init__(self, name, column_name, default, **kw): + super(ColumnDefault, self).__init__(name, column_name, **kw) + self.default = default + +class AddColumn(AlterTable): + def __init__(self, name, column, schema=None): + super(AddColumn, self).__init__(name, schema=schema) + self.column = column + +class DropColumn(AlterTable): + def __init__(self, name, column, schema=None): + super(DropColumn, self).__init__(name, schema=schema) + self.column = column + + +@compiles(RenameTable) +def visit_rename_table(element, compiler, **kw): + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, element.schema) + ) + +@compiles(AddColumn) +def visit_add_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + add_column(compiler, element.column, **kw) + ) + +@compiles(DropColumn) +def visit_drop_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + drop_column(compiler, element.column.name, **kw) + ) + +@compiles(ColumnNullable) +def visit_column_nullable(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "DROP NOT NULL" if element.nullable else "SET NOT NULL" + ) + +@compiles(ColumnType) +def visit_column_type(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "TYPE %s" % format_type(compiler, element.type_) + ) + +@compiles(ColumnName) +def visit_column_name(element, compiler, **kw): + return "%s RENAME %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname) + ) + +@compiles(ColumnDefault) +def visit_column_default(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "SET DEFAULT %s" % + format_server_default(compiler, element.default) + if element.default is not None + else "DROP DEFAULT" + ) + +def quote_dotted(name, quote): + """quote the elements of a dotted name""" + + result = '.'.join([quote(x) for x in name.split('.')]) + return result + +def format_table_name(compiler, name, schema): + quote = functools.partial(compiler.preparer.quote, force=None) + if schema: + return quote_dotted(schema, quote) + "." + quote(name) + else: + return quote(name) + +def format_column_name(compiler, name): + return compiler.preparer.quote(name, None) + +def format_server_default(compiler, default): + return compiler.get_column_default_string( + Column("x", Integer, server_default=default) + ) + +def format_type(compiler, type_): + return compiler.dialect.type_compiler.process(type_) + +def alter_table(compiler, name, schema): + return "ALTER TABLE %s" % format_table_name(compiler, name, schema) + +def drop_column(compiler, name): + return 'DROP COLUMN %s' % format_column_name(compiler, name) + +def alter_column(compiler, name): + return 'ALTER COLUMN %s' % format_column_name(compiler, name) + +def add_column(compiler, column, **kw): + return "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) + + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/impl.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/impl.py new file mode 100644 index 0000000..79cbd36 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/impl.py @@ -0,0 +1,279 @@ +from sqlalchemy.sql.expression import _BindParamClause +from sqlalchemy.ext.compiler import compiles +from sqlalchemy import schema, text +from sqlalchemy import types as sqltypes + +from ..compat import string_types, text_type, with_metaclass +from .. import util +from . import base + +class ImplMeta(type): + def __init__(cls, classname, bases, dict_): + newtype = type.__init__(cls, classname, bases, dict_) + if '__dialect__' in dict_: + _impls[dict_['__dialect__']] = cls + return newtype + +_impls = {} + +class DefaultImpl(with_metaclass(ImplMeta)): + """Provide the entrypoint for major migration operations, + including database-specific behavioral variances. + + While individual SQL/DDL constructs already provide + for database-specific implementations, variances here + allow for entirely different sequences of operations + to take place for a particular migration, such as + SQL Server's special 'IDENTITY INSERT' step for + bulk inserts. + + """ + __dialect__ = 'default' + + transactional_ddl = False + command_terminator = ";" + + def __init__(self, dialect, connection, as_sql, + transactional_ddl, output_buffer, + context_opts): + self.dialect = dialect + self.connection = connection + self.as_sql = as_sql + self.output_buffer = output_buffer + self.memo = {} + self.context_opts = context_opts + if transactional_ddl is not None: + self.transactional_ddl = transactional_ddl + + @classmethod + def get_by_dialect(cls, dialect): + return _impls[dialect.name] + + def static_output(self, text): + self.output_buffer.write(text_type(text + "\n\n")) + self.output_buffer.flush() + + @property + def bind(self): + return self.connection + + def _exec(self, construct, execution_options=None, + multiparams=(), + params=util.immutabledict()): + if isinstance(construct, string_types): + construct = text(construct) + if self.as_sql: + if multiparams or params: + # TODO: coverage + raise Exception("Execution arguments not allowed with as_sql") + self.static_output(text_type( + construct.compile(dialect=self.dialect) + ).replace("\t", " ").strip() + self.command_terminator) + else: + conn = self.connection + if execution_options: + conn = conn.execution_options(**execution_options) + conn.execute(construct, *multiparams, **params) + + def execute(self, sql, execution_options=None): + self._exec(sql, execution_options) + + def alter_column(self, table_name, column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + autoincrement=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + existing_autoincrement=None + ): + if autoincrement is not None or existing_autoincrement is not None: + util.warn("nautoincrement and existing_autoincrement only make sense for MySQL") + if nullable is not None: + self._exec(base.ColumnNullable(table_name, column_name, + nullable, schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + )) + if server_default is not False: + self._exec(base.ColumnDefault( + table_name, column_name, server_default, + schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + )) + if type_ is not None: + self._exec(base.ColumnType( + table_name, column_name, type_, schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + )) + # do the new name last ;) + if name is not None: + self._exec(base.ColumnName( + table_name, column_name, name, schema=schema, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + )) + + def add_column(self, table_name, column, schema=None): + self._exec(base.AddColumn(table_name, column, schema=schema)) + + def drop_column(self, table_name, column, schema=None, **kw): + self._exec(base.DropColumn(table_name, column, schema=schema)) + + def add_constraint(self, const): + if const._create_rule is None or \ + const._create_rule(self): + self._exec(schema.AddConstraint(const)) + + def drop_constraint(self, const): + self._exec(schema.DropConstraint(const)) + + def rename_table(self, old_table_name, new_table_name, schema=None): + self._exec(base.RenameTable(old_table_name, + new_table_name, schema=schema)) + + def create_table(self, table): + if util.sqla_07: + table.dispatch.before_create(table, self.connection, + checkfirst=False, + _ddl_runner=self) + self._exec(schema.CreateTable(table)) + if util.sqla_07: + table.dispatch.after_create(table, self.connection, + checkfirst=False, + _ddl_runner=self) + for index in table.indexes: + self._exec(schema.CreateIndex(index)) + + def drop_table(self, table): + self._exec(schema.DropTable(table)) + + def create_index(self, index): + self._exec(schema.CreateIndex(index)) + + def drop_index(self, index): + self._exec(schema.DropIndex(index)) + + def bulk_insert(self, table, rows, multiinsert=True): + if not isinstance(rows, list): + raise TypeError("List expected") + elif rows and not isinstance(rows[0], dict): + raise TypeError("List of dictionaries expected") + if self.as_sql: + for row in rows: + self._exec(table.insert(inline=True).values(**dict( + (k, + _literal_bindparam(k, v, type_=table.c[k].type) + if not isinstance(v, _literal_bindparam) else v) + for k, v in row.items() + ))) + else: + # work around http://www.sqlalchemy.org/trac/ticket/2461 + if not hasattr(table, '_autoincrement_column'): + table._autoincrement_column = None + if rows: + if multiinsert: + self._exec(table.insert(inline=True), multiparams=rows) + else: + for row in rows: + self._exec(table.insert(inline=True).values(**row)) + + def compare_type(self, inspector_column, metadata_column): + + conn_type = inspector_column.type + metadata_type = metadata_column.type + + metadata_impl = metadata_type.dialect_impl(self.dialect) + + # work around SQLAlchemy bug "stale value for type affinity" + # fixed in 0.7.4 + metadata_impl.__dict__.pop('_type_affinity', None) + + if conn_type._compare_type_affinity( + metadata_impl + ): + comparator = _type_comparators.get(conn_type._type_affinity, None) + + return comparator and comparator(metadata_type, conn_type) + else: + return True + + def compare_server_default(self, inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default): + return rendered_inspector_default != rendered_metadata_default + + def correct_for_autogen_constraints(self, conn_uniques, conn_indexes, + metadata_unique_constraints, + metadata_indexes): + pass + + def start_migrations(self): + """A hook called when :meth:`.EnvironmentContext.run_migrations` + is called. + + Implementations can set up per-migration-run state here. + + """ + + def emit_begin(self): + """Emit the string ``BEGIN``, or the backend-specific + equivalent, on the current connection context. + + This is used in offline mode and typically + via :meth:`.EnvironmentContext.begin_transaction`. + + """ + self.static_output("BEGIN" + self.command_terminator) + + def emit_commit(self): + """Emit the string ``COMMIT``, or the backend-specific + equivalent, on the current connection context. + + This is used in offline mode and typically + via :meth:`.EnvironmentContext.begin_transaction`. + + """ + self.static_output("COMMIT" + self.command_terminator) + +class _literal_bindparam(_BindParamClause): + pass + +@compiles(_literal_bindparam) +def _render_literal_bindparam(element, compiler, **kw): + return compiler.render_literal_bindparam(element, **kw) + + +def _string_compare(t1, t2): + return \ + t1.length is not None and \ + t1.length != t2.length + +def _numeric_compare(t1, t2): + return \ + ( + t1.precision is not None and \ + t1.precision != t2.precision + ) or \ + ( + t1.scale is not None and \ + t1.scale != t2.scale + ) +_type_comparators = { + sqltypes.String:_string_compare, + sqltypes.Numeric:_numeric_compare +} + + + + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mssql.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mssql.py new file mode 100644 index 0000000..fece08b --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mssql.py @@ -0,0 +1,217 @@ +from sqlalchemy.ext.compiler import compiles + +from .. import util +from .impl import DefaultImpl +from .base import alter_table, AddColumn, ColumnName, \ + format_table_name, format_column_name, ColumnNullable, alter_column,\ + format_server_default,ColumnDefault, format_type, ColumnType +from sqlalchemy.sql.expression import ClauseElement, Executable + +class MSSQLImpl(DefaultImpl): + __dialect__ = 'mssql' + transactional_ddl = True + batch_separator = "GO" + + def __init__(self, *arg, **kw): + super(MSSQLImpl, self).__init__(*arg, **kw) + self.batch_separator = self.context_opts.get( + "mssql_batch_separator", + self.batch_separator) + + def _exec(self, construct, *args, **kw): + super(MSSQLImpl, self)._exec(construct, *args, **kw) + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + + def emit_begin(self): + self.static_output("BEGIN TRANSACTION" + self.command_terminator) + + def emit_commit(self): + super(MSSQLImpl, self).emit_commit() + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + + def alter_column(self, table_name, column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + autoincrement=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + existing_autoincrement=None + ): + + if nullable is not None and existing_type is None: + if type_ is not None: + existing_type = type_ + # the NULL/NOT NULL alter will handle + # the type alteration + type_ = None + else: + raise util.CommandError( + "MS-SQL ALTER COLUMN operations " + "with NULL or NOT NULL require the " + "existing_type or a new type_ be passed.") + + super(MSSQLImpl, self).alter_column( + table_name, column_name, + nullable=nullable, + type_=type_, + schema=schema, + autoincrement=autoincrement, + existing_type=existing_type, + existing_nullable=existing_nullable, + existing_autoincrement=existing_autoincrement + ) + + if server_default is not False: + if existing_server_default is not False or \ + server_default is None: + self._exec( + _ExecDropConstraint( + table_name, column_name, + 'sys.default_constraints') + ) + if server_default is not None: + super(MSSQLImpl, self).alter_column( + table_name, column_name, + schema=schema, + server_default=server_default) + + if name is not None: + super(MSSQLImpl, self).alter_column( + table_name, column_name, + schema=schema, + name=name) + + def bulk_insert(self, table, rows, **kw): + if self.as_sql: + self._exec( + "SET IDENTITY_INSERT %s ON" % + self.dialect.identifier_preparer.format_table(table) + ) + super(MSSQLImpl, self).bulk_insert(table, rows, **kw) + self._exec( + "SET IDENTITY_INSERT %s OFF" % + self.dialect.identifier_preparer.format_table(table) + ) + else: + super(MSSQLImpl, self).bulk_insert(table, rows, **kw) + + + def drop_column(self, table_name, column, **kw): + drop_default = kw.pop('mssql_drop_default', False) + if drop_default: + self._exec( + _ExecDropConstraint( + table_name, column, + 'sys.default_constraints') + ) + drop_check = kw.pop('mssql_drop_check', False) + if drop_check: + self._exec( + _ExecDropConstraint( + table_name, column, + 'sys.check_constraints') + ) + drop_fks = kw.pop('mssql_drop_foreign_key', False) + if drop_fks: + self._exec( + _ExecDropFKConstraint(table_name, column) + ) + super(MSSQLImpl, self).drop_column(table_name, column) + +class _ExecDropConstraint(Executable, ClauseElement): + def __init__(self, tname, colname, type_): + self.tname = tname + self.colname = colname + self.type_ = type_ + +class _ExecDropFKConstraint(Executable, ClauseElement): + def __init__(self, tname, colname): + self.tname = tname + self.colname = colname + + +@compiles(_ExecDropConstraint, 'mssql') +def _exec_drop_col_constraint(element, compiler, **kw): + tname, colname, type_ = element.tname, element.colname, element.type_ + # from http://www.mssqltips.com/sqlservertip/1425/working-with-default-constraints-in-sql-server/ + # TODO: needs table formatting, etc. + return """declare @const_name varchar(256) +select @const_name = [name] from %(type)s +where parent_object_id = object_id('%(tname)s') +and col_name(parent_object_id, parent_column_id) = '%(colname)s' +exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { + 'type': type_, + 'tname': tname, + 'colname': colname, + 'tname_quoted': format_table_name(compiler, tname, None), + } + +@compiles(_ExecDropFKConstraint, 'mssql') +def _exec_drop_col_fk_constraint(element, compiler, **kw): + tname, colname = element.tname, element.colname + + return """declare @const_name varchar(256) +select @const_name = [name] from + sys.foreign_keys fk join sys.foreign_key_columns fkc + on fk.object_id=fkc.constraint_object_id +where fkc.parent_object_id = object_id('%(tname)s') +and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s' +exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { + 'tname': tname, + 'colname': colname, + 'tname_quoted': format_table_name(compiler, tname, None), + } + + + +@compiles(AddColumn, 'mssql') +def visit_add_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + mssql_add_column(compiler, element.column, **kw) + ) + +def mssql_add_column(compiler, column, **kw): + return "ADD %s" % compiler.get_column_specification(column, **kw) + +@compiles(ColumnNullable, 'mssql') +def visit_column_nullable(element, compiler, **kw): + return "%s %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + format_type(compiler, element.existing_type), + "NULL" if element.nullable else "NOT NULL" + ) + +@compiles(ColumnDefault, 'mssql') +def visit_column_default(element, compiler, **kw): + # TODO: there can also be a named constraint + # with ADD CONSTRAINT here + return "%s ADD DEFAULT %s FOR %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_server_default(compiler, element.default), + format_column_name(compiler, element.column_name) + ) + +@compiles(ColumnName, 'mssql') +def visit_rename_column(element, compiler, **kw): + return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % ( + format_table_name(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname) + ) + +@compiles(ColumnType, 'mssql') +def visit_column_type(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + format_type(compiler, element.type_) + ) + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mysql.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mysql.py new file mode 100644 index 0000000..96f42f3 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mysql.py @@ -0,0 +1,212 @@ +from sqlalchemy.ext.compiler import compiles +from sqlalchemy import types as sqltypes +from sqlalchemy import schema + +from ..compat import string_types +from .. import util +from .impl import DefaultImpl +from .base import ColumnNullable, ColumnName, ColumnDefault, \ + ColumnType, AlterColumn, format_column_name, \ + format_server_default +from .base import alter_table + +class MySQLImpl(DefaultImpl): + __dialect__ = 'mysql' + + transactional_ddl = False + + def alter_column(self, table_name, column_name, + nullable=None, + server_default=False, + name=None, + type_=None, + schema=None, + autoincrement=None, + existing_type=None, + existing_server_default=None, + existing_nullable=None, + existing_autoincrement=None + ): + if name is not None: + self._exec( + MySQLChangeColumn( + table_name, column_name, + schema=schema, + newname=name, + nullable=nullable if nullable is not None else + existing_nullable + if existing_nullable is not None + else True, + type_=type_ if type_ is not None else existing_type, + default=server_default if server_default is not False + else existing_server_default, + autoincrement=autoincrement if autoincrement is not None + else existing_autoincrement + ) + ) + elif nullable is not None or \ + type_ is not None or \ + autoincrement is not None: + self._exec( + MySQLModifyColumn( + table_name, column_name, + schema=schema, + newname=name if name is not None else column_name, + nullable=nullable if nullable is not None else + existing_nullable + if existing_nullable is not None + else True, + type_=type_ if type_ is not None else existing_type, + default=server_default if server_default is not False + else existing_server_default, + autoincrement=autoincrement if autoincrement is not None + else existing_autoincrement + ) + ) + elif server_default is not False: + self._exec( + MySQLAlterDefault( + table_name, column_name, server_default, + schema=schema, + ) + ) + + def correct_for_autogen_constraints(self, conn_unique_constraints, + conn_indexes, + metadata_unique_constraints, + metadata_indexes): + removed = set() + for idx in list(conn_indexes): + # MySQL puts implicit indexes on FK columns, even if + # composite and even if MyISAM, so can't check this too easily + if idx.name == idx.columns.keys()[0]: + conn_indexes.remove(idx) + removed.add(idx.name) + + # then remove indexes from the "metadata_indexes" + # that we've removed from reflected, otherwise they come out + # as adds (see #202) + for idx in list(metadata_indexes): + if idx.name in removed: + metadata_indexes.remove(idx) + +class MySQLAlterDefault(AlterColumn): + def __init__(self, name, column_name, default, schema=None): + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.default = default + + +class MySQLChangeColumn(AlterColumn): + def __init__(self, name, column_name, schema=None, + newname=None, + type_=None, + nullable=None, + default=False, + autoincrement=None): + super(AlterColumn, self).__init__(name, schema=schema) + self.column_name = column_name + self.nullable = nullable + self.newname = newname + self.default = default + self.autoincrement = autoincrement + if type_ is None: + raise util.CommandError( + "All MySQL CHANGE/MODIFY COLUMN operations " + "require the existing type." + ) + + self.type_ = sqltypes.to_instance(type_) + +class MySQLModifyColumn(MySQLChangeColumn): + pass + + +@compiles(ColumnNullable, 'mysql') +@compiles(ColumnName, 'mysql') +@compiles(ColumnDefault, 'mysql') +@compiles(ColumnType, 'mysql') +def _mysql_doesnt_support_individual(element, compiler, **kw): + raise NotImplementedError( + "Individual alter column constructs not supported by MySQL" + ) + + +@compiles(MySQLAlterDefault, "mysql") +def _mysql_alter_default(element, compiler, **kw): + return "%s ALTER COLUMN %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + "SET DEFAULT %s" % format_server_default(compiler, element.default) + if element.default is not None + else "DROP DEFAULT" + ) + +@compiles(MySQLModifyColumn, "mysql") +def _mysql_modify_column(element, compiler, **kw): + return "%s MODIFY %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + _mysql_colspec( + compiler, + nullable=element.nullable, + server_default=element.default, + type_=element.type_, + autoincrement=element.autoincrement + ), + ) + + +@compiles(MySQLChangeColumn, "mysql") +def _mysql_change_column(element, compiler, **kw): + return "%s CHANGE %s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname), + _mysql_colspec( + compiler, + nullable=element.nullable, + server_default=element.default, + type_=element.type_, + autoincrement=element.autoincrement + ), + ) + +def _render_value(compiler, expr): + if isinstance(expr, string_types): + return "'%s'" % expr + else: + return compiler.sql_compiler.process(expr) + +def _mysql_colspec(compiler, nullable, server_default, type_, + autoincrement): + spec = "%s %s" % ( + compiler.dialect.type_compiler.process(type_), + "NULL" if nullable else "NOT NULL" + ) + if autoincrement: + spec += " AUTO_INCREMENT" + if server_default is not False and server_default is not None: + spec += " DEFAULT %s" % _render_value(compiler, server_default) + + return spec + +@compiles(schema.DropConstraint, "mysql") +def _mysql_drop_constraint(element, compiler, **kw): + """Redefine SQLAlchemy's drop constraint to + raise errors for invalid constraint type.""" + + constraint = element.element + if isinstance(constraint, (schema.ForeignKeyConstraint, + schema.PrimaryKeyConstraint, + schema.UniqueConstraint) + ): + return compiler.visit_drop_constraint(element, **kw) + elif isinstance(constraint, schema.CheckConstraint): + raise NotImplementedError( + "MySQL does not support CHECK constraints.") + else: + raise NotImplementedError( + "No generic 'DROP CONSTRAINT' in MySQL - " + "please specify constraint type") + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/oracle.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/oracle.py new file mode 100644 index 0000000..28eb246 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/oracle.py @@ -0,0 +1,77 @@ +from sqlalchemy.ext.compiler import compiles + +from .impl import DefaultImpl +from .base import alter_table, AddColumn, ColumnName, \ + format_column_name, ColumnNullable, \ + format_server_default,ColumnDefault, format_type, ColumnType + +class OracleImpl(DefaultImpl): + __dialect__ = 'oracle' + transactional_ddl = True + batch_separator = "/" + command_terminator = "" + + def __init__(self, *arg, **kw): + super(OracleImpl, self).__init__(*arg, **kw) + self.batch_separator = self.context_opts.get( + "oracle_batch_separator", + self.batch_separator) + + def _exec(self, construct, *args, **kw): + super(OracleImpl, self)._exec(construct, *args, **kw) + if self.as_sql and self.batch_separator: + self.static_output(self.batch_separator) + + def emit_begin(self): + self._exec("SET TRANSACTION READ WRITE") + + def emit_commit(self): + self._exec("COMMIT") + +@compiles(AddColumn, 'oracle') +def visit_add_column(element, compiler, **kw): + return "%s %s" % ( + alter_table(compiler, element.table_name, element.schema), + add_column(compiler, element.column, **kw), + ) + +@compiles(ColumnNullable, 'oracle') +def visit_column_nullable(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "NULL" if element.nullable else "NOT NULL" + ) + +@compiles(ColumnType, 'oracle') +def visit_column_type(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "%s" % format_type(compiler, element.type_) + ) + +@compiles(ColumnName, 'oracle') +def visit_column_name(element, compiler, **kw): + return "%s RENAME COLUMN %s TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_column_name(compiler, element.column_name), + format_column_name(compiler, element.newname) + ) + +@compiles(ColumnDefault, 'oracle') +def visit_column_default(element, compiler, **kw): + return "%s %s %s" % ( + alter_table(compiler, element.table_name, element.schema), + alter_column(compiler, element.column_name), + "DEFAULT %s" % + format_server_default(compiler, element.default) + if element.default is not None + else "DEFAULT NULL" + ) + +def alter_column(compiler, name): + return 'MODIFY %s' % format_column_name(compiler, name) + +def add_column(compiler, column, **kw): + return "ADD %s" % compiler.get_column_specification(column, **kw) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/postgresql.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/postgresql.py new file mode 100644 index 0000000..5ca0d1f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/postgresql.py @@ -0,0 +1,43 @@ +import re + +from sqlalchemy import types as sqltypes + +from .base import compiles, alter_table, format_table_name, RenameTable +from .impl import DefaultImpl + +class PostgresqlImpl(DefaultImpl): + __dialect__ = 'postgresql' + transactional_ddl = True + + def compare_server_default(self, inspector_column, + metadata_column, + rendered_metadata_default, + rendered_inspector_default): + + # don't do defaults for SERIAL columns + if metadata_column.primary_key and \ + metadata_column is metadata_column.table._autoincrement_column: + return False + + conn_col_default = rendered_inspector_default + + if None in (conn_col_default, rendered_metadata_default): + return conn_col_default != rendered_metadata_default + + if metadata_column.type._type_affinity is not sqltypes.String: + rendered_metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default) + + return not self.connection.scalar( + "SELECT %s = %s" % ( + conn_col_default, + rendered_metadata_default + ) + ) + + +@compiles(RenameTable, "postgresql") +def visit_rename_table(element, compiler, **kw): + return "%s RENAME TO %s" % ( + alter_table(compiler, element.table_name, element.schema), + format_table_name(compiler, element.new_table_name, None) + ) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/sqlite.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/sqlite.py new file mode 100644 index 0000000..a3c73ce --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/sqlite.py @@ -0,0 +1,73 @@ +from .. import util +from .impl import DefaultImpl + +#from sqlalchemy.ext.compiler import compiles +#from .base import AddColumn, alter_table +#from sqlalchemy.schema import AddConstraint + +class SQLiteImpl(DefaultImpl): + __dialect__ = 'sqlite' + + transactional_ddl = False + """SQLite supports transactional DDL, but pysqlite does not: + see: http://bugs.python.org/issue10740 + """ + + def add_constraint(self, const): + # attempt to distinguish between an + # auto-gen constraint and an explicit one + if const._create_rule is None: + raise NotImplementedError( + "No support for ALTER of constraints in SQLite dialect") + elif const._create_rule(self): + util.warn("Skipping unsupported ALTER for " + "creation of implicit constraint") + + + def drop_constraint(self, const): + if const._create_rule is None: + raise NotImplementedError( + "No support for ALTER of constraints in SQLite dialect") + + def correct_for_autogen_constraints(self, conn_unique_constraints, conn_indexes, + metadata_unique_constraints, + metadata_indexes): + + def uq_sig(uq): + return tuple(sorted(uq.columns.keys())) + + conn_unique_sigs = set( + uq_sig(uq) + for uq in conn_unique_constraints + ) + + for idx in list(metadata_unique_constraints): + # SQLite backend can't report on unnamed UNIQUE constraints, + # so remove these, unless we see an exact signature match + if idx.name is None and uq_sig(idx) not in conn_unique_sigs: + metadata_unique_constraints.remove(idx) + + for idx in list(conn_unique_constraints): + # just in case we fix the backend such that it does report + # on them, blow them out of the reflected collection too otherwise + # they will come up as removed. if the backend supports this now, + # add a version check here for the dialect. + if idx.name is None: + conn_uniques.remove(idx) + +#@compiles(AddColumn, 'sqlite') +#def visit_add_column(element, compiler, **kw): +# return "%s %s" % ( +# alter_table(compiler, element.table_name, element.schema), +# add_column(compiler, element.column, **kw) +# ) + + +#def add_column(compiler, column, **kw): +# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) +# # need to modify SQLAlchemy so that the CHECK associated with a Boolean +# # or Enum gets placed as part of the column constraints, not the Table +# # see ticket 98 +# for const in column.constraints: +# text += compiler.process(AddConstraint(const)) +# return text diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/environment.py b/Linux_i686/lib/python2.7/site-packages/alembic/environment.py new file mode 100644 index 0000000..f8875a2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/environment.py @@ -0,0 +1,791 @@ +from .operations import Operations +from .migration import MigrationContext +from . import util + +class EnvironmentContext(object): + """Represent the state made available to an ``env.py`` script. + + :class:`.EnvironmentContext` is normally instantiated + by the commands present in the :mod:`alembic.command` + module. From within an ``env.py`` script, the current + :class:`.EnvironmentContext` is available via the + ``alembic.context`` datamember. + + :class:`.EnvironmentContext` is also a Python context + manager, that is, is intended to be used using the + ``with:`` statement. A typical use of :class:`.EnvironmentContext`:: + + from alembic.config import Config + from alembic.script import ScriptDirectory + + config = Config() + config.set_main_option("script_location", "myapp:migrations") + script = ScriptDirectory.from_config(config) + + def my_function(rev, context): + '''do something with revision "rev", which + will be the current database revision, + and "context", which is the MigrationContext + that the env.py will create''' + + with EnvironmentContext( + config, + script, + fn = my_function, + as_sql = False, + starting_rev = 'base', + destination_rev = 'head', + tag = "sometag" + ): + script.run_env() + + The above script will invoke the ``env.py`` script + within the migration environment. If and when ``env.py`` + calls :meth:`.MigrationContext.run_migrations`, the + ``my_function()`` function above will be called + by the :class:`.MigrationContext`, given the context + itself as well as the current revision in the database. + + .. note:: + + For most API usages other than full blown + invocation of migration scripts, the :class:`.MigrationContext` + and :class:`.ScriptDirectory` objects can be created and + used directly. The :class:`.EnvironmentContext` object + is *only* needed when you need to actually invoke the + ``env.py`` module present in the migration environment. + + """ + + _migration_context = None + + config = None + """An instance of :class:`.Config` representing the + configuration file contents as well as other variables + set programmatically within it.""" + + script = None + """An instance of :class:`.ScriptDirectory` which provides + programmatic access to version files within the ``versions/`` + directory. + + """ + + def __init__(self, config, script, **kw): + """Construct a new :class:`.EnvironmentContext`. + + :param config: a :class:`.Config` instance. + :param script: a :class:`.ScriptDirectory` instance. + :param \**kw: keyword options that will be ultimately + passed along to the :class:`.MigrationContext` when + :meth:`.EnvironmentContext.configure` is called. + + """ + self.config = config + self.script = script + self.context_opts = kw + + def __enter__(self): + """Establish a context which provides a + :class:`.EnvironmentContext` object to + env.py scripts. + + The :class:`.EnvironmentContext` will + be made available as ``from alembic import context``. + + """ + from .context import _install_proxy + _install_proxy(self) + return self + + def __exit__(self, *arg, **kw): + from . import context, op + context._remove_proxy() + op._remove_proxy() + + def is_offline_mode(self): + """Return True if the current migrations environment + is running in "offline mode". + + This is ``True`` or ``False`` depending + on the the ``--sql`` flag passed. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.context_opts.get('as_sql', False) + + def is_transactional_ddl(self): + """Return True if the context is configured to expect a + transactional DDL capable backend. + + This defaults to the type of database in use, and + can be overridden by the ``transactional_ddl`` argument + to :meth:`.configure` + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + return self.get_context().impl.transactional_ddl + + def requires_connection(self): + return not self.is_offline_mode() + + def get_head_revision(self): + """Return the hex identifier of the 'head' revision. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.script._as_rev_number("head") + + def get_starting_revision_argument(self): + """Return the 'starting revision' argument, + if the revision was passed using ``start:end``. + + This is only meaningful in "offline" mode. + Returns ``None`` if no value is available + or was configured. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + if self._migration_context is not None: + return self.script._as_rev_number( + self.get_context()._start_from_rev) + elif 'starting_rev' in self.context_opts: + return self.script._as_rev_number( + self.context_opts['starting_rev']) + else: + raise util.CommandError( + "No starting revision argument is available.") + + def get_revision_argument(self): + """Get the 'destination' revision argument. + + This is typically the argument passed to the + ``upgrade`` or ``downgrade`` command. + + If it was specified as ``head``, the actual + version number is returned; if specified + as ``base``, ``None`` is returned. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + """ + return self.script._as_rev_number( + self.context_opts['destination_rev']) + + def get_tag_argument(self): + """Return the value passed for the ``--tag`` argument, if any. + + The ``--tag`` argument is not used directly by Alembic, + but is available for custom ``env.py`` configurations that + wish to use it; particularly for offline generation scripts + that wish to generate tagged filenames. + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. seealso:: + + :meth:`.EnvironmentContext.get_x_argument` - a newer and more + open ended system of extending ``env.py`` scripts via the command + line. + + """ + return self.context_opts.get('tag', None) + + def get_x_argument(self, as_dictionary=False): + """Return the value(s) passed for the ``-x`` argument, if any. + + The ``-x`` argument is an open ended flag that allows any user-defined + value or values to be passed on the command line, then available + here for consumption by a custom ``env.py`` script. + + The return value is a list, returned directly from the ``argparse`` + structure. If ``as_dictionary=True`` is passed, the ``x`` arguments + are parsed using ``key=value`` format into a dictionary that is + then returned. + + For example, to support passing a database URL on the command line, + the standard ``env.py`` script can be modified like this:: + + cmd_line_url = context.get_x_argument(as_dictionary=True).get('dbname') + if cmd_line_url: + engine = create_engine(cmd_line_url) + else: + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + This then takes effect by running the ``alembic`` script as:: + + alembic -x dbname=postgresql://user:pass@host/dbname upgrade head + + This function does not require that the :class:`.MigrationContext` + has been configured. + + .. versionadded:: 0.6.0 + + .. seealso:: + + :meth:`.EnvironmentContext.get_tag_argument` + + :attr:`.Config.cmd_opts` + + """ + if self.config.cmd_opts is not None: + value = self.config.cmd_opts.x or [] + else: + value = [] + if as_dictionary: + value = dict( + arg.split('=', 1) for arg in value + ) + return value + + def configure(self, + connection=None, + url=None, + dialect_name=None, + transactional_ddl=None, + transaction_per_migration=False, + output_buffer=None, + starting_rev=None, + tag=None, + template_args=None, + target_metadata=None, + include_symbol=None, + include_object=None, + include_schemas=False, + compare_type=False, + compare_server_default=False, + render_item=None, + upgrade_token="upgrades", + downgrade_token="downgrades", + alembic_module_prefix="op.", + sqlalchemy_module_prefix="sa.", + user_module_prefix=None, + **kw + ): + """Configure a :class:`.MigrationContext` within this + :class:`.EnvironmentContext` which will provide database + connectivity and other configuration to a series of + migration scripts. + + Many methods on :class:`.EnvironmentContext` require that + this method has been called in order to function, as they + ultimately need to have database access or at least access + to the dialect in use. Those which do are documented as such. + + The important thing needed by :meth:`.configure` is a + means to determine what kind of database dialect is in use. + An actual connection to that database is needed only if + the :class:`.MigrationContext` is to be used in + "online" mode. + + If the :meth:`.is_offline_mode` function returns ``True``, + then no connection is needed here. Otherwise, the + ``connection`` parameter should be present as an + instance of :class:`sqlalchemy.engine.Connection`. + + This function is typically called from the ``env.py`` + script within a migration environment. It can be called + multiple times for an invocation. The most recent + :class:`~sqlalchemy.engine.Connection` + for which it was called is the one that will be operated upon + by the next call to :meth:`.run_migrations`. + + General parameters: + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use + for SQL execution in "online" mode. When present, is also + used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. + The type of dialect to be used will be derived from this if + ``connection`` and ``url`` are not passed. + :param transactional_ddl: Force the usage of "transactional" + DDL on or off; + this otherwise defaults to whether or not the dialect in + use supports it. + :param transaction_per_migration: if True, nest each migration script + in a transaction rather than the full series of migrations to + run. + + .. versionadded:: 0.6.5 + + :param output_buffer: a file-like object that will be used + for textual output + when the ``--sql`` option is used to generate SQL scripts. + Defaults to + ``sys.stdout`` if not passed here and also not present on + the :class:`.Config` + object. The value here overrides that of the :class:`.Config` + object. + :param output_encoding: when using ``--sql`` to generate SQL + scripts, apply this encoding to the string output. + + .. versionadded:: 0.5.0 + + :param starting_rev: Override the "starting revision" argument + when using ``--sql`` mode. + :param tag: a string tag for usage by custom ``env.py`` scripts. + Set via the ``--tag`` option, can be overridden here. + :param template_args: dictionary of template arguments which + will be added to the template argument environment when + running the "revision" command. Note that the script environment + is only run within the "revision" command if the --autogenerate + option is used, or if the option "revision_environment=true" + is present in the alembic.ini file. + + .. versionadded:: 0.3.3 + + :param version_table: The name of the Alembic version table. + The default is ``'alembic_version'``. + :param version_table_schema: Optional schema to place version + table within. + + .. versionadded:: 0.5.0 + + Parameters specific to the autogenerate feature, when + ``alembic revision`` is run with the ``--autogenerate`` feature: + + :param target_metadata: a :class:`sqlalchemy.schema.MetaData` + object that + will be consulted during autogeneration. The tables present + will be compared against + what is locally available on the target + :class:`~sqlalchemy.engine.Connection` + to produce candidate upgrade/downgrade operations. + + :param compare_type: Indicates type comparison behavior during + an autogenerate + operation. Defaults to ``False`` which disables type + comparison. Set to + ``True`` to turn on default type comparison, which has varied + accuracy depending on backend. + + To customize type comparison behavior, a callable may be + specified which + can filter type comparisons during an autogenerate operation. + The format of this callable is:: + + def my_compare_type(context, inspected_column, + metadata_column, inspected_type, metadata_type): + # return True if the types are different, + # False if not, or None to allow the default implementation + # to compare these types + return None + + context.configure( + # ... + compare_type = my_compare_type + ) + + + ``inspected_column`` is a :class:`sqlalchemy.schema.Column` as returned by + :meth:`sqlalchemy.engine.reflection.Inspector.reflecttable`, whereas + ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from + the local model environment. + + A return value of ``None`` indicates to allow default type + comparison to proceed. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.compare_server_default` + + :param compare_server_default: Indicates server default comparison + behavior during + an autogenerate operation. Defaults to ``False`` which disables + server default + comparison. Set to ``True`` to turn on server default comparison, + which has + varied accuracy depending on backend. + + To customize server default comparison behavior, a callable may + be specified + which can filter server default comparisons during an + autogenerate operation. + defaults during an autogenerate operation. The format of this + callable is:: + + def my_compare_server_default(context, inspected_column, + metadata_column, inspected_default, metadata_default, + rendered_metadata_default): + # return True if the defaults are different, + # False if not, or None to allow the default implementation + # to compare these defaults + return None + + context.configure( + # ... + compare_server_default = my_compare_server_default + ) + + ``inspected_column`` is a dictionary structure as returned by + :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas + ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from + the local model environment. + + A return value of ``None`` indicates to allow default server default + comparison + to proceed. Note that some backends such as Postgresql actually + execute + the two defaults on the database side to compare for equivalence. + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.compare_type` + + :param include_object: A callable function which is given + the chance to return ``True`` or ``False`` for any object, + indicating if the given object should be considered in the + autogenerate sweep. + + The function accepts the following positional arguments: + + * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such as a + :class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.Column` + object + * ``name``: the name of the object. This is typically available + via ``object.name``. + * ``type``: a string describing the type of object; currently + ``"table"`` or ``"column"`` + * ``reflected``: ``True`` if the given object was produced based on + table reflection, ``False`` if it's from a local :class:`.MetaData` + object. + * ``compare_to``: the object being compared against, if available, + else ``None``. + + E.g.:: + + def include_object(object, name, type_, reflected, compare_to): + if (type_ == "column" and + not reflected and + object.info.get("skip_autogenerate", False)): + return False + else: + return True + + context.configure( + # ... + include_object = include_object + ) + + :paramref:`.EnvironmentContext.configure.include_object` can also + be used to filter on specific schemas to include or omit, when + the :paramref:`.EnvironmentContext.configure.include_schemas` + flag is set to ``True``. The :attr:`.Table.schema` attribute + on each :class:`.Table` object reflected will indicate the name of the + schema from which the :class:`.Table` originates. + + .. versionadded:: 0.6.0 + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.include_schemas` + + :param include_symbol: A callable function which, given a table name + and schema name (may be ``None``), returns ``True`` or ``False``, indicating + if the given table should be considered in the autogenerate sweep. + + .. deprecated:: 0.6.0 :paramref:`.EnvironmentContext.configure.include_symbol` + is superceded by the more generic + :paramref:`.EnvironmentContext.configure.include_object` + parameter. + + E.g.:: + + def include_symbol(tablename, schema): + return tablename not in ("skip_table_one", "skip_table_two") + + context.configure( + # ... + include_symbol = include_symbol + ) + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.include_schemas` + + :paramref:`.EnvironmentContext.configure.include_object` + + :param include_schemas: If True, autogenerate will scan across + all schemas located by the SQLAlchemy + :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` + method, and include all differences in tables found across all + those schemas. When using this option, you may want to also + use the :paramref:`.EnvironmentContext.configure.include_object` + option to specify a callable which + can filter the tables/schemas that get included. + + .. versionadded :: 0.4.0 + + .. seealso:: + + :paramref:`.EnvironmentContext.configure.include_object` + + :param render_item: Callable that can be used to override how + any schema item, i.e. column, constraint, type, + etc., is rendered for autogenerate. The callable receives a + string describing the type of object, the object, and + the autogen context. If it returns False, the + default rendering method will be used. If it returns None, + the item will not be rendered in the context of a Table + construct, that is, can be used to skip columns or constraints + within op.create_table():: + + def my_render_column(type_, col, autogen_context): + if type_ == "column" and isinstance(col, MySpecialCol): + return repr(col) + else: + return False + + context.configure( + # ... + render_item = my_render_column + ) + + Available values for the type string include: ``"column"``, + ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, + ``"type"``, ``"server_default"``. + + .. versionadded:: 0.5.0 + + .. seealso:: + + :ref:`autogen_render_types` + + :param upgrade_token: When autogenerate completes, the text of the + candidate upgrade operations will be present in this template + variable when ``script.py.mako`` is rendered. Defaults to + ``upgrades``. + :param downgrade_token: When autogenerate completes, the text of the + candidate downgrade operations will be present in this + template variable when ``script.py.mako`` is rendered. Defaults to + ``downgrades``. + + :param alembic_module_prefix: When autogenerate refers to Alembic + :mod:`alembic.operations` constructs, this prefix will be used + (i.e. ``op.create_table``) Defaults to "``op.``". + Can be ``None`` to indicate no prefix. + + :param sqlalchemy_module_prefix: When autogenerate refers to + SQLAlchemy + :class:`~sqlalchemy.schema.Column` or type classes, this prefix + will be used + (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". + Can be ``None`` to indicate no prefix. + Note that when dialect-specific types are rendered, autogenerate + will render them using the dialect module name, i.e. ``mssql.BIT()``, + ``postgresql.UUID()``. + + :param user_module_prefix: When autogenerate refers to a SQLAlchemy + type (e.g. :class:`.TypeEngine`) where the module name is not + under the ``sqlalchemy`` namespace, this prefix will be used + within autogenerate, if non-``None``; if left at its default of + ``None``, the + :paramref:`.EnvironmentContext.configure.sqlalchemy_module_prefix` + is used instead. + + .. versionadded:: 0.6.3 added + :paramref:`.EnvironmentContext.configure.user_module_prefix` + + .. seealso:: + + :ref:`autogen_module_prefix` + + Parameters specific to individual backends: + + :param mssql_batch_separator: The "batch separator" which will + be placed between each statement when generating offline SQL Server + migrations. Defaults to ``GO``. Note this is in addition to the + customary semicolon ``;`` at the end of each statement; SQL Server + considers the "batch separator" to denote the end of an + individual statement execution, and cannot group certain + dependent operations in one step. + :param oracle_batch_separator: The "batch separator" which will + be placed between each statement when generating offline + Oracle migrations. Defaults to ``/``. Oracle doesn't add a + semicolon between statements like most other backends. + + """ + opts = self.context_opts + if transactional_ddl is not None: + opts["transactional_ddl"] = transactional_ddl + if output_buffer is not None: + opts["output_buffer"] = output_buffer + elif self.config.output_buffer is not None: + opts["output_buffer"] = self.config.output_buffer + if starting_rev: + opts['starting_rev'] = starting_rev + if tag: + opts['tag'] = tag + if template_args and 'template_args' in opts: + opts['template_args'].update(template_args) + opts["transaction_per_migration"] = transaction_per_migration + opts['target_metadata'] = target_metadata + opts['include_symbol'] = include_symbol + opts['include_object'] = include_object + opts['include_schemas'] = include_schemas + opts['upgrade_token'] = upgrade_token + opts['downgrade_token'] = downgrade_token + opts['sqlalchemy_module_prefix'] = sqlalchemy_module_prefix + opts['alembic_module_prefix'] = alembic_module_prefix + opts['user_module_prefix'] = user_module_prefix + if render_item is not None: + opts['render_item'] = render_item + if compare_type is not None: + opts['compare_type'] = compare_type + if compare_server_default is not None: + opts['compare_server_default'] = compare_server_default + opts['script'] = self.script + + opts.update(kw) + + self._migration_context = MigrationContext.configure( + connection=connection, + url=url, + dialect_name=dialect_name, + opts=opts + ) + + def run_migrations(self, **kw): + """Run migrations as determined by the current command line + configuration + as well as versioning information present (or not) in the current + database connection (if one is present). + + The function accepts optional ``**kw`` arguments. If these are + passed, they are sent directly to the ``upgrade()`` and + ``downgrade()`` + functions within each target revision file. By modifying the + ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` + functions accept arguments, parameters can be passed here so that + contextual information, usually information to identify a particular + database in use, can be passed from a custom ``env.py`` script + to the migration functions. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + with Operations.context(self._migration_context): + self.get_context().run_migrations(**kw) + + def execute(self, sql, execution_options=None): + """Execute the given SQL using the current change context. + + The behavior of :meth:`.execute` is the same + as that of :meth:`.Operations.execute`. Please see that + function's documentation for full detail including + caveats and limitations. + + This function requires that a :class:`.MigrationContext` has + first been made available via :meth:`.configure`. + + """ + self.get_context().execute(sql, + execution_options=execution_options) + + def static_output(self, text): + """Emit text directly to the "offline" SQL stream. + + Typically this is for emitting comments that + start with --. The statement is not treated + as a SQL execution, no ; or batch separator + is added, etc. + + """ + self.get_context().impl.static_output(text) + + + def begin_transaction(self): + """Return a context manager that will + enclose an operation within a "transaction", + as defined by the environment's offline + and transactional DDL settings. + + e.g.:: + + with context.begin_transaction(): + context.run_migrations() + + :meth:`.begin_transaction` is intended to + "do the right thing" regardless of + calling context: + + * If :meth:`.is_transactional_ddl` is ``False``, + returns a "do nothing" context manager + which otherwise produces no transactional + state or directives. + * If :meth:`.is_offline_mode` is ``True``, + returns a context manager that will + invoke the :meth:`.DefaultImpl.emit_begin` + and :meth:`.DefaultImpl.emit_commit` + methods, which will produce the string + directives ``BEGIN`` and ``COMMIT`` on + the output stream, as rendered by the + target backend (e.g. SQL Server would + emit ``BEGIN TRANSACTION``). + * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` + on the current online connection, which + returns a :class:`sqlalchemy.engine.Transaction` + object. This object demarcates a real + transaction and is itself a context manager, + which will roll back if an exception + is raised. + + Note that a custom ``env.py`` script which + has more specific transactional needs can of course + manipulate the :class:`~sqlalchemy.engine.Connection` + directly to produce transactional state in "online" + mode. + + """ + + return self.get_context().begin_transaction() + + + def get_context(self): + """Return the current :class:`.MigrationContext` object. + + If :meth:`.EnvironmentContext.configure` has not been + called yet, raises an exception. + + """ + + if self._migration_context is None: + raise Exception("No context has been configured yet.") + return self._migration_context + + def get_bind(self): + """Return the current 'bind'. + + In "online" mode, this is the + :class:`sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + This function requires that a :class:`.MigrationContext` + has first been made available via :meth:`.configure`. + + """ + return self.get_context().bind + + def get_impl(self): + return self.get_context().impl + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/migration.py b/Linux_i686/lib/python2.7/site-packages/alembic/migration.py new file mode 100644 index 0000000..e554515 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/migration.py @@ -0,0 +1,352 @@ +import io +import logging +import sys +from contextlib import contextmanager + + +from sqlalchemy import MetaData, Table, Column, String, literal_column +from sqlalchemy import create_engine +from sqlalchemy.engine import url as sqla_url + +from .compat import callable, EncodedIO +from . import ddl, util + +log = logging.getLogger(__name__) + +class MigrationContext(object): + """Represent the database state made available to a migration + script. + + :class:`.MigrationContext` is the front end to an actual + database connection, or alternatively a string output + stream given a particular database dialect, + from an Alembic perspective. + + When inside the ``env.py`` script, the :class:`.MigrationContext` + is available via the + :meth:`.EnvironmentContext.get_context` method, + which is available at ``alembic.context``:: + + # from within env.py script + from alembic import context + migration_context = context.get_context() + + For usage outside of an ``env.py`` script, such as for + utility routines that want to check the current version + in the database, the :meth:`.MigrationContext.configure` + method to create new :class:`.MigrationContext` objects. + For example, to get at the current revision in the + database using :meth:`.MigrationContext.get_current_revision`:: + + # in any application, outside of an env.py script + from alembic.migration import MigrationContext + from sqlalchemy import create_engine + + engine = create_engine("postgresql://mydatabase") + conn = engine.connect() + + context = MigrationContext.configure(conn) + current_rev = context.get_current_revision() + + The above context can also be used to produce + Alembic migration operations with an :class:`.Operations` + instance:: + + # in any application, outside of the normal Alembic environment + from alembic.operations import Operations + op = Operations(context) + op.alter_column("mytable", "somecolumn", nullable=True) + + """ + def __init__(self, dialect, connection, opts): + self.opts = opts + self.dialect = dialect + self.script = opts.get('script') + + as_sql = opts.get('as_sql', False) + transactional_ddl = opts.get("transactional_ddl") + + self._transaction_per_migration = opts.get( + "transaction_per_migration", False) + + if as_sql: + self.connection = self._stdout_connection(connection) + assert self.connection is not None + else: + self.connection = connection + self._migrations_fn = opts.get('fn') + self.as_sql = as_sql + + if "output_encoding" in opts: + self.output_buffer = EncodedIO( + opts.get("output_buffer") or sys.stdout, + opts['output_encoding'] + ) + else: + self.output_buffer = opts.get("output_buffer", sys.stdout) + + self._user_compare_type = opts.get('compare_type', False) + self._user_compare_server_default = opts.get( + 'compare_server_default', + False) + version_table = opts.get('version_table', 'alembic_version') + version_table_schema = opts.get('version_table_schema', None) + self._version = Table( + version_table, MetaData(), + Column('version_num', String(32), nullable=False), + schema=version_table_schema) + + self._start_from_rev = opts.get("starting_rev") + self.impl = ddl.DefaultImpl.get_by_dialect(dialect)( + dialect, self.connection, self.as_sql, + transactional_ddl, + self.output_buffer, + opts + ) + log.info("Context impl %s.", self.impl.__class__.__name__) + if self.as_sql: + log.info("Generating static SQL") + log.info("Will assume %s DDL.", + "transactional" if self.impl.transactional_ddl + else "non-transactional") + + @classmethod + def configure(cls, + connection=None, + url=None, + dialect_name=None, + opts={}, + ): + """Create a new :class:`.MigrationContext`. + + This is a factory method usually called + by :meth:`.EnvironmentContext.configure`. + + :param connection: a :class:`~sqlalchemy.engine.Connection` + to use for SQL execution in "online" mode. When present, + is also used to determine the type of dialect in use. + :param url: a string database url, or a + :class:`sqlalchemy.engine.url.URL` object. + The type of dialect to be used will be derived from this if + ``connection`` is not passed. + :param dialect_name: string name of a dialect, such as + "postgresql", "mssql", etc. The type of dialect to be used will be + derived from this if ``connection`` and ``url`` are not passed. + :param opts: dictionary of options. Most other options + accepted by :meth:`.EnvironmentContext.configure` are passed via + this dictionary. + + """ + if connection: + dialect = connection.dialect + elif url: + url = sqla_url.make_url(url) + dialect = url.get_dialect()() + elif dialect_name: + url = sqla_url.make_url("%s://" % dialect_name) + dialect = url.get_dialect()() + else: + raise Exception("Connection, url, or dialect_name is required.") + + return MigrationContext(dialect, connection, opts) + + + def begin_transaction(self, _per_migration=False): + transaction_now = _per_migration == self._transaction_per_migration + + if not transaction_now: + @contextmanager + def do_nothing(): + yield + return do_nothing() + + elif not self.impl.transactional_ddl: + @contextmanager + def do_nothing(): + yield + return do_nothing() + elif self.as_sql: + @contextmanager + def begin_commit(): + self.impl.emit_begin() + yield + self.impl.emit_commit() + return begin_commit() + else: + return self.bind.begin() + + def get_current_revision(self): + """Return the current revision, usually that which is present + in the ``alembic_version`` table in the database. + + If this :class:`.MigrationContext` was configured in "offline" + mode, that is with ``as_sql=True``, the ``starting_rev`` + parameter is returned instead, if any. + + """ + if self.as_sql: + return self._start_from_rev + else: + if self._start_from_rev: + raise util.CommandError( + "Can't specify current_rev to context " + "when using a database connection") + self._version.create(self.connection, checkfirst=True) + return self.connection.scalar(self._version.select()) + + _current_rev = get_current_revision + """The 0.2 method name, for backwards compat.""" + + def _update_current_rev(self, old, new): + if old == new: + return + if new is None: + self.impl._exec(self._version.delete()) + elif old is None: + self.impl._exec(self._version.insert(). + values(version_num=literal_column("'%s'" % new)) + ) + else: + self.impl._exec(self._version.update(). + values(version_num=literal_column("'%s'" % new)) + ) + + def run_migrations(self, **kw): + """Run the migration scripts established for this :class:`.MigrationContext`, + if any. + + The commands in :mod:`alembic.command` will set up a function + that is ultimately passed to the :class:`.MigrationContext` + as the ``fn`` argument. This function represents the "work" + that will be done when :meth:`.MigrationContext.run_migrations` + is called, typically from within the ``env.py`` script of the + migration environment. The "work function" then provides an iterable + of version callables and other version information which + in the case of the ``upgrade`` or ``downgrade`` commands are the + list of version scripts to invoke. Other commands yield nothing, + in the case that a command wants to run some other operation + against the database such as the ``current`` or ``stamp`` commands. + + :param \**kw: keyword arguments here will be passed to each + migration callable, that is the ``upgrade()`` or ``downgrade()`` + method within revision scripts. + + """ + current_rev = rev = False + stamp_per_migration = not self.impl.transactional_ddl or \ + self._transaction_per_migration + + self.impl.start_migrations() + for change, prev_rev, rev, doc in self._migrations_fn( + self.get_current_revision(), + self): + with self.begin_transaction(_per_migration=True): + if current_rev is False: + current_rev = prev_rev + if self.as_sql and not current_rev: + self._version.create(self.connection) + if doc: + log.info("Running %s %s -> %s, %s", change.__name__, prev_rev, + rev, doc) + else: + log.info("Running %s %s -> %s", change.__name__, prev_rev, rev) + if self.as_sql: + self.impl.static_output( + "-- Running %s %s -> %s" % + (change.__name__, prev_rev, rev) + ) + change(**kw) + if stamp_per_migration: + self._update_current_rev(prev_rev, rev) + prev_rev = rev + + if rev is not False: + if not stamp_per_migration: + self._update_current_rev(current_rev, rev) + + if self.as_sql and not rev: + self._version.drop(self.connection) + + def execute(self, sql, execution_options=None): + """Execute a SQL construct or string statement. + + The underlying execution mechanics are used, that is + if this is "offline mode" the SQL is written to the + output buffer, otherwise the SQL is emitted on + the current SQLAlchemy connection. + + """ + self.impl._exec(sql, execution_options) + + def _stdout_connection(self, connection): + def dump(construct, *multiparams, **params): + self.impl._exec(construct) + + return create_engine("%s://" % self.dialect.name, + strategy="mock", executor=dump) + + @property + def bind(self): + """Return the current "bind". + + In online mode, this is an instance of + :class:`sqlalchemy.engine.Connection`, and is suitable + for ad-hoc execution of any kind of usage described + in :ref:`sqlexpression_toplevel` as well as + for usage with the :meth:`sqlalchemy.schema.Table.create` + and :meth:`sqlalchemy.schema.MetaData.create_all` methods + of :class:`~sqlalchemy.schema.Table`, :class:`~sqlalchemy.schema.MetaData`. + + Note that when "standard output" mode is enabled, + this bind will be a "mock" connection handler that cannot + return results and is only appropriate for a very limited + subset of commands. + + """ + return self.connection + + def _compare_type(self, inspector_column, metadata_column): + if self._user_compare_type is False: + return False + + if callable(self._user_compare_type): + user_value = self._user_compare_type( + self, + inspector_column, + metadata_column, + inspector_column.type, + metadata_column.type + ) + if user_value is not None: + return user_value + + return self.impl.compare_type( + inspector_column, + metadata_column) + + def _compare_server_default(self, inspector_column, + metadata_column, + rendered_metadata_default, + rendered_column_default): + + if self._user_compare_server_default is False: + return False + + if callable(self._user_compare_server_default): + user_value = self._user_compare_server_default( + self, + inspector_column, + metadata_column, + rendered_column_default, + metadata_column.server_default, + rendered_metadata_default + ) + if user_value is not None: + return user_value + + return self.impl.compare_server_default( + inspector_column, + metadata_column, + rendered_metadata_default, + rendered_column_default) + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/op.py b/Linux_i686/lib/python2.7/site-packages/alembic/op.py new file mode 100644 index 0000000..8e5f777 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/op.py @@ -0,0 +1,6 @@ +from .operations import Operations +from . import util + +# create proxy functions for +# each method on the Operations class. +util.create_module_class_proxy(Operations, globals(), locals()) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/operations.py b/Linux_i686/lib/python2.7/site-packages/alembic/operations.py new file mode 100644 index 0000000..f1d06a5 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/operations.py @@ -0,0 +1,1037 @@ +from contextlib import contextmanager + +from sqlalchemy.types import NULLTYPE, Integer +from sqlalchemy import schema as sa_schema + +from . import util +from .compat import string_types +from .ddl import impl + +__all__ = ('Operations',) + +try: + from sqlalchemy.sql.naming import conv +except: + conv = None + +class Operations(object): + """Define high level migration operations. + + Each operation corresponds to some schema migration operation, + executed against a particular :class:`.MigrationContext` + which in turn represents connectivity to a database, + or a file output stream. + + While :class:`.Operations` is normally configured as + part of the :meth:`.EnvironmentContext.run_migrations` + method called from an ``env.py`` script, a standalone + :class:`.Operations` instance can be + made for use cases external to regular Alembic + migrations by passing in a :class:`.MigrationContext`:: + + from alembic.migration import MigrationContext + from alembic.operations import Operations + + conn = myengine.connect() + ctx = MigrationContext.configure(conn) + op = Operations(ctx) + + op.alter_column("t", "c", nullable=True) + + """ + def __init__(self, migration_context): + """Construct a new :class:`.Operations` + + :param migration_context: a :class:`.MigrationContext` + instance. + + """ + self.migration_context = migration_context + self.impl = migration_context.impl + + @classmethod + @contextmanager + def context(cls, migration_context): + from .op import _install_proxy, _remove_proxy + op = Operations(migration_context) + _install_proxy(op) + yield op + _remove_proxy() + + + def _primary_key_constraint(self, name, table_name, cols, schema=None): + m = self._metadata() + columns = [sa_schema.Column(n, NULLTYPE) for n in cols] + t1 = sa_schema.Table(table_name, m, + *columns, + schema=schema) + p = sa_schema.PrimaryKeyConstraint(*columns, name=name) + t1.append_constraint(p) + return p + + def _foreign_key_constraint(self, name, source, referent, + local_cols, remote_cols, + onupdate=None, ondelete=None, + deferrable=None, source_schema=None, + referent_schema=None, initially=None, + match=None, **dialect_kw): + m = self._metadata() + if source == referent: + t1_cols = local_cols + remote_cols + else: + t1_cols = local_cols + sa_schema.Table(referent, m, + *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], + schema=referent_schema) + + t1 = sa_schema.Table(source, m, + *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], + schema=source_schema) + + tname = "%s.%s" % (referent_schema, referent) if referent_schema \ + else referent + f = sa_schema.ForeignKeyConstraint(local_cols, + ["%s.%s" % (tname, n) + for n in remote_cols], + name=name, + onupdate=onupdate, + ondelete=ondelete, + deferrable=deferrable, + initially=initially, + match=match, + **dialect_kw + ) + t1.append_constraint(f) + + return f + + def _unique_constraint(self, name, source, local_cols, schema=None, **kw): + t = sa_schema.Table(source, self._metadata(), + *[sa_schema.Column(n, NULLTYPE) for n in local_cols], + schema=schema) + kw['name'] = name + uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) + # TODO: need event tests to ensure the event + # is fired off here + t.append_constraint(uq) + return uq + + def _check_constraint(self, name, source, condition, schema=None, **kw): + t = sa_schema.Table(source, self._metadata(), + sa_schema.Column('x', Integer), schema=schema) + ck = sa_schema.CheckConstraint(condition, name=name, **kw) + t.append_constraint(ck) + return ck + + def _metadata(self): + kw = {} + if 'target_metadata' in self.migration_context.opts: + mt = self.migration_context.opts['target_metadata'] + if hasattr(mt, 'naming_convention'): + kw['naming_convention'] = mt.naming_convention + return sa_schema.MetaData(**kw) + + def _table(self, name, *columns, **kw): + m = self._metadata() + t = sa_schema.Table(name, m, *columns, **kw) + for f in t.foreign_keys: + self._ensure_table_for_fk(m, f) + return t + + def _column(self, name, type_, **kw): + return sa_schema.Column(name, type_, **kw) + + def _index(self, name, tablename, columns, schema=None, **kw): + t = sa_schema.Table(tablename or 'no_table', self._metadata(), + *[sa_schema.Column(n, NULLTYPE) for n in columns], + schema=schema + ) + return sa_schema.Index(name, *[t.c[n] for n in columns], **kw) + + def _parse_table_key(self, table_key): + if '.' in table_key: + tokens = table_key.split('.') + sname = ".".join(tokens[0:-1]) + tname = tokens[-1] + else: + tname = table_key + sname = None + return (sname, tname) + + def _ensure_table_for_fk(self, metadata, fk): + """create a placeholder Table object for the referent of a + ForeignKey. + + """ + if isinstance(fk._colspec, string_types): + table_key, cname = fk._colspec.rsplit('.', 1) + sname, tname = self._parse_table_key(table_key) + if table_key not in metadata.tables: + rel_t = sa_schema.Table(tname, metadata, schema=sname) + else: + rel_t = metadata.tables[table_key] + if cname not in rel_t.c: + rel_t.append_column(sa_schema.Column(cname, NULLTYPE)) + + def get_context(self): + """Return the :class:`.MigrationContext` object that's + currently in use. + + """ + + return self.migration_context + + def rename_table(self, old_table_name, new_table_name, schema=None): + """Emit an ALTER TABLE to rename a table. + + :param old_table_name: old name. + :param new_table_name: new name. + :param schema: Optional schema name to operate within. + + """ + self.impl.rename_table( + old_table_name, + new_table_name, + schema=schema + ) + + @util._with_legacy_names([('name', 'new_column_name')]) + def alter_column(self, table_name, column_name, + nullable=None, + server_default=False, + new_column_name=None, + type_=None, + autoincrement=None, + existing_type=None, + existing_server_default=False, + existing_nullable=None, + existing_autoincrement=None, + schema=None + ): + """Issue an "alter column" instruction using the + current migration context. + + Generally, only that aspect of the column which + is being changed, i.e. name, type, nullability, + default, needs to be specified. Multiple changes + can also be specified at once and the backend should + "do the right thing", emitting each change either + separately or together as the backend allows. + + MySQL has special requirements here, since MySQL + cannot ALTER a column without a full specification. + When producing MySQL-compatible migration files, + it is recommended that the ``existing_type``, + ``existing_server_default``, and ``existing_nullable`` + parameters be present, if not being altered. + + Type changes which are against the SQLAlchemy + "schema" types :class:`~sqlalchemy.types.Boolean` + and :class:`~sqlalchemy.types.Enum` may also + add or drop constraints which accompany those + types on backends that don't support them natively. + The ``existing_server_default`` argument is + used in this case as well to remove a previous + constraint. + + :param table_name: string name of the target table. + :param column_name: string name of the target column, + as it exists before the operation begins. + :param nullable: Optional; specify ``True`` or ``False`` + to alter the column's nullability. + :param server_default: Optional; specify a string + SQL expression, :func:`~sqlalchemy.sql.expression.text`, + or :class:`~sqlalchemy.schema.DefaultClause` to indicate + an alteration to the column's default value. + Set to ``None`` to have the default removed. + :param new_column_name: Optional; specify a string name here to + indicate the new name within a column rename operation. + + .. versionchanged:: 0.5.0 + The ``name`` parameter is now named ``new_column_name``. + The old name will continue to function for backwards + compatibility. + + :param ``type_``: Optional; a :class:`~sqlalchemy.types.TypeEngine` + type object to specify a change to the column's type. + For SQLAlchemy types that also indicate a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), + the constraint is also generated. + :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; + currently understood by the MySQL dialect. + :param existing_type: Optional; a + :class:`~sqlalchemy.types.TypeEngine` + type object to specify the previous type. This + is required for all MySQL column alter operations that + don't otherwise specify a new type, as well as for + when nullability is being changed on a SQL Server + column. It is also used if the type is a so-called + SQLlchemy "schema" type which may define a constraint (i.e. + :class:`~sqlalchemy.types.Boolean`, + :class:`~sqlalchemy.types.Enum`), + so that the constraint can be dropped. + :param existing_server_default: Optional; The existing + default value of the column. Required on MySQL if + an existing default is not being changed; else MySQL + removes the default. + :param existing_nullable: Optional; the existing nullability + of the column. Required on MySQL if the existing nullability + is not being changed; else MySQL sets this to NULL. + :param existing_autoincrement: Optional; the existing autoincrement + of the column. Used for MySQL's system of altering a column + that specifies ``AUTO_INCREMENT``. + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + """ + + compiler = self.impl.dialect.statement_compiler( + self.impl.dialect, + None + ) + def _count_constraint(constraint): + return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and \ + (not constraint._create_rule or + constraint._create_rule(compiler)) + + if existing_type and type_: + t = self._table(table_name, + sa_schema.Column(column_name, existing_type), + schema=schema + ) + for constraint in t.constraints: + if _count_constraint(constraint): + self.impl.drop_constraint(constraint) + + self.impl.alter_column(table_name, column_name, + nullable=nullable, + server_default=server_default, + name=new_column_name, + type_=type_, + schema=schema, + autoincrement=autoincrement, + existing_type=existing_type, + existing_server_default=existing_server_default, + existing_nullable=existing_nullable, + existing_autoincrement=existing_autoincrement + ) + + if type_: + t = self._table(table_name, + sa_schema.Column(column_name, type_), + schema=schema + ) + for constraint in t.constraints: + if _count_constraint(constraint): + self.impl.add_constraint(constraint) + + def f(self, name): + """Indicate a string name that has already had a naming convention + applied to it. + + This feature combines with the SQLAlchemy ``naming_convention`` feature + to disambiguate constraint names that have already had naming + conventions applied to them, versus those that have not. This is + necessary in the case that the ``"%(constraint_name)s"`` token + is used within a naming convention, so that it can be identified + that this particular name should remain fixed. + + If the :meth:`.Operations.f` is used on a constraint, the naming + convention will not take effect:: + + op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x'))) + + Above, the CHECK constraint generated will have the name ``ck_bool_t_x`` + regardless of whether or not a naming convention is in use. + + Alternatively, if a naming convention is in use, and 'f' is not used, + names will be converted along conventions. If the ``target_metadata`` + contains the naming convention + ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the + output of the following: + + op.add_column('t', 'x', Boolean(name='x')) + + will be:: + + CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) + + The function is rendered in the output of autogenerate when + a particular constraint name is already converted, for SQLAlchemy + version **0.9.4 and greater only**. Even though ``naming_convention`` + was introduced in 0.9.2, the string disambiguation service is new + as of 0.9.4. + + .. versionadded:: 0.6.4 + + """ + if conv: + return conv(name) + else: + raise NotImplementedError( + "op.f() feature requires SQLAlchemy 0.9.4 or greater.") + + def add_column(self, table_name, column, schema=None): + """Issue an "add column" instruction using the current + migration context. + + e.g.:: + + from alembic import op + from sqlalchemy import Column, String + + op.add_column('organization', + Column('name', String()) + ) + + The provided :class:`~sqlalchemy.schema.Column` object can also + specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing + a remote table name. Alembic will automatically generate a stub + "referenced" table and emit a second ALTER statement in order + to add the constraint separately:: + + from alembic import op + from sqlalchemy import Column, INTEGER, ForeignKey + + op.add_column('organization', + Column('account_id', INTEGER, ForeignKey('accounts.id')) + ) + + Note that this statement uses the :class:`~sqlalchemy.schema.Column` + construct as is from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the column add + op.add_column('account', + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + :param table_name: String name of the parent table. + :param column: a :class:`sqlalchemy.schema.Column` object + representing the new column. + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + """ + + t = self._table(table_name, column, schema=schema) + self.impl.add_column( + table_name, + column, + schema=schema + ) + for constraint in t.constraints: + if not isinstance(constraint, sa_schema.PrimaryKeyConstraint): + self.impl.add_constraint(constraint) + + def drop_column(self, table_name, column_name, **kw): + """Issue a "drop column" instruction using the current + migration context. + + e.g.:: + + drop_column('organization', 'account_id') + + :param table_name: name of table + :param column_name: name of column + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + :param mssql_drop_check: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the CHECK constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.check_constraints, + then exec's a separate DROP CONSTRAINT for that constraint. + :param mssql_drop_default: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop the DEFAULT constraint on the column using a + SQL-script-compatible + block that selects into a @variable from sys.default_constraints, + then exec's a separate DROP CONSTRAINT for that default. + :param mssql_drop_foreign_key: Optional boolean. When ``True``, on + Microsoft SQL Server only, first + drop a single FOREIGN KEY constraint on the column using a + SQL-script-compatible + block that selects into a @variable from + sys.foreign_keys/sys.foreign_key_columns, + then exec's a separate DROP CONSTRAINT for that default. Only + works if the column has exactly one FK constraint which refers to + it, at the moment. + + .. versionadded:: 0.6.2 + + """ + + self.impl.drop_column( + table_name, + self._column(column_name, NULLTYPE), + **kw + ) + + + def create_primary_key(self, name, table_name, cols, schema=None): + """Issue a "create primary key" instruction using the current + migration context. + + e.g.:: + + from alembic import op + op.create_primary_key( + "pk_my_table", "my_table", + ["id", "version"] + ) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.PrimaryKeyConstraint` + object which it then associates with the :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + .. versionadded:: 0.5.0 + + :param name: Name of the primary key constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + `NamingConventions `_, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param table_name: String name of the target table. + :param cols: a list of string column names to be applied to the + primary key constraint. + :param schema: Optional schema name of the table. + + """ + self.impl.add_constraint( + self._primary_key_constraint(name, table_name, cols, + schema) + ) + + + def create_foreign_key(self, name, source, referent, local_cols, + remote_cols, onupdate=None, ondelete=None, + deferrable=None, initially=None, match=None, + source_schema=None, referent_schema=None, + **dialect_kw): + """Issue a "create foreign key" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_foreign_key( + "fk_user_address", "address", + "user", ["user_id"], ["id"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.ForeignKeyConstraint` + object which it then associates with the :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the foreign key constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + `NamingConventions `_, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source: String name of the source table. + :param referent: String name of the destination table. + :param local_cols: a list of string column names in the + source table. + :param remote_cols: a list of string column names in the + remote table. + :param onupdate: Optional string. If set, emit ON UPDATE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param ondelete: Optional string. If set, emit ON DELETE when + issuing DDL for this constraint. Typical values include CASCADE, + DELETE and RESTRICT. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT + DEFERRABLE when issuing DDL for this constraint. + :param source_schema: Optional schema name of the source table. + :param referent_schema: Optional schema name of the destination table. + + """ + + self.impl.add_constraint( + self._foreign_key_constraint(name, source, referent, + local_cols, remote_cols, + onupdate=onupdate, ondelete=ondelete, + deferrable=deferrable, source_schema=source_schema, + referent_schema=referent_schema, + initially=initially, match=match, **dialect_kw) + ) + + def create_unique_constraint(self, name, source, local_cols, + schema=None, **kw): + """Issue a "create unique constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + op.create_unique_constraint("uq_user_name", "user", ["name"]) + + This internally generates a :class:`~sqlalchemy.schema.Table` object + containing the necessary columns, then generates a new + :class:`~sqlalchemy.schema.UniqueConstraint` + object which it then associates with the :class:`~sqlalchemy.schema.Table`. + Any event listeners associated with this action will be fired + off normally. The :class:`~sqlalchemy.schema.AddConstraint` + construct is ultimately used to generate the ALTER statement. + + :param name: Name of the unique constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + `NamingConventions `_, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source: String name of the source table. Dotted schema names are + supported. + :param local_cols: a list of string column names in the + source table. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + """ + + self.impl.add_constraint( + self._unique_constraint(name, source, local_cols, + schema=schema, **kw) + ) + + def create_check_constraint(self, name, source, condition, + schema=None, **kw): + """Issue a "create check constraint" instruction using the + current migration context. + + e.g.:: + + from alembic import op + from sqlalchemy.sql import column, func + + op.create_check_constraint( + "ck_user_name_len", + "user", + func.len(column('name')) > 5 + ) + + CHECK constraints are usually against a SQL expression, so ad-hoc + table metadata is usually needed. The function will convert the given + arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound + to an anonymous table in order to emit the CREATE statement. + + :param name: Name of the check constraint. The name is necessary + so that an ALTER statement can be emitted. For setups that + use an automated naming scheme such as that described at + `NamingConventions `_, + ``name`` here can be ``None``, as the event listener will + apply the name to the constraint object when it is associated + with the table. + :param source: String name of the source table. + :param condition: SQL expression that's the condition of the constraint. + Can be a string or SQLAlchemy expression language structure. + :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when + issuing DDL for this constraint. + :param initially: optional string. If set, emit INITIALLY when issuing DDL + for this constraint. + :param schema: Optional schema name to operate within. + + ..versionadded:: 0.4.0 + + """ + self.impl.add_constraint( + self._check_constraint(name, source, condition, schema=schema, **kw) + ) + + def create_table(self, name, *columns, **kw): + """Issue a "create table" instruction using the current migration context. + + This directive receives an argument list similar to that of the + traditional :class:`sqlalchemy.schema.Table` construct, but without the + metadata:: + + from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column + from alembic import op + + op.create_table( + 'account', + Column('id', INTEGER, primary_key=True), + Column('name', VARCHAR(50), nullable=False), + Column('description', NVARCHAR(200)) + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + Note that :meth:`.create_table` accepts :class:`~sqlalchemy.schema.Column` + constructs directly from the SQLAlchemy library. In particular, + default values to be created on the database side are + specified using the ``server_default`` parameter, and not + ``default`` which only specifies Python-side defaults:: + + from alembic import op + from sqlalchemy import Column, TIMESTAMP, func + + # specify "DEFAULT NOW" along with the "timestamp" column + op.create_table('account', + Column('id', INTEGER, primary_key=True), + Column('timestamp', TIMESTAMP, server_default=func.now()) + ) + + :param name: Name of the table + :param \*columns: collection of :class:`~sqlalchemy.schema.Column` + objects within + the table, as well as optional :class:`~sqlalchemy.schema.Constraint` + objects + and :class:`~.sqlalchemy.schema.Index` objects. + :param schema: Optional schema name to operate within. + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + """ + self.impl.create_table( + self._table(name, *columns, **kw) + ) + + def drop_table(self, name, **kw): + """Issue a "drop table" instruction using the current + migration context. + + + e.g.:: + + drop_table("accounts") + + :param name: Name of the table + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + :param \**kw: Other keyword arguments are passed to the underlying + :class:`sqlalchemy.schema.Table` object created for the command. + + """ + self.impl.drop_table( + self._table(name, **kw) + ) + + def create_index(self, name, table_name, columns, schema=None, **kw): + """Issue a "create index" instruction using the current + migration context. + + e.g.:: + + from alembic import op + op.create_index('ik_test', 't1', ['foo', 'bar']) + + :param name: name of the index. + :param table_name: name of the owning table. + + .. versionchanged:: 0.5.0 + The ``tablename`` parameter is now named ``table_name``. + As this is a positional argument, the old name is no + longer present. + + :param columns: a list of string column names in the + table. + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + """ + + self.impl.create_index( + self._index(name, table_name, columns, schema=schema, **kw) + ) + + @util._with_legacy_names([('tablename', 'table_name')]) + def drop_index(self, name, table_name=None, schema=None): + """Issue a "drop index" instruction using the current + migration context. + + e.g.:: + + drop_index("accounts") + + :param name: name of the index. + :param table_name: name of the owning table. Some + backends such as Microsoft SQL Server require this. + + .. versionchanged:: 0.5.0 + The ``tablename`` parameter is now named ``table_name``. + The old name will continue to function for backwards + compatibility. + + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + """ + # need a dummy column name here since SQLAlchemy + # 0.7.6 and further raises on Index with no columns + self.impl.drop_index( + self._index(name, table_name, ['x'], schema=schema) + ) + + @util._with_legacy_names([("type", "type_")]) + def drop_constraint(self, name, table_name, type_=None, schema=None): + """Drop a constraint of the given name, typically via DROP CONSTRAINT. + + :param name: name of the constraint. + :param table_name: table name. + + .. versionchanged:: 0.5.0 + The ``tablename`` parameter is now named ``table_name``. + As this is a positional argument, the old name is no + longer present. + + :param ``type_``: optional, required on MySQL. can be + 'foreignkey', 'primary', 'unique', or 'check'. + + .. versionchanged:: 0.5.0 + The ``type`` parameter is now named ``type_``. The old name + ``type`` will remain for backwards compatibility. + + .. versionadded:: 0.3.6 'primary' qualfier to enable + dropping of MySQL primary key constraints. + + :param schema: Optional schema name to operate within. + + .. versionadded:: 0.4.0 + + """ + + t = self._table(table_name, schema=schema) + types = { + 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint( + [], [], name=name), + 'primary': sa_schema.PrimaryKeyConstraint, + 'unique': sa_schema.UniqueConstraint, + 'check': lambda name: sa_schema.CheckConstraint("", name=name), + None: sa_schema.Constraint + } + try: + const = types[type_] + except KeyError: + raise TypeError("'type' can be one of %s" % + ", ".join(sorted(repr(x) for x in types))) + + const = const(name=name) + t.append_constraint(const) + self.impl.drop_constraint(const) + + def bulk_insert(self, table, rows, multiinsert=True): + """Issue a "bulk insert" operation using the current + migration context. + + This provides a means of representing an INSERT of multiple rows + which works equally well in the context of executing on a live + connection as well as that of generating a SQL script. In the + case of a SQL script, the values are rendered inline into the + statement. + + e.g.:: + + from alembic import op + from datetime import date + from sqlalchemy.sql import table, column + from sqlalchemy import String, Integer, Date + + # Create an ad-hoc table to use for the insert statement. + accounts_table = table('account', + column('id', Integer), + column('name', String), + column('create_date', Date) + ) + + op.bulk_insert(accounts_table, + [ + {'id':1, 'name':'John Smith', + 'create_date':date(2010, 10, 5)}, + {'id':2, 'name':'Ed Williams', + 'create_date':date(2007, 5, 27)}, + {'id':3, 'name':'Wendy Jones', + 'create_date':date(2008, 8, 15)}, + ] + ) + + When using --sql mode, some datatypes may not render inline automatically, + such as dates and other special types. When this issue is present, + :meth:`.Operations.inline_literal` may be used:: + + op.bulk_insert(accounts_table, + [ + {'id':1, 'name':'John Smith', + 'create_date':op.inline_literal("2010-10-05")}, + {'id':2, 'name':'Ed Williams', + 'create_date':op.inline_literal("2007-05-27")}, + {'id':3, 'name':'Wendy Jones', + 'create_date':op.inline_literal("2008-08-15")}, + ], + multiinsert=False + ) + + When using :meth:`.Operations.inline_literal` in conjunction with + :meth:`.Operations.bulk_insert`, in order for the statement to work + in "online" (e.g. non --sql) mode, the + :paramref:`~.Operations.bulk_insert.multiinsert` + flag should be set to ``False``, which will have the effect of + individual INSERT statements being emitted to the database, each + with a distinct VALUES clause, so that the "inline" values can + still be rendered, rather than attempting to pass the values + as bound parameters. + + .. versionadded:: 0.6.4 :meth:`.Operations.inline_literal` can now + be used with :meth:`.Operations.bulk_insert`, and the + :paramref:`~.Operations.bulk_insert.multiinsert` flag has + been added to assist in this usage when running in "online" + mode. + + :param table: a table object which represents the target of the INSERT. + + :param rows: a list of dictionaries indicating rows. + + :param multiinsert: when at its default of True and --sql mode is not + enabled, the INSERT statement will be executed using + "executemany()" style, where all elements in the list of dictionaries + are passed as bound parameters in a single list. Setting this + to False results in individual INSERT statements being emitted + per parameter set, and is needed in those cases where non-literal + values are present in the parameter sets. + + .. versionadded:: 0.6.4 + + """ + self.impl.bulk_insert(table, rows, multiinsert=multiinsert) + + def inline_literal(self, value, type_=None): + """Produce an 'inline literal' expression, suitable for + using in an INSERT, UPDATE, or DELETE statement. + + When using Alembic in "offline" mode, CRUD operations + aren't compatible with SQLAlchemy's default behavior surrounding + literal values, + which is that they are converted into bound values and passed + separately into the ``execute()`` method of the DBAPI cursor. + An offline SQL + script needs to have these rendered inline. While it should + always be noted that inline literal values are an **enormous** + security hole in an application that handles untrusted input, + a schema migration is not run in this context, so + literals are safe to render inline, with the caveat that + advanced types like dates may not be supported directly + by SQLAlchemy. + + See :meth:`.execute` for an example usage of + :meth:`.inline_literal`. + + :param value: The value to render. Strings, integers, and simple + numerics should be supported. Other types like boolean, + dates, etc. may or may not be supported yet by various + backends. + :param ``type_``: optional - a :class:`sqlalchemy.types.TypeEngine` + subclass stating the type of this value. In SQLAlchemy + expressions, this is usually derived automatically + from the Python type of the value itself, as well as + based on the context in which the value is used. + + """ + return impl._literal_bindparam(None, value, type_=type_) + + def execute(self, sql, execution_options=None): + """Execute the given SQL using the current migration context. + + In a SQL script context, the statement is emitted directly to the + output stream. There is *no* return result, however, as this + function is oriented towards generating a change script + that can run in "offline" mode. For full interaction + with a connected database, use the "bind" available + from the context:: + + from alembic import op + connection = op.get_bind() + + Also note that any parameterized statement here *will not work* + in offline mode - INSERT, UPDATE and DELETE statements which refer + to literal values would need to render + inline expressions. For simple use cases, the + :meth:`.inline_literal` function can be used for **rudimentary** + quoting of string values. For "bulk" inserts, consider using + :meth:`.bulk_insert`. + + For example, to emit an UPDATE statement which is equally + compatible with both online and offline mode:: + + from sqlalchemy.sql import table, column + from sqlalchemy import String + from alembic import op + + account = table('account', + column('name', String) + ) + op.execute( + account.update().\\ + where(account.c.name==op.inline_literal('account 1')).\\ + values({'name':op.inline_literal('account 2')}) + ) + + Note above we also used the SQLAlchemy + :func:`sqlalchemy.sql.expression.table` + and :func:`sqlalchemy.sql.expression.column` constructs to make a brief, + ad-hoc table construct just for our UPDATE statement. A full + :class:`~sqlalchemy.schema.Table` construct of course works perfectly + fine as well, though note it's a recommended practice to at least ensure + the definition of a table is self-contained within the migration script, + rather than imported from a module that may break compatibility with + older migrations. + + :param sql: Any legal SQLAlchemy expression, including: + + * a string + * a :func:`sqlalchemy.sql.expression.text` construct. + * a :func:`sqlalchemy.sql.expression.insert` construct. + * a :func:`sqlalchemy.sql.expression.update`, + :func:`sqlalchemy.sql.expression.insert`, + or :func:`sqlalchemy.sql.expression.delete` construct. + * Pretty much anything that's "executable" as described + in :ref:`sqlexpression_toplevel`. + + :param execution_options: Optional dictionary of + execution options, will be passed to + :meth:`sqlalchemy.engine.Connection.execution_options`. + """ + self.migration_context.impl.execute(sql, + execution_options=execution_options) + + def get_bind(self): + """Return the current 'bind'. + + Under normal circumstances, this is the + :class:`~sqlalchemy.engine.Connection` currently being used + to emit SQL to the database. + + In a SQL script context, this value is ``None``. [TODO: verify this] + + """ + return self.migration_context.impl.bind + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/script.py b/Linux_i686/lib/python2.7/site-packages/alembic/script.py new file mode 100644 index 0000000..3294366 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/script.py @@ -0,0 +1,513 @@ +import datetime +import os +import re +import shutil +from . import util + +_sourceless_rev_file = re.compile(r'(.*\.py)(c|o)?$') +_only_source_rev_file = re.compile(r'(.*\.py)$') +_legacy_rev = re.compile(r'([a-f0-9]+)\.py$') +_mod_def_re = re.compile(r'(upgrade|downgrade)_([a-z0-9]+)') +_slug_re = re.compile(r'\w+') +_default_file_template = "%(rev)s_%(slug)s" +_relative_destination = re.compile(r'(?:\+|-)\d+') + +class ScriptDirectory(object): + """Provides operations upon an Alembic script directory. + + This object is useful to get information as to current revisions, + most notably being able to get at the "head" revision, for schemes + that want to test if the current revision in the database is the most + recent:: + + from alembic.script import ScriptDirectory + from alembic.config import Config + config = Config() + config.set_main_option("script_location", "myapp:migrations") + script = ScriptDirectory.from_config(config) + + head_revision = script.get_current_head() + + + + """ + def __init__(self, dir, file_template=_default_file_template, + truncate_slug_length=40, + sourceless=False): + self.dir = dir + self.versions = os.path.join(self.dir, 'versions') + self.file_template = file_template + self.truncate_slug_length = truncate_slug_length or 40 + self.sourceless = sourceless + + if not os.access(dir, os.F_OK): + raise util.CommandError("Path doesn't exist: %r. Please use " + "the 'init' command to create a new " + "scripts folder." % dir) + + @classmethod + def from_config(cls, config): + """Produce a new :class:`.ScriptDirectory` given a :class:`.Config` + instance. + + The :class:`.Config` need only have the ``script_location`` key + present. + + """ + script_location = config.get_main_option('script_location') + if script_location is None: + raise util.CommandError("No 'script_location' key " + "found in configuration.") + truncate_slug_length = config.get_main_option("truncate_slug_length") + if truncate_slug_length is not None: + truncate_slug_length = int(truncate_slug_length) + return ScriptDirectory( + util.coerce_resource_to_filename(script_location), + file_template=config.get_main_option( + 'file_template', + _default_file_template), + truncate_slug_length=truncate_slug_length, + sourceless=config.get_main_option("sourceless") == "true" + ) + + def walk_revisions(self, base="base", head="head"): + """Iterate through all revisions. + + This is actually a breadth-first tree traversal, + with leaf nodes being heads. + + """ + if head == "head": + heads = set(self.get_heads()) + else: + heads = set([head]) + while heads: + todo = set(heads) + heads = set() + for head in todo: + if head in heads: + break + for sc in self.iterate_revisions(head, base): + if sc.is_branch_point and sc.revision not in todo: + heads.add(sc.revision) + break + else: + yield sc + + def get_revision(self, id_): + """Return the :class:`.Script` instance with the given rev id.""" + + id_ = self.as_revision_number(id_) + try: + return self._revision_map[id_] + except KeyError: + # do a partial lookup + revs = [x for x in self._revision_map + if x is not None and x.startswith(id_)] + if not revs: + raise util.CommandError("No such revision '%s'" % id_) + elif len(revs) > 1: + raise util.CommandError( + "Multiple revisions start " + "with '%s', %s..." % ( + id_, + ", ".join("'%s'" % r for r in revs[0:3]) + )) + else: + return self._revision_map[revs[0]] + + _get_rev = get_revision + + def as_revision_number(self, id_): + """Convert a symbolic revision, i.e. 'head' or 'base', into + an actual revision number.""" + + if id_ == 'head': + id_ = self.get_current_head() + elif id_ == 'base': + id_ = None + return id_ + + _as_rev_number = as_revision_number + + def iterate_revisions(self, upper, lower): + """Iterate through script revisions, starting at the given + upper revision identifier and ending at the lower. + + The traversal uses strictly the `down_revision` + marker inside each migration script, so + it is a requirement that upper >= lower, + else you'll get nothing back. + + The iterator yields :class:`.Script` objects. + + """ + if upper is not None and _relative_destination.match(upper): + relative = int(upper) + revs = list(self._iterate_revisions("head", lower)) + revs = revs[-relative:] + if len(revs) != abs(relative): + raise util.CommandError("Relative revision %s didn't " + "produce %d migrations" % (upper, abs(relative))) + return iter(revs) + elif lower is not None and _relative_destination.match(lower): + relative = int(lower) + revs = list(self._iterate_revisions(upper, "base")) + revs = revs[0:-relative] + if len(revs) != abs(relative): + raise util.CommandError("Relative revision %s didn't " + "produce %d migrations" % (lower, abs(relative))) + return iter(revs) + else: + return self._iterate_revisions(upper, lower) + + def _iterate_revisions(self, upper, lower): + lower = self.get_revision(lower) + upper = self.get_revision(upper) + orig = lower.revision if lower else 'base', \ + upper.revision if upper else 'base' + script = upper + while script != lower: + if script is None and lower is not None: + raise util.CommandError( + "Revision %s is not an ancestor of %s" % orig) + yield script + downrev = script.down_revision + script = self._revision_map[downrev] + + def _upgrade_revs(self, destination, current_rev): + revs = self.iterate_revisions(destination, current_rev) + return [ + (script.module.upgrade, script.down_revision, script.revision, + script.doc) + for script in reversed(list(revs)) + ] + + def _downgrade_revs(self, destination, current_rev): + revs = self.iterate_revisions(current_rev, destination) + return [ + (script.module.downgrade, script.revision, script.down_revision, + script.doc) + for script in revs + ] + + def run_env(self): + """Run the script environment. + + This basically runs the ``env.py`` script present + in the migration environment. It is called exclusively + by the command functions in :mod:`alembic.command`. + + + """ + util.load_python_file(self.dir, 'env.py') + + @property + def env_py_location(self): + return os.path.abspath(os.path.join(self.dir, "env.py")) + + @util.memoized_property + def _revision_map(self): + map_ = {} + for file_ in os.listdir(self.versions): + script = Script._from_filename(self, self.versions, file_) + if script is None: + continue + if script.revision in map_: + util.warn("Revision %s is present more than once" % + script.revision) + map_[script.revision] = script + for rev in map_.values(): + if rev.down_revision is None: + continue + if rev.down_revision not in map_: + util.warn("Revision %s referenced from %s is not present" + % (rev.down_revision, rev)) + rev.down_revision = None + else: + map_[rev.down_revision].add_nextrev(rev.revision) + map_[None] = None + return map_ + + def _rev_path(self, rev_id, message, create_date): + slug = "_".join(_slug_re.findall(message or "")).lower() + if len(slug) > self.truncate_slug_length: + slug = slug[:self.truncate_slug_length].rsplit('_', 1)[0] + '_' + filename = "%s.py" % ( + self.file_template % { + 'rev': rev_id, + 'slug': slug, + 'year': create_date.year, + 'month': create_date.month, + 'day': create_date.day, + 'hour': create_date.hour, + 'minute': create_date.minute, + 'second': create_date.second + } + ) + return os.path.join(self.versions, filename) + + def get_current_head(self): + """Return the current head revision. + + If the script directory has multiple heads + due to branching, an error is raised. + + Returns a string revision number. + + """ + current_heads = self.get_heads() + if len(current_heads) > 1: + raise util.CommandError('Only a single head is supported. The ' + 'script directory has multiple heads (due to branching), which ' + 'must be resolved by manually editing the revision files to ' + 'form a linear sequence. Run `alembic branches` to see the ' + 'divergence(s).') + + if current_heads: + return current_heads[0] + else: + return None + + _current_head = get_current_head + """the 0.2 name, for backwards compat.""" + + def get_heads(self): + """Return all "head" revisions as strings. + + Returns a list of string revision numbers. + + This is normally a list of length one, + unless branches are present. The + :meth:`.ScriptDirectory.get_current_head()` method + can be used normally when a script directory + has only one head. + + """ + heads = [] + for script in self._revision_map.values(): + if script and script.is_head: + heads.append(script.revision) + return heads + + def get_base(self): + """Return the "base" revision as a string. + + This is the revision number of the script that + has a ``down_revision`` of None. + + Behavior is not defined if more than one script + has a ``down_revision`` of None. + + """ + for script in self._revision_map.values(): + if script and script.down_revision is None \ + and script.revision in self._revision_map: + return script.revision + else: + return None + + def _generate_template(self, src, dest, **kw): + util.status("Generating %s" % os.path.abspath(dest), + util.template_to_file, + src, + dest, + **kw + ) + + def _copy_file(self, src, dest): + util.status("Generating %s" % os.path.abspath(dest), + shutil.copy, + src, dest) + + def generate_revision(self, revid, message, refresh=False, **kw): + """Generate a new revision file. + + This runs the ``script.py.mako`` template, given + template arguments, and creates a new file. + + :param revid: String revision id. Typically this + comes from ``alembic.util.rev_id()``. + :param message: the revision message, the one passed + by the -m argument to the ``revision`` command. + :param refresh: when True, the in-memory state of this + :class:`.ScriptDirectory` will be updated with a new + :class:`.Script` instance representing the new revision; + the :class:`.Script` instance is returned. + If False, the file is created but the state of the + :class:`.ScriptDirectory` is unmodified; ``None`` + is returned. + + """ + current_head = self.get_current_head() + create_date = datetime.datetime.now() + path = self._rev_path(revid, message, create_date) + self._generate_template( + os.path.join(self.dir, "script.py.mako"), + path, + up_revision=str(revid), + down_revision=current_head, + create_date=create_date, + message=message if message is not None else ("empty message"), + **kw + ) + if refresh: + script = Script._from_path(self, path) + self._revision_map[script.revision] = script + if script.down_revision: + self._revision_map[script.down_revision].\ + add_nextrev(script.revision) + return script + else: + return None + + +class Script(object): + """Represent a single revision file in a ``versions/`` directory. + + The :class:`.Script` instance is returned by methods + such as :meth:`.ScriptDirectory.iterate_revisions`. + + """ + + nextrev = frozenset() + + def __init__(self, module, rev_id, path): + self.module = module + self.revision = rev_id + self.path = path + self.down_revision = getattr(module, 'down_revision', None) + + revision = None + """The string revision number for this :class:`.Script` instance.""" + + module = None + """The Python module representing the actual script itself.""" + + path = None + """Filesystem path of the script.""" + + down_revision = None + """The ``down_revision`` identifier within the migration script.""" + + @property + def doc(self): + """Return the docstring given in the script.""" + + return re.split("\n\n", self.longdoc)[0] + + @property + def longdoc(self): + """Return the docstring given in the script.""" + + doc = self.module.__doc__ + if doc: + if hasattr(self.module, "_alembic_source_encoding"): + doc = doc.decode(self.module._alembic_source_encoding) + return doc.strip() + else: + return "" + + def add_nextrev(self, rev): + self.nextrev = self.nextrev.union([rev]) + + @property + def is_head(self): + """Return True if this :class:`.Script` is a 'head' revision. + + This is determined based on whether any other :class:`.Script` + within the :class:`.ScriptDirectory` refers to this + :class:`.Script`. Multiple heads can be present. + + """ + return not bool(self.nextrev) + + @property + def is_branch_point(self): + """Return True if this :class:`.Script` is a branch point. + + A branchpoint is defined as a :class:`.Script` which is referred + to by more than one succeeding :class:`.Script`, that is more + than one :class:`.Script` has a `down_revision` identifier pointing + here. + + """ + return len(self.nextrev) > 1 + + @property + def log_entry(self): + return \ + "Rev: %s%s%s\n" \ + "Parent: %s\n" \ + "Path: %s\n" \ + "\n%s\n" % ( + self.revision, + " (head)" if self.is_head else "", + " (branchpoint)" if self.is_branch_point else "", + self.down_revision, + self.path, + "\n".join( + " %s" % para + for para in self.longdoc.splitlines() + ) + ) + + def __str__(self): + return "%s -> %s%s%s, %s" % ( + self.down_revision, + self.revision, + " (head)" if self.is_head else "", + " (branchpoint)" if self.is_branch_point else "", + self.doc) + + @classmethod + def _from_path(cls, scriptdir, path): + dir_, filename = os.path.split(path) + return cls._from_filename(scriptdir, dir_, filename) + + @classmethod + def _from_filename(cls, scriptdir, dir_, filename): + if scriptdir.sourceless: + py_match = _sourceless_rev_file.match(filename) + else: + py_match = _only_source_rev_file.match(filename) + + if not py_match: + return None + + py_filename = py_match.group(1) + + if scriptdir.sourceless: + is_c = py_match.group(2) == 'c' + is_o = py_match.group(2) == 'o' + else: + is_c = is_o = False + + if is_o or is_c: + py_exists = os.path.exists(os.path.join(dir_, py_filename)) + pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c")) + + # prefer .py over .pyc because we'd like to get the + # source encoding; prefer .pyc over .pyo because we'd like to + # have the docstrings which a -OO file would not have + if py_exists or is_o and pyc_exists: + return None + + module = util.load_python_file(dir_, filename) + + if not hasattr(module, "revision"): + # attempt to get the revision id from the script name, + # this for legacy only + m = _legacy_rev.match(filename) + if not m: + raise util.CommandError( + "Could not determine revision id from filename %s. " + "Be sure the 'revision' variable is " + "declared inside the script (please see 'Upgrading " + "from Alembic 0.1 to 0.2' in the documentation)." + % filename) + else: + revision = m.group(1) + else: + revision = module.revision + return Script(module, revision, os.path.join(dir_, filename)) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/README b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/README new file mode 100644 index 0000000..98e4f9c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/README @@ -0,0 +1 @@ +Generic single-database configuration. \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/alembic.ini.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/alembic.ini.mako new file mode 100644 index 0000000..a738a24 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/alembic.ini.mako @@ -0,0 +1,59 @@ +# A generic, single database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +sqlalchemy.url = driver://user:pass@localhost/dbname + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/env.py b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/env.py new file mode 100644 index 0000000..712b616 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/env.py @@ -0,0 +1,71 @@ +from __future__ import with_statement +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option("sqlalchemy.url") + context.configure(url=url, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + engine = engine_from_config( + config.get_section(config.config_ini_section), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + connection = engine.connect() + context.configure( + connection=connection, + target_metadata=target_metadata + ) + + try: + with context.begin_transaction(): + context.run_migrations() + finally: + connection.close() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() + diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/script.py.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/script.py.mako new file mode 100644 index 0000000..9570201 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/script.py.mako @@ -0,0 +1,22 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision} +Create Date: ${create_date} + +""" + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/README b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/README new file mode 100644 index 0000000..5db219f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/README @@ -0,0 +1 @@ +Rudimentary multi-database configuration. \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/alembic.ini.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/alembic.ini.mako new file mode 100644 index 0000000..132b246 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/alembic.ini.mako @@ -0,0 +1,65 @@ +# a multi-database configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +databases = engine1, engine2 + +[engine1] +sqlalchemy.url = driver://user:pass@localhost/dbname + +[engine2] +sqlalchemy.url = driver://user:pass@localhost/dbname2 + + +# Logging configuration +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/env.py b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/env.py new file mode 100644 index 0000000..e3511de --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/env.py @@ -0,0 +1,130 @@ +from __future__ import with_statement +from alembic import context +from sqlalchemy import engine_from_config, pool +from logging.config import fileConfig +import logging +import re + +USE_TWOPHASE = False + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +fileConfig(config.config_file_name) +logger = logging.getLogger('alembic.env') + +# gather section names referring to different +# databases. These are named "engine1", "engine2" +# in the sample .ini file. +db_names = config.get_main_option('databases') + +# add your model's MetaData objects here +# for 'autogenerate' support. These must be set +# up to hold just those tables targeting a +# particular database. table.tometadata() may be +# helpful here in case a "copy" of +# a MetaData is needed. +# from myapp import mymodel +# target_metadata = { +# 'engine1':mymodel.metadata1, +# 'engine2':mymodel.metadata2 +#} +target_metadata = {} + +# other values from the config, defined by the needs of env.py, +# can be acquired: +# my_important_option = config.get_main_option("my_important_option") +# ... etc. + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + # for the --sql use case, run migrations for each URL into + # individual files. + + engines = {} + for name in re.split(r',\s*', db_names): + engines[name] = rec = {} + rec['url'] = context.config.get_section_option(name, + "sqlalchemy.url") + + for name, rec in engines.items(): + logger.info("Migrating database %s" % name) + file_ = "%s.sql" % name + logger.info("Writing output to %s" % file_) + with open(file_, 'w') as buffer: + context.configure(url=rec['url'], output_buffer=buffer, + target_metadata=target_metadata.get(name)) + with context.begin_transaction(): + context.run_migrations(engine_name=name) + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + + # for the direct-to-DB use case, start a transaction on all + # engines, then run all migrations, then commit all transactions. + + engines = {} + for name in re.split(r',\s*', db_names): + engines[name] = rec = {} + rec['engine'] = engine_from_config( + context.config.get_section(name), + prefix='sqlalchemy.', + poolclass=pool.NullPool) + + for name, rec in engines.items(): + engine = rec['engine'] + rec['connection'] = conn = engine.connect() + + if USE_TWOPHASE: + rec['transaction'] = conn.begin_twophase() + else: + rec['transaction'] = conn.begin() + + try: + for name, rec in engines.items(): + logger.info("Migrating database %s" % name) + context.configure( + connection=rec['connection'], + upgrade_token="%s_upgrades" % name, + downgrade_token="%s_downgrades" % name, + target_metadata=target_metadata.get(name) + ) + context.run_migrations(engine_name=name) + + if USE_TWOPHASE: + for rec in engines.values(): + rec['transaction'].prepare() + + for rec in engines.values(): + rec['transaction'].commit() + except: + for rec in engines.values(): + rec['transaction'].rollback() + raise + finally: + for rec in engines.values(): + rec['connection'].close() + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/script.py.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/script.py.mako new file mode 100644 index 0000000..1e7f79a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/script.py.mako @@ -0,0 +1,43 @@ +<%! +import re + +%>"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision} +Create Date: ${create_date} + +""" + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +def upgrade(engine_name): + eval("upgrade_%s" % engine_name)() + + +def downgrade(engine_name): + eval("downgrade_%s" % engine_name)() + +<% + db_names = config.get_main_option("databases") +%> + +## generate an "upgrade_() / downgrade_()" function +## for each database name in the ini file. + +% for db_name in re.split(r',\s*', db_names): + +def upgrade_${db_name}(): + ${context.get("%s_upgrades" % db_name, "pass")} + + +def downgrade_${db_name}(): + ${context.get("%s_downgrades" % db_name, "pass")} + +% endfor diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/README b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/README new file mode 100644 index 0000000..ed3c28e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/README @@ -0,0 +1 @@ +Configuration that reads from a Pylons project environment. \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/alembic.ini.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/alembic.ini.mako new file mode 100644 index 0000000..771c027 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/alembic.ini.mako @@ -0,0 +1,25 @@ +# a Pylons configuration. + +[alembic] +# path to migration scripts +script_location = ${script_location} + +# template used to generate migration files +# file_template = %%(rev)s_%%(slug)s + +# max length of characters to apply to the +# "slug" field +#truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .pyc and .pyo files without +# a source .py file to be detected as revisions in the +# versions/ directory +# sourceless = false + +pylons_config_file = ./development.ini + +# that's it ! \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/env.py b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/env.py new file mode 100644 index 0000000..36c3fca --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/env.py @@ -0,0 +1,86 @@ +"""Pylons bootstrap environment. + +Place 'pylons_config_file' into alembic.ini, and the application will +be loaded from there. + +""" +from alembic import context +from paste.deploy import loadapp +from logging.config import fileConfig +from sqlalchemy.engine.base import Engine + + +try: + # if pylons app already in, don't create a new app + from pylons import config as pylons_config + pylons_config['__file__'] +except: + config = context.config + # can use config['__file__'] here, i.e. the Pylons + # ini file, instead of alembic.ini + config_file = config.get_main_option('pylons_config_file') + fileConfig(config_file) + wsgi_app = loadapp('config:%s' % config_file, relative_to='.') + + +# customize this section for non-standard engine configurations. +meta = __import__("%s.model.meta" % wsgi_app.config['pylons.package']).model.meta + +# add your model's MetaData object here +# for 'autogenerate' support +# from myapp import mymodel +# target_metadata = mymodel.Base.metadata +target_metadata = None + + +def run_migrations_offline(): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + context.configure( + url=meta.engine.url, target_metadata=target_metadata) + with context.begin_transaction(): + context.run_migrations() + + +def run_migrations_online(): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + # specify here how the engine is acquired + # engine = meta.engine + raise NotImplementedError("Please specify engine connectivity here") + + if isinstance(engine, Engine): + connection = engine.connect() + else: + raise Exception( + 'Expected engine instance got %s instead' % type(engine) + ) + + context.configure( + connection=connection, + target_metadata=target_metadata + ) + + try: + with context.begin_transaction(): + context.run_migrations() + finally: + connection.close() + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/script.py.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/script.py.mako new file mode 100644 index 0000000..9570201 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/script.py.mako @@ -0,0 +1,22 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision} +Create Date: ${create_date} + +""" + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/util.py b/Linux_i686/lib/python2.7/site-packages/alembic/util.py new file mode 100644 index 0000000..63e9269 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/alembic/util.py @@ -0,0 +1,348 @@ +import sys +import os +import textwrap +import warnings +import re +import inspect +import uuid + +from mako.template import Template +from sqlalchemy.engine import url +from sqlalchemy import __version__ + +from .compat import callable, exec_, load_module_py, load_module_pyc, binary_type + +class CommandError(Exception): + pass + +def _safe_int(value): + try: + return int(value) + except: + return value +_vers = tuple([_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)]) +sqla_07 = _vers > (0, 7, 2) +sqla_08 = _vers >= (0, 8, 0, 'b2') +sqla_09 = _vers >= (0, 9, 0) +sqla_092 = _vers >= (0, 9, 2) +sqla_094 = _vers >= (0, 9, 4) +if not sqla_07: + raise CommandError( + "SQLAlchemy 0.7.3 or greater is required. ") + +from sqlalchemy.util import format_argspec_plus, update_wrapper +from sqlalchemy.util.compat import inspect_getfullargspec + + +try: + import fcntl + import termios + import struct + ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, + struct.pack('HHHH', 0, 0, 0, 0)) + _h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl) + if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty + TERMWIDTH = None +except (ImportError, IOError): + TERMWIDTH = None + + +def template_to_file(template_file, dest, **kw): + with open(dest, 'w') as f: + f.write( + Template(filename=template_file).render(**kw) + ) + +def create_module_class_proxy(cls, globals_, locals_): + """Create module level proxy functions for the + methods on a given class. + + The functions will have a compatible signature + as the methods. A proxy is established + using the ``_install_proxy(obj)`` function, + and removed using ``_remove_proxy()``, both + installed by calling this function. + + """ + attr_names = set() + + def _install_proxy(obj): + globals_['_proxy'] = obj + for name in attr_names: + globals_[name] = getattr(obj, name) + + def _remove_proxy(): + globals_['_proxy'] = None + for name in attr_names: + del globals_[name] + + globals_['_install_proxy'] = _install_proxy + globals_['_remove_proxy'] = _remove_proxy + + def _create_op_proxy(name): + fn = getattr(cls, name) + spec = inspect.getargspec(fn) + if spec[0] and spec[0][0] == 'self': + spec[0].pop(0) + args = inspect.formatargspec(*spec) + num_defaults = 0 + if spec[3]: + num_defaults += len(spec[3]) + name_args = spec[0] + if num_defaults: + defaulted_vals = name_args[0 - num_defaults:] + else: + defaulted_vals = () + + apply_kw = inspect.formatargspec( + name_args, spec[1], spec[2], + defaulted_vals, + formatvalue=lambda x: '=' + x) + + def _name_error(name): + raise NameError( + "Can't invoke function '%s', as the proxy object has "\ + "not yet been " + "established for the Alembic '%s' class. " + "Try placing this code inside a callable." % ( + name, cls.__name__ + )) + globals_['_name_error'] = _name_error + + func_text = textwrap.dedent("""\ + def %(name)s(%(args)s): + %(doc)r + try: + p = _proxy + except NameError: + _name_error('%(name)s') + return _proxy.%(name)s(%(apply_kw)s) + e + """ % { + 'name': name, + 'args': args[1:-1], + 'apply_kw': apply_kw[1:-1], + 'doc': fn.__doc__, + }) + lcl = {} + exec_(func_text, globals_, lcl) + return lcl[name] + + for methname in dir(cls): + if not methname.startswith('_'): + if callable(getattr(cls, methname)): + locals_[methname] = _create_op_proxy(methname) + else: + attr_names.add(methname) + +def write_outstream(stream, *text): + encoding = getattr(stream, 'encoding', 'ascii') or 'ascii' + for t in text: + if not isinstance(t, binary_type): + t = t.encode(encoding, 'replace') + t = t.decode(encoding) + try: + stream.write(t) + except IOError: + # suppress "broken pipe" errors. + # no known way to handle this on Python 3 however + # as the exception is "ignored" (noisily) in TextIOWrapper. + break + +def coerce_resource_to_filename(fname): + """Interpret a filename as either a filesystem location or as a package resource. + + Names that are non absolute paths and contain a colon + are interpreted as resources and coerced to a file location. + + """ + if not os.path.isabs(fname) and ":" in fname: + import pkg_resources + fname = pkg_resources.resource_filename(*fname.split(':')) + return fname + +def status(_statmsg, fn, *arg, **kw): + msg(_statmsg + " ...", False) + try: + ret = fn(*arg, **kw) + write_outstream(sys.stdout, " done\n") + return ret + except: + write_outstream(sys.stdout, " FAILED\n") + raise + +def err(message): + msg(message) + sys.exit(-1) + +def obfuscate_url_pw(u): + u = url.make_url(u) + if u.password: + u.password = 'XXXXX' + return str(u) + +def asbool(value): + return value is not None and \ + value.lower() == 'true' + +def warn(msg): + warnings.warn(msg) + +def msg(msg, newline=True): + if TERMWIDTH is None: + write_outstream(sys.stdout, msg) + if newline: + write_outstream(sys.stdout, "\n") + else: + # left indent output lines + lines = textwrap.wrap(msg, TERMWIDTH) + if len(lines) > 1: + for line in lines[0:-1]: + write_outstream(sys.stdout, " ", line, "\n") + write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else "")) + +def load_python_file(dir_, filename): + """Load a file from the given path as a Python module.""" + + module_id = re.sub(r'\W', "_", filename) + path = os.path.join(dir_, filename) + _, ext = os.path.splitext(filename) + if ext == ".py": + if os.path.exists(path): + module = load_module_py(module_id, path) + elif os.path.exists(simple_pyc_file_from_path(path)): + # look for sourceless load + module = load_module_pyc(module_id, simple_pyc_file_from_path(path)) + else: + raise ImportError("Can't find Python file %s" % path) + elif ext in (".pyc", ".pyo"): + module = load_module_pyc(module_id, path) + del sys.modules[module_id] + return module + +def simple_pyc_file_from_path(path): + """Given a python source path, return the so-called + "sourceless" .pyc or .pyo path. + + This just a .pyc or .pyo file where the .py file would be. + + Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__, + this use case remains supported as a so-called "sourceless module import". + + """ + if sys.flags.optimize: + return path + "o" # e.g. .pyo + else: + return path + "c" # e.g. .pyc + +def pyc_file_from_path(path): + """Given a python source path, locate the .pyc. + + See http://www.python.org/dev/peps/pep-3147/ + #detecting-pep-3147-availability + http://www.python.org/dev/peps/pep-3147/#file-extension-checks + + """ + import imp + has3147 = hasattr(imp, 'get_tag') + if has3147: + return imp.cache_from_source(path) + else: + return simple_pyc_file_from_path(path) + +def rev_id(): + val = int(uuid.uuid4()) % 100000000000000 + return hex(val)[2:-1] + +class memoized_property(object): + """A read-only @property that is only evaluated once.""" + + def __init__(self, fget, doc=None): + self.fget = fget + self.__doc__ = doc or fget.__doc__ + self.__name__ = fget.__name__ + + def __get__(self, obj, cls): + if obj is None: + return None + obj.__dict__[self.__name__] = result = self.fget(obj) + return result + + +class immutabledict(dict): + + def _immutable(self, *arg, **kw): + raise TypeError("%s object is immutable" % self.__class__.__name__) + + __delitem__ = __setitem__ = __setattr__ = \ + clear = pop = popitem = setdefault = \ + update = _immutable + + def __new__(cls, *args): + new = dict.__new__(cls) + dict.__init__(new, *args) + return new + + def __init__(self, *args): + pass + + def __reduce__(self): + return immutabledict, (dict(self), ) + + def union(self, d): + if not self: + return immutabledict(d) + else: + d2 = immutabledict(self) + dict.update(d2, d) + return d2 + + def __repr__(self): + return "immutabledict(%s)" % dict.__repr__(self) + + +def _with_legacy_names(translations): + def decorate(fn): + + spec = inspect_getfullargspec(fn) + metadata = dict(target='target', fn='fn') + metadata.update(format_argspec_plus(spec, grouped=False)) + + has_keywords = bool(spec[2]) + + if not has_keywords: + metadata['args'] += ", **kw" + metadata['apply_kw'] += ", **kw" + + def go(*arg, **kw): + names = set(kw).difference(spec[0]) + for oldname, newname in translations: + if oldname in kw: + kw[newname] = kw.pop(oldname) + names.discard(oldname) + + warnings.warn( + "Argument '%s' is now named '%s' for function '%s'" % + (oldname, newname, fn.__name__)) + if not has_keywords and names: + raise TypeError("Unknown arguments: %s" % ", ".join(names)) + return fn(*arg, **kw) + + code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % ( + metadata) + decorated = eval(code, {"target": go}) + decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__ + update_wrapper(decorated, fn) + if hasattr(decorated, '__wrapped__'): + # update_wrapper in py3k applies __wrapped__, which causes + # inspect.getargspec() to ignore the extra arguments on our + # wrapper as of Python 3.4. We need this for the + # "module class proxy" thing though, so just del the __wrapped__ + # for now. See #175 as well as bugs.python.org/issue17482 + del decorated.__wrapped__ + return decorated + + return decorate + + + diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/PKG-INFO new file mode 100644 index 0000000..23f2982 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/PKG-INFO @@ -0,0 +1,28 @@ +Metadata-Version: 1.1 +Name: cffi +Version: 0.8.2 +Summary: Foreign Function Interface for Python calling C code. +Home-page: http://cffi.readthedocs.org +Author: Armin Rigo, Maciej Fijalkowski +Author-email: python-cffi@googlegroups.com +License: MIT +Description: + CFFI + ==== + + Foreign Function Interface for Python calling C code. + Please see the `Documentation `_. + + Contact + ------- + + `Mailing list `_ + +Platform: UNKNOWN +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/SOURCES.txt new file mode 100644 index 0000000..0fe9fb2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/SOURCES.txt @@ -0,0 +1,112 @@ +LICENSE +MANIFEST.in +setup.cfg +setup.py +setup_base.py +c/_cffi_backend.c +c/check__thread.c +c/file_emulator.h +c/malloc_closure.h +c/minibuffer.h +c/misc_thread.h +c/misc_win32.h +c/test_c.py +c/wchar_helper.h +c/x.py +c/libffi_msvc/ffi.c +c/libffi_msvc/ffi.h +c/libffi_msvc/ffi_common.h +c/libffi_msvc/fficonfig.h +c/libffi_msvc/ffitarget.h +c/libffi_msvc/prep_cif.c +c/libffi_msvc/types.c +c/libffi_msvc/win32.c +c/libffi_msvc/win64.asm +c/libffi_msvc/win64.obj +cffi/__init__.py +cffi/api.py +cffi/backend_ctypes.py +cffi/commontypes.py +cffi/cparser.py +cffi/ffiplatform.py +cffi/gc_weakref.py +cffi/lock.py +cffi/model.py +cffi/vengine_cpy.py +cffi/vengine_gen.py +cffi/verifier.py +cffi.egg-info/PKG-INFO +cffi.egg-info/SOURCES.txt +cffi.egg-info/dependency_links.txt +cffi.egg-info/not-zip-safe +cffi.egg-info/requires.txt +cffi.egg-info/top_level.txt +demo/_csvmodule.py +demo/_curses.py +demo/api.py +demo/bsdopendirtype.py +demo/btrfs-snap.py +demo/cffi-cocoa.py +demo/fastcsv.py +demo/gmp.py +demo/image.py +demo/pwuid.py +demo/py.cleanup +demo/pyobj.py +demo/readdir.py +demo/readdir2.py +demo/readdir_ctypes.py +demo/sarvi.py +demo/setup.py +demo/syslog.py +demo/ui.py +demo/winclipboard.py +demo/xclient.py +demo/y.py +doc/Makefile +doc/design.rst +doc/make.bat +doc/source/conf.py +doc/source/index.rst +testing/__init__.py +testing/backend_tests.py +testing/callback_in_thread.py +testing/support.py +testing/test_cdata.py +testing/test_ctypes.py +testing/test_ffi_backend.py +testing/test_function.py +testing/test_model.py +testing/test_ownlib.py +testing/test_parsing.py +testing/test_platform.py +testing/test_unicode_literals.py +testing/test_verify.py +testing/test_verify2.py +testing/test_version.py +testing/test_vgen.py +testing/test_vgen2.py +testing/test_zdistutils.py +testing/test_zintegration.py +testing/udir.py +testing/snippets/distutils_module/setup.py +testing/snippets/distutils_module/snip_basic_verify.py +testing/snippets/distutils_module/build/lib.linux-x86_64-3.2/snip_basic_verify.py +testing/snippets/distutils_package_1/setup.py +testing/snippets/distutils_package_1/build/lib.linux-x86_64-3.2/snip_basic_verify1/__init__.py +testing/snippets/distutils_package_1/snip_basic_verify1/__init__.py +testing/snippets/distutils_package_2/setup.py +testing/snippets/distutils_package_2/build/lib.linux-x86_64-3.2/snip_basic_verify2/__init__.py +testing/snippets/distutils_package_2/snip_basic_verify2/__init__.py +testing/snippets/infrastructure/setup.py +testing/snippets/infrastructure/build/lib/snip_infrastructure/__init__.py +testing/snippets/infrastructure/snip_infrastructure/__init__.py +testing/snippets/setuptools_module/setup.py +testing/snippets/setuptools_module/snip_setuptools_verify.py +testing/snippets/setuptools_module/build/lib.linux-x86_64-3.2/snip_setuptools_verify.py +testing/snippets/setuptools_package_1/setup.py +testing/snippets/setuptools_package_1/build/lib.linux-x86_64-3.2/snip_setuptools_verify1/__init__.py +testing/snippets/setuptools_package_1/snip_setuptools_verify1/__init__.py +testing/snippets/setuptools_package_2/setup.py +testing/snippets/setuptools_package_2/build/lib.linux-x86_64-3.2/snip_setuptools_verify2/__init__.py +testing/snippets/setuptools_package_2/snip_setuptools_verify2/__init__.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/installed-files.txt new file mode 100644 index 0000000..7fce8e4 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/installed-files.txt @@ -0,0 +1,32 @@ +../cffi/api.py +../cffi/vengine_gen.py +../cffi/verifier.py +../cffi/commontypes.py +../cffi/cparser.py +../cffi/__init__.py +../cffi/backend_ctypes.py +../cffi/gc_weakref.py +../cffi/vengine_cpy.py +../cffi/ffiplatform.py +../cffi/model.py +../cffi/lock.py +../cffi/api.pyc +../cffi/vengine_gen.pyc +../cffi/verifier.pyc +../cffi/commontypes.pyc +../cffi/cparser.pyc +../cffi/__init__.pyc +../cffi/backend_ctypes.pyc +../cffi/gc_weakref.pyc +../cffi/vengine_cpy.pyc +../cffi/ffiplatform.pyc +../cffi/model.pyc +../cffi/lock.pyc +../_cffi_backend.so +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/requires.txt new file mode 100644 index 0000000..203143c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/requires.txt @@ -0,0 +1 @@ +pycparser \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/top_level.txt new file mode 100644 index 0000000..f645779 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/top_level.txt @@ -0,0 +1,2 @@ +_cffi_backend +cffi diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py b/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py new file mode 100644 index 0000000..fa9e86f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py @@ -0,0 +1,8 @@ +__all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', + 'FFIError'] + +from .api import FFI, CDefError, FFIError +from .ffiplatform import VerificationError, VerificationMissing + +__version__ = "0.8.2" +__version_info__ = (0, 8, 2) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/api.py b/Linux_i686/lib/python2.7/site-packages/cffi/api.py new file mode 100644 index 0000000..f44f086 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/api.py @@ -0,0 +1,494 @@ +import sys, types +from .lock import allocate_lock + +try: + callable +except NameError: + # Python 3.1 + from collections import Callable + callable = lambda x: isinstance(x, Callable) + +try: + basestring +except NameError: + # Python 3.x + basestring = str + + +class FFIError(Exception): + pass + +class CDefError(Exception): + def __str__(self): + try: + line = 'line %d: ' % (self.args[1].coord.line,) + except (AttributeError, TypeError, IndexError): + line = '' + return '%s%s' % (line, self.args[0]) + + +class FFI(object): + r''' + The main top-level class that you instantiate once, or once per module. + + Example usage: + + ffi = FFI() + ffi.cdef(""" + int printf(const char *, ...); + """) + + C = ffi.dlopen(None) # standard library + -or- + C = ffi.verify() # use a C compiler: verify the decl above is right + + C.printf("hello, %s!\n", ffi.new("char[]", "world")) + ''' + + def __init__(self, backend=None): + """Create an FFI instance. The 'backend' argument is used to + select a non-default backend, mostly for tests. + """ + from . import cparser, model + if backend is None: + # You need PyPy (>= 2.0 beta), or a CPython (>= 2.6) with + # _cffi_backend.so compiled. + import _cffi_backend as backend + from . import __version__ + assert (backend.__version__ == __version__ or + backend.__version__ == __version__[:3]) + # (If you insist you can also try to pass the option + # 'backend=backend_ctypes.CTypesBackend()', but don't + # rely on it! It's probably not going to work well.) + + self._backend = backend + self._lock = allocate_lock() + self._parser = cparser.Parser() + self._cached_btypes = {} + self._parsed_types = types.ModuleType('parsed_types').__dict__ + self._new_types = types.ModuleType('new_types').__dict__ + self._function_caches = [] + self._libraries = [] + self._cdefsources = [] + if hasattr(backend, 'set_ffi'): + backend.set_ffi(self) + for name in backend.__dict__: + if name.startswith('RTLD_'): + setattr(self, name, getattr(backend, name)) + # + with self._lock: + self.BVoidP = self._get_cached_btype(model.voidp_type) + if isinstance(backend, types.ModuleType): + # _cffi_backend: attach these constants to the class + if not hasattr(FFI, 'NULL'): + FFI.NULL = self.cast(self.BVoidP, 0) + FFI.CData, FFI.CType = backend._get_types() + else: + # ctypes backend: attach these constants to the instance + self.NULL = self.cast(self.BVoidP, 0) + self.CData, self.CType = backend._get_types() + + def cdef(self, csource, override=False, packed=False): + """Parse the given C source. This registers all declared functions, + types, and global variables. The functions and global variables can + then be accessed via either 'ffi.dlopen()' or 'ffi.verify()'. + The types can be used in 'ffi.new()' and other functions. + If 'packed' is specified as True, all structs declared inside this + cdef are packed, i.e. laid out without any field alignment at all. + """ + if not isinstance(csource, str): # unicode, on Python 2 + if not isinstance(csource, basestring): + raise TypeError("cdef() argument must be a string") + csource = csource.encode('ascii') + with self._lock: + self._parser.parse(csource, override=override, packed=packed) + self._cdefsources.append(csource) + if override: + for cache in self._function_caches: + cache.clear() + + def dlopen(self, name, flags=0): + """Load and return a dynamic library identified by 'name'. + The standard C library can be loaded by passing None. + Note that functions and types declared by 'ffi.cdef()' are not + linked to a particular library, just like C headers; in the + library we only look for the actual (untyped) symbols. + """ + assert isinstance(name, basestring) or name is None + with self._lock: + lib, function_cache = _make_ffi_library(self, name, flags) + self._function_caches.append(function_cache) + self._libraries.append(lib) + return lib + + def _typeof_locked(self, cdecl): + # call me with the lock! + key = cdecl + if key in self._parsed_types: + return self._parsed_types[key] + # + if not isinstance(cdecl, str): # unicode, on Python 2 + cdecl = cdecl.encode('ascii') + # + type = self._parser.parse_type(cdecl) + really_a_function_type = type.is_raw_function + if really_a_function_type: + type = type.as_function_pointer() + btype = self._get_cached_btype(type) + result = btype, really_a_function_type + self._parsed_types[key] = result + return result + + def _typeof(self, cdecl, consider_function_as_funcptr=False): + # string -> ctype object + try: + result = self._parsed_types[cdecl] + except KeyError: + with self._lock: + result = self._typeof_locked(cdecl) + # + btype, really_a_function_type = result + if really_a_function_type and not consider_function_as_funcptr: + raise CDefError("the type %r is a function type, not a " + "pointer-to-function type" % (cdecl,)) + return btype + + def typeof(self, cdecl): + """Parse the C type given as a string and return the + corresponding object. + It can also be used on 'cdata' instance to get its C type. + """ + if isinstance(cdecl, basestring): + return self._typeof(cdecl) + if isinstance(cdecl, self.CData): + return self._backend.typeof(cdecl) + if isinstance(cdecl, types.BuiltinFunctionType): + res = _builtin_function_type(cdecl) + if res is not None: + return res + if (isinstance(cdecl, types.FunctionType) + and hasattr(cdecl, '_cffi_base_type')): + with self._lock: + return self._get_cached_btype(cdecl._cffi_base_type) + raise TypeError(type(cdecl)) + + def sizeof(self, cdecl): + """Return the size in bytes of the argument. It can be a + string naming a C type, or a 'cdata' instance. + """ + if isinstance(cdecl, basestring): + BType = self._typeof(cdecl) + return self._backend.sizeof(BType) + else: + return self._backend.sizeof(cdecl) + + def alignof(self, cdecl): + """Return the natural alignment size in bytes of the C type + given as a string. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.alignof(cdecl) + + def offsetof(self, cdecl, fieldname): + """Return the offset of the named field inside the given + structure, which must be given as a C type name. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.typeoffsetof(cdecl, fieldname)[1] + + def new(self, cdecl, init=None): + """Allocate an instance according to the specified C type and + return a pointer to it. The specified C type must be either a + pointer or an array: ``new('X *')`` allocates an X and returns + a pointer to it, whereas ``new('X[n]')`` allocates an array of + n X'es and returns an array referencing it (which works + mostly like a pointer, like in C). You can also use + ``new('X[]', n)`` to allocate an array of a non-constant + length n. + + The memory is initialized following the rules of declaring a + global variable in C: by default it is zero-initialized, but + an explicit initializer can be given which can be used to + fill all or part of the memory. + + When the returned object goes out of scope, the memory + is freed. In other words the returned object has + ownership of the value of type 'cdecl' that it points to. This + means that the raw data can be used as long as this object is + kept alive, but must not be used for a longer time. Be careful + about that when copying the pointer to the memory somewhere + else, e.g. into another structure. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.newp(cdecl, init) + + def cast(self, cdecl, source): + """Similar to a C cast: returns an instance of the named C + type initialized with the given 'source'. The source is + casted between integers or pointers of any type. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + return self._backend.cast(cdecl, source) + + def string(self, cdata, maxlen=-1): + """Return a Python string (or unicode string) from the 'cdata'. + If 'cdata' is a pointer or array of characters or bytes, returns + the null-terminated string. The returned string extends until + the first null character, or at most 'maxlen' characters. If + 'cdata' is an array then 'maxlen' defaults to its length. + + If 'cdata' is a pointer or array of wchar_t, returns a unicode + string following the same rules. + + If 'cdata' is a single character or byte or a wchar_t, returns + it as a string or unicode string. + + If 'cdata' is an enum, returns the value of the enumerator as a + string, or 'NUMBER' if the value is out of range. + """ + return self._backend.string(cdata, maxlen) + + def buffer(self, cdata, size=-1): + """Return a read-write buffer object that references the raw C data + pointed to by the given 'cdata'. The 'cdata' must be a pointer or + an array. Can be passed to functions expecting a buffer, or directly + manipulated with: + + buf[:] get a copy of it in a regular string, or + buf[idx] as a single character + buf[:] = ... + buf[idx] = ... change the content + """ + return self._backend.buffer(cdata, size) + + def callback(self, cdecl, python_callable=None, error=None): + """Return a callback object or a decorator making such a + callback object. 'cdecl' must name a C function pointer type. + The callback invokes the specified 'python_callable' (which may + be provided either directly or via a decorator). Important: the + callback object must be manually kept alive for as long as the + callback may be invoked from the C level. + """ + def callback_decorator_wrap(python_callable): + if not callable(python_callable): + raise TypeError("the 'python_callable' argument " + "is not callable") + return self._backend.callback(cdecl, python_callable, error) + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl, consider_function_as_funcptr=True) + if python_callable is None: + return callback_decorator_wrap # decorator mode + else: + return callback_decorator_wrap(python_callable) # direct mode + + def getctype(self, cdecl, replace_with=''): + """Return a string giving the C type 'cdecl', which may be itself + a string or a object. If 'replace_with' is given, it gives + extra text to append (or insert for more complicated C types), like + a variable name, or '*' to get actually the C type 'pointer-to-cdecl'. + """ + if isinstance(cdecl, basestring): + cdecl = self._typeof(cdecl) + replace_with = replace_with.strip() + if (replace_with.startswith('*') + and '&[' in self._backend.getcname(cdecl, '&')): + replace_with = '(%s)' % replace_with + elif replace_with and not replace_with[0] in '[(': + replace_with = ' ' + replace_with + return self._backend.getcname(cdecl, replace_with) + + def gc(self, cdata, destructor): + """Return a new cdata object that points to the same + data. Later, when this new cdata object is garbage-collected, + 'destructor(old_cdata_object)' will be called. + """ + with self._lock: + try: + gc_weakrefs = self.gc_weakrefs + except AttributeError: + from .gc_weakref import GcWeakrefs + gc_weakrefs = self.gc_weakrefs = GcWeakrefs(self) + return gc_weakrefs.build(cdata, destructor) + + def _get_cached_btype(self, type): + assert self._lock.acquire(False) is False + # call me with the lock! + try: + BType = self._cached_btypes[type] + except KeyError: + finishlist = [] + BType = type.get_cached_btype(self, finishlist) + for type in finishlist: + type.finish_backend_type(self, finishlist) + return BType + + def verify(self, source='', tmpdir=None, **kwargs): + """Verify that the current ffi signatures compile on this + machine, and return a dynamic library object. The dynamic + library can be used to call functions and access global + variables declared in this 'ffi'. The library is compiled + by the C compiler: it gives you C-level API compatibility + (including calling macros). This is unlike 'ffi.dlopen()', + which requires binary compatibility in the signatures. + """ + from .verifier import Verifier, _caller_dir_pycache + tmpdir = tmpdir or _caller_dir_pycache() + self.verifier = Verifier(self, source, tmpdir, **kwargs) + lib = self.verifier.load_library() + self._libraries.append(lib) + return lib + + def _get_errno(self): + return self._backend.get_errno() + def _set_errno(self, errno): + self._backend.set_errno(errno) + errno = property(_get_errno, _set_errno, None, + "the value of 'errno' from/to the C calls") + + def getwinerror(self, code=-1): + return self._backend.getwinerror(code) + + def _pointer_to(self, ctype): + from . import model + with self._lock: + return model.pointer_cache(self, ctype) + + def addressof(self, cdata, field=None): + """Return the address of a . + If 'field' is specified, return the address of this field. + """ + ctype = self._backend.typeof(cdata) + ctype, offset = self._backend.typeoffsetof(ctype, field) + ctypeptr = self._pointer_to(ctype) + return self._backend.rawaddressof(ctypeptr, cdata, offset) + + def include(self, ffi_to_include): + """Includes the typedefs, structs, unions and enums defined + in another FFI instance. Usage is similar to a #include in C, + where a part of the program might include types defined in + another part for its own usage. Note that the include() + method has no effect on functions, constants and global + variables, which must anyway be accessed directly from the + lib object returned by the original FFI instance. + """ + with ffi_to_include._lock: + with self._lock: + self._parser.include(ffi_to_include._parser) + self._cdefsources.append('[') + self._cdefsources.extend(ffi_to_include._cdefsources) + self._cdefsources.append(']') + + def new_handle(self, x): + return self._backend.newp_handle(self.BVoidP, x) + + def from_handle(self, x): + return self._backend.from_handle(x) + + +def _load_backend_lib(backend, name, flags): + if name is None: + if sys.platform != "win32": + return backend.load_library(None, flags) + name = "c" # Windows: load_library(None) fails, but this works + # (backward compatibility hack only) + try: + if '.' not in name and '/' not in name: + raise OSError("library not found: %r" % (name,)) + return backend.load_library(name, flags) + except OSError: + import ctypes.util + path = ctypes.util.find_library(name) + if path is None: + raise # propagate the original OSError + return backend.load_library(path, flags) + +def _make_ffi_library(ffi, libname, flags): + import os + backend = ffi._backend + backendlib = _load_backend_lib(backend, libname, flags) + copied_enums = [] + # + def make_accessor_locked(name): + key = 'function ' + name + if key in ffi._parser._declarations: + tp = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + try: + value = backendlib.load_function(BType, name) + except KeyError as e: + raise AttributeError('%s: %s' % (name, e)) + library.__dict__[name] = value + return + # + key = 'variable ' + name + if key in ffi._parser._declarations: + tp = ffi._parser._declarations[key] + BType = ffi._get_cached_btype(tp) + read_variable = backendlib.read_variable + write_variable = backendlib.write_variable + setattr(FFILibrary, name, property( + lambda self: read_variable(BType, name), + lambda self, value: write_variable(BType, name, value))) + return + # + if not copied_enums: + from . import model + for key, tp in ffi._parser._declarations.items(): + if not isinstance(tp, model.EnumType): + continue + for enumname, enumval in zip(tp.enumerators, tp.enumvalues): + if enumname not in library.__dict__: + library.__dict__[enumname] = enumval + copied_enums.append(True) + if name in library.__dict__: + return + # + raise AttributeError(name) + # + def make_accessor(name): + with ffi._lock: + if name in library.__dict__ or name in FFILibrary.__dict__: + return # added by another thread while waiting for the lock + make_accessor_locked(name) + # + class FFILibrary(object): + def __getattr__(self, name): + make_accessor(name) + return getattr(self, name) + def __setattr__(self, name, value): + try: + property = getattr(self.__class__, name) + except AttributeError: + make_accessor(name) + setattr(self, name, value) + else: + property.__set__(self, value) + # + if libname is not None: + try: + if not isinstance(libname, str): # unicode, on Python 2 + libname = libname.encode('utf-8') + FFILibrary.__name__ = 'FFILibrary_%s' % libname + except UnicodeError: + pass + library = FFILibrary() + return library, library.__dict__ + +def _builtin_function_type(func): + # a hack to make at least ffi.typeof(builtin_function) work, + # if the builtin function was obtained by 'vengine_cpy'. + import sys + try: + module = sys.modules[func.__module__] + ffi = module._cffi_original_ffi + types_of_builtin_funcs = module._cffi_types_of_builtin_funcs + tp = types_of_builtin_funcs[func] + except (KeyError, AttributeError, TypeError): + return None + else: + with ffi._lock: + return ffi._get_cached_btype(tp) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/backend_ctypes.py b/Linux_i686/lib/python2.7/site-packages/cffi/backend_ctypes.py new file mode 100644 index 0000000..2b2b481 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/backend_ctypes.py @@ -0,0 +1,1049 @@ +import ctypes, ctypes.util, operator, sys +from . import model + +if sys.version_info < (3,): + integer_types = (int, long) + bytechr = chr +else: + unicode = str + integer_types = int + xrange = range + bytechr = lambda num: bytes([num]) + +class CTypesType(type): + pass + +class CTypesData(object): + __metaclass__ = CTypesType + __slots__ = ['__weakref__'] + __name__ = '' + + def __init__(self, *args): + raise TypeError("cannot instantiate %r" % (self.__class__,)) + + @classmethod + def _newp(cls, init): + raise TypeError("expected a pointer or array ctype, got '%s'" + % (cls._get_c_name(),)) + + @staticmethod + def _to_ctypes(value): + raise TypeError + + @classmethod + def _arg_to_ctypes(cls, *value): + try: + ctype = cls._ctype + except AttributeError: + raise TypeError("cannot create an instance of %r" % (cls,)) + if value: + res = cls._to_ctypes(*value) + if not isinstance(res, ctype): + res = cls._ctype(res) + else: + res = cls._ctype() + return res + + @classmethod + def _create_ctype_obj(cls, init): + if init is None: + return cls._arg_to_ctypes() + else: + return cls._arg_to_ctypes(init) + + @staticmethod + def _from_ctypes(ctypes_value): + raise TypeError + + @classmethod + def _get_c_name(cls, replace_with=''): + return cls._reftypename.replace(' &', replace_with) + + @classmethod + def _fix_class(cls): + cls.__name__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__qualname__ = 'CData<%s>' % (cls._get_c_name(),) + cls.__module__ = 'ffi' + + def _get_own_repr(self): + raise NotImplementedError + + def _addr_repr(self, address): + if address == 0: + return 'NULL' + else: + if address < 0: + address += 1 << (8*ctypes.sizeof(ctypes.c_void_p)) + return '0x%x' % address + + def __repr__(self, c_name=None): + own = self._get_own_repr() + return '' % (c_name or self._get_c_name(), own) + + def _convert_to_address(self, BClass): + if BClass is None: + raise TypeError("cannot convert %r to an address" % ( + self._get_c_name(),)) + else: + raise TypeError("cannot convert %r to %r" % ( + self._get_c_name(), BClass._get_c_name())) + + @classmethod + def _get_size(cls): + return ctypes.sizeof(cls._ctype) + + def _get_size_of_instance(self): + return ctypes.sizeof(self._ctype) + + @classmethod + def _cast_from(cls, source): + raise TypeError("cannot cast to %r" % (cls._get_c_name(),)) + + def _cast_to_integer(self): + return self._convert_to_address(None) + + @classmethod + def _alignment(cls): + return ctypes.alignment(cls._ctype) + + def __iter__(self): + raise TypeError("cdata %r does not support iteration" % ( + self._get_c_name()),) + + def _make_cmp(name): + cmpfunc = getattr(operator, name) + def cmp(self, other): + if isinstance(other, CTypesData): + return cmpfunc(self._convert_to_address(None), + other._convert_to_address(None)) + else: + return NotImplemented + cmp.func_name = name + return cmp + + __eq__ = _make_cmp('__eq__') + __ne__ = _make_cmp('__ne__') + __lt__ = _make_cmp('__lt__') + __le__ = _make_cmp('__le__') + __gt__ = _make_cmp('__gt__') + __ge__ = _make_cmp('__ge__') + + def __hash__(self): + return hash(type(self)) ^ hash(self._convert_to_address(None)) + + def _to_string(self, maxlen): + raise TypeError("string(): %r" % (self,)) + + +class CTypesGenericPrimitive(CTypesData): + __slots__ = [] + + def __eq__(self, other): + return self is other + + def __ne__(self, other): + return self is not other + + def __hash__(self): + return object.__hash__(self) + + def _get_own_repr(self): + return repr(self._from_ctypes(self._value)) + + +class CTypesGenericArray(CTypesData): + __slots__ = [] + + @classmethod + def _newp(cls, init): + return cls(init) + + def __iter__(self): + for i in xrange(len(self)): + yield self[i] + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + +class CTypesGenericPtr(CTypesData): + __slots__ = ['_address', '_as_ctype_ptr'] + _automatic_casts = False + + @classmethod + def _newp(cls, init): + return cls(init) + + @classmethod + def _cast_from(cls, source): + if source is None: + address = 0 + elif isinstance(source, CTypesData): + address = source._cast_to_integer() + elif isinstance(source, integer_types): + address = source + else: + raise TypeError("bad type for cast to %r: %r" % + (cls, type(source).__name__)) + return cls._new_pointer_at(address) + + @classmethod + def _new_pointer_at(cls, address): + self = cls.__new__(cls) + self._address = address + self._as_ctype_ptr = ctypes.cast(address, cls._ctype) + return self + + def _get_own_repr(self): + try: + return self._addr_repr(self._address) + except AttributeError: + return '???' + + def _cast_to_integer(self): + return self._address + + def __nonzero__(self): + return bool(self._address) + + def __bool__(self): + return bool(self._address) + + @classmethod + def _to_ctypes(cls, value): + if not isinstance(value, CTypesData): + raise TypeError("unexpected %s object" % type(value).__name__) + address = value._convert_to_address(cls) + return ctypes.cast(address, cls._ctype) + + @classmethod + def _from_ctypes(cls, ctypes_ptr): + address = ctypes.cast(ctypes_ptr, ctypes.c_void_p).value or 0 + return cls._new_pointer_at(address) + + @classmethod + def _initialize(cls, ctypes_ptr, value): + if value: + ctypes_ptr.contents = cls._to_ctypes(value).contents + + def _convert_to_address(self, BClass): + if (BClass in (self.__class__, None) or BClass._automatic_casts + or self._automatic_casts): + return self._address + else: + return CTypesData._convert_to_address(self, BClass) + + +class CTypesBaseStructOrUnion(CTypesData): + __slots__ = ['_blob'] + + @classmethod + def _create_ctype_obj(cls, init): + # may be overridden + raise TypeError("cannot instantiate opaque type %s" % (cls,)) + + def _get_own_repr(self): + return self._addr_repr(ctypes.addressof(self._blob)) + + @classmethod + def _offsetof(cls, fieldname): + return getattr(cls._ctype, fieldname).offset + + def _convert_to_address(self, BClass): + if getattr(BClass, '_BItem', None) is self.__class__: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @classmethod + def _from_ctypes(cls, ctypes_struct_or_union): + self = cls.__new__(cls) + self._blob = ctypes_struct_or_union + return self + + @classmethod + def _to_ctypes(cls, value): + return value._blob + + def __repr__(self, c_name=None): + return CTypesData.__repr__(self, c_name or self._get_c_name(' &')) + + +class CTypesBackend(object): + + PRIMITIVE_TYPES = { + 'char': ctypes.c_char, + 'short': ctypes.c_short, + 'int': ctypes.c_int, + 'long': ctypes.c_long, + 'long long': ctypes.c_longlong, + 'signed char': ctypes.c_byte, + 'unsigned char': ctypes.c_ubyte, + 'unsigned short': ctypes.c_ushort, + 'unsigned int': ctypes.c_uint, + 'unsigned long': ctypes.c_ulong, + 'unsigned long long': ctypes.c_ulonglong, + 'float': ctypes.c_float, + 'double': ctypes.c_double, + '_Bool': ctypes.c_bool, + } + + for _name in ['unsigned long long', 'unsigned long', + 'unsigned int', 'unsigned short', 'unsigned char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['uint%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['uintptr_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['size_t'] = PRIMITIVE_TYPES[_name] + + for _name in ['long long', 'long', 'int', 'short', 'signed char']: + _size = ctypes.sizeof(PRIMITIVE_TYPES[_name]) + PRIMITIVE_TYPES['int%d_t' % (8*_size)] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_void_p): + PRIMITIVE_TYPES['intptr_t'] = PRIMITIVE_TYPES[_name] + PRIMITIVE_TYPES['ptrdiff_t'] = PRIMITIVE_TYPES[_name] + if _size == ctypes.sizeof(ctypes.c_size_t): + PRIMITIVE_TYPES['ssize_t'] = PRIMITIVE_TYPES[_name] + + + def __init__(self): + self.RTLD_LAZY = 0 # not supported anyway by ctypes + self.RTLD_NOW = 0 + self.RTLD_GLOBAL = ctypes.RTLD_GLOBAL + self.RTLD_LOCAL = ctypes.RTLD_LOCAL + + def set_ffi(self, ffi): + self.ffi = ffi + + def _get_types(self): + return CTypesData, CTypesType + + def load_library(self, path, flags=0): + cdll = ctypes.CDLL(path, flags) + return CTypesLibrary(self, cdll) + + def new_void_type(self): + class CTypesVoid(CTypesData): + __slots__ = [] + _reftypename = 'void &' + @staticmethod + def _from_ctypes(novalue): + return None + @staticmethod + def _to_ctypes(novalue): + if novalue is not None: + raise TypeError("None expected, got %s object" % + (type(novalue).__name__,)) + return None + CTypesVoid._fix_class() + return CTypesVoid + + def new_primitive_type(self, name): + if name == 'wchar_t': + raise NotImplementedError(name) + ctype = self.PRIMITIVE_TYPES[name] + if name == 'char': + kind = 'char' + elif name in ('float', 'double'): + kind = 'float' + else: + if name in ('signed char', 'unsigned char'): + kind = 'byte' + elif name == '_Bool': + kind = 'bool' + else: + kind = 'int' + is_signed = (ctype(-1).value == -1) + # + def _cast_source_to_int(source): + if isinstance(source, (integer_types, float)): + source = int(source) + elif isinstance(source, CTypesData): + source = source._cast_to_integer() + elif isinstance(source, bytes): + source = ord(source) + elif source is None: + source = 0 + else: + raise TypeError("bad type for cast to %r: %r" % + (CTypesPrimitive, type(source).__name__)) + return source + # + class CTypesPrimitive(CTypesGenericPrimitive): + __slots__ = ['_value'] + _ctype = ctype + _reftypename = '%s &' % name + + def __init__(self, value): + self._value = value + + @staticmethod + def _create_ctype_obj(init): + if init is None: + return ctype() + return ctype(CTypesPrimitive._to_ctypes(init)) + + if kind == 'int' or kind == 'byte': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = ctype(source).value # cast within range + return cls(source) + def __int__(self): + return self._value + + if kind == 'bool': + @classmethod + def _cast_from(cls, source): + if not isinstance(source, (integer_types, float)): + source = _cast_source_to_int(source) + return cls(bool(source)) + def __int__(self): + return self._value + + if kind == 'char': + @classmethod + def _cast_from(cls, source): + source = _cast_source_to_int(source) + source = bytechr(source & 0xFF) + return cls(source) + def __int__(self): + return ord(self._value) + + if kind == 'float': + @classmethod + def _cast_from(cls, source): + if isinstance(source, float): + pass + elif isinstance(source, CTypesGenericPrimitive): + if hasattr(source, '__float__'): + source = float(source) + else: + source = int(source) + else: + source = _cast_source_to_int(source) + source = ctype(source).value # fix precision + return cls(source) + def __int__(self): + return int(self._value) + def __float__(self): + return self._value + + _cast_to_integer = __int__ + + if kind == 'int' or kind == 'byte' or kind == 'bool': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, integer_types): + if isinstance(x, CTypesData): + x = int(x) + else: + raise TypeError("integer expected, got %s" % + type(x).__name__) + if ctype(x).value != x: + if not is_signed and x < 0: + raise OverflowError("%s: negative integer" % name) + else: + raise OverflowError("%s: integer out of bounds" + % name) + return x + + if kind == 'char': + @staticmethod + def _to_ctypes(x): + if isinstance(x, bytes) and len(x) == 1: + return x + if isinstance(x, CTypesPrimitive): # > + return x._value + raise TypeError("character expected, got %s" % + type(x).__name__) + + if kind == 'float': + @staticmethod + def _to_ctypes(x): + if not isinstance(x, (integer_types, float, CTypesData)): + raise TypeError("float expected, got %s" % + type(x).__name__) + return ctype(x).value + + @staticmethod + def _from_ctypes(value): + return getattr(value, 'value', value) + + @staticmethod + def _initialize(blob, init): + blob.value = CTypesPrimitive._to_ctypes(init) + + if kind == 'char': + def _to_string(self, maxlen): + return self._value + if kind == 'byte': + def _to_string(self, maxlen): + return chr(self._value & 0xff) + # + CTypesPrimitive._fix_class() + return CTypesPrimitive + + def new_pointer_type(self, BItem): + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'charp' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'bytep' + elif BItem is getbtype(model.void_type): + kind = 'voidp' + else: + kind = 'generic' + # + class CTypesPtr(CTypesGenericPtr): + __slots__ = ['_own'] + if kind == 'charp': + __slots__ += ['__as_strbuf'] + _BItem = BItem + if hasattr(BItem, '_ctype'): + _ctype = ctypes.POINTER(BItem._ctype) + _bitem_size = ctypes.sizeof(BItem._ctype) + else: + _ctype = ctypes.c_void_p + if issubclass(BItem, CTypesGenericArray): + _reftypename = BItem._get_c_name('(* &)') + else: + _reftypename = BItem._get_c_name(' * &') + + def __init__(self, init): + ctypeobj = BItem._create_ctype_obj(init) + if kind == 'charp': + self.__as_strbuf = ctypes.create_string_buffer( + ctypeobj.value + b'\x00') + self._as_ctype_ptr = ctypes.cast( + self.__as_strbuf, self._ctype) + else: + self._as_ctype_ptr = ctypes.pointer(ctypeobj) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own = True + + def __add__(self, other): + if isinstance(other, integer_types): + return self._new_pointer_at(self._address + + other * self._bitem_size) + else: + return NotImplemented + + def __sub__(self, other): + if isinstance(other, integer_types): + return self._new_pointer_at(self._address - + other * self._bitem_size) + elif type(self) is type(other): + return (self._address - other._address) // self._bitem_size + else: + return NotImplemented + + def __getitem__(self, index): + if getattr(self, '_own', False) and index != 0: + raise IndexError + return BItem._from_ctypes(self._as_ctype_ptr[index]) + + def __setitem__(self, index, value): + self._as_ctype_ptr[index] = BItem._to_ctypes(value) + + if kind == 'charp' or kind == 'voidp': + @classmethod + def _arg_to_ctypes(cls, *value): + if value and isinstance(value[0], bytes): + return ctypes.c_char_p(value[0]) + else: + return super(CTypesPtr, cls)._arg_to_ctypes(*value) + + if kind == 'charp' or kind == 'bytep': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = sys.maxsize + p = ctypes.cast(self._as_ctype_ptr, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % ( + ctypes.sizeof(self._as_ctype_ptr.contents),) + return super(CTypesPtr, self)._get_own_repr() + # + if (BItem is self.ffi._get_cached_btype(model.void_type) or + BItem is self.ffi._get_cached_btype(model.PrimitiveType('char'))): + CTypesPtr._automatic_casts = True + # + CTypesPtr._fix_class() + return CTypesPtr + + def new_array_type(self, CTypesPtr, length): + if length is None: + brackets = ' &[]' + else: + brackets = ' &[%d]' % length + BItem = CTypesPtr._BItem + getbtype = self.ffi._get_cached_btype + if BItem is getbtype(model.PrimitiveType('char')): + kind = 'char' + elif BItem in (getbtype(model.PrimitiveType('signed char')), + getbtype(model.PrimitiveType('unsigned char'))): + kind = 'byte' + else: + kind = 'generic' + # + class CTypesArray(CTypesGenericArray): + __slots__ = ['_blob', '_own'] + if length is not None: + _ctype = BItem._ctype * length + else: + __slots__.append('_ctype') + _reftypename = BItem._get_c_name(brackets) + _declared_length = length + _CTPtr = CTypesPtr + + def __init__(self, init): + if length is None: + if isinstance(init, integer_types): + len1 = init + init = None + elif kind == 'char' and isinstance(init, bytes): + len1 = len(init) + 1 # extra null + else: + init = tuple(init) + len1 = len(init) + self._ctype = BItem._ctype * len1 + self._blob = self._ctype() + self._own = True + if init is not None: + self._initialize(self._blob, init) + + @staticmethod + def _initialize(blob, init): + if isinstance(init, bytes): + init = [init[i:i+1] for i in range(len(init))] + else: + init = tuple(init) + if len(init) > len(blob): + raise IndexError("too many initializers") + addr = ctypes.cast(blob, ctypes.c_void_p).value + PTR = ctypes.POINTER(BItem._ctype) + itemsize = ctypes.sizeof(BItem._ctype) + for i, value in enumerate(init): + p = ctypes.cast(addr + i * itemsize, PTR) + BItem._initialize(p.contents, value) + + def __len__(self): + return len(self._blob) + + def __getitem__(self, index): + if not (0 <= index < len(self._blob)): + raise IndexError + return BItem._from_ctypes(self._blob[index]) + + def __setitem__(self, index, value): + if not (0 <= index < len(self._blob)): + raise IndexError + self._blob[index] = BItem._to_ctypes(value) + + if kind == 'char' or kind == 'byte': + def _to_string(self, maxlen): + if maxlen < 0: + maxlen = len(self._blob) + p = ctypes.cast(self._blob, + ctypes.POINTER(ctypes.c_char)) + n = 0 + while n < maxlen and p[n] != b'\x00': + n += 1 + return b''.join([p[i] for i in range(n)]) + + def _get_own_repr(self): + if getattr(self, '_own', False): + return 'owning %d bytes' % (ctypes.sizeof(self._blob),) + return super(CTypesArray, self)._get_own_repr() + + def _convert_to_address(self, BClass): + if BClass in (CTypesPtr, None) or BClass._automatic_casts: + return ctypes.addressof(self._blob) + else: + return CTypesData._convert_to_address(self, BClass) + + @staticmethod + def _from_ctypes(ctypes_array): + self = CTypesArray.__new__(CTypesArray) + self._blob = ctypes_array + return self + + @staticmethod + def _arg_to_ctypes(value): + return CTypesPtr._arg_to_ctypes(value) + + def __add__(self, other): + if isinstance(other, integer_types): + return CTypesPtr._new_pointer_at( + ctypes.addressof(self._blob) + + other * ctypes.sizeof(BItem._ctype)) + else: + return NotImplemented + + @classmethod + def _cast_from(cls, source): + raise NotImplementedError("casting to %r" % ( + cls._get_c_name(),)) + # + CTypesArray._fix_class() + return CTypesArray + + def _new_struct_or_union(self, kind, name, base_ctypes_class): + # + class struct_or_union(base_ctypes_class): + pass + struct_or_union.__name__ = '%s_%s' % (kind, name) + # + class CTypesStructOrUnion(CTypesBaseStructOrUnion): + __slots__ = ['_blob'] + _ctype = struct_or_union + _reftypename = '%s &' % (name,) + _kind = kind + # + CTypesStructOrUnion._fix_class() + return CTypesStructOrUnion + + def new_struct_type(self, name): + return self._new_struct_or_union('struct', name, ctypes.Structure) + + def new_union_type(self, name): + return self._new_struct_or_union('union', name, ctypes.Union) + + def complete_struct_or_union(self, CTypesStructOrUnion, fields, tp, + totalsize=-1, totalalignment=-1, sflags=0): + if totalsize >= 0 or totalalignment >= 0: + raise NotImplementedError("the ctypes backend of CFFI does not support " + "structures completed by verify(); please " + "compile and install the _cffi_backend module.") + struct_or_union = CTypesStructOrUnion._ctype + fnames = [fname for (fname, BField, bitsize) in fields] + btypes = [BField for (fname, BField, bitsize) in fields] + bitfields = [bitsize for (fname, BField, bitsize) in fields] + # + bfield_types = {} + cfields = [] + for (fname, BField, bitsize) in fields: + if bitsize < 0: + cfields.append((fname, BField._ctype)) + bfield_types[fname] = BField + else: + cfields.append((fname, BField._ctype, bitsize)) + bfield_types[fname] = Ellipsis + if sflags & 8: + struct_or_union._pack_ = 1 + struct_or_union._fields_ = cfields + CTypesStructOrUnion._bfield_types = bfield_types + # + @staticmethod + def _create_ctype_obj(init): + result = struct_or_union() + if init is not None: + initialize(result, init) + return result + CTypesStructOrUnion._create_ctype_obj = _create_ctype_obj + # + def initialize(blob, init): + if is_union: + if len(init) > 1: + raise ValueError("union initializer: %d items given, but " + "only one supported (use a dict if needed)" + % (len(init),)) + if not isinstance(init, dict): + if isinstance(init, (bytes, unicode)): + raise TypeError("union initializer: got a str") + init = tuple(init) + if len(init) > len(fnames): + raise ValueError("too many values for %s initializer" % + CTypesStructOrUnion._get_c_name()) + init = dict(zip(fnames, init)) + addr = ctypes.addressof(blob) + for fname, value in init.items(): + BField, bitsize = name2fieldtype[fname] + assert bitsize < 0, \ + "not implemented: initializer with bit fields" + offset = CTypesStructOrUnion._offsetof(fname) + PTR = ctypes.POINTER(BField._ctype) + p = ctypes.cast(addr + offset, PTR) + BField._initialize(p.contents, value) + is_union = CTypesStructOrUnion._kind == 'union' + name2fieldtype = dict(zip(fnames, zip(btypes, bitfields))) + # + for fname, BField, bitsize in fields: + if fname == '': + raise NotImplementedError("nested anonymous structs/unions") + if hasattr(CTypesStructOrUnion, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + if bitsize < 0: + def getter(self, fname=fname, BField=BField, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BField._from_ctypes(p.contents) + def setter(self, value, fname=fname, BField=BField): + setattr(self._blob, fname, BField._to_ctypes(value)) + # + if issubclass(BField, CTypesGenericArray): + setter = None + if BField._declared_length == 0: + def getter(self, fname=fname, BFieldPtr=BField._CTPtr, + offset=CTypesStructOrUnion._offsetof(fname), + PTR=ctypes.POINTER(BField._ctype)): + addr = ctypes.addressof(self._blob) + p = ctypes.cast(addr + offset, PTR) + return BFieldPtr._from_ctypes(p) + # + else: + def getter(self, fname=fname, BField=BField): + return BField._from_ctypes(getattr(self._blob, fname)) + def setter(self, value, fname=fname, BField=BField): + # xxx obscure workaround + value = BField._to_ctypes(value) + oldvalue = getattr(self._blob, fname) + setattr(self._blob, fname, value) + if value != getattr(self._blob, fname): + setattr(self._blob, fname, oldvalue) + raise OverflowError("value too large for bitfield") + setattr(CTypesStructOrUnion, fname, property(getter, setter)) + # + CTypesPtr = self.ffi._get_cached_btype(model.PointerType(tp)) + for fname in fnames: + if hasattr(CTypesPtr, fname): + raise ValueError("the field name %r conflicts in " + "the ctypes backend" % fname) + def getter(self, fname=fname): + return getattr(self[0], fname) + def setter(self, value, fname=fname): + setattr(self[0], fname, value) + setattr(CTypesPtr, fname, property(getter, setter)) + + def new_function_type(self, BArgs, BResult, has_varargs): + nameargs = [BArg._get_c_name() for BArg in BArgs] + if has_varargs: + nameargs.append('...') + nameargs = ', '.join(nameargs) + # + class CTypesFunctionPtr(CTypesGenericPtr): + __slots__ = ['_own_callback', '_name'] + _ctype = ctypes.CFUNCTYPE(getattr(BResult, '_ctype', None), + *[BArg._ctype for BArg in BArgs], + use_errno=True) + _reftypename = BResult._get_c_name('(* &)(%s)' % (nameargs,)) + + def __init__(self, init, error=None): + # create a callback to the Python callable init() + import traceback + assert not has_varargs, "varargs not supported for callbacks" + if getattr(BResult, '_ctype', None) is not None: + error = BResult._from_ctypes( + BResult._create_ctype_obj(error)) + else: + error = None + def callback(*args): + args2 = [] + for arg, BArg in zip(args, BArgs): + args2.append(BArg._from_ctypes(arg)) + try: + res2 = init(*args2) + res2 = BResult._to_ctypes(res2) + except: + traceback.print_exc() + res2 = error + if issubclass(BResult, CTypesGenericPtr): + if res2: + res2 = ctypes.cast(res2, ctypes.c_void_p).value + # .value: http://bugs.python.org/issue1574593 + else: + res2 = None + #print repr(res2) + return res2 + if issubclass(BResult, CTypesGenericPtr): + # The only pointers callbacks can return are void*s: + # http://bugs.python.org/issue5710 + callback_ctype = ctypes.CFUNCTYPE( + ctypes.c_void_p, + *[BArg._ctype for BArg in BArgs], + use_errno=True) + else: + callback_ctype = CTypesFunctionPtr._ctype + self._as_ctype_ptr = callback_ctype(callback) + self._address = ctypes.cast(self._as_ctype_ptr, + ctypes.c_void_p).value + self._own_callback = init + + @staticmethod + def _initialize(ctypes_ptr, value): + if value: + raise NotImplementedError("ctypes backend: not supported: " + "initializers for function pointers") + + def __repr__(self): + c_name = getattr(self, '_name', None) + if c_name: + i = self._reftypename.index('(* &)') + if self._reftypename[i-1] not in ' )*': + c_name = ' ' + c_name + c_name = self._reftypename.replace('(* &)', c_name) + return CTypesData.__repr__(self, c_name) + + def _get_own_repr(self): + if getattr(self, '_own_callback', None) is not None: + return 'calling %r' % (self._own_callback,) + return super(CTypesFunctionPtr, self)._get_own_repr() + + def __call__(self, *args): + if has_varargs: + assert len(args) >= len(BArgs) + extraargs = args[len(BArgs):] + args = args[:len(BArgs)] + else: + assert len(args) == len(BArgs) + ctypes_args = [] + for arg, BArg in zip(args, BArgs): + ctypes_args.append(BArg._arg_to_ctypes(arg)) + if has_varargs: + for i, arg in enumerate(extraargs): + if arg is None: + ctypes_args.append(ctypes.c_void_p(0)) # NULL + continue + if not isinstance(arg, CTypesData): + raise TypeError( + "argument %d passed in the variadic part " + "needs to be a cdata object (got %s)" % + (1 + len(BArgs) + i, type(arg).__name__)) + ctypes_args.append(arg._arg_to_ctypes(arg)) + result = self._as_ctype_ptr(*ctypes_args) + return BResult._from_ctypes(result) + # + CTypesFunctionPtr._fix_class() + return CTypesFunctionPtr + + def new_enum_type(self, name, enumerators, enumvalues, CTypesInt): + assert isinstance(name, str) + reverse_mapping = dict(zip(reversed(enumvalues), + reversed(enumerators))) + # + class CTypesEnum(CTypesInt): + __slots__ = [] + _reftypename = '%s &' % name + + def _get_own_repr(self): + value = self._value + try: + return '%d: %s' % (value, reverse_mapping[value]) + except KeyError: + return str(value) + + def _to_string(self, maxlen): + value = self._value + try: + return reverse_mapping[value] + except KeyError: + return str(value) + # + CTypesEnum._fix_class() + return CTypesEnum + + def get_errno(self): + return ctypes.get_errno() + + def set_errno(self, value): + ctypes.set_errno(value) + + def string(self, b, maxlen=-1): + return b._to_string(maxlen) + + def buffer(self, bptr, size=-1): + raise NotImplementedError("buffer() with ctypes backend") + + def sizeof(self, cdata_or_BType): + if isinstance(cdata_or_BType, CTypesData): + return cdata_or_BType._get_size_of_instance() + else: + assert issubclass(cdata_or_BType, CTypesData) + return cdata_or_BType._get_size() + + def alignof(self, BType): + assert issubclass(BType, CTypesData) + return BType._alignment() + + def newp(self, BType, source): + if not issubclass(BType, CTypesData): + raise TypeError + return BType._newp(source) + + def cast(self, BType, source): + return BType._cast_from(source) + + def callback(self, BType, source, error): + return BType(source, error) + + typeof = type + + def getcname(self, BType, replace_with): + return BType._get_c_name(replace_with) + + def typeoffsetof(self, BType, fieldname): + if fieldname is not None and issubclass(BType, CTypesGenericPtr): + BType = BType._BItem + if not issubclass(BType, CTypesBaseStructOrUnion): + raise TypeError("expected a struct or union ctype") + if fieldname is None: + return (BType, 0) + else: + BField = BType._bfield_types[fieldname] + if BField is Ellipsis: + raise TypeError("not supported for bitfields") + return (BField, BType._offsetof(fieldname)) + + def rawaddressof(self, BTypePtr, cdata, offset): + if isinstance(cdata, CTypesBaseStructOrUnion): + ptr = ctypes.pointer(type(cdata)._to_ctypes(cdata)) + elif isinstance(cdata, CTypesGenericPtr): + ptr = type(cdata)._to_ctypes(cdata) + else: + raise TypeError("expected a ") + if offset != 0: + ptr = ctypes.cast( + ctypes.c_void_p( + ctypes.cast(ptr, ctypes.c_void_p).value + offset), + type(ptr)) + return BTypePtr._from_ctypes(ptr) + + +class CTypesLibrary(object): + + def __init__(self, backend, cdll): + self.backend = backend + self.cdll = cdll + + def load_function(self, BType, name): + c_func = getattr(self.cdll, name) + funcobj = BType._from_ctypes(c_func) + funcobj._name = name + return funcobj + + def read_variable(self, BType, name): + try: + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + except AttributeError as e: + raise NotImplementedError(e) + return BType._from_ctypes(ctypes_obj) + + def write_variable(self, BType, name, value): + new_ctypes_obj = BType._to_ctypes(value) + ctypes_obj = BType._ctype.in_dll(self.cdll, name) + ctypes.memmove(ctypes.addressof(ctypes_obj), + ctypes.addressof(new_ctypes_obj), + ctypes.sizeof(BType._ctype)) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/commontypes.py b/Linux_i686/lib/python2.7/site-packages/cffi/commontypes.py new file mode 100644 index 0000000..9daf2ff --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/commontypes.py @@ -0,0 +1,248 @@ +import sys +from . import api, model + + +COMMON_TYPES = { + 'FILE': model.unknown_type('FILE', '_IO_FILE'), + 'bool': '_Bool', + } + +for _type in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + if _type.endswith('_t'): + COMMON_TYPES[_type] = _type +del _type + +_CACHE = {} + +def resolve_common_type(commontype): + try: + return _CACHE[commontype] + except KeyError: + result = COMMON_TYPES.get(commontype, commontype) + if not isinstance(result, str): + pass # result is already a BaseType + elif result.endswith(' *'): + if result.startswith('const '): + result = model.ConstPointerType( + resolve_common_type(result[6:-2])) + else: + result = model.PointerType(resolve_common_type(result[:-2])) + elif result in model.PrimitiveType.ALL_PRIMITIVE_TYPES: + result = model.PrimitiveType(result) + else: + if commontype == result: + raise api.FFIError("Unsupported type: %r. Please file a bug " + "if you think it should be." % (commontype,)) + result = resolve_common_type(result) # recursively + assert isinstance(result, model.BaseTypeByIdentity) + _CACHE[commontype] = result + return result + + +# ____________________________________________________________ +# Windows common types + + +def win_common_types(maxsize): + result = {} + if maxsize < (1<<32): + result.update({ # Windows 32-bits + 'HALF_PTR': 'short', + 'INT_PTR': 'int', + 'LONG_PTR': 'long', + 'UHALF_PTR': 'unsigned short', + 'UINT_PTR': 'unsigned int', + 'ULONG_PTR': 'unsigned long', + }) + else: + result.update({ # Windows 64-bits + 'HALF_PTR': 'int', + 'INT_PTR': 'long long', + 'LONG_PTR': 'long long', + 'UHALF_PTR': 'unsigned int', + 'UINT_PTR': 'unsigned long long', + 'ULONG_PTR': 'unsigned long long', + }) + result.update({ + "BYTE": "unsigned char", + "BOOL": "int", + "CCHAR": "char", + "CHAR": "char", + "DWORD": "unsigned long", + "DWORD32": "unsigned int", + "DWORD64": "unsigned long long", + "FLOAT": "float", + "INT": "int", + "INT8": "signed char", + "INT16": "short", + "INT32": "int", + "INT64": "long long", + "LONG": "long", + "LONGLONG": "long long", + "LONG32": "int", + "LONG64": "long long", + "WORD": "unsigned short", + "PVOID": model.voidp_type, + "ULONGLONG": "unsigned long long", + "WCHAR": "wchar_t", + "SHORT": "short", + "TBYTE": "WCHAR", + "TCHAR": "WCHAR", + "UCHAR": "unsigned char", + "UINT": "unsigned int", + "UINT8": "unsigned char", + "UINT16": "unsigned short", + "UINT32": "unsigned int", + "UINT64": "unsigned long long", + "ULONG": "unsigned long", + "ULONG32": "unsigned int", + "ULONG64": "unsigned long long", + "USHORT": "unsigned short", + + "SIZE_T": "ULONG_PTR", + "SSIZE_T": "LONG_PTR", + "ATOM": "WORD", + "BOOLEAN": "BYTE", + "COLORREF": "DWORD", + + "HANDLE": "PVOID", + "DWORDLONG": "ULONGLONG", + "DWORD_PTR": "ULONG_PTR", + "HACCEL": "HANDLE", + + "HBITMAP": "HANDLE", + "HBRUSH": "HANDLE", + "HCOLORSPACE": "HANDLE", + "HCONV": "HANDLE", + "HCONVLIST": "HANDLE", + "HDC": "HANDLE", + "HDDEDATA": "HANDLE", + "HDESK": "HANDLE", + "HDROP": "HANDLE", + "HDWP": "HANDLE", + "HENHMETAFILE": "HANDLE", + "HFILE": "int", + "HFONT": "HANDLE", + "HGDIOBJ": "HANDLE", + "HGLOBAL": "HANDLE", + "HHOOK": "HANDLE", + "HICON": "HANDLE", + "HCURSOR": "HICON", + "HINSTANCE": "HANDLE", + "HKEY": "HANDLE", + "HKL": "HANDLE", + "HLOCAL": "HANDLE", + "HMENU": "HANDLE", + "HMETAFILE": "HANDLE", + "HMODULE": "HINSTANCE", + "HMONITOR": "HANDLE", + "HPALETTE": "HANDLE", + "HPEN": "HANDLE", + "HRESULT": "LONG", + "HRGN": "HANDLE", + "HRSRC": "HANDLE", + "HSZ": "HANDLE", + "WINSTA": "HANDLE", + "HWND": "HANDLE", + + "LANGID": "WORD", + "LCID": "DWORD", + "LCTYPE": "DWORD", + "LGRPID": "DWORD", + "LPARAM": "LONG_PTR", + "LPBOOL": "BOOL *", + "LPBYTE": "BYTE *", + "LPCOLORREF": "DWORD *", + "LPCSTR": "const char *", + + "LPCVOID": model.const_voidp_type, + "LPCWSTR": "const WCHAR *", + "LPCTSTR": "LPCWSTR", + "LPDWORD": "DWORD *", + "LPHANDLE": "HANDLE *", + "LPINT": "int *", + "LPLONG": "long *", + "LPSTR": "CHAR *", + "LPWSTR": "WCHAR *", + "LPTSTR": "LPWSTR", + "LPVOID": model.voidp_type, + "LPWORD": "WORD *", + "LRESULT": "LONG_PTR", + "PBOOL": "BOOL *", + "PBOOLEAN": "BOOLEAN *", + "PBYTE": "BYTE *", + "PCHAR": "CHAR *", + "PCSTR": "const CHAR *", + "PCTSTR": "LPCWSTR", + "PCWSTR": "const WCHAR *", + "PDWORD": "DWORD *", + "PDWORDLONG": "DWORDLONG *", + "PDWORD_PTR": "DWORD_PTR *", + "PDWORD32": "DWORD32 *", + "PDWORD64": "DWORD64 *", + "PFLOAT": "FLOAT *", + "PHALF_PTR": "HALF_PTR *", + "PHANDLE": "HANDLE *", + "PHKEY": "HKEY *", + "PINT": "int *", + "PINT_PTR": "INT_PTR *", + "PINT8": "INT8 *", + "PINT16": "INT16 *", + "PINT32": "INT32 *", + "PINT64": "INT64 *", + "PLCID": "PDWORD", + "PLONG": "LONG *", + "PLONGLONG": "LONGLONG *", + "PLONG_PTR": "LONG_PTR *", + "PLONG32": "LONG32 *", + "PLONG64": "LONG64 *", + "PSHORT": "SHORT *", + "PSIZE_T": "SIZE_T *", + "PSSIZE_T": "SSIZE_T *", + "PSTR": "CHAR *", + "PTBYTE": "TBYTE *", + "PTCHAR": "TCHAR *", + "PTSTR": "LPWSTR", + "PUCHAR": "UCHAR *", + "PUHALF_PTR": "UHALF_PTR *", + "PUINT": "UINT *", + "PUINT_PTR": "UINT_PTR *", + "PUINT8": "UINT8 *", + "PUINT16": "UINT16 *", + "PUINT32": "UINT32 *", + "PUINT64": "UINT64 *", + "PULONG": "ULONG *", + "PULONGLONG": "ULONGLONG *", + "PULONG_PTR": "ULONG_PTR *", + "PULONG32": "ULONG32 *", + "PULONG64": "ULONG64 *", + "PUSHORT": "USHORT *", + "PWCHAR": "WCHAR *", + "PWORD": "WORD *", + "PWSTR": "WCHAR *", + "QWORD": "unsigned long long", + "SC_HANDLE": "HANDLE", + "SC_LOCK": "LPVOID", + "SERVICE_STATUS_HANDLE": "HANDLE", + + "UNICODE_STRING": model.StructType( + "_UNICODE_STRING", + ["Length", + "MaximumLength", + "Buffer"], + [model.PrimitiveType("unsigned short"), + model.PrimitiveType("unsigned short"), + model.PointerType(model.PrimitiveType("wchar_t"))], + [-1, -1, -1]), + "PUNICODE_STRING": "UNICODE_STRING *", + "PCUNICODE_STRING": "const UNICODE_STRING *", + + "USN": "LONGLONG", + "VOID": model.void_type, + "WPARAM": "UINT_PTR", + }) + return result + + +if sys.platform == 'win32': + COMMON_TYPES.update(win_common_types(sys.maxsize)) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py b/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py new file mode 100644 index 0000000..99998ac --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py @@ -0,0 +1,537 @@ + +from . import api, model +from .commontypes import COMMON_TYPES, resolve_common_type +try: + from . import _pycparser as pycparser +except ImportError: + import pycparser +import weakref, re, sys + +try: + if sys.version_info < (3,): + import thread as _thread + else: + import _thread + lock = _thread.allocate_lock() +except ImportError: + lock = None + +_r_comment = re.compile(r"/\*.*?\*/|//.*?$", re.DOTALL | re.MULTILINE) +_r_define = re.compile(r"^\s*#\s*define\s+([A-Za-z_][A-Za-z_0-9]*)\s+(.*?)$", + re.MULTILINE) +_r_partial_enum = re.compile(r"=\s*\.\.\.\s*[,}]|\.\.\.\s*\}") +_r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") +_r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") +_r_words = re.compile(r"\w+|\S") +_parser_cache = None + +def _get_parser(): + global _parser_cache + if _parser_cache is None: + _parser_cache = pycparser.CParser() + return _parser_cache + +def _preprocess(csource): + # Remove comments. NOTE: this only work because the cdef() section + # should not contain any string literal! + csource = _r_comment.sub(' ', csource) + # Remove the "#define FOO x" lines + macros = {} + for match in _r_define.finditer(csource): + macroname, macrovalue = match.groups() + macros[macroname] = macrovalue + csource = _r_define.sub('', csource) + # Replace "[...]" with "[__dotdotdotarray__]" + csource = _r_partial_array.sub('[__dotdotdotarray__]', csource) + # Replace "...}" with "__dotdotdotNUM__}". This construction should + # occur only at the end of enums; at the end of structs we have "...;}" + # and at the end of vararg functions "...);". Also replace "=...[,}]" + # with ",__dotdotdotNUM__[,}]": this occurs in the enums too, when + # giving an unknown value. + matches = list(_r_partial_enum.finditer(csource)) + for number, match in enumerate(reversed(matches)): + p = match.start() + if csource[p] == '=': + p2 = csource.find('...', p, match.end()) + assert p2 > p + csource = '%s,__dotdotdot%d__ %s' % (csource[:p], number, + csource[p2+3:]) + else: + assert csource[p:p+3] == '...' + csource = '%s __dotdotdot%d__ %s' % (csource[:p], number, + csource[p+3:]) + # Replace all remaining "..." with the same name, "__dotdotdot__", + # which is declared with a typedef for the purpose of C parsing. + return csource.replace('...', ' __dotdotdot__ '), macros + +def _common_type_names(csource): + # Look in the source for what looks like usages of types from the + # list of common types. A "usage" is approximated here as the + # appearance of the word, minus a "definition" of the type, which + # is the last word in a "typedef" statement. Approximative only + # but should be fine for all the common types. + look_for_words = set(COMMON_TYPES) + look_for_words.add(';') + look_for_words.add('typedef') + words_used = set() + is_typedef = False + previous_word = '' + for word in _r_words.findall(csource): + if word in look_for_words: + if word == ';': + if is_typedef: + words_used.discard(previous_word) + look_for_words.discard(previous_word) + is_typedef = False + elif word == 'typedef': + is_typedef = True + else: # word in COMMON_TYPES + words_used.add(word) + previous_word = word + return words_used + + +class Parser(object): + + def __init__(self): + self._declarations = {} + self._anonymous_counter = 0 + self._structnode2type = weakref.WeakKeyDictionary() + self._override = False + self._packed = False + + def _parse(self, csource): + csource, macros = _preprocess(csource) + # XXX: for more efficiency we would need to poke into the + # internals of CParser... the following registers the + # typedefs, because their presence or absence influences the + # parsing itself (but what they are typedef'ed to plays no role) + ctn = _common_type_names(csource) + typenames = [] + for name in sorted(self._declarations): + if name.startswith('typedef '): + name = name[8:] + typenames.append(name) + ctn.discard(name) + typenames += sorted(ctn) + # + csourcelines = ['typedef int %s;' % typename for typename in typenames] + csourcelines.append('typedef int __dotdotdot__;') + csourcelines.append(csource) + csource = '\n'.join(csourcelines) + if lock is not None: + lock.acquire() # pycparser is not thread-safe... + try: + ast = _get_parser().parse(csource) + except pycparser.c_parser.ParseError as e: + self.convert_pycparser_error(e, csource) + finally: + if lock is not None: + lock.release() + return ast, macros + + def convert_pycparser_error(self, e, csource): + # xxx look for ":NUM:" at the start of str(e) and try to interpret + # it as a line number + line = None + msg = str(e) + if msg.startswith(':') and ':' in msg[1:]: + linenum = msg[1:msg.find(':',1)] + if linenum.isdigit(): + linenum = int(linenum, 10) + csourcelines = csource.splitlines() + if 1 <= linenum <= len(csourcelines): + line = csourcelines[linenum-1] + if line: + msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) + else: + msg = 'parse error\n%s' % (msg,) + raise api.CDefError(msg) + + def parse(self, csource, override=False, packed=False): + prev_override = self._override + prev_packed = self._packed + try: + self._override = override + self._packed = packed + self._internal_parse(csource) + finally: + self._override = prev_override + self._packed = prev_packed + + def _internal_parse(self, csource): + ast, macros = self._parse(csource) + # add the macros + for key, value in macros.items(): + value = value.strip() + if value != '...': + raise api.CDefError('only supports the syntax "#define ' + '%s ..." for now (literally)' % key) + self._declare('macro ' + key, value) + # find the first "__dotdotdot__" and use that as a separator + # between the repeated typedefs and the real csource + iterator = iter(ast.ext) + for decl in iterator: + if decl.name == '__dotdotdot__': + break + # + for decl in iterator: + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise api.CDefError("typedef does not declare any name", + decl) + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) + and decl.type.type.names == ['__dotdotdot__']): + realtype = model.unknown_type(decl.name) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names == ['__dotdotdot__']): + realtype = model.unknown_ptr_type(decl.name) + else: + realtype = self._get_type(decl.type, name=decl.name) + self._declare('typedef ' + decl.name, realtype) + else: + raise api.CDefError("unrecognized construct", decl) + + def _parse_decl(self, decl): + node = decl.type + if isinstance(node, pycparser.c_ast.FuncDecl): + tp = self._get_type(node, name=decl.name) + assert isinstance(tp, model.RawFunctionType) + tp = self._get_type_pointer(tp) + self._declare('function ' + decl.name, tp) + else: + if isinstance(node, pycparser.c_ast.Struct): + # XXX do we need self._declare in any of those? + if node.decls is not None: + self._get_struct_union_enum_type('struct', node) + elif isinstance(node, pycparser.c_ast.Union): + if node.decls is not None: + self._get_struct_union_enum_type('union', node) + elif isinstance(node, pycparser.c_ast.Enum): + if node.values is not None: + self._get_struct_union_enum_type('enum', node) + elif not decl.name: + raise api.CDefError("construct does not declare any variable", + decl) + # + if decl.name: + tp = self._get_type(node, partial_length_ok=True) + if self._is_constant_globalvar(node): + self._declare('constant ' + decl.name, tp) + else: + self._declare('variable ' + decl.name, tp) + + def parse_type(self, cdecl): + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl) + assert not macros + exprnode = ast.ext[-1].type.args.params[0] + if isinstance(exprnode, pycparser.c_ast.ID): + raise api.CDefError("unknown identifier '%s'" % (exprnode.name,)) + return self._get_type(exprnode.type) + + def _declare(self, name, obj): + if name in self._declarations: + if self._declarations[name] is obj: + return + if not self._override: + raise api.FFIError( + "multiple declarations of %s (for interactive usage, " + "try cdef(xx, override=True))" % (name,)) + assert '__dotdotdot__' not in name.split() + self._declarations[name] = obj + + def _get_type_pointer(self, type, const=False): + if isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + if const: + return model.ConstPointerType(type) + return model.PointerType(type) + + def _get_type(self, typenode, name=None, partial_length_ok=False): + # first, dereference typedefs, if we have it already parsed, we're good + if (isinstance(typenode, pycparser.c_ast.TypeDecl) and + isinstance(typenode.type, pycparser.c_ast.IdentifierType) and + len(typenode.type.names) == 1 and + ('typedef ' + typenode.type.names[0]) in self._declarations): + type = self._declarations['typedef ' + typenode.type.names[0]] + return type + # + if isinstance(typenode, pycparser.c_ast.ArrayDecl): + # array type + if typenode.dim is None: + length = None + else: + length = self._parse_constant( + typenode.dim, partial_length_ok=partial_length_ok) + return model.ArrayType(self._get_type(typenode.type), length) + # + if isinstance(typenode, pycparser.c_ast.PtrDecl): + # pointer type + const = (isinstance(typenode.type, pycparser.c_ast.TypeDecl) + and 'const' in typenode.type.quals) + return self._get_type_pointer(self._get_type(typenode.type), const) + # + if isinstance(typenode, pycparser.c_ast.TypeDecl): + type = typenode.type + if isinstance(type, pycparser.c_ast.IdentifierType): + # assume a primitive type. get it from .names, but reduce + # synonyms to a single chosen combination + names = list(type.names) + if names != ['signed', 'char']: # keep this unmodified + prefixes = {} + while names: + name = names[0] + if name in ('short', 'long', 'signed', 'unsigned'): + prefixes[name] = prefixes.get(name, 0) + 1 + del names[0] + else: + break + # ignore the 'signed' prefix below, and reorder the others + newnames = [] + for prefix in ('unsigned', 'short', 'long'): + for i in range(prefixes.get(prefix, 0)): + newnames.append(prefix) + if not names: + names = ['int'] # implicitly + if names == ['int']: # but kill it if 'short' or 'long' + if 'short' in prefixes or 'long' in prefixes: + names = [] + names = newnames + names + ident = ' '.join(names) + if ident == 'void': + return model.void_type + if ident == '__dotdotdot__': + raise api.FFIError('bad usage of "..."') + return resolve_common_type(ident) + # + if isinstance(type, pycparser.c_ast.Struct): + # 'struct foobar' + return self._get_struct_union_enum_type('struct', type, name) + # + if isinstance(type, pycparser.c_ast.Union): + # 'union foobar' + return self._get_struct_union_enum_type('union', type, name) + # + if isinstance(type, pycparser.c_ast.Enum): + # 'enum foobar' + return self._get_struct_union_enum_type('enum', type, name) + # + if isinstance(typenode, pycparser.c_ast.FuncDecl): + # a function type + return self._parse_function_type(typenode, name) + # + # nested anonymous structs or unions end up here + if isinstance(typenode, pycparser.c_ast.Struct): + return self._get_struct_union_enum_type('struct', typenode, name, + nested=True) + if isinstance(typenode, pycparser.c_ast.Union): + return self._get_struct_union_enum_type('union', typenode, name, + nested=True) + # + raise api.FFIError("bad or unsupported type declaration") + + def _parse_function_type(self, typenode, funcname=None): + params = list(getattr(typenode.args, 'params', [])) + ellipsis = ( + len(params) > 0 and + isinstance(params[-1].type, pycparser.c_ast.TypeDecl) and + isinstance(params[-1].type.type, + pycparser.c_ast.IdentifierType) and + params[-1].type.type.names == ['__dotdotdot__']) + if ellipsis: + params.pop() + if not params: + raise api.CDefError( + "%s: a function with only '(...)' as argument" + " is not correct C" % (funcname or 'in expression')) + elif (len(params) == 1 and + isinstance(params[0].type, pycparser.c_ast.TypeDecl) and + isinstance(params[0].type.type, pycparser.c_ast.IdentifierType) + and list(params[0].type.type.names) == ['void']): + del params[0] + args = [self._as_func_arg(self._get_type(argdeclnode.type)) + for argdeclnode in params] + result = self._get_type(typenode.type) + return model.RawFunctionType(tuple(args), result, ellipsis) + + def _as_func_arg(self, type): + if isinstance(type, model.ArrayType): + return model.PointerType(type.item) + elif isinstance(type, model.RawFunctionType): + return type.as_function_pointer() + else: + return type + + def _is_constant_globalvar(self, typenode): + if isinstance(typenode, pycparser.c_ast.PtrDecl): + return 'const' in typenode.quals + if isinstance(typenode, pycparser.c_ast.TypeDecl): + return 'const' in typenode.quals + return False + + def _get_struct_union_enum_type(self, kind, type, name=None, nested=False): + # First, a level of caching on the exact 'type' node of the AST. + # This is obscure, but needed because pycparser "unrolls" declarations + # such as "typedef struct { } foo_t, *foo_p" and we end up with + # an AST that is not a tree, but a DAG, with the "type" node of the + # two branches foo_t and foo_p of the trees being the same node. + # It's a bit silly but detecting "DAG-ness" in the AST tree seems + # to be the only way to distinguish this case from two independent + # structs. See test_struct_with_two_usages. + try: + return self._structnode2type[type] + except KeyError: + pass + # + # Note that this must handle parsing "struct foo" any number of + # times and always return the same StructType object. Additionally, + # one of these times (not necessarily the first), the fields of + # the struct can be specified with "struct foo { ...fields... }". + # If no name is given, then we have to create a new anonymous struct + # with no caching; in this case, the fields are either specified + # right now or never. + # + force_name = name + name = type.name + # + # get the type or create it if needed + if name is None: + # 'force_name' is used to guess a more readable name for + # anonymous structs, for the common case "typedef struct { } foo". + if force_name is not None: + explicit_name = '$%s' % force_name + else: + self._anonymous_counter += 1 + explicit_name = '$%d' % self._anonymous_counter + tp = None + else: + explicit_name = name + key = '%s %s' % (kind, name) + tp = self._declarations.get(key, None) + # + if tp is None: + if kind == 'struct': + tp = model.StructType(explicit_name, None, None, None) + elif kind == 'union': + tp = model.UnionType(explicit_name, None, None, None) + elif kind == 'enum': + tp = self._build_enum_type(explicit_name, type.values) + else: + raise AssertionError("kind = %r" % (kind,)) + if name is not None: + self._declare(key, tp) + else: + if kind == 'enum' and type.values is not None: + raise NotImplementedError( + "enum %s: the '{}' declaration should appear on the first " + "time the enum is mentioned, not later" % explicit_name) + if not tp.forcename: + tp.force_the_name(force_name) + if tp.forcename and '$' in tp.name: + self._declare('anonymous %s' % tp.forcename, tp) + # + self._structnode2type[type] = tp + # + # enums: done here + if kind == 'enum': + return tp + # + # is there a 'type.decls'? If yes, then this is the place in the + # C sources that declare the fields. If no, then just return the + # existing type, possibly still incomplete. + if type.decls is None: + return tp + # + if tp.fldnames is not None: + raise api.CDefError("duplicate declaration of struct %s" % name) + fldnames = [] + fldtypes = [] + fldbitsize = [] + for decl in type.decls: + if (isinstance(decl.type, pycparser.c_ast.IdentifierType) and + ''.join(decl.type.names) == '__dotdotdot__'): + # XXX pycparser is inconsistent: 'names' should be a list + # of strings, but is sometimes just one string. Use + # str.join() as a way to cope with both. + self._make_partial(tp, nested) + continue + if decl.bitsize is None: + bitsize = -1 + else: + bitsize = self._parse_constant(decl.bitsize) + self._partial_length = False + type = self._get_type(decl.type, partial_length_ok=True) + if self._partial_length: + self._make_partial(tp, nested) + if isinstance(type, model.StructType) and type.partial: + self._make_partial(tp, nested) + fldnames.append(decl.name or '') + fldtypes.append(type) + fldbitsize.append(bitsize) + tp.fldnames = tuple(fldnames) + tp.fldtypes = tuple(fldtypes) + tp.fldbitsize = tuple(fldbitsize) + if fldbitsize != [-1] * len(fldbitsize): + if isinstance(tp, model.StructType) and tp.partial: + raise NotImplementedError("%s: using both bitfields and '...;'" + % (tp,)) + tp.packed = self._packed + return tp + + def _make_partial(self, tp, nested): + if not isinstance(tp, model.StructOrUnion): + raise api.CDefError("%s cannot be partial" % (tp,)) + if not tp.has_c_name() and not nested: + raise NotImplementedError("%s is partial but has no C name" %(tp,)) + tp.partial = True + + def _parse_constant(self, exprnode, partial_length_ok=False): + # for now, limited to expressions that are an immediate number + # or negative number + if isinstance(exprnode, pycparser.c_ast.Constant): + return int(exprnode.value, 0) + # + if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and + exprnode.op == '-'): + return -self._parse_constant(exprnode.expr) + # + if partial_length_ok: + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name == '__dotdotdotarray__'): + self._partial_length = True + return '...' + # + raise api.FFIError("unsupported expression: expected a " + "simple numeric constant") + + def _build_enum_type(self, explicit_name, decls): + if decls is not None: + enumerators1 = [enum.name for enum in decls.enumerators] + enumerators = [s for s in enumerators1 + if not _r_enum_dotdotdot.match(s)] + partial = len(enumerators) < len(enumerators1) + enumerators = tuple(enumerators) + enumvalues = [] + nextenumvalue = 0 + for enum in decls.enumerators[:len(enumerators)]: + if enum.value is not None: + nextenumvalue = self._parse_constant(enum.value) + enumvalues.append(nextenumvalue) + nextenumvalue += 1 + enumvalues = tuple(enumvalues) + tp = model.EnumType(explicit_name, enumerators, enumvalues) + tp.partial = partial + else: # opaque enum + tp = model.EnumType(explicit_name, (), ()) + return tp + + def include(self, other): + for name, tp in other._declarations.items(): + kind = name.split(' ', 1)[0] + if kind in ('typedef', 'struct', 'union', 'enum'): + self._declare(name, tp) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py b/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py new file mode 100644 index 0000000..460ba90 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py @@ -0,0 +1,111 @@ +import os + + +class VerificationError(Exception): + """ An error raised when verification fails + """ + +class VerificationMissing(Exception): + """ An error raised when incomplete structures are passed into + cdef, but no verification has been done + """ + + +def get_extension(srcfilename, modname, sources=(), **kwds): + from distutils.core import Extension + allsources = [srcfilename] + allsources.extend(sources) + return Extension(name=modname, sources=allsources, **kwds) + +def compile(tmpdir, ext): + """Compile a C extension module using distutils.""" + + saved_environ = os.environ.copy() + try: + outputfilename = _build(tmpdir, ext) + outputfilename = os.path.abspath(outputfilename) + finally: + # workaround for a distutils bugs where some env vars can + # become longer and longer every time it is used + for key, value in saved_environ.items(): + if os.environ.get(key) != value: + os.environ[key] = value + return outputfilename + +def _build(tmpdir, ext): + # XXX compact but horrible :-( + from distutils.core import Distribution + import distutils.errors + # + dist = Distribution({'ext_modules': [ext]}) + options = dist.get_option_dict('build_ext') + options['force'] = ('ffiplatform', True) + options['build_lib'] = ('ffiplatform', tmpdir) + options['build_temp'] = ('ffiplatform', tmpdir) + # + try: + dist.run_command('build_ext') + except (distutils.errors.CompileError, + distutils.errors.LinkError) as e: + raise VerificationError('%s: %s' % (e.__class__.__name__, e)) + # + cmd_obj = dist.get_command_obj('build_ext') + [soname] = cmd_obj.get_outputs() + return soname + +try: + from os.path import samefile +except ImportError: + def samefile(f1, f2): + return os.path.abspath(f1) == os.path.abspath(f2) + +def maybe_relative_path(path): + if not os.path.isabs(path): + return path # already relative + dir = path + names = [] + while True: + prevdir = dir + dir, name = os.path.split(prevdir) + if dir == prevdir or not dir: + return path # failed to make it relative + names.append(name) + try: + if samefile(dir, os.curdir): + names.reverse() + return os.path.join(*names) + except OSError: + pass + +# ____________________________________________________________ + +try: + int_or_long = (int, long) + import cStringIO +except NameError: + int_or_long = int # Python 3 + import io as cStringIO + +def _flatten(x, f): + if isinstance(x, str): + f.write('%ds%s' % (len(x), x)) + elif isinstance(x, dict): + keys = sorted(x.keys()) + f.write('%dd' % len(keys)) + for key in keys: + _flatten(key, f) + _flatten(x[key], f) + elif isinstance(x, (list, tuple)): + f.write('%dl' % len(x)) + for value in x: + _flatten(value, f) + elif isinstance(x, int_or_long): + f.write('%di' % (x,)) + else: + raise TypeError( + "the keywords to verify() contains unsupported object %r" % (x,)) + +def flatten(x): + f = cStringIO.StringIO() + _flatten(x, f) + return f.getvalue() diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/gc_weakref.py b/Linux_i686/lib/python2.7/site-packages/cffi/gc_weakref.py new file mode 100644 index 0000000..a2c0967 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/gc_weakref.py @@ -0,0 +1,19 @@ +from weakref import ref + + +class GcWeakrefs(object): + # code copied and adapted from WeakKeyDictionary. + + def __init__(self, ffi): + self.ffi = ffi + self.data = data = {} + def remove(k): + destructor, cdata = data.pop(k) + destructor(cdata) + self.remove = remove + + def build(self, cdata, destructor): + # make a new cdata of the same type as the original one + new_cdata = self.ffi.cast(self.ffi._backend.typeof(cdata), cdata) + self.data[ref(new_cdata, self.remove)] = destructor, cdata + return new_cdata diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/lock.py b/Linux_i686/lib/python2.7/site-packages/cffi/lock.py new file mode 100644 index 0000000..db91b71 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/lock.py @@ -0,0 +1,30 @@ +import sys + +if sys.version_info < (3,): + try: + from thread import allocate_lock + except ImportError: + from dummy_thread import allocate_lock +else: + try: + from _thread import allocate_lock + except ImportError: + from _dummy_thread import allocate_lock + + +##import sys +##l1 = allocate_lock + +##class allocate_lock(object): +## def __init__(self): +## self._real = l1() +## def __enter__(self): +## for i in range(4, 0, -1): +## print sys._getframe(i).f_code +## print +## return self._real.__enter__() +## def __exit__(self, *args): +## return self._real.__exit__(*args) +## def acquire(self, f): +## assert f is False +## return self._real.acquire(f) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/model.py b/Linux_i686/lib/python2.7/site-packages/cffi/model.py new file mode 100644 index 0000000..371153f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/model.py @@ -0,0 +1,499 @@ +import types +import weakref + +from .lock import allocate_lock + + +class BaseTypeByIdentity(object): + is_array_type = False + is_raw_function = False + + def get_c_name(self, replace_with='', context='a C file'): + result = self.c_name_with_marker + assert result.count('&') == 1 + # some logic duplication with ffi.getctype()... :-( + replace_with = replace_with.strip() + if replace_with: + if replace_with.startswith('*') and '&[' in result: + replace_with = '(%s)' % replace_with + elif not replace_with[0] in '[(': + replace_with = ' ' + replace_with + result = result.replace('&', replace_with) + if '$' in result: + from .ffiplatform import VerificationError + raise VerificationError( + "cannot generate '%s' in %s: unknown type name" + % (self._get_c_name(), context)) + return result + + def _get_c_name(self): + return self.c_name_with_marker.replace('&', '') + + def has_c_name(self): + return '$' not in self._get_c_name() + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + try: + BType = ffi._cached_btypes[self] + except KeyError: + BType = self.build_backend_type(ffi, finishlist) + BType2 = ffi._cached_btypes.setdefault(self, BType) + assert BType2 is BType + return BType + + def __repr__(self): + return '<%s>' % (self._get_c_name(),) + + def _get_items(self): + return [(name, getattr(self, name)) for name in self._attrs_] + + +class BaseType(BaseTypeByIdentity): + + def __eq__(self, other): + return (self.__class__ == other.__class__ and + self._get_items() == other._get_items()) + + def __ne__(self, other): + return not self == other + + def __hash__(self): + return hash((self.__class__, tuple(self._get_items()))) + + +class VoidType(BaseType): + _attrs_ = () + + def __init__(self): + self.c_name_with_marker = 'void&' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_void_type') + +void_type = VoidType() + + +class PrimitiveType(BaseType): + _attrs_ = ('name',) + + ALL_PRIMITIVE_TYPES = { + 'char': 'c', + 'short': 'i', + 'int': 'i', + 'long': 'i', + 'long long': 'i', + 'signed char': 'i', + 'unsigned char': 'i', + 'unsigned short': 'i', + 'unsigned int': 'i', + 'unsigned long': 'i', + 'unsigned long long': 'i', + 'float': 'f', + 'double': 'f', + 'long double': 'f', + '_Bool': 'i', + # the following types are not primitive in the C sense + 'wchar_t': 'c', + 'int8_t': 'i', + 'uint8_t': 'i', + 'int16_t': 'i', + 'uint16_t': 'i', + 'int32_t': 'i', + 'uint32_t': 'i', + 'int64_t': 'i', + 'uint64_t': 'i', + 'intptr_t': 'i', + 'uintptr_t': 'i', + 'ptrdiff_t': 'i', + 'size_t': 'i', + 'ssize_t': 'i', + } + + def __init__(self, name): + assert name in self.ALL_PRIMITIVE_TYPES + self.name = name + self.c_name_with_marker = name + '&' + + def is_char_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'c' + def is_integer_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'i' + def is_float_type(self): + return self.ALL_PRIMITIVE_TYPES[self.name] == 'f' + + def build_backend_type(self, ffi, finishlist): + return global_cache(self, ffi, 'new_primitive_type', self.name) + + +class BaseFunctionType(BaseType): + _attrs_ = ('args', 'result', 'ellipsis') + + def __init__(self, args, result, ellipsis): + self.args = args + self.result = result + self.ellipsis = ellipsis + # + reprargs = [arg._get_c_name() for arg in self.args] + if self.ellipsis: + reprargs.append('...') + reprargs = reprargs or ['void'] + replace_with = self._base_pattern % (', '.join(reprargs),) + self.c_name_with_marker = ( + self.result.c_name_with_marker.replace('&', replace_with)) + + +class RawFunctionType(BaseFunctionType): + # Corresponds to a C type like 'int(int)', which is the C type of + # a function, but not a pointer-to-function. The backend has no + # notion of such a type; it's used temporarily by parsing. + _base_pattern = '(&)(%s)' + is_raw_function = True + + def build_backend_type(self, ffi, finishlist): + from . import api + raise api.CDefError("cannot render the type %r: it is a function " + "type, not a pointer-to-function type" % (self,)) + + def as_function_pointer(self): + return FunctionPtrType(self.args, self.result, self.ellipsis) + + +class FunctionPtrType(BaseFunctionType): + _base_pattern = '(*&)(%s)' + + def build_backend_type(self, ffi, finishlist): + result = self.result.get_cached_btype(ffi, finishlist) + args = [] + for tp in self.args: + args.append(tp.get_cached_btype(ffi, finishlist)) + return global_cache(self, ffi, 'new_function_type', + tuple(args), result, self.ellipsis) + + +class PointerType(BaseType): + _attrs_ = ('totype',) + _base_pattern = " *&" + _base_pattern_array = "(*&)" + + def __init__(self, totype): + self.totype = totype + if totype.is_array_type: + extra = self._base_pattern_array + else: + extra = self._base_pattern + self.c_name_with_marker = totype.c_name_with_marker.replace('&', extra) + + def build_backend_type(self, ffi, finishlist): + BItem = self.totype.get_cached_btype(ffi, finishlist, can_delay=True) + return global_cache(self, ffi, 'new_pointer_type', BItem) + +voidp_type = PointerType(void_type) + + +class ConstPointerType(PointerType): + _base_pattern = " const *&" + _base_pattern_array = "(const *&)" + +const_voidp_type = ConstPointerType(void_type) + + +class NamedPointerType(PointerType): + _attrs_ = ('totype', 'name') + + def __init__(self, totype, name): + PointerType.__init__(self, totype) + self.name = name + self.c_name_with_marker = name + '&' + + +class ArrayType(BaseType): + _attrs_ = ('item', 'length') + is_array_type = True + + def __init__(self, item, length): + self.item = item + self.length = length + # + if length is None: + brackets = '&[]' + elif length == '...': + brackets = '&[/*...*/]' + else: + brackets = '&[%d]' % length + self.c_name_with_marker = ( + self.item.c_name_with_marker.replace('&', brackets)) + + def resolve_length(self, newlength): + return ArrayType(self.item, newlength) + + def build_backend_type(self, ffi, finishlist): + if self.length == '...': + from . import api + raise api.CDefError("cannot render the type %r: unknown length" % + (self,)) + self.item.get_cached_btype(ffi, finishlist) # force the item BType + BPtrItem = PointerType(self.item).get_cached_btype(ffi, finishlist) + return global_cache(self, ffi, 'new_array_type', BPtrItem, self.length) + + +class StructOrUnionOrEnum(BaseTypeByIdentity): + _attrs_ = ('name',) + forcename = None + + def build_c_name_with_marker(self): + name = self.forcename or '%s %s' % (self.kind, self.name) + self.c_name_with_marker = name + '&' + + def force_the_name(self, forcename): + self.forcename = forcename + self.build_c_name_with_marker() + + def get_official_name(self): + assert self.c_name_with_marker.endswith('&') + return self.c_name_with_marker[:-1] + + +class StructOrUnion(StructOrUnionOrEnum): + fixedlayout = None + completed = False + partial = False + packed = False + + def __init__(self, name, fldnames, fldtypes, fldbitsize): + self.name = name + self.fldnames = fldnames + self.fldtypes = fldtypes + self.fldbitsize = fldbitsize + self.build_c_name_with_marker() + + def enumfields(self): + for name, type, bitsize in zip(self.fldnames, self.fldtypes, + self.fldbitsize): + if name == '' and isinstance(type, StructOrUnion): + # nested anonymous struct/union + for result in type.enumfields(): + yield result + else: + yield (name, type, bitsize) + + def force_flatten(self): + # force the struct or union to have a declaration that lists + # directly all fields returned by enumfields(), flattening + # nested anonymous structs/unions. + names = [] + types = [] + bitsizes = [] + for name, type, bitsize in self.enumfields(): + names.append(name) + types.append(type) + bitsizes.append(bitsize) + self.fldnames = tuple(names) + self.fldtypes = tuple(types) + self.fldbitsize = tuple(bitsizes) + + def get_cached_btype(self, ffi, finishlist, can_delay=False): + BType = StructOrUnionOrEnum.get_cached_btype(self, ffi, finishlist, + can_delay) + if not can_delay: + self.finish_backend_type(ffi, finishlist) + return BType + + def finish_backend_type(self, ffi, finishlist): + if self.completed: + if self.completed != 2: + raise NotImplementedError("recursive structure declaration " + "for '%s'" % (self.name,)) + return + BType = ffi._cached_btypes[self] + if self.fldtypes is None: + return # not completing it: it's an opaque struct + # + self.completed = 1 + # + if self.fixedlayout is None: + fldtypes = [tp.get_cached_btype(ffi, finishlist) + for tp in self.fldtypes] + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize)) + sflags = 0 + if self.packed: + sflags = 8 # SF_PACKED + ffi._backend.complete_struct_or_union(BType, lst, self, + -1, -1, sflags) + # + else: + fldtypes = [] + fieldofs, fieldsize, totalsize, totalalignment = self.fixedlayout + for i in range(len(self.fldnames)): + fsize = fieldsize[i] + ftype = self.fldtypes[i] + # + if isinstance(ftype, ArrayType) and ftype.length == '...': + # fix the length to match the total size + BItemType = ftype.item.get_cached_btype(ffi, finishlist) + nlen, nrest = divmod(fsize, ffi.sizeof(BItemType)) + if nrest != 0: + self._verification_error( + "field '%s.%s' has a bogus size?" % ( + self.name, self.fldnames[i] or '{}')) + ftype = ftype.resolve_length(nlen) + self.fldtypes = (self.fldtypes[:i] + (ftype,) + + self.fldtypes[i+1:]) + # + BFieldType = ftype.get_cached_btype(ffi, finishlist) + if isinstance(ftype, ArrayType) and ftype.length is None: + assert fsize == 0 + else: + bitemsize = ffi.sizeof(BFieldType) + if bitemsize != fsize: + self._verification_error( + "field '%s.%s' is declared as %d bytes, but is " + "really %d bytes" % (self.name, + self.fldnames[i] or '{}', + bitemsize, fsize)) + fldtypes.append(BFieldType) + # + lst = list(zip(self.fldnames, fldtypes, self.fldbitsize, fieldofs)) + ffi._backend.complete_struct_or_union(BType, lst, self, + totalsize, totalalignment) + self.completed = 2 + + def _verification_error(self, msg): + from .ffiplatform import VerificationError + raise VerificationError(msg) + + def check_not_partial(self): + if self.partial and self.fixedlayout is None: + from . import ffiplatform + raise ffiplatform.VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + finishlist.append(self) + # + return global_cache(self, ffi, 'new_%s_type' % self.kind, + self.get_official_name(), key=self) + + +class StructType(StructOrUnion): + kind = 'struct' + + +class UnionType(StructOrUnion): + kind = 'union' + + +class EnumType(StructOrUnionOrEnum): + kind = 'enum' + partial = False + partial_resolved = False + + def __init__(self, name, enumerators, enumvalues, baseinttype=None): + self.name = name + self.enumerators = enumerators + self.enumvalues = enumvalues + self.baseinttype = baseinttype + self.build_c_name_with_marker() + + def force_the_name(self, forcename): + StructOrUnionOrEnum.force_the_name(self, forcename) + if self.forcename is None: + name = self.get_official_name() + self.forcename = '$' + name.replace(' ', '_') + + def check_not_partial(self): + if self.partial and not self.partial_resolved: + from . import ffiplatform + raise ffiplatform.VerificationMissing(self._get_c_name()) + + def build_backend_type(self, ffi, finishlist): + self.check_not_partial() + base_btype = self.build_baseinttype(ffi, finishlist) + return global_cache(self, ffi, 'new_enum_type', + self.get_official_name(), + self.enumerators, self.enumvalues, + base_btype, key=self) + + def build_baseinttype(self, ffi, finishlist): + if self.baseinttype is not None: + return self.baseinttype.get_cached_btype(ffi, finishlist) + # + if self.enumvalues: + smallest_value = min(self.enumvalues) + largest_value = max(self.enumvalues) + else: + smallest_value = 0 + largest_value = 0 + if smallest_value < 0: # needs a signed type + sign = 1 + candidate1 = PrimitiveType("int") + candidate2 = PrimitiveType("long") + else: + sign = 0 + candidate1 = PrimitiveType("unsigned int") + candidate2 = PrimitiveType("unsigned long") + btype1 = candidate1.get_cached_btype(ffi, finishlist) + btype2 = candidate2.get_cached_btype(ffi, finishlist) + size1 = ffi.sizeof(btype1) + size2 = ffi.sizeof(btype2) + if (smallest_value >= ((-1) << (8*size1-1)) and + largest_value < (1 << (8*size1-sign))): + return btype1 + if (smallest_value >= ((-1) << (8*size2-1)) and + largest_value < (1 << (8*size2-sign))): + return btype2 + raise api.CDefError("%s values don't all fit into either 'long' " + "or 'unsigned long'" % self._get_c_name()) + +def unknown_type(name, structname=None): + if structname is None: + structname = '$%s' % name + tp = StructType(structname, None, None, None) + tp.force_the_name(name) + return tp + +def unknown_ptr_type(name, structname=None): + if structname is None: + structname = '*$%s' % name + tp = StructType(structname, None, None, None) + return NamedPointerType(tp, name) + + +global_lock = allocate_lock() + +def global_cache(srctype, ffi, funcname, *args, **kwds): + key = kwds.pop('key', (funcname, args)) + assert not kwds + try: + return ffi._backend.__typecache[key] + except KeyError: + pass + except AttributeError: + # initialize the __typecache attribute, either at the module level + # if ffi._backend is a module, or at the class level if ffi._backend + # is some instance. + if isinstance(ffi._backend, types.ModuleType): + ffi._backend.__typecache = weakref.WeakValueDictionary() + else: + type(ffi._backend).__typecache = weakref.WeakValueDictionary() + try: + res = getattr(ffi._backend, funcname)(*args) + except NotImplementedError as e: + raise NotImplementedError("%r: %s" % (srctype, e)) + # note that setdefault() on WeakValueDictionary is not atomic + # and contains a rare bug (http://bugs.python.org/issue19542); + # we have to use a lock and do it ourselves + cache = ffi._backend.__typecache + with global_lock: + res1 = cache.get(key) + if res1 is None: + cache[key] = res + return res + else: + return res1 + +def pointer_cache(ffi, BType): + return global_cache('?', ffi, 'new_pointer_type', BType) + +def attach_exception_info(e, name): + if e.args and type(e.args[0]) is str: + e.args = ('%s: %s' % (name, e.args[0]),) + e.args[1:] diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py new file mode 100644 index 0000000..d9af334 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py @@ -0,0 +1,912 @@ +import sys, imp +from . import model, ffiplatform + + +class VCPythonEngine(object): + _class_key = 'x' + _gen_python_module = True + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self._struct_pending_verification = {} + self._types_of_builtin_functions = {} + + def patch_extension_kwds(self, kwds): + pass + + def find_module(self, module_name, path, so_suffixes): + try: + f, filename, descr = imp.find_module(module_name, path) + except ImportError: + return None + if f is not None: + f.close() + # Note that after a setuptools installation, there are both .py + # and .so files with the same basename. The code here relies on + # imp.find_module() locating the .so in priority. + if descr[0] not in so_suffixes: + return None + return filename + + def collect_types(self): + self._typesdict = {} + self._generate("collecttype") + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def _gettypenum(self, type): + # a KeyError here is a bug. please report it! :-) + return self._typesdict[type] + + def _do_collect_type(self, tp): + if ((not isinstance(tp, model.PrimitiveType) + or tp.name == 'long double') + and tp not in self._typesdict): + num = len(self._typesdict) + self._typesdict[tp] = num + + def write_source_to_f(self): + self.collect_types() + # + # The new module will have a _cffi_setup() function that receives + # objects from the ffi world, and that calls some setup code in + # the module. This setup code is split in several independent + # functions, e.g. one per constant. The functions are "chained" + # by ending in a tail call to each other. + # + # This is further split in two chained lists, depending on if we + # can do it at import-time or if we must wait for _cffi_setup() to + # provide us with the objects. This is needed because we + # need the values of the enum constants in order to build the + # that we may have to pass to _cffi_setup(). + # + # The following two 'chained_list_constants' items contains + # the head of these two chained lists, as a string that gives the + # call to do, if any. + self._chained_list_constants = ['0', '0'] + # + prnt = self._prnt + # first paste some standard set of lines that are mostly '#define' + prnt(cffimod_header) + prnt() + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + prnt() + # + # call generate_cpy_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate("decl") + # + # implement the function _cffi_setup_custom() as calling the + # head of the chained list. + self._generate_setup_custom() + prnt() + # + # produce the method table, including the entries for the + # generated Python->C function wrappers, which are done + # by generate_cpy_function_method(). + prnt('static PyMethodDef _cffi_methods[] = {') + self._generate("method") + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS},') + prnt(' {NULL, NULL} /* Sentinel */') + prnt('};') + prnt() + # + # standard init. + modname = self.verifier.get_module_name() + if sys.version_info >= (3,): + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() + initname = 'PyInit_%s' % modname + createmod = 'PyModule_Create(&_cffi_module_def)' + errorcase = 'return NULL' + finalreturn = 'return lib' + else: + initname = 'init%s' % modname + createmod = 'Py_InitModule("%s", _cffi_methods)' % modname + errorcase = 'return' + finalreturn = 'return' + prnt('PyMODINIT_FUNC') + prnt('%s(void)' % initname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = %s;' % createmod) + prnt(' if (lib == NULL || %s < 0)' % ( + self._chained_list_constants[False],)) + prnt(' %s;' % errorcase) + prnt(' _cffi_init();') + prnt(' %s;' % finalreturn) + prnt('}') + + def load_library(self): + # XXX review all usages of 'self' here! + # import it as a new extension module + try: + module = imp.load_dynamic(self.verifier.get_module_name(), + self.verifier.modulefilename) + except ImportError as e: + error = "importing %r: %s" % (self.verifier.modulefilename, e) + raise ffiplatform.VerificationError(error) + # + # call loading_cpy_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + # + # the C code will need the objects. Collect them in + # order in a list. + revmapping = dict([(value, key) + for (key, value) in self._typesdict.items()]) + lst = [revmapping[i] for i in range(len(revmapping))] + lst = list(map(self.ffi._get_cached_btype, lst)) + # + # build the FFILibrary class and instance and call _cffi_setup(). + # this will set up some fields like '_cffi_types', and only then + # it will invoke the chained list of functions that will really + # build (notably) the constant objects, as if they are + # pointers, and store them as attributes on the 'library' object. + class FFILibrary(object): + _cffi_python_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + list(self.__dict__) + library = FFILibrary() + if module._cffi_setup(lst, ffiplatform.VerificationError, library): + import warnings + warnings.warn("reimporting %r might overwrite older definitions" + % (self.verifier.get_module_name())) + # + # finally, call the loaded_cpy_xxx() functions. This will perform + # the final adjustments, like copying the Python->C wrapper + # functions from the module to the 'library' object, and setting + # up the FFILibrary class with properties for the global C variables. + self._load(module, 'loaded', library=library) + module._cffi_original_ffi = self.ffi + module._cffi_types_of_builtin_funcs = self._types_of_builtin_functions + return library + + def _get_declarations(self): + return sorted(self.ffi._parser._declarations.items()) + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_cpy_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_cpy_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + + def _convert_funcarg_to_c(self, tp, fromvar, tovar, errcode): + extraarg = '' + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type() and tp.name != '_Bool': + converter = '_cffi_to_c_int' + extraarg = ', %s' % tp.name + else: + converter = '_cffi_to_c_%s' % (tp.name.replace(' ', '_'),) + errvalue = '-1' + # + elif isinstance(tp, model.PointerType): + self._convert_funcarg_to_c_ptr_or_array(tp, fromvar, + tovar, errcode) + return + # + elif isinstance(tp, (model.StructOrUnion, model.EnumType)): + # a struct (not a struct pointer) as a function argument + self._prnt(' if (_cffi_to_c((char *)&%s, _cffi_type(%d), %s) < 0)' + % (tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + return + # + elif isinstance(tp, model.FunctionPtrType): + converter = '(%s)_cffi_to_c_pointer' % tp.get_c_name('') + extraarg = ', _cffi_type(%d)' % self._gettypenum(tp) + errvalue = 'NULL' + # + else: + raise NotImplementedError(tp) + # + self._prnt(' %s = %s(%s%s);' % (tovar, converter, fromvar, extraarg)) + self._prnt(' if (%s == (%s)%s && PyErr_Occurred())' % ( + tovar, tp.get_c_name(''), errvalue)) + self._prnt(' %s;' % errcode) + + def _extra_local_variables(self, tp, localvars): + if isinstance(tp, model.PointerType): + localvars.add('Py_ssize_t datasize') + + def _convert_funcarg_to_c_ptr_or_array(self, tp, fromvar, tovar, errcode): + self._prnt(' datasize = _cffi_prepare_pointer_call_argument(') + self._prnt(' _cffi_type(%d), %s, (char **)&%s);' % ( + self._gettypenum(tp), fromvar, tovar)) + self._prnt(' if (datasize != 0) {') + self._prnt(' if (datasize < 0)') + self._prnt(' %s;' % errcode) + self._prnt(' %s = alloca(datasize);' % (tovar,)) + self._prnt(' memset((void *)%s, 0, datasize);' % (tovar,)) + self._prnt(' if (_cffi_convert_array_from_object(' + '(char *)%s, _cffi_type(%d), %s) < 0)' % ( + tovar, self._gettypenum(tp), fromvar)) + self._prnt(' %s;' % errcode) + self._prnt(' }') + + def _convert_expr_from_c(self, tp, var, context): + if isinstance(tp, model.PrimitiveType): + if tp.is_integer_type(): + return '_cffi_from_c_int(%s, %s)' % (var, tp.name) + elif tp.name != 'long double': + return '_cffi_from_c_%s(%s)' % (tp.name.replace(' ', '_'), var) + else: + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, (model.PointerType, model.FunctionPtrType)): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.ArrayType): + return '_cffi_from_c_pointer((char *)%s, _cffi_type(%d))' % ( + var, self._gettypenum(model.PointerType(tp.item))) + elif isinstance(tp, model.StructType): + if tp.fldnames is None: + raise TypeError("'%s' is used as %s, but is opaque" % ( + tp._get_c_name(), context)) + return '_cffi_from_c_struct((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + elif isinstance(tp, model.EnumType): + return '_cffi_from_c_deref((char *)&%s, _cffi_type(%d))' % ( + var, self._gettypenum(tp)) + else: + raise NotImplementedError(tp) + + # ---------- + # typedefs: generates no code so far + + _generate_cpy_typedef_collecttype = _generate_nothing + _generate_cpy_typedef_decl = _generate_nothing + _generate_cpy_typedef_method = _generate_nothing + _loading_cpy_typedef = _loaded_noop + _loaded_cpy_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_cpy_function_collecttype(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + self._do_collect_type(tp) + else: + # don't call _do_collect_type(tp) in this common case, + # otherwise test_autofilled_struct_as_argument fails + for type in tp.args: + self._do_collect_type(type) + self._do_collect_type(tp.result) + + def _generate_cpy_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no CPython wrapper) + self._generate_cpy_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + if numargs == 0: + argname = 'no_arg' + elif numargs == 1: + argname = 'arg0' + else: + argname = 'args' + prnt('static PyObject *') + prnt('_cffi_f_%s(PyObject *self, PyObject *%s)' % (name, argname)) + prnt('{') + # + context = 'argument of %s' % name + for i, type in enumerate(tp.args): + prnt(' %s;' % type.get_c_name(' x%d' % i, context)) + # + localvars = set() + for type in tp.args: + self._extra_local_variables(type, localvars) + for decl in localvars: + prnt(' %s;' % (decl,)) + # + if not isinstance(tp.result, model.VoidType): + result_code = 'result = ' + context = 'result of %s' % name + prnt(' %s;' % tp.result.get_c_name(' result', context)) + else: + result_code = '' + # + if len(tp.args) > 1: + rng = range(len(tp.args)) + for i in rng: + prnt(' PyObject *arg%d;' % i) + prnt() + prnt(' if (!PyArg_ParseTuple(args, "%s:%s", %s))' % ( + 'O' * numargs, name, ', '.join(['&arg%d' % i for i in rng]))) + prnt(' return NULL;') + prnt() + # + for i, type in enumerate(tp.args): + self._convert_funcarg_to_c(type, 'arg%d' % i, 'x%d' % i, + 'return NULL') + prnt() + # + prnt(' Py_BEGIN_ALLOW_THREADS') + prnt(' _cffi_restore_errno();') + prnt(' { %s%s(%s); }' % ( + result_code, name, + ', '.join(['x%d' % i for i in range(len(tp.args))]))) + prnt(' _cffi_save_errno();') + prnt(' Py_END_ALLOW_THREADS') + prnt() + # + if result_code: + prnt(' return %s;' % + self._convert_expr_from_c(tp.result, 'result', 'result type')) + else: + prnt(' Py_INCREF(Py_None);') + prnt(' return Py_None;') + prnt('}') + prnt() + + def _generate_cpy_function_method(self, tp, name): + if tp.ellipsis: + return + numargs = len(tp.args) + if numargs == 0: + meth = 'METH_NOARGS' + elif numargs == 1: + meth = 'METH_O' + else: + meth = 'METH_VARARGS' + self._prnt(' {"%s", _cffi_f_%s, %s},' % (name, name, meth)) + + _loading_cpy_function = _loaded_noop + + def _loaded_cpy_function(self, tp, name, module, library): + if tp.ellipsis: + return + func = getattr(module, name) + setattr(library, name, func) + self._types_of_builtin_functions[func] = tp + + # ---------- + # named structs + + _generate_cpy_struct_collecttype = _generate_nothing + def _generate_cpy_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + def _generate_cpy_struct_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'struct', name) + def _loading_cpy_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + def _loaded_cpy_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + _generate_cpy_union_collecttype = _generate_nothing + def _generate_cpy_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + def _generate_cpy_union_method(self, tp, name): + self._generate_struct_or_union_method(tp, 'union', name) + def _loading_cpy_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + def _loaded_cpy_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + for fname, ftype, fbitsize in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname), fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + prnt('static PyObject *') + prnt('%s(PyObject *self, PyObject *noarg)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static Py_ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return _cffi_get_struct_layout(nums);') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _generate_struct_or_union_method(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + self._prnt(' {"%s", %s, METH_NOARGS},' % (layoutfuncname, + layoutfuncname)) + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + function = getattr(module, layoutfuncname) + layout = function() + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise ffiplatform.VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + _generate_cpy_anonymous_collecttype = _generate_nothing + + def _generate_cpy_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_cpy_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _generate_cpy_anonymous_method(self, tp, name): + if not isinstance(tp, model.EnumType): + self._generate_struct_or_union_method(tp, '', name) + + def _loading_cpy_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_cpy_enum(tp, name, module) + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_cpy_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_cpy_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_cpy_const(self, is_int, name, tp=None, category='const', + vartp=None, delayed=True, size_too=False): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + prnt(' PyObject *o;') + prnt(' int res;') + if not is_int: + prnt(' %s;' % (vartp or tp).get_c_name(' i', name)) + else: + assert category == 'const' + # + if not is_int: + if category == 'var': + realexpr = '&' + name + else: + realexpr = name + prnt(' i = (%s);' % (realexpr,)) + prnt(' o = %s;' % (self._convert_expr_from_c(tp, 'i', + 'variable type'),)) + assert delayed + else: + prnt(' if (LONG_MIN <= (%s) && (%s) <= LONG_MAX)' % (name, name)) + prnt(' o = PyInt_FromLong((long)(%s));' % (name,)) + prnt(' else if ((%s) <= 0)' % (name,)) + prnt(' o = PyLong_FromLongLong((long long)(%s));' % (name,)) + prnt(' else') + prnt(' o = PyLong_FromUnsignedLongLong(' + '(unsigned long long)(%s));' % (name,)) + prnt(' if (o == NULL)') + prnt(' return -1;') + if size_too: + prnt(' {') + prnt(' PyObject *o1 = o;') + prnt(' o = Py_BuildValue("On", o1, (Py_ssize_t)sizeof(%s));' + % (name,)) + prnt(' Py_DECREF(o1);') + prnt(' if (o == NULL)') + prnt(' return -1;') + prnt(' }') + prnt(' res = PyObject_SetAttrString(lib, "%s", o);' % name) + prnt(' Py_DECREF(o);') + prnt(' if (res < 0)') + prnt(' return -1;') + prnt(' return %s;' % self._chained_list_constants[delayed]) + self._chained_list_constants[delayed] = funcname + '(lib)' + prnt('}') + prnt() + + def _generate_cpy_constant_collecttype(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + if not is_int: + self._do_collect_type(tp) + + def _generate_cpy_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_cpy_const(is_int, name, tp) + + _generate_cpy_constant_method = _generate_nothing + _loading_cpy_constant = _loaded_noop + _loaded_cpy_constant = _loaded_noop + + # ---------- + # enums + + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_cpy_const(True, enumerator, delayed=False) + return + # + funcname = '_cffi_e_%s_%s' % (prefix, name) + prnt = self._prnt + prnt('static int %s(PyObject *lib)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + if enumvalue < 0: + prnt(' if ((%s) >= 0 || (long)(%s) != %dL) {' % ( + enumerator, enumerator, enumvalue)) + else: + prnt(' if ((%s) < 0 || (unsigned long)(%s) != %dUL) {' % ( + enumerator, enumerator, enumvalue)) + prnt(' char buf[64];') + prnt(' if ((%s) < 0)' % enumerator) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + enumerator) + prnt(' PyErr_Format(_cffi_VerificationError,') + prnt(' "enum %s: %s has the real value %s, ' + 'not %s",') + prnt(' "%s", "%s", buf, "%d");' % ( + name, enumerator, enumvalue)) + prnt(' return -1;') + prnt(' }') + prnt(' return %s;' % self._chained_list_constants[True]) + self._chained_list_constants[True] = funcname + '(lib)' + prnt('}') + prnt() + + _generate_cpy_enum_collecttype = _generate_nothing + _generate_cpy_enum_method = _generate_nothing + + def _loading_cpy_enum(self, tp, name, module): + if tp.partial: + enumvalues = [getattr(module, enumerator) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + + def _loaded_cpy_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + + # ---------- + # macros: for now only for integers + + def _generate_cpy_macro_decl(self, tp, name): + assert tp == '...' + self._generate_cpy_const(True, name) + + _generate_cpy_macro_collecttype = _generate_nothing + _generate_cpy_macro_method = _generate_nothing + _loading_cpy_macro = _loaded_noop + _loaded_cpy_macro = _loaded_noop + + # ---------- + # global variables + + def _generate_cpy_variable_collecttype(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + else: + tp_ptr = model.PointerType(tp) + self._do_collect_type(tp_ptr) + + def _generate_cpy_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + tp_ptr = model.PointerType(tp.item) + self._generate_cpy_const(False, name, tp, vartp=tp_ptr, + size_too = (tp.length == '...')) + else: + tp_ptr = model.PointerType(tp) + self._generate_cpy_const(False, name, tp_ptr, category='var') + + _generate_cpy_variable_method = _generate_nothing + _loading_cpy_variable = _loaded_noop + + def _loaded_cpy_variable(self, tp, name, module, library): + value = getattr(library, name) + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + assert isinstance(value, tuple) + (value, size) = value + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise ffiplatform.VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + ptr = value + delattr(library, name) + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + + # ---------- + + def _generate_setup_custom(self): + prnt = self._prnt + prnt('static int _cffi_setup_custom(PyObject *lib)') + prnt('{') + prnt(' return %s;' % self._chained_list_constants[True]) + prnt('}') + +cffimod_header = r''' +#include +#include + +#ifdef MS_WIN32 +#include /* for alloca() */ +typedef __int8 int8_t; +typedef __int16 int16_t; +typedef __int32 int32_t; +typedef __int64 int64_t; +typedef unsigned __int8 uint8_t; +typedef unsigned __int16 uint16_t; +typedef unsigned __int32 uint32_t; +typedef unsigned __int64 uint64_t; +typedef unsigned char _Bool; +#endif + +#if PY_MAJOR_VERSION < 3 +# undef PyCapsule_CheckExact +# undef PyCapsule_GetPointer +# define PyCapsule_CheckExact(capsule) (PyCObject_Check(capsule)) +# define PyCapsule_GetPointer(capsule, name) \ + (PyCObject_AsVoidPtr(capsule)) +#endif + +#if PY_MAJOR_VERSION >= 3 +# define PyInt_FromLong PyLong_FromLong +#endif + +#define _cffi_from_c_double PyFloat_FromDouble +#define _cffi_from_c_float PyFloat_FromDouble +#define _cffi_from_c_long PyInt_FromLong +#define _cffi_from_c_ulong PyLong_FromUnsignedLong +#define _cffi_from_c_longlong PyLong_FromLongLong +#define _cffi_from_c_ulonglong PyLong_FromUnsignedLongLong + +#define _cffi_to_c_double PyFloat_AsDouble +#define _cffi_to_c_float PyFloat_AsDouble + +#define _cffi_from_c_int(x, type) \ + (((type)-1) > 0 ? /* unsigned */ \ + (sizeof(type) < sizeof(long) ? PyInt_FromLong(x) : \ + sizeof(type) == sizeof(long) ? PyLong_FromUnsignedLong(x) : \ + PyLong_FromUnsignedLongLong(x)) \ + : (sizeof(type) <= sizeof(long) ? PyInt_FromLong(x) : \ + PyLong_FromLongLong(x))) + +#define _cffi_to_c_int(o, type) \ + (sizeof(type) == 1 ? (((type)-1) > 0 ? _cffi_to_c_u8(o) \ + : _cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? _cffi_to_c_u16(o) \ + : _cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? _cffi_to_c_u32(o) \ + : _cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? _cffi_to_c_u64(o) \ + : _cffi_to_c_i64(o)) : \ + (Py_FatalError("unsupported size for type " #type), 0)) + +#define _cffi_to_c_i8 \ + ((int(*)(PyObject *))_cffi_exports[1]) +#define _cffi_to_c_u8 \ + ((int(*)(PyObject *))_cffi_exports[2]) +#define _cffi_to_c_i16 \ + ((int(*)(PyObject *))_cffi_exports[3]) +#define _cffi_to_c_u16 \ + ((int(*)(PyObject *))_cffi_exports[4]) +#define _cffi_to_c_i32 \ + ((int(*)(PyObject *))_cffi_exports[5]) +#define _cffi_to_c_u32 \ + ((unsigned int(*)(PyObject *))_cffi_exports[6]) +#define _cffi_to_c_i64 \ + ((long long(*)(PyObject *))_cffi_exports[7]) +#define _cffi_to_c_u64 \ + ((unsigned long long(*)(PyObject *))_cffi_exports[8]) +#define _cffi_to_c_char \ + ((int(*)(PyObject *))_cffi_exports[9]) +#define _cffi_from_c_pointer \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[10]) +#define _cffi_to_c_pointer \ + ((char *(*)(PyObject *, CTypeDescrObject *))_cffi_exports[11]) +#define _cffi_get_struct_layout \ + ((PyObject *(*)(Py_ssize_t[]))_cffi_exports[12]) +#define _cffi_restore_errno \ + ((void(*)(void))_cffi_exports[13]) +#define _cffi_save_errno \ + ((void(*)(void))_cffi_exports[14]) +#define _cffi_from_c_char \ + ((PyObject *(*)(char))_cffi_exports[15]) +#define _cffi_from_c_deref \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[16]) +#define _cffi_to_c \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[17]) +#define _cffi_from_c_struct \ + ((PyObject *(*)(char *, CTypeDescrObject *))_cffi_exports[18]) +#define _cffi_to_c_wchar_t \ + ((wchar_t(*)(PyObject *))_cffi_exports[19]) +#define _cffi_from_c_wchar_t \ + ((PyObject *(*)(wchar_t))_cffi_exports[20]) +#define _cffi_to_c_long_double \ + ((long double(*)(PyObject *))_cffi_exports[21]) +#define _cffi_to_c__Bool \ + ((_Bool(*)(PyObject *))_cffi_exports[22]) +#define _cffi_prepare_pointer_call_argument \ + ((Py_ssize_t(*)(CTypeDescrObject *, PyObject *, char **))_cffi_exports[23]) +#define _cffi_convert_array_from_object \ + ((int(*)(char *, CTypeDescrObject *, PyObject *))_cffi_exports[24]) +#define _CFFI_NUM_EXPORTS 25 + +typedef struct _ctypedescr CTypeDescrObject; + +static void *_cffi_exports[_CFFI_NUM_EXPORTS]; +static PyObject *_cffi_types, *_cffi_VerificationError; + +static int _cffi_setup_custom(PyObject *lib); /* forward */ + +static PyObject *_cffi_setup(PyObject *self, PyObject *args) +{ + PyObject *library; + int was_alive = (_cffi_types != NULL); + if (!PyArg_ParseTuple(args, "OOO", &_cffi_types, &_cffi_VerificationError, + &library)) + return NULL; + Py_INCREF(_cffi_types); + Py_INCREF(_cffi_VerificationError); + if (_cffi_setup_custom(library) < 0) + return NULL; + return PyBool_FromLong(was_alive); +} + +static void _cffi_init(void) +{ + PyObject *module = PyImport_ImportModule("_cffi_backend"); + PyObject *c_api_object; + + if (module == NULL) + return; + + c_api_object = PyObject_GetAttrString(module, "_C_API"); + if (c_api_object == NULL) + return; + if (!PyCapsule_CheckExact(c_api_object)) { + Py_DECREF(c_api_object); + PyErr_SetNone(PyExc_ImportError); + return; + } + memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), + _CFFI_NUM_EXPORTS * sizeof(void *)); + Py_DECREF(c_api_object); +} + +#define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) + +/**********/ +''' diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py new file mode 100644 index 0000000..f8715c7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py @@ -0,0 +1,566 @@ +import sys, os +import types + +from . import model, ffiplatform + + +class VGenericEngine(object): + _class_key = 'g' + _gen_python_module = False + + def __init__(self, verifier): + self.verifier = verifier + self.ffi = verifier.ffi + self.export_symbols = [] + self._struct_pending_verification = {} + + def patch_extension_kwds(self, kwds): + # add 'export_symbols' to the dictionary. Note that we add the + # list before filling it. When we fill it, it will thus also show + # up in kwds['export_symbols']. + kwds.setdefault('export_symbols', self.export_symbols) + + def find_module(self, module_name, path, so_suffixes): + for so_suffix in so_suffixes: + basename = module_name + so_suffix + if path is None: + path = sys.path + for dirname in path: + filename = os.path.join(dirname, basename) + if os.path.isfile(filename): + return filename + + def collect_types(self): + pass # not needed in the generic engine + + def _prnt(self, what=''): + self._f.write(what + '\n') + + def write_source_to_f(self): + prnt = self._prnt + # first paste some standard set of lines that are mostly '#include' + prnt(cffimod_header) + # then paste the C source given by the user, verbatim. + prnt(self.verifier.preamble) + # + # call generate_gen_xxx_decl(), for every xxx found from + # ffi._parser._declarations. This generates all the functions. + self._generate('decl') + # + # on Windows, distutils insists on putting init_cffi_xyz in + # 'export_symbols', so instead of fighting it, just give up and + # give it one + if sys.platform == 'win32': + if sys.version_info >= (3,): + prefix = 'PyInit_' + else: + prefix = 'init' + modname = self.verifier.get_module_name() + prnt("void %s%s(void) { }\n" % (prefix, modname)) + + def load_library(self): + # import it with the CFFI backend + backend = self.ffi._backend + # needs to make a path that contains '/', on Posix + filename = os.path.join(os.curdir, self.verifier.modulefilename) + module = backend.load_library(filename) + # + # call loading_gen_struct() to get the struct layout inferred by + # the C compiler + self._load(module, 'loading') + + # build the FFILibrary class and instance, this is a module subclass + # because modules are expected to have usually-constant-attributes and + # in PyPy this means the JIT is able to treat attributes as constant, + # which we want. + class FFILibrary(types.ModuleType): + _cffi_generic_module = module + _cffi_ffi = self.ffi + _cffi_dir = [] + def __dir__(self): + return FFILibrary._cffi_dir + library = FFILibrary("") + # + # finally, call the loaded_gen_xxx() functions. This will set + # up the 'library' object. + self._load(module, 'loaded', library=library) + return library + + def _get_declarations(self): + return sorted(self.ffi._parser._declarations.items()) + + def _generate(self, step_name): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + try: + method = getattr(self, '_generate_gen_%s_%s' % (kind, + step_name)) + except AttributeError: + raise ffiplatform.VerificationError( + "not implemented in verify(): %r" % name) + try: + method(tp, realname) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _load(self, module, step_name, **kwds): + for name, tp in self._get_declarations(): + kind, realname = name.split(' ', 1) + method = getattr(self, '_%s_gen_%s' % (step_name, kind)) + try: + method(tp, realname, module, **kwds) + except Exception as e: + model.attach_exception_info(e, name) + raise + + def _generate_nothing(self, tp, name): + pass + + def _loaded_noop(self, tp, name, module, **kwds): + pass + + # ---------- + # typedefs: generates no code so far + + _generate_gen_typedef_decl = _generate_nothing + _loading_gen_typedef = _loaded_noop + _loaded_gen_typedef = _loaded_noop + + # ---------- + # function declarations + + def _generate_gen_function_decl(self, tp, name): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + # cannot support vararg functions better than this: check for its + # exact type (including the fixed arguments), and build it as a + # constant function pointer (no _cffi_f_%s wrapper) + self._generate_gen_const(False, name, tp) + return + prnt = self._prnt + numargs = len(tp.args) + argnames = [] + for i, type in enumerate(tp.args): + indirection = '' + if isinstance(type, model.StructOrUnion): + indirection = '*' + argnames.append('%sx%d' % (indirection, i)) + context = 'argument of %s' % name + arglist = [type.get_c_name(' %s' % arg, context) + for type, arg in zip(tp.args, argnames)] + arglist = ', '.join(arglist) or 'void' + wrappername = '_cffi_f_%s' % name + self.export_symbols.append(wrappername) + funcdecl = ' %s(%s)' % (wrappername, arglist) + context = 'result of %s' % name + prnt(tp.result.get_c_name(funcdecl, context)) + prnt('{') + # + if not isinstance(tp.result, model.VoidType): + result_code = 'return ' + else: + result_code = '' + prnt(' %s%s(%s);' % (result_code, name, ', '.join(argnames))) + prnt('}') + prnt() + + _loading_gen_function = _loaded_noop + + def _loaded_gen_function(self, tp, name, module, library): + assert isinstance(tp, model.FunctionPtrType) + if tp.ellipsis: + newfunction = self._load_constant(False, tp, name, module) + else: + indirections = [] + base_tp = tp + if any(isinstance(typ, model.StructOrUnion) for typ in tp.args): + indirect_args = [] + for i, typ in enumerate(tp.args): + if isinstance(typ, model.StructOrUnion): + typ = model.PointerType(typ) + indirections.append((i, typ)) + indirect_args.append(typ) + tp = model.FunctionPtrType(tuple(indirect_args), + tp.result, tp.ellipsis) + BFunc = self.ffi._get_cached_btype(tp) + wrappername = '_cffi_f_%s' % name + newfunction = module.load_function(BFunc, wrappername) + for i, typ in indirections: + newfunction = self._make_struct_wrapper(newfunction, i, typ, + base_tp) + setattr(library, name, newfunction) + type(library)._cffi_dir.append(name) + + def _make_struct_wrapper(self, oldfunc, i, tp, base_tp): + backend = self.ffi._backend + BType = self.ffi._get_cached_btype(tp) + def newfunc(*args): + args = args[:i] + (backend.newp(BType, args[i]),) + args[i+1:] + return oldfunc(*args) + newfunc._cffi_base_type = base_tp + return newfunc + + # ---------- + # named structs + + def _generate_gen_struct_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'struct', name) + + def _loading_gen_struct(self, tp, name, module): + self._loading_struct_or_union(tp, 'struct', name, module) + + def _loaded_gen_struct(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_gen_union_decl(self, tp, name): + assert name == tp.name + self._generate_struct_or_union_decl(tp, 'union', name) + + def _loading_gen_union(self, tp, name, module): + self._loading_struct_or_union(tp, 'union', name, module) + + def _loaded_gen_union(self, tp, name, module, **kwds): + self._loaded_struct_or_union(tp) + + def _generate_struct_or_union_decl(self, tp, prefix, name): + if tp.fldnames is None: + return # nothing to do with opaque structs + checkfuncname = '_cffi_check_%s_%s' % (prefix, name) + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + cname = ('%s %s' % (prefix, name)).strip() + # + prnt = self._prnt + prnt('static void %s(%s *p)' % (checkfuncname, cname)) + prnt('{') + prnt(' /* only to generate compile-time warnings or errors */') + for fname, ftype, fbitsize in tp.enumfields(): + if (isinstance(ftype, model.PrimitiveType) + and ftype.is_integer_type()) or fbitsize >= 0: + # accept all integers, but complain on float or double + prnt(' (void)((p->%s) << 1);' % fname) + else: + # only accept exactly the type declared. + try: + prnt(' { %s = &p->%s; (void)tmp; }' % ( + ftype.get_c_name('*tmp', 'field %r'%fname), fname)) + except ffiplatform.VerificationError as e: + prnt(' /* %s */' % str(e)) # cannot verify it, ignore + prnt('}') + self.export_symbols.append(layoutfuncname) + prnt('ssize_t %s(ssize_t i)' % (layoutfuncname,)) + prnt('{') + prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) + prnt(' static ssize_t nums[] = {') + prnt(' sizeof(%s),' % cname) + prnt(' offsetof(struct _cffi_aligncheck, y),') + for fname, ftype, fbitsize in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + prnt(' offsetof(%s, %s),' % (cname, fname)) + if isinstance(ftype, model.ArrayType) and ftype.length is None: + prnt(' 0, /* %s */' % ftype._get_c_name()) + else: + prnt(' sizeof(((%s *)0)->%s),' % (cname, fname)) + prnt(' -1') + prnt(' };') + prnt(' return nums[i];') + prnt(' /* the next line is not executed, but compiled */') + prnt(' %s(0);' % (checkfuncname,)) + prnt('}') + prnt() + + def _loading_struct_or_union(self, tp, prefix, name, module): + if tp.fldnames is None: + return # nothing to do with opaque structs + layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) + # + BFunc = self.ffi._typeof_locked("ssize_t(*)(ssize_t)")[0] + function = module.load_function(BFunc, layoutfuncname) + layout = [] + num = 0 + while True: + x = function(num) + if x < 0: break + layout.append(x) + num += 1 + if isinstance(tp, model.StructOrUnion) and tp.partial: + # use the function()'s sizes and offsets to guide the + # layout of the struct + totalsize = layout[0] + totalalignment = layout[1] + fieldofs = layout[2::2] + fieldsize = layout[3::2] + tp.force_flatten() + assert len(fieldofs) == len(fieldsize) == len(tp.fldnames) + tp.fixedlayout = fieldofs, fieldsize, totalsize, totalalignment + else: + cname = ('%s %s' % (prefix, name)).strip() + self._struct_pending_verification[tp] = layout, cname + + def _loaded_struct_or_union(self, tp): + if tp.fldnames is None: + return # nothing to do with opaque structs + self.ffi._get_cached_btype(tp) # force 'fixedlayout' to be considered + + if tp in self._struct_pending_verification: + # check that the layout sizes and offsets match the real ones + def check(realvalue, expectedvalue, msg): + if realvalue != expectedvalue: + raise ffiplatform.VerificationError( + "%s (we have %d, but C compiler says %d)" + % (msg, expectedvalue, realvalue)) + ffi = self.ffi + BStruct = ffi._get_cached_btype(tp) + layout, cname = self._struct_pending_verification.pop(tp) + check(layout[0], ffi.sizeof(BStruct), "wrong total size") + check(layout[1], ffi.alignof(BStruct), "wrong total alignment") + i = 2 + for fname, ftype, fbitsize in tp.enumfields(): + if fbitsize >= 0: + continue # xxx ignore fbitsize for now + check(layout[i], ffi.offsetof(BStruct, fname), + "wrong offset for field %r" % (fname,)) + if layout[i+1] != 0: + BField = ffi._get_cached_btype(ftype) + check(layout[i+1], ffi.sizeof(BField), + "wrong size for field %r" % (fname,)) + i += 2 + assert i == len(layout) + + # ---------- + # 'anonymous' declarations. These are produced for anonymous structs + # or unions; the 'name' is obtained by a typedef. + + def _generate_gen_anonymous_decl(self, tp, name): + if isinstance(tp, model.EnumType): + self._generate_gen_enum_decl(tp, name, '') + else: + self._generate_struct_or_union_decl(tp, '', name) + + def _loading_gen_anonymous(self, tp, name, module): + if isinstance(tp, model.EnumType): + self._loading_gen_enum(tp, name, module, '') + else: + self._loading_struct_or_union(tp, '', name, module) + + def _loaded_gen_anonymous(self, tp, name, module, **kwds): + if isinstance(tp, model.EnumType): + self._loaded_gen_enum(tp, name, module, **kwds) + else: + self._loaded_struct_or_union(tp) + + # ---------- + # constants, likely declared with '#define' + + def _generate_gen_const(self, is_int, name, tp=None, category='const'): + prnt = self._prnt + funcname = '_cffi_%s_%s' % (category, name) + self.export_symbols.append(funcname) + if is_int: + assert category == 'const' + prnt('int %s(long long *out_value)' % funcname) + prnt('{') + prnt(' *out_value = (long long)(%s);' % (name,)) + prnt(' return (%s) <= 0;' % (name,)) + prnt('}') + else: + assert tp is not None + prnt(tp.get_c_name(' %s(void)' % funcname, name),) + prnt('{') + if category == 'var': + ampersand = '&' + else: + ampersand = '' + prnt(' return (%s%s);' % (ampersand, name)) + prnt('}') + prnt() + + def _generate_gen_constant_decl(self, tp, name): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + self._generate_gen_const(is_int, name, tp) + + _loading_gen_constant = _loaded_noop + + def _load_constant(self, is_int, tp, name, module): + funcname = '_cffi_const_%s' % name + if is_int: + BType = self.ffi._typeof_locked("long long*")[0] + BFunc = self.ffi._typeof_locked("int(*)(long long*)")[0] + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType) + negative = function(p) + value = int(p[0]) + if value < 0 and not negative: + BLongLong = self.ffi._typeof_locked("long long")[0] + value += (1 << (8*self.ffi.sizeof(BLongLong))) + else: + BFunc = self.ffi._typeof_locked(tp.get_c_name('(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + value = function() + return value + + def _loaded_gen_constant(self, tp, name, module, library): + is_int = isinstance(tp, model.PrimitiveType) and tp.is_integer_type() + value = self._load_constant(is_int, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # enums + + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): + if tp.partial: + for enumerator in tp.enumerators: + self._generate_gen_const(True, enumerator) + return + # + funcname = '_cffi_e_%s_%s' % (prefix, name) + self.export_symbols.append(funcname) + prnt = self._prnt + prnt('int %s(char *out_error)' % funcname) + prnt('{') + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + if enumvalue < 0: + prnt(' if ((%s) >= 0 || (long)(%s) != %dL) {' % ( + enumerator, enumerator, enumvalue)) + else: + prnt(' if ((%s) < 0 || (unsigned long)(%s) != %dUL) {' % ( + enumerator, enumerator, enumvalue)) + prnt(' char buf[64];') + prnt(' if ((%s) < 0)' % enumerator) + prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator) + prnt(' else') + prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + enumerator) + prnt(' snprintf(out_error, 255,' + ' "%s has the real value %s, not %s",') + prnt(' "%s", buf, "%d");' % ( + enumerator, enumvalue)) + prnt(' return -1;') + prnt(' }') + prnt(' return 0;') + prnt('}') + prnt() + + def _loading_gen_enum(self, tp, name, module, prefix='enum'): + if tp.partial: + enumvalues = [self._load_constant(True, tp, enumerator, module) + for enumerator in tp.enumerators] + tp.enumvalues = tuple(enumvalues) + tp.partial_resolved = True + else: + BType = self.ffi._typeof_locked("char[]")[0] + BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] + funcname = '_cffi_e_%s_%s' % (prefix, name) + function = module.load_function(BFunc, funcname) + p = self.ffi.new(BType, 256) + if function(p) < 0: + error = self.ffi.string(p) + if sys.version_info >= (3,): + error = str(error, 'utf-8') + raise ffiplatform.VerificationError(error) + + def _loaded_gen_enum(self, tp, name, module, library): + for enumerator, enumvalue in zip(tp.enumerators, tp.enumvalues): + setattr(library, enumerator, enumvalue) + type(library)._cffi_dir.append(enumerator) + + # ---------- + # macros: for now only for integers + + def _generate_gen_macro_decl(self, tp, name): + assert tp == '...' + self._generate_gen_const(True, name) + + _loading_gen_macro = _loaded_noop + + def _loaded_gen_macro(self, tp, name, module, library): + value = self._load_constant(True, tp, name, module) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + + # ---------- + # global variables + + def _generate_gen_variable_decl(self, tp, name): + if isinstance(tp, model.ArrayType): + if tp.length == '...': + prnt = self._prnt + funcname = '_cffi_sizeof_%s' % (name,) + self.export_symbols.append(funcname) + prnt("size_t %s(void)" % funcname) + prnt("{") + prnt(" return sizeof(%s);" % (name,)) + prnt("}") + tp_ptr = model.PointerType(tp.item) + self._generate_gen_const(False, name, tp_ptr) + else: + tp_ptr = model.PointerType(tp) + self._generate_gen_const(False, name, tp_ptr, category='var') + + _loading_gen_variable = _loaded_noop + + def _loaded_gen_variable(self, tp, name, module, library): + if isinstance(tp, model.ArrayType): # int a[5] is "constant" in the + # sense that "a=..." is forbidden + if tp.length == '...': + funcname = '_cffi_sizeof_%s' % (name,) + BFunc = self.ffi._typeof_locked('size_t(*)(void)')[0] + function = module.load_function(BFunc, funcname) + size = function() + BItemType = self.ffi._get_cached_btype(tp.item) + length, rest = divmod(size, self.ffi.sizeof(BItemType)) + if rest != 0: + raise ffiplatform.VerificationError( + "bad size: %r does not seem to be an array of %s" % + (name, tp.item)) + tp = tp.resolve_length(length) + tp_ptr = model.PointerType(tp.item) + value = self._load_constant(False, tp_ptr, name, module) + # 'value' is a which we have to replace with + # a if the N is actually known + if tp.length is not None: + BArray = self.ffi._get_cached_btype(tp) + value = self.ffi.cast(BArray, value) + setattr(library, name, value) + type(library)._cffi_dir.append(name) + return + # remove ptr= from the library instance, and replace + # it by a property on the class, which reads/writes into ptr[0]. + funcname = '_cffi_var_%s' % name + BFunc = self.ffi._typeof_locked(tp.get_c_name('*(*)(void)', name))[0] + function = module.load_function(BFunc, funcname) + ptr = function() + def getter(library): + return ptr[0] + def setter(library, value): + ptr[0] = value + setattr(type(library), name, property(getter, setter)) + type(library)._cffi_dir.append(name) + +cffimod_header = r''' +#include +#include +#include +#include +#include /* XXX for ssize_t on some platforms */ + +#ifdef _WIN32 +# include +# define snprintf _snprintf +typedef __int8 int8_t; +typedef __int16 int16_t; +typedef __int32 int32_t; +typedef __int64 int64_t; +typedef unsigned __int8 uint8_t; +typedef unsigned __int16 uint16_t; +typedef unsigned __int32 uint32_t; +typedef unsigned __int64 uint64_t; +typedef SSIZE_T ssize_t; +typedef unsigned char _Bool; +#else +# include +#endif +''' diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/verifier.py b/Linux_i686/lib/python2.7/site-packages/cffi/verifier.py new file mode 100644 index 0000000..9603a7e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cffi/verifier.py @@ -0,0 +1,243 @@ +import sys, os, binascii, imp, shutil +from . import __version__ +from . import ffiplatform + + +class Verifier(object): + + def __init__(self, ffi, preamble, tmpdir=None, modulename=None, + ext_package=None, tag='', force_generic_engine=False, **kwds): + self.ffi = ffi + self.preamble = preamble + if not modulename: + flattened_kwds = ffiplatform.flatten(kwds) + vengine_class = _locate_engine_class(ffi, force_generic_engine) + self._vengine = vengine_class(self) + self._vengine.patch_extension_kwds(kwds) + self.kwds = kwds + # + if modulename: + if tag: + raise TypeError("can't specify both 'modulename' and 'tag'") + else: + key = '\x00'.join([sys.version[:3], __version__, preamble, + flattened_kwds] + + ffi._cdefsources) + if sys.version_info >= (3,): + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + modulename = '_cffi_%s_%s%s%s' % (tag, self._vengine._class_key, + k1, k2) + suffix = _get_so_suffixes()[0] + self.tmpdir = tmpdir or _caller_dir_pycache() + self.sourcefilename = os.path.join(self.tmpdir, modulename + '.c') + self.modulefilename = os.path.join(self.tmpdir, modulename + suffix) + self.ext_package = ext_package + self._has_source = False + self._has_module = False + + def write_source(self, file=None): + """Write the C source code. It is produced in 'self.sourcefilename', + which can be tweaked beforehand.""" + with self.ffi._lock: + if self._has_source and file is None: + raise ffiplatform.VerificationError( + "source code already written") + self._write_source(file) + + def compile_module(self): + """Write the C source code (if not done already) and compile it. + This produces a dynamic link library in 'self.modulefilename'.""" + with self.ffi._lock: + if self._has_module: + raise ffiplatform.VerificationError("module already compiled") + if not self._has_source: + self._write_source() + self._compile_module() + + def load_library(self): + """Get a C module from this Verifier instance. + Returns an instance of a FFILibrary class that behaves like the + objects returned by ffi.dlopen(), but that delegates all + operations to the C module. If necessary, the C code is written + and compiled first. + """ + with self.ffi._lock: + if not self._has_module: + self._locate_module() + if not self._has_module: + if not self._has_source: + self._write_source() + self._compile_module() + return self._load_library() + + def get_module_name(self): + basename = os.path.basename(self.modulefilename) + # kill both the .so extension and the other .'s, as introduced + # by Python 3: 'basename.cpython-33m.so' + basename = basename.split('.', 1)[0] + # and the _d added in Python 2 debug builds --- but try to be + # conservative and not kill a legitimate _d + if basename.endswith('_d') and hasattr(sys, 'gettotalrefcount'): + basename = basename[:-2] + return basename + + def get_extension(self): + if not self._has_source: + with self.ffi._lock: + if not self._has_source: + self._write_source() + sourcename = ffiplatform.maybe_relative_path(self.sourcefilename) + modname = self.get_module_name() + return ffiplatform.get_extension(sourcename, modname, **self.kwds) + + def generates_python_module(self): + return self._vengine._gen_python_module + + # ---------- + + def _locate_module(self): + if not os.path.isfile(self.modulefilename): + if self.ext_package: + try: + pkg = __import__(self.ext_package, None, None, ['__doc__']) + except ImportError: + return # cannot import the package itself, give up + # (e.g. it might be called differently before installation) + path = pkg.__path__ + else: + path = None + filename = self._vengine.find_module(self.get_module_name(), path, + _get_so_suffixes()) + if filename is None: + return + self.modulefilename = filename + self._vengine.collect_types() + self._has_module = True + + def _write_source(self, file=None): + must_close = (file is None) + if must_close: + _ensure_dir(self.sourcefilename) + file = open(self.sourcefilename, 'w') + self._vengine._f = file + try: + self._vengine.write_source_to_f() + finally: + del self._vengine._f + if must_close: + file.close() + if must_close: + self._has_source = True + + def _compile_module(self): + # compile this C source + tmpdir = os.path.dirname(self.sourcefilename) + outputfilename = ffiplatform.compile(tmpdir, self.get_extension()) + try: + same = ffiplatform.samefile(outputfilename, self.modulefilename) + except OSError: + same = False + if not same: + _ensure_dir(self.modulefilename) + shutil.move(outputfilename, self.modulefilename) + self._has_module = True + + def _load_library(self): + assert self._has_module + return self._vengine.load_library() + +# ____________________________________________________________ + +_FORCE_GENERIC_ENGINE = False # for tests + +def _locate_engine_class(ffi, force_generic_engine): + if _FORCE_GENERIC_ENGINE: + force_generic_engine = True + if not force_generic_engine: + if '__pypy__' in sys.builtin_module_names: + force_generic_engine = True + else: + try: + import _cffi_backend + except ImportError: + _cffi_backend = '?' + if ffi._backend is not _cffi_backend: + force_generic_engine = True + if force_generic_engine: + from . import vengine_gen + return vengine_gen.VGenericEngine + else: + from . import vengine_cpy + return vengine_cpy.VCPythonEngine + +# ____________________________________________________________ + +_TMPDIR = None + +def _caller_dir_pycache(): + if _TMPDIR: + return _TMPDIR + filename = sys._getframe(2).f_code.co_filename + return os.path.abspath(os.path.join(os.path.dirname(filename), + '__pycache__')) + +def set_tmpdir(dirname): + """Set the temporary directory to use instead of __pycache__.""" + global _TMPDIR + _TMPDIR = dirname + +def cleanup_tmpdir(tmpdir=None, keep_so=False): + """Clean up the temporary directory by removing all files in it + called `_cffi_*.{c,so}` as well as the `build` subdirectory.""" + tmpdir = tmpdir or _caller_dir_pycache() + try: + filelist = os.listdir(tmpdir) + except OSError: + return + if keep_so: + suffix = '.c' # only remove .c files + else: + suffix = _get_so_suffixes()[0].lower() + for fn in filelist: + if fn.lower().startswith('_cffi_') and ( + fn.lower().endswith(suffix) or fn.lower().endswith('.c')): + try: + os.unlink(os.path.join(tmpdir, fn)) + except OSError: + pass + clean_dir = [os.path.join(tmpdir, 'build')] + for dir in clean_dir: + try: + for fn in os.listdir(dir): + fn = os.path.join(dir, fn) + if os.path.isdir(fn): + clean_dir.append(fn) + else: + os.unlink(fn) + except OSError: + pass + +def _get_so_suffixes(): + suffixes = [] + for suffix, mode, type in imp.get_suffixes(): + if type == imp.C_EXTENSION: + suffixes.append(suffix) + + if not suffixes: + # bah, no C_EXTENSION available. Occurs on pypy without cpyext + if sys.platform == 'win32': + suffixes = [".pyd"] + else: + suffixes = [".so"] + + return suffixes + +def _ensure_dir(filename): + try: + os.makedirs(os.path.dirname(filename)) + except OSError: + pass diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/PKG-INFO new file mode 100644 index 0000000..a3eb033 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/PKG-INFO @@ -0,0 +1,76 @@ +Metadata-Version: 1.1 +Name: cryptography +Version: 0.4 +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Home-page: https://github.com/pyca/cryptography +Author: The cryptography developers +Author-email: cryptography-dev@python.org +License: Apache License, Version 2.0 +Description: Cryptography + ============ + + .. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master + :target: https://travis-ci.org/pyca/cryptography + + .. image:: https://coveralls.io/repos/pyca/cryptography/badge.png?branch=master + :target: https://coveralls.io/r/pyca/cryptography?branch=master + + + ``cryptography`` is a package which provides cryptographic recipes and + primitives to Python developers. Our goal is for it to be your "cryptographic + standard library". It supports Python 2.6-2.7, Python 3.2+, and PyPy. + + ``cryptography`` includes both high level recipes, and low level interfaces to + common cryptographic algorithms such as symmetric ciphers, message digests and + key derivation functions. For example, to encrypt something with + ``cryptography``'s high level symmetric encryption recipe: + + .. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + '...' + >>> f.decrypt(token) + 'A really secret message. Not for prying eyes.' + + You can find more information in the `documentation`_. + + Discussion + ~~~~~~~~~~ + + If you run into bugs, you can file them in our `issue tracker`_. + + We maintain a `cryptography-dev`_ mailing list for development discussion. + + You can also join ``#cryptography-dev`` on Freenode to ask questions or get + involved. + + + .. _`documentation`: https://cryptography.io/ + .. _`issue tracker`: https://github.com/pyca/cryptography/issues + .. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev + +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.2 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/SOURCES.txt new file mode 100644 index 0000000..d15cf6e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/SOURCES.txt @@ -0,0 +1,189 @@ +AUTHORS.rst +CHANGELOG.rst +CONTRIBUTING.rst +LICENSE +MANIFEST.in +README.rst +setup.cfg +setup.py +cryptography/__about__.py +cryptography/__init__.py +cryptography/exceptions.py +cryptography/fernet.py +cryptography/utils.py +cryptography.egg-info/PKG-INFO +cryptography.egg-info/SOURCES.txt +cryptography.egg-info/dependency_links.txt +cryptography.egg-info/not-zip-safe +cryptography.egg-info/requires.txt +cryptography.egg-info/top_level.txt +cryptography/hazmat/__init__.py +cryptography/hazmat/backends/__init__.py +cryptography/hazmat/backends/interfaces.py +cryptography/hazmat/backends/multibackend.py +cryptography/hazmat/backends/commoncrypto/__init__.py +cryptography/hazmat/backends/commoncrypto/backend.py +cryptography/hazmat/backends/openssl/__init__.py +cryptography/hazmat/backends/openssl/backend.py +cryptography/hazmat/bindings/__init__.py +cryptography/hazmat/bindings/utils.py +cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_444d7397xa22f8491.c +cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_be05eb56x6daa9a79.c +cryptography/hazmat/bindings/commoncrypto/__init__.py +cryptography/hazmat/bindings/commoncrypto/binding.py +cryptography/hazmat/bindings/commoncrypto/common_cryptor.py +cryptography/hazmat/bindings/commoncrypto/common_digest.py +cryptography/hazmat/bindings/commoncrypto/common_hmac.py +cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py +cryptography/hazmat/bindings/openssl/__init__.py +cryptography/hazmat/bindings/openssl/aes.py +cryptography/hazmat/bindings/openssl/asn1.py +cryptography/hazmat/bindings/openssl/bignum.py +cryptography/hazmat/bindings/openssl/binding.py +cryptography/hazmat/bindings/openssl/bio.py +cryptography/hazmat/bindings/openssl/cmac.py +cryptography/hazmat/bindings/openssl/cms.py +cryptography/hazmat/bindings/openssl/conf.py +cryptography/hazmat/bindings/openssl/crypto.py +cryptography/hazmat/bindings/openssl/dh.py +cryptography/hazmat/bindings/openssl/dsa.py +cryptography/hazmat/bindings/openssl/ec.py +cryptography/hazmat/bindings/openssl/ecdh.py +cryptography/hazmat/bindings/openssl/ecdsa.py +cryptography/hazmat/bindings/openssl/engine.py +cryptography/hazmat/bindings/openssl/err.py +cryptography/hazmat/bindings/openssl/evp.py +cryptography/hazmat/bindings/openssl/hmac.py +cryptography/hazmat/bindings/openssl/nid.py +cryptography/hazmat/bindings/openssl/objects.py +cryptography/hazmat/bindings/openssl/opensslv.py +cryptography/hazmat/bindings/openssl/osrandom_engine.py +cryptography/hazmat/bindings/openssl/pem.py +cryptography/hazmat/bindings/openssl/pkcs12.py +cryptography/hazmat/bindings/openssl/pkcs7.py +cryptography/hazmat/bindings/openssl/rand.py +cryptography/hazmat/bindings/openssl/rsa.py +cryptography/hazmat/bindings/openssl/ssl.py +cryptography/hazmat/bindings/openssl/x509.py +cryptography/hazmat/bindings/openssl/x509name.py +cryptography/hazmat/bindings/openssl/x509v3.py +cryptography/hazmat/primitives/__init__.py +cryptography/hazmat/primitives/cmac.py +cryptography/hazmat/primitives/constant_time.py +cryptography/hazmat/primitives/hashes.py +cryptography/hazmat/primitives/hmac.py +cryptography/hazmat/primitives/interfaces.py +cryptography/hazmat/primitives/padding.py +cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_684bb40axf342507b.c +cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_8f86901cxc1767c5a.c +cryptography/hazmat/primitives/asymmetric/__init__.py +cryptography/hazmat/primitives/asymmetric/dsa.py +cryptography/hazmat/primitives/asymmetric/padding.py +cryptography/hazmat/primitives/asymmetric/rsa.py +cryptography/hazmat/primitives/ciphers/__init__.py +cryptography/hazmat/primitives/ciphers/algorithms.py +cryptography/hazmat/primitives/ciphers/base.py +cryptography/hazmat/primitives/ciphers/modes.py +cryptography/hazmat/primitives/kdf/__init__.py +cryptography/hazmat/primitives/kdf/hkdf.py +cryptography/hazmat/primitives/kdf/pbkdf2.py +cryptography/hazmat/primitives/twofactor/__init__.py +cryptography/hazmat/primitives/twofactor/hotp.py +cryptography/hazmat/primitives/twofactor/totp.py +docs/Makefile +docs/api-stability.rst +docs/changelog.rst +docs/community.rst +docs/conf.py +docs/cryptography-docs.py +docs/doing-a-release.rst +docs/exceptions.rst +docs/faq.rst +docs/fernet.rst +docs/glossary.rst +docs/index.rst +docs/installation.rst +docs/limitations.rst +docs/make.bat +docs/random-numbers.rst +docs/security.rst +docs/spelling_wordlist.txt +docs/_static/.keep +docs/development/getting-started.rst +docs/development/index.rst +docs/development/reviewing-patches.rst +docs/development/submitting-patches.rst +docs/development/test-vectors.rst +docs/development/custom-vectors/cast5.rst +docs/development/custom-vectors/idea.rst +docs/development/custom-vectors/seed.rst +docs/development/custom-vectors/cast5/generate_cast5.py +docs/development/custom-vectors/cast5/verify_cast5.go +docs/development/custom-vectors/idea/generate_idea.py +docs/development/custom-vectors/idea/verify_idea.py +docs/development/custom-vectors/seed/generate_seed.py +docs/development/custom-vectors/seed/verify_seed.py +docs/hazmat/backends/commoncrypto.rst +docs/hazmat/backends/index.rst +docs/hazmat/backends/interfaces.rst +docs/hazmat/backends/multibackend.rst +docs/hazmat/backends/openssl.rst +docs/hazmat/bindings/commoncrypto.rst +docs/hazmat/bindings/index.rst +docs/hazmat/bindings/openssl.rst +docs/hazmat/primitives/constant-time.rst +docs/hazmat/primitives/cryptographic-hashes.rst +docs/hazmat/primitives/index.rst +docs/hazmat/primitives/interfaces.rst +docs/hazmat/primitives/key-derivation-functions.rst +docs/hazmat/primitives/padding.rst +docs/hazmat/primitives/symmetric-encryption.rst +docs/hazmat/primitives/twofactor.rst +docs/hazmat/primitives/asymmetric/dsa.rst +docs/hazmat/primitives/asymmetric/index.rst +docs/hazmat/primitives/asymmetric/padding.rst +docs/hazmat/primitives/asymmetric/rsa.rst +docs/hazmat/primitives/mac/cmac.rst +docs/hazmat/primitives/mac/hmac.rst +docs/hazmat/primitives/mac/index.rst +tests/__init__.py +tests/conftest.py +tests/test_fernet.py +tests/test_utils.py +tests/utils.py +tests/hazmat/__init__.py +tests/hazmat/backends/__init__.py +tests/hazmat/backends/test_commoncrypto.py +tests/hazmat/backends/test_multibackend.py +tests/hazmat/backends/test_openssl.py +tests/hazmat/bindings/test_commoncrypto.py +tests/hazmat/bindings/test_openssl.py +tests/hazmat/bindings/test_utils.py +tests/hazmat/primitives/__init__.py +tests/hazmat/primitives/test_3des.py +tests/hazmat/primitives/test_aes.py +tests/hazmat/primitives/test_arc4.py +tests/hazmat/primitives/test_block.py +tests/hazmat/primitives/test_blowfish.py +tests/hazmat/primitives/test_camellia.py +tests/hazmat/primitives/test_cast5.py +tests/hazmat/primitives/test_ciphers.py +tests/hazmat/primitives/test_cmac.py +tests/hazmat/primitives/test_constant_time.py +tests/hazmat/primitives/test_dsa.py +tests/hazmat/primitives/test_hash_vectors.py +tests/hazmat/primitives/test_hashes.py +tests/hazmat/primitives/test_hkdf.py +tests/hazmat/primitives/test_hkdf_vectors.py +tests/hazmat/primitives/test_hmac.py +tests/hazmat/primitives/test_hmac_vectors.py +tests/hazmat/primitives/test_idea.py +tests/hazmat/primitives/test_padding.py +tests/hazmat/primitives/test_pbkdf2hmac.py +tests/hazmat/primitives/test_pbkdf2hmac_vectors.py +tests/hazmat/primitives/test_rsa.py +tests/hazmat/primitives/test_seed.py +tests/hazmat/primitives/utils.py +tests/hazmat/primitives/twofactor/__init__.py +tests/hazmat/primitives/twofactor/test_hotp.py +tests/hazmat/primitives/twofactor/test_totp.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/installed-files.txt new file mode 100644 index 0000000..2f17187 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/installed-files.txt @@ -0,0 +1,158 @@ +../cryptography/exceptions.py +../cryptography/__init__.py +../cryptography/fernet.py +../cryptography/utils.py +../cryptography/__about__.py +../cryptography/hazmat/__init__.py +../cryptography/hazmat/backends/multibackend.py +../cryptography/hazmat/backends/interfaces.py +../cryptography/hazmat/backends/__init__.py +../cryptography/hazmat/primitives/hmac.py +../cryptography/hazmat/primitives/hashes.py +../cryptography/hazmat/primitives/cmac.py +../cryptography/hazmat/primitives/interfaces.py +../cryptography/hazmat/primitives/constant_time.py +../cryptography/hazmat/primitives/__init__.py +../cryptography/hazmat/primitives/padding.py +../cryptography/hazmat/bindings/__init__.py +../cryptography/hazmat/bindings/utils.py +../cryptography/hazmat/backends/openssl/__init__.py +../cryptography/hazmat/backends/openssl/backend.py +../cryptography/hazmat/backends/commoncrypto/__init__.py +../cryptography/hazmat/backends/commoncrypto/backend.py +../cryptography/hazmat/primitives/twofactor/totp.py +../cryptography/hazmat/primitives/twofactor/__init__.py +../cryptography/hazmat/primitives/twofactor/hotp.py +../cryptography/hazmat/primitives/kdf/__init__.py +../cryptography/hazmat/primitives/kdf/pbkdf2.py +../cryptography/hazmat/primitives/kdf/hkdf.py +../cryptography/hazmat/primitives/asymmetric/rsa.py +../cryptography/hazmat/primitives/asymmetric/dsa.py +../cryptography/hazmat/primitives/asymmetric/__init__.py +../cryptography/hazmat/primitives/asymmetric/padding.py +../cryptography/hazmat/primitives/ciphers/modes.py +../cryptography/hazmat/primitives/ciphers/__init__.py +../cryptography/hazmat/primitives/ciphers/base.py +../cryptography/hazmat/primitives/ciphers/algorithms.py +../cryptography/hazmat/bindings/openssl/conf.py +../cryptography/hazmat/bindings/openssl/ssl.py +../cryptography/hazmat/bindings/openssl/crypto.py +../cryptography/hazmat/bindings/openssl/hmac.py +../cryptography/hazmat/bindings/openssl/rsa.py +../cryptography/hazmat/bindings/openssl/bio.py +../cryptography/hazmat/bindings/openssl/binding.py +../cryptography/hazmat/bindings/openssl/cmac.py +../cryptography/hazmat/bindings/openssl/pem.py +../cryptography/hazmat/bindings/openssl/rand.py +../cryptography/hazmat/bindings/openssl/pkcs7.py +../cryptography/hazmat/bindings/openssl/osrandom_engine.py +../cryptography/hazmat/bindings/openssl/objects.py +../cryptography/hazmat/bindings/openssl/x509.py +../cryptography/hazmat/bindings/openssl/err.py +../cryptography/hazmat/bindings/openssl/dsa.py +../cryptography/hazmat/bindings/openssl/dh.py +../cryptography/hazmat/bindings/openssl/pkcs12.py +../cryptography/hazmat/bindings/openssl/__init__.py +../cryptography/hazmat/bindings/openssl/aes.py +../cryptography/hazmat/bindings/openssl/bignum.py +../cryptography/hazmat/bindings/openssl/x509name.py +../cryptography/hazmat/bindings/openssl/asn1.py +../cryptography/hazmat/bindings/openssl/cms.py +../cryptography/hazmat/bindings/openssl/nid.py +../cryptography/hazmat/bindings/openssl/ecdh.py +../cryptography/hazmat/bindings/openssl/ecdsa.py +../cryptography/hazmat/bindings/openssl/x509v3.py +../cryptography/hazmat/bindings/openssl/evp.py +../cryptography/hazmat/bindings/openssl/opensslv.py +../cryptography/hazmat/bindings/openssl/engine.py +../cryptography/hazmat/bindings/openssl/ec.py +../cryptography/hazmat/bindings/commoncrypto/common_cryptor.py +../cryptography/hazmat/bindings/commoncrypto/binding.py +../cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py +../cryptography/hazmat/bindings/commoncrypto/common_hmac.py +../cryptography/hazmat/bindings/commoncrypto/common_digest.py +../cryptography/hazmat/bindings/commoncrypto/__init__.py +../cryptography/exceptions.pyc +../cryptography/__init__.pyc +../cryptography/fernet.pyc +../cryptography/utils.pyc +../cryptography/__about__.pyc +../cryptography/hazmat/__init__.pyc +../cryptography/hazmat/backends/multibackend.pyc +../cryptography/hazmat/backends/interfaces.pyc +../cryptography/hazmat/backends/__init__.pyc +../cryptography/hazmat/primitives/hmac.pyc +../cryptography/hazmat/primitives/hashes.pyc +../cryptography/hazmat/primitives/cmac.pyc +../cryptography/hazmat/primitives/interfaces.pyc +../cryptography/hazmat/primitives/constant_time.pyc +../cryptography/hazmat/primitives/__init__.pyc +../cryptography/hazmat/primitives/padding.pyc +../cryptography/hazmat/bindings/__init__.pyc +../cryptography/hazmat/bindings/utils.pyc +../cryptography/hazmat/backends/openssl/__init__.pyc +../cryptography/hazmat/backends/openssl/backend.pyc +../cryptography/hazmat/backends/commoncrypto/__init__.pyc +../cryptography/hazmat/backends/commoncrypto/backend.pyc +../cryptography/hazmat/primitives/twofactor/totp.pyc +../cryptography/hazmat/primitives/twofactor/__init__.pyc +../cryptography/hazmat/primitives/twofactor/hotp.pyc +../cryptography/hazmat/primitives/kdf/__init__.pyc +../cryptography/hazmat/primitives/kdf/pbkdf2.pyc +../cryptography/hazmat/primitives/kdf/hkdf.pyc +../cryptography/hazmat/primitives/asymmetric/rsa.pyc +../cryptography/hazmat/primitives/asymmetric/dsa.pyc +../cryptography/hazmat/primitives/asymmetric/__init__.pyc +../cryptography/hazmat/primitives/asymmetric/padding.pyc +../cryptography/hazmat/primitives/ciphers/modes.pyc +../cryptography/hazmat/primitives/ciphers/__init__.pyc +../cryptography/hazmat/primitives/ciphers/base.pyc +../cryptography/hazmat/primitives/ciphers/algorithms.pyc +../cryptography/hazmat/bindings/openssl/conf.pyc +../cryptography/hazmat/bindings/openssl/ssl.pyc +../cryptography/hazmat/bindings/openssl/crypto.pyc +../cryptography/hazmat/bindings/openssl/hmac.pyc +../cryptography/hazmat/bindings/openssl/rsa.pyc +../cryptography/hazmat/bindings/openssl/bio.pyc +../cryptography/hazmat/bindings/openssl/binding.pyc +../cryptography/hazmat/bindings/openssl/cmac.pyc +../cryptography/hazmat/bindings/openssl/pem.pyc +../cryptography/hazmat/bindings/openssl/rand.pyc +../cryptography/hazmat/bindings/openssl/pkcs7.pyc +../cryptography/hazmat/bindings/openssl/osrandom_engine.pyc +../cryptography/hazmat/bindings/openssl/objects.pyc +../cryptography/hazmat/bindings/openssl/x509.pyc +../cryptography/hazmat/bindings/openssl/err.pyc +../cryptography/hazmat/bindings/openssl/dsa.pyc +../cryptography/hazmat/bindings/openssl/dh.pyc +../cryptography/hazmat/bindings/openssl/pkcs12.pyc +../cryptography/hazmat/bindings/openssl/__init__.pyc +../cryptography/hazmat/bindings/openssl/aes.pyc +../cryptography/hazmat/bindings/openssl/bignum.pyc +../cryptography/hazmat/bindings/openssl/x509name.pyc +../cryptography/hazmat/bindings/openssl/asn1.pyc +../cryptography/hazmat/bindings/openssl/cms.pyc +../cryptography/hazmat/bindings/openssl/nid.pyc +../cryptography/hazmat/bindings/openssl/ecdh.pyc +../cryptography/hazmat/bindings/openssl/ecdsa.pyc +../cryptography/hazmat/bindings/openssl/x509v3.pyc +../cryptography/hazmat/bindings/openssl/evp.pyc +../cryptography/hazmat/bindings/openssl/opensslv.pyc +../cryptography/hazmat/bindings/openssl/engine.pyc +../cryptography/hazmat/bindings/openssl/ec.pyc +../cryptography/hazmat/bindings/commoncrypto/common_cryptor.pyc +../cryptography/hazmat/bindings/commoncrypto/binding.pyc +../cryptography/hazmat/bindings/commoncrypto/common_key_derivation.pyc +../cryptography/hazmat/bindings/commoncrypto/common_hmac.pyc +../cryptography/hazmat/bindings/commoncrypto/common_digest.pyc +../cryptography/hazmat/bindings/commoncrypto/__init__.pyc +../cryptography/_Cryptography_cffi_444d7397xa22f8491.so +../cryptography/_Cryptography_cffi_684bb40axf342507b.so +../cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so +./ +requires.txt +SOURCES.txt +dependency_links.txt +PKG-INFO +not-zip-safe +top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/not-zip-safe @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/requires.txt new file mode 100644 index 0000000..aeb5cd7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/requires.txt @@ -0,0 +1,2 @@ +cffi>=0.8 +six>=1.4.1 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/top_level.txt new file mode 100644 index 0000000..2cead95 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/top_level.txt @@ -0,0 +1,4 @@ +_Cryptography_cffi_444d7397xa22f8491 +_Cryptography_cffi_684bb40axf342507b +cryptography +_Cryptography_cffi_8f86901cxc1767c5a diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_444d7397xa22f8491.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_444d7397xa22f8491.so new file mode 100755 index 0000000..6bbdaa7 Binary files /dev/null and b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_444d7397xa22f8491.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so new file mode 100755 index 0000000..7faef53 Binary files /dev/null and b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so new file mode 100755 index 0000000..17a33c1 Binary files /dev/null and b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py new file mode 100644 index 0000000..d1151dc --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py @@ -0,0 +1,31 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "cryptography" +__summary__ = ("cryptography is a package which provides cryptographic recipes" + " and primitives to Python developers.") +__uri__ = "https://github.com/pyca/cryptography" + +__version__ = "0.4" + +__author__ = "The cryptography developers" +__email__ = "cryptography-dev@python.org" + +__license__ = "Apache License, Version 2.0" +__copyright__ = "Copyright 2013-2014 %s" % __author__ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/__init__.py new file mode 100644 index 0000000..f27ba85 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/__init__.py @@ -0,0 +1,25 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography.__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py b/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py new file mode 100644 index 0000000..b4ee8fe --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py @@ -0,0 +1,61 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + + +class _Reasons(object): + BACKEND_MISSING_INTERFACE = object() + UNSUPPORTED_HASH = object() + UNSUPPORTED_CIPHER = object() + UNSUPPORTED_PADDING = object() + UNSUPPORTED_MGF = object() + UNSUPPORTED_PUBLIC_KEY_ALGORITHM = object() + + +class UnsupportedAlgorithm(Exception): + def __init__(self, message, reason=None): + super(UnsupportedAlgorithm, self).__init__(message) + self._reason = reason + + +class AlreadyFinalized(Exception): + pass + + +class AlreadyUpdated(Exception): + pass + + +class NotYetFinalized(Exception): + pass + + +class InvalidTag(Exception): + pass + + +class InvalidSignature(Exception): + pass + + +class InternalError(Exception): + pass + + +class InvalidKey(Exception): + pass + + +class InvalidToken(Exception): + pass diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py b/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py new file mode 100644 index 0000000..674ce8a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py @@ -0,0 +1,133 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import base64 +import binascii +import os +import struct +import time + +import six + +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import hashes, padding +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from cryptography.hazmat.primitives.hmac import HMAC + + +class InvalidToken(Exception): + pass + + +_MAX_CLOCK_SKEW = 60 + + +class Fernet(object): + def __init__(self, key, backend=None): + if backend is None: + backend = default_backend() + + key = base64.urlsafe_b64decode(key) + if len(key) != 32: + raise ValueError( + "Fernet key must be 32 url-safe base64-encoded bytes" + ) + + self._signing_key = key[:16] + self._encryption_key = key[16:] + self._backend = backend + + @classmethod + def generate_key(cls): + return base64.urlsafe_b64encode(os.urandom(32)) + + def encrypt(self, data): + current_time = int(time.time()) + iv = os.urandom(16) + return self._encrypt_from_parts(data, current_time, iv) + + def _encrypt_from_parts(self, data, current_time, iv): + if isinstance(data, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before encryption" + ) + + padder = padding.PKCS7(algorithms.AES.block_size).padder() + padded_data = padder.update(data) + padder.finalize() + encryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).encryptor() + ciphertext = encryptor.update(padded_data) + encryptor.finalize() + + basic_parts = ( + b"\x80" + struct.pack(">Q", current_time) + iv + ciphertext + ) + + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(basic_parts) + hmac = h.finalize() + return base64.urlsafe_b64encode(basic_parts + hmac) + + def decrypt(self, token, ttl=None): + if isinstance(token, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before decryption" + ) + + current_time = int(time.time()) + + try: + data = base64.urlsafe_b64decode(token) + except (TypeError, binascii.Error): + raise InvalidToken + + if six.indexbytes(data, 0) != 0x80: + raise InvalidToken + + try: + timestamp, = struct.unpack(">Q", data[1:9]) + except struct.error: + raise InvalidToken + if ttl is not None: + if timestamp + ttl < current_time: + raise InvalidToken + if current_time + _MAX_CLOCK_SKEW < timestamp: + raise InvalidToken + h = HMAC(self._signing_key, hashes.SHA256(), backend=self._backend) + h.update(data[:-32]) + try: + h.verify(data[-32:]) + except InvalidSignature: + raise InvalidToken + + iv = data[9:25] + ciphertext = data[25:-32] + decryptor = Cipher( + algorithms.AES(self._encryption_key), modes.CBC(iv), self._backend + ).decryptor() + plaintext_padded = decryptor.update(ciphertext) + try: + plaintext_padded += decryptor.finalize() + except ValueError: + raise InvalidToken + unpadder = padding.PKCS7(algorithms.AES.block_size).unpadder() + + unpadded = unpadder.update(plaintext_padded) + try: + unpadded += unpadder.finalize() + except ValueError: + raise InvalidToken + return unpadded diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/__init__.py new file mode 100644 index 0000000..ae78822 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/__init__.py @@ -0,0 +1,54 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.multibackend import MultiBackend +from cryptography.hazmat.bindings.commoncrypto.binding import ( + Binding as CommonCryptoBinding +) +from cryptography.hazmat.bindings.openssl.binding import ( + Binding as OpenSSLBinding +) + + +_available_backends_list = None + + +def _available_backends(): + global _available_backends_list + + if _available_backends_list is None: + _available_backends_list = [] + + if CommonCryptoBinding.is_available(): + from cryptography.hazmat.backends import commoncrypto + _available_backends_list.append(commoncrypto.backend) + + if OpenSSLBinding.is_available(): + from cryptography.hazmat.backends import openssl + _available_backends_list.append(openssl.backend) + + return _available_backends_list + + +_default_backend = None + + +def default_backend(): + global _default_backend + + if _default_backend is None: + _default_backend = MultiBackend(_available_backends()) + + return _default_backend diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py new file mode 100644 index 0000000..f080394 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/__init__.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.commoncrypto.backend import backend + + +__all__ = ["backend"] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py new file mode 100644 index 0000000..4faca73 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py @@ -0,0 +1,499 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from collections import namedtuple + +from cryptography import utils +from cryptography.exceptions import ( + InternalError, InvalidTag, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import ( + CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend +) +from cryptography.hazmat.bindings.commoncrypto.binding import Binding +from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CTR, ECB, GCM, OFB +) + + +HashMethods = namedtuple( + "HashMethods", ["ctx", "hash_init", "hash_update", "hash_final"] +) + + +@utils.register_interface(CipherBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +class Backend(object): + """ + CommonCrypto API wrapper. + """ + name = "commoncrypto" + + def __init__(self): + self._binding = Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + self._cipher_registry = {} + self._register_default_ciphers() + self._hash_mapping = { + "md5": HashMethods( + "CC_MD5_CTX *", self._lib.CC_MD5_Init, + self._lib.CC_MD5_Update, self._lib.CC_MD5_Final + ), + "sha1": HashMethods( + "CC_SHA1_CTX *", self._lib.CC_SHA1_Init, + self._lib.CC_SHA1_Update, self._lib.CC_SHA1_Final + ), + "sha224": HashMethods( + "CC_SHA256_CTX *", self._lib.CC_SHA224_Init, + self._lib.CC_SHA224_Update, self._lib.CC_SHA224_Final + ), + "sha256": HashMethods( + "CC_SHA256_CTX *", self._lib.CC_SHA256_Init, + self._lib.CC_SHA256_Update, self._lib.CC_SHA256_Final + ), + "sha384": HashMethods( + "CC_SHA512_CTX *", self._lib.CC_SHA384_Init, + self._lib.CC_SHA384_Update, self._lib.CC_SHA384_Final + ), + "sha512": HashMethods( + "CC_SHA512_CTX *", self._lib.CC_SHA512_Init, + self._lib.CC_SHA512_Update, self._lib.CC_SHA512_Final + ), + } + + self._supported_hmac_algorithms = { + "md5": self._lib.kCCHmacAlgMD5, + "sha1": self._lib.kCCHmacAlgSHA1, + "sha224": self._lib.kCCHmacAlgSHA224, + "sha256": self._lib.kCCHmacAlgSHA256, + "sha384": self._lib.kCCHmacAlgSHA384, + "sha512": self._lib.kCCHmacAlgSHA512, + } + + self._supported_pbkdf2_hmac_algorithms = { + "sha1": self._lib.kCCPRFHmacAlgSHA1, + "sha224": self._lib.kCCPRFHmacAlgSHA224, + "sha256": self._lib.kCCPRFHmacAlgSHA256, + "sha384": self._lib.kCCPRFHmacAlgSHA384, + "sha512": self._lib.kCCPRFHmacAlgSHA512, + } + + def hash_supported(self, algorithm): + return algorithm.name in self._hash_mapping + + def hmac_supported(self, algorithm): + return algorithm.name in self._supported_hmac_algorithms + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def cipher_supported(self, cipher, mode): + return (type(cipher), type(mode)) in self._cipher_registry + + def create_symmetric_encryption_ctx(self, cipher, mode): + if isinstance(mode, GCM): + return _GCMCipherContext( + self, cipher, mode, self._lib.kCCEncrypt + ) + else: + return _CipherContext(self, cipher, mode, self._lib.kCCEncrypt) + + def create_symmetric_decryption_ctx(self, cipher, mode): + if isinstance(mode, GCM): + return _GCMCipherContext( + self, cipher, mode, self._lib.kCCDecrypt + ) + else: + return _CipherContext(self, cipher, mode, self._lib.kCCDecrypt) + + def pbkdf2_hmac_supported(self, algorithm): + return algorithm.name in self._supported_pbkdf2_hmac_algorithms + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + alg_enum = self._supported_pbkdf2_hmac_algorithms[algorithm.name] + buf = self._ffi.new("char[]", length) + res = self._lib.CCKeyDerivationPBKDF( + self._lib.kCCPBKDF2, + key_material, + len(key_material), + salt, + len(salt), + alg_enum, + iterations, + buf, + length + ) + self._check_response(res) + + return self._ffi.buffer(buf)[:] + + def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls, + mode_const): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = (cipher_const, + mode_const) + + def _register_default_ciphers(self): + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB), + (CTR, self._lib.kCCModeCTR), + (GCM, self._lib.kCCModeGCM), + ]: + self._register_cipher_adapter( + AES, + self._lib.kCCAlgorithmAES128, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB), + ]: + self._register_cipher_adapter( + TripleDES, + self._lib.kCCAlgorithm3DES, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB) + ]: + self._register_cipher_adapter( + Blowfish, + self._lib.kCCAlgorithmBlowfish, + mode_cls, + mode_const + ) + for mode_cls, mode_const in [ + (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), + (CFB, self._lib.kCCModeCFB), + (OFB, self._lib.kCCModeOFB), + (CTR, self._lib.kCCModeCTR) + ]: + self._register_cipher_adapter( + CAST5, + self._lib.kCCAlgorithmCAST, + mode_cls, + mode_const + ) + self._register_cipher_adapter( + ARC4, + self._lib.kCCAlgorithmRC4, + type(None), + self._lib.kCCModeRC4 + ) + + def _check_response(self, response): + if response == self._lib.kCCSuccess: + return + elif response == self._lib.kCCAlignmentError: + # This error is not currently triggered due to a bug filed as + # rdar://15589470 + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length" + ) + else: + raise InternalError( + "The backend returned an unknown error, consider filing a bug." + " Code: {0}.".format(response) + ) + + +def _release_cipher_ctx(ctx): + """ + Called by the garbage collector and used to safely dereference and + release the context. + """ + if ctx[0] != backend._ffi.NULL: + res = backend._lib.CCCryptorRelease(ctx[0]) + backend._check_response(res) + ctx[0] = backend._ffi.NULL + + +@utils.register_interface(interfaces.CipherContext) +class _CipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + # There is a bug in CommonCrypto where block ciphers do not raise + # kCCAlignmentError when finalizing if you supply non-block aligned + # data. To work around this we need to keep track of the block + # alignment ourselves, but only for alg+mode combos that require + # block alignment. OFB, CFB, and CTR make a block cipher algorithm + # into a stream cipher so we don't need to track them (and thus their + # block size is effectively 1 byte just like OpenSSL/CommonCrypto + # treat RC4 and other stream cipher block sizes). + # This bug has been filed as rdar://15589470 + self._bytes_processed = 0 + if (isinstance(cipher, interfaces.BlockCipherAlgorithm) and not + isinstance(mode, (OFB, CFB, CTR))): + self._byte_block_size = cipher.block_size // 8 + else: + self._byte_block_size = 1 + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, _release_cipher_ctx) + + if isinstance(mode, interfaces.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, interfaces.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + + if isinstance(mode, CTR): + mode_option = self._backend._lib.kCCModeOptionCTR_BE + else: + mode_option = 0 + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, iv_nonce, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, mode_option, ctx) + self._backend._check_response(res) + + self._ctx = ctx + + def update(self, data): + # Count bytes processed to handle block alignment. + self._bytes_processed += len(data) + buf = self._backend._ffi.new( + "unsigned char[]", len(data) + self._byte_block_size - 1) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorUpdate( + self._ctx[0], data, len(data), buf, + len(data) + self._byte_block_size - 1, outlen) + self._backend._check_response(res) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + # Raise error if block alignment is wrong. + if self._bytes_processed % self._byte_block_size: + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length" + ) + buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorFinal( + self._ctx[0], buf, len(buf), outlen) + self._backend._check_response(res) + _release_cipher_ctx(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + +@utils.register_interface(interfaces.AEADCipherContext) +@utils.register_interface(interfaces.AEADEncryptionContext) +class _GCMCipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, _release_cipher_ctx) + + self._ctx = ctx + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, + self._backend._ffi.NULL, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, 0, self._ctx) + self._backend._check_response(res) + + res = self._backend._lib.CCCryptorGCMAddIV( + self._ctx[0], + mode.initialization_vector, + len(mode.initialization_vector) + ) + self._backend._check_response(res) + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + args = (self._ctx[0], data, len(data), buf) + if self._operation == self._backend._lib.kCCEncrypt: + res = self._backend._lib.CCCryptorGCMEncrypt(*args) + else: + res = self._backend._lib.CCCryptorGCMDecrypt(*args) + + self._backend._check_response(res) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + tag_size = self._cipher.block_size // 8 + tag_buf = self._backend._ffi.new("unsigned char[]", tag_size) + tag_len = self._backend._ffi.new("size_t *", tag_size) + res = backend._lib.CCCryptorGCMFinal(self._ctx[0], tag_buf, tag_len) + self._backend._check_response(res) + _release_cipher_ctx(self._ctx) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + if (self._operation == self._backend._lib.kCCDecrypt and + not constant_time.bytes_eq( + self._tag[:len(self._mode.tag)], self._mode.tag + )): + raise InvalidTag + return b"" + + def authenticate_additional_data(self, data): + res = self._backend._lib.CCCryptorGCMAddAAD( + self._ctx[0], data, len(data) + ) + self._backend._check_response(res) + + @property + def tag(self): + return self._tag + + +@utils.register_interface(interfaces.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self.algorithm = algorithm + self._backend = backend + + if ctx is None: + try: + methods = self._backend._hash_mapping[self.algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + ctx = self._backend._ffi.new(methods.ctx) + res = methods.hash_init(ctx) + assert res == 1 + + self._ctx = ctx + + def copy(self): + methods = self._backend._hash_mapping[self.algorithm.name] + new_ctx = self._backend._ffi.new(methods.ctx) + # CommonCrypto has no APIs for copying hashes, so we have to copy the + # underlying struct. + new_ctx[0] = self._ctx[0] + + return _HashContext(self._backend, self.algorithm, ctx=new_ctx) + + def update(self, data): + methods = self._backend._hash_mapping[self.algorithm.name] + res = methods.hash_update(self._ctx, data, len(data)) + assert res == 1 + + def finalize(self): + methods = self._backend._hash_mapping[self.algorithm.name] + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + res = methods.hash_final(buf, self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:] + + +@utils.register_interface(interfaces.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self.algorithm = algorithm + self._backend = backend + if ctx is None: + ctx = self._backend._ffi.new("CCHmacContext *") + try: + alg = self._backend._supported_hmac_algorithms[algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported HMAC hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + self._backend._lib.CCHmacInit(ctx, alg, key, len(key)) + + self._ctx = ctx + self._key = key + + def copy(self): + copied_ctx = self._backend._ffi.new("CCHmacContext *") + # CommonCrypto has no APIs for copying HMACs, so we have to copy the + # underlying struct. + copied_ctx[0] = self._ctx[0] + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + self._backend._lib.CCHmacUpdate(self._ctx, data, len(data)) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + self._backend._lib.CCHmacFinal(self._ctx, buf) + return self._backend._ffi.buffer(buf)[:] + + +backend = Backend() diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py new file mode 100644 index 0000000..264c5af --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py @@ -0,0 +1,197 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class CipherBackend(object): + @abc.abstractmethod + def cipher_supported(self, cipher, mode): + """ + Return True if the given cipher and mode are supported. + """ + + @abc.abstractmethod + def create_symmetric_encryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for encryption. + """ + + @abc.abstractmethod + def create_symmetric_decryption_ctx(self, cipher, mode): + """ + Get a CipherContext that can be used for decryption. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashBackend(object): + @abc.abstractmethod + def hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by this backend. + """ + + @abc.abstractmethod + def create_hash_ctx(self, algorithm): + """ + Create a HashContext for calculating a message digest. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HMACBackend(object): + @abc.abstractmethod + def hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for HMAC by this + backend. + """ + + @abc.abstractmethod + def create_hmac_ctx(self, key, algorithm): + """ + Create a HashContext for calculating a message authentication code. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PBKDF2HMACBackend(object): + @abc.abstractmethod + def pbkdf2_hmac_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for PBKDF2 by this + backend. + """ + + @abc.abstractmethod + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + """ + Return length bytes derived from provided PBKDF2 parameters. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSABackend(object): + @abc.abstractmethod + def generate_rsa_private_key(self, public_exponent, key_size): + """ + Generate an RSAPrivateKey instance with public_exponent and a modulus + of key_size bits. + """ + + @abc.abstractmethod + def create_rsa_signature_ctx(self, private_key, padding, algorithm): + """ + Returns an object conforming to the AsymmetricSignatureContext + interface. + """ + + @abc.abstractmethod + def create_rsa_verification_ctx(self, public_key, signature, padding, + algorithm): + """ + Returns an object conforming to the AsymmetricVerificationContext + interface. + """ + + @abc.abstractmethod + def mgf1_hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported for MGF1 in PSS. + """ + + @abc.abstractmethod + def decrypt_rsa(self, private_key, ciphertext, padding): + """ + Returns decrypted bytes. + """ + + @abc.abstractmethod + def encrypt_rsa(self, public_key, plaintext, padding): + """ + Returns encrypted bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSABackend(object): + @abc.abstractmethod + def generate_dsa_parameters(self, key_size): + """ + Generate a DSAParameters instance with a modulus of key_size bits. + """ + + @abc.abstractmethod + def generate_dsa_private_key(self, parameters): + """ + Generate an DSAPrivateKey instance with parameters as + a DSAParameters object. + """ + + @abc.abstractmethod + def create_dsa_signature_ctx(self, private_key, algorithm): + """ + Returns an object conforming to the AsymmetricSignatureContext + interface. + """ + + @abc.abstractmethod + def create_dsa_verification_ctx(self, public_key, signature, algorithm): + """ + Returns an object conforming to the AsymmetricVerificationContext + interface. + """ + + @abc.abstractmethod + def dsa_hash_supported(self, algorithm): + """ + Return True if the hash algorithm is supported by the backend for DSA. + """ + + @abc.abstractmethod + def dsa_parameters_supported(self, p, q, g): + """ + Return True if the parameters are supported by the backend for DSA. + """ + + +@six.add_metaclass(abc.ABCMeta) +class TraditionalOpenSSLSerializationBackend(object): + @abc.abstractmethod + def load_traditional_openssl_pem_private_key(self, data, password): + """ + Load a private key from PEM encoded data, using password if the data + is encrypted. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CMACBackend(object): + @abc.abstractmethod + def cmac_algorithm_supported(self, algorithm): + """ + Returns True if the block cipher is supported for CMAC by this backend + """ + + @abc.abstractmethod + def create_cmac_ctx(self, algorithm): + """ + Create a CMACContext for calculating a message authentication code. + """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py new file mode 100644 index 0000000..753f4fc --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py @@ -0,0 +1,199 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DSABackend, HMACBackend, HashBackend, + PBKDF2HMACBackend, RSABackend +) + + +@utils.register_interface(CMACBackend) +@utils.register_interface(CipherBackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +@utils.register_interface(DSABackend) +class MultiBackend(object): + name = "multibackend" + + def __init__(self, backends): + self._backends = backends + + def _filtered_backends(self, interface): + for b in self._backends: + if isinstance(b, interface): + yield b + + def cipher_supported(self, algorithm, mode): + return any( + b.cipher_supported(algorithm, mode) + for b in self._filtered_backends(CipherBackend) + ) + + def create_symmetric_encryption_ctx(self, algorithm, mode): + for b in self._filtered_backends(CipherBackend): + try: + return b.create_symmetric_encryption_ctx(algorithm, mode) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported by this backend".format( + algorithm.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + def create_symmetric_decryption_ctx(self, algorithm, mode): + for b in self._filtered_backends(CipherBackend): + try: + return b.create_symmetric_decryption_ctx(algorithm, mode) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported by this backend".format( + algorithm.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + def hash_supported(self, algorithm): + return any( + b.hash_supported(algorithm) + for b in self._filtered_backends(HashBackend) + ) + + def create_hash_ctx(self, algorithm): + for b in self._filtered_backends(HashBackend): + try: + return b.create_hash_ctx(algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def hmac_supported(self, algorithm): + return any( + b.hmac_supported(algorithm) + for b in self._filtered_backends(HMACBackend) + ) + + def create_hmac_ctx(self, key, algorithm): + for b in self._filtered_backends(HMACBackend): + try: + return b.create_hmac_ctx(key, algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def pbkdf2_hmac_supported(self, algorithm): + return any( + b.pbkdf2_hmac_supported(algorithm) + for b in self._filtered_backends(PBKDF2HMACBackend) + ) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + for b in self._filtered_backends(PBKDF2HMACBackend): + try: + return b.derive_pbkdf2_hmac( + algorithm, length, salt, iterations, key_material + ) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + def generate_rsa_private_key(self, public_exponent, key_size): + for b in self._filtered_backends(RSABackend): + return b.generate_rsa_private_key(public_exponent, key_size) + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def create_rsa_signature_ctx(self, private_key, padding, algorithm): + for b in self._filtered_backends(RSABackend): + return b.create_rsa_signature_ctx(private_key, padding, algorithm) + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def create_rsa_verification_ctx(self, public_key, signature, padding, + algorithm): + for b in self._filtered_backends(RSABackend): + return b.create_rsa_verification_ctx(public_key, signature, + padding, algorithm) + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_parameters(self, key_size): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_parameters(key_size) + raise UnsupportedAlgorithm("DSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_private_key(self, parameters): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_private_key(parameters) + raise UnsupportedAlgorithm("DSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def create_dsa_verification_ctx(self, public_key, signature, algorithm): + for b in self._filtered_backends(DSABackend): + return b.create_dsa_verification_ctx(public_key, signature, + algorithm) + raise UnsupportedAlgorithm("DSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def create_dsa_signature_ctx(self, private_key, algorithm): + for b in self._filtered_backends(DSABackend): + return b.create_dsa_signature_ctx(private_key, algorithm) + raise UnsupportedAlgorithm("DSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def dsa_hash_supported(self, algorithm): + for b in self._filtered_backends(DSABackend): + return b.dsa_hash_supported(algorithm) + raise UnsupportedAlgorithm("DSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def dsa_parameters_supported(self, p, q, g): + for b in self._filtered_backends(DSABackend): + return b.dsa_parameters_supported(p, q, g) + raise UnsupportedAlgorithm("DSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def cmac_algorithm_supported(self, algorithm): + return any( + b.cmac_algorithm_supported(algorithm) + for b in self._filtered_backends(CMACBackend) + ) + + def create_cmac_ctx(self, algorithm): + for b in self._filtered_backends(CMACBackend): + try: + return b.create_cmac_ctx(algorithm) + except UnsupportedAlgorithm: + pass + raise UnsupportedAlgorithm("This backend does not support CMAC", + _Reasons.UNSUPPORTED_CIPHER) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/__init__.py new file mode 100644 index 0000000..25885e1 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/__init__.py @@ -0,0 +1,19 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.backends.openssl.backend import backend + + +__all__ = ["backend"] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py new file mode 100644 index 0000000..e00be92 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -0,0 +1,1487 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import math + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InternalError, InvalidSignature, InvalidTag, + UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import ( + CMACBackend, CipherBackend, DSABackend, HMACBackend, HashBackend, + PBKDF2HMACBackend, RSABackend +) +from cryptography.hazmat.bindings.openssl.binding import Binding +from cryptography.hazmat.primitives import hashes, interfaces +from cryptography.hazmat.primitives.asymmetric import dsa, rsa +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.ciphers.algorithms import ( + AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES +) +from cryptography.hazmat.primitives.ciphers.modes import ( + CBC, CFB, CTR, ECB, GCM, OFB +) + + +_OpenSSLError = collections.namedtuple("_OpenSSLError", + ["code", "lib", "func", "reason"]) + + +@utils.register_interface(CipherBackend) +@utils.register_interface(CMACBackend) +@utils.register_interface(DSABackend) +@utils.register_interface(HashBackend) +@utils.register_interface(HMACBackend) +@utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(RSABackend) +class Backend(object): + """ + OpenSSL API binding interfaces. + """ + name = "openssl" + + def __init__(self): + self._binding = Binding() + self._ffi = self._binding.ffi + self._lib = self._binding.lib + + self._binding.init_static_locks() + + # adds all ciphers/digests for EVP + self._lib.OpenSSL_add_all_algorithms() + # registers available SSL/TLS ciphers and digests + self._lib.SSL_library_init() + # loads error strings for libcrypto and libssl functions + self._lib.SSL_load_error_strings() + + self._cipher_registry = {} + self._register_default_ciphers() + self.activate_osrandom_engine() + + def activate_builtin_random(self): + # Obtain a new structural reference. + e = self._lib.ENGINE_get_default_RAND() + if e != self._ffi.NULL: + self._lib.ENGINE_unregister_RAND(e) + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + # decrement the structural reference from get_default_RAND + res = self._lib.ENGINE_finish(e) + assert res == 1 + + def activate_osrandom_engine(self): + # Unregister and free the current engine. + self.activate_builtin_random() + # Fetches an engine by id and returns it. This creates a structural + # reference. + e = self._lib.ENGINE_by_id(self._lib.Cryptography_osrandom_engine_id) + assert e != self._ffi.NULL + # Initialize the engine for use. This adds a functional reference. + res = self._lib.ENGINE_init(e) + assert res == 1 + # Set the engine as the default RAND provider. + res = self._lib.ENGINE_set_default_RAND(e) + assert res == 1 + # Decrement the structural ref incremented by ENGINE_by_id. + res = self._lib.ENGINE_free(e) + assert res == 1 + # Decrement the functional ref incremented by ENGINE_init. + res = self._lib.ENGINE_finish(e) + assert res == 1 + # Reset the RNG to use the new engine. + self._lib.RAND_cleanup() + + def openssl_version_text(self): + """ + Friendly string name of linked OpenSSL. + + Example: OpenSSL 1.0.1e 11 Feb 2013 + """ + return self._ffi.string(self._lib.OPENSSL_VERSION_TEXT).decode("ascii") + + def create_hmac_ctx(self, key, algorithm): + return _HMACContext(self, key, algorithm) + + def hash_supported(self, algorithm): + digest = self._lib.EVP_get_digestbyname(algorithm.name.encode("ascii")) + return digest != self._ffi.NULL + + def hmac_supported(self, algorithm): + return self.hash_supported(algorithm) + + def create_hash_ctx(self, algorithm): + return _HashContext(self, algorithm) + + def cipher_supported(self, cipher, mode): + try: + adapter = self._cipher_registry[type(cipher), type(mode)] + except KeyError: + return False + evp_cipher = adapter(self, cipher, mode) + return self._ffi.NULL != evp_cipher + + def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): + if (cipher_cls, mode_cls) in self._cipher_registry: + raise ValueError("Duplicate registration for: {0} {1}".format( + cipher_cls, mode_cls) + ) + self._cipher_registry[cipher_cls, mode_cls] = adapter + + def _register_default_ciphers(self): + for cipher_cls, mode_cls in itertools.product( + [AES, Camellia], + [CBC, CTR, ECB, OFB, CFB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB]: + self.register_cipher_adapter( + TripleDES, + mode_cls, + GetCipherByName("des-ede3-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + Blowfish, + mode_cls, + GetCipherByName("bf-{mode.name}") + ) + for mode_cls in [CBC, CFB, OFB, ECB]: + self.register_cipher_adapter( + SEED, + mode_cls, + GetCipherByName("seed-{mode.name}") + ) + for cipher_cls, mode_cls in itertools.product( + [CAST5, IDEA], + [CBC, OFB, CFB, ECB], + ): + self.register_cipher_adapter( + cipher_cls, + mode_cls, + GetCipherByName("{cipher.name}-{mode.name}") + ) + self.register_cipher_adapter( + ARC4, + type(None), + GetCipherByName("rc4") + ) + self.register_cipher_adapter( + AES, + GCM, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + + def create_symmetric_encryption_ctx(self, cipher, mode): + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + + def create_symmetric_decryption_ctx(self, cipher, mode): + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + + def pbkdf2_hmac_supported(self, algorithm): + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + return self.hmac_supported(algorithm) + else: + # OpenSSL < 1.0.0 has an explicit PBKDF2-HMAC-SHA1 function, + # so if the PBKDF2_HMAC function is missing we only support + # SHA1 via PBKDF2_HMAC_SHA1. + return isinstance(algorithm, hashes.SHA1) + + def derive_pbkdf2_hmac(self, algorithm, length, salt, iterations, + key_material): + buf = self._ffi.new("char[]", length) + if self._lib.Cryptography_HAS_PBKDF2_HMAC: + evp_md = self._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + assert evp_md != self._ffi.NULL + res = self._lib.PKCS5_PBKDF2_HMAC( + key_material, + len(key_material), + salt, + len(salt), + iterations, + evp_md, + length, + buf + ) + assert res == 1 + else: + if not isinstance(algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This version of OpenSSL only supports PBKDF2HMAC with " + "SHA1", + _Reasons.UNSUPPORTED_HASH + ) + res = self._lib.PKCS5_PBKDF2_HMAC_SHA1( + key_material, + len(key_material), + salt, + len(salt), + iterations, + length, + buf + ) + assert res == 1 + + return self._ffi.buffer(buf)[:] + + def _err_string(self, code): + err_buf = self._ffi.new("char[]", 256) + self._lib.ERR_error_string_n(code, err_buf, 256) + return self._ffi.string(err_buf, 256)[:] + + def _consume_errors(self): + errors = [] + while True: + code = self._lib.ERR_get_error() + if code == 0: + break + + lib = self._lib.ERR_GET_LIB(code) + func = self._lib.ERR_GET_FUNC(code) + reason = self._lib.ERR_GET_REASON(code) + + errors.append(_OpenSSLError(code, lib, func, reason)) + return errors + + def _unknown_error(self, error): + return InternalError( + "Unknown error code {0} from OpenSSL, " + "you should probably file a bug. {1}".format( + error.code, self._err_string(error.code) + ) + ) + + def _bn_to_int(self, bn): + if six.PY3: + # Python 3 has constant time from_bytes, so use that. + + bn_num_bytes = (self._lib.BN_num_bits(bn) + 7) // 8 + bin_ptr = self._ffi.new("unsigned char[]", bn_num_bytes) + bin_len = self._lib.BN_bn2bin(bn, bin_ptr) + assert bin_len > 0 + assert bin_ptr != self._ffi.NULL + return int.from_bytes(self._ffi.buffer(bin_ptr)[:bin_len], "big") + + else: + # Under Python 2 the best we can do is hex() + + hex_cdata = self._lib.BN_bn2hex(bn) + assert hex_cdata != self._ffi.NULL + hex_str = self._ffi.string(hex_cdata) + self._lib.OPENSSL_free(hex_cdata) + return int(hex_str, 16) + + def _int_to_bn(self, num, bn=None): + """ + Converts a python integer to a BIGNUM. The returned BIGNUM will not + be garbage collected (to support adding them to structs that take + ownership of the object). Be sure to register it for GC if it will + be discarded after use. + """ + + if bn is None: + bn = self._ffi.NULL + + if six.PY3: + # Python 3 has constant time to_bytes, so use that. + + binary = num.to_bytes(int(num.bit_length() / 8.0 + 1), "big") + bn_ptr = self._lib.BN_bin2bn(binary, len(binary), bn) + assert bn_ptr != self._ffi.NULL + return bn_ptr + + else: + # Under Python 2 the best we can do is hex() + + hex_num = hex(num).rstrip("L").lstrip("0x").encode("ascii") or b"0" + bn_ptr = self._ffi.new("BIGNUM **") + bn_ptr[0] = bn + res = self._lib.BN_hex2bn(bn_ptr, hex_num) + assert res != 0 + assert bn_ptr[0] != self._ffi.NULL + return bn_ptr[0] + + def generate_rsa_private_key(self, public_exponent, key_size): + if public_exponent < 3: + raise ValueError("public_exponent must be >= 3") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd") + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits") + + ctx = self._lib.RSA_new() + assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.RSA_free) + + bn = self._int_to_bn(public_exponent) + bn = self._ffi.gc(bn, self._lib.BN_free) + + res = self._lib.RSA_generate_key_ex( + ctx, key_size, bn, self._ffi.NULL + ) + assert res == 1 + + return self._rsa_cdata_to_private_key(ctx) + + def _new_evp_pkey(self): + evp_pkey = self._lib.EVP_PKEY_new() + assert evp_pkey != self._ffi.NULL + return self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + def _rsa_private_key_to_evp_pkey(self, private_key): + evp_pkey = self._new_evp_pkey() + rsa_cdata = self._rsa_cdata_from_private_key(private_key) + + res = self._lib.EVP_PKEY_assign_RSA(evp_pkey, rsa_cdata) + assert res == 1 + + return evp_pkey + + def _rsa_public_key_to_evp_pkey(self, public_key): + evp_pkey = self._new_evp_pkey() + rsa_cdata = self._rsa_cdata_from_public_key(public_key) + + res = self._lib.EVP_PKEY_assign_RSA(evp_pkey, rsa_cdata) + assert res == 1 + + return evp_pkey + + def _rsa_cdata_to_private_key(self, cdata): + return rsa.RSAPrivateKey( + p=self._bn_to_int(cdata.p), + q=self._bn_to_int(cdata.q), + dmp1=self._bn_to_int(cdata.dmp1), + dmq1=self._bn_to_int(cdata.dmq1), + iqmp=self._bn_to_int(cdata.iqmp), + private_exponent=self._bn_to_int(cdata.d), + public_exponent=self._bn_to_int(cdata.e), + modulus=self._bn_to_int(cdata.n), + ) + + def _rsa_cdata_from_private_key(self, private_key): + # Does not GC the RSA cdata. You *must* make sure it's freed + # correctly yourself! + ctx = self._lib.RSA_new() + assert ctx != self._ffi.NULL + ctx.p = self._int_to_bn(private_key.p) + ctx.q = self._int_to_bn(private_key.q) + ctx.d = self._int_to_bn(private_key.d) + ctx.e = self._int_to_bn(private_key.e) + ctx.n = self._int_to_bn(private_key.n) + ctx.dmp1 = self._int_to_bn(private_key.dmp1) + ctx.dmq1 = self._int_to_bn(private_key.dmq1) + ctx.iqmp = self._int_to_bn(private_key.iqmp) + res = self._lib.RSA_blinding_on(ctx, self._ffi.NULL) + assert res == 1 + + return ctx + + def _rsa_cdata_from_public_key(self, public_key): + # Does not GC the RSA cdata. You *must* make sure it's freed + # correctly yourself! + + ctx = self._lib.RSA_new() + assert ctx != self._ffi.NULL + ctx.e = self._int_to_bn(public_key.e) + ctx.n = self._int_to_bn(public_key.n) + res = self._lib.RSA_blinding_on(ctx, self._ffi.NULL) + assert res == 1 + + return ctx + + def create_rsa_signature_ctx(self, private_key, padding, algorithm): + return _RSASignatureContext(self, private_key, padding, algorithm) + + def create_rsa_verification_ctx(self, public_key, signature, padding, + algorithm): + return _RSAVerificationContext(self, public_key, signature, padding, + algorithm) + + def mgf1_hash_supported(self, algorithm): + if self._lib.Cryptography_HAS_MGF1_MD: + return self.hash_supported(algorithm) + else: + return isinstance(algorithm, hashes.SHA1) + + def generate_dsa_parameters(self, key_size): + if key_size not in (1024, 2048, 3072): + raise ValueError( + "Key size must be 1024 or 2048 or 3072 bits") + + if (self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f and + key_size > 1024): + raise ValueError( + "Key size must be 1024 because OpenSSL < 1.0.0 doesn't " + "support larger key sizes") + + ctx = self._lib.DSA_new() + assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + + res = self._lib.DSA_generate_parameters_ex( + ctx, key_size, self._ffi.NULL, 0, + self._ffi.NULL, self._ffi.NULL, self._ffi.NULL + ) + + assert res == 1 + + return dsa.DSAParameters( + modulus=self._bn_to_int(ctx.p), + subgroup_order=self._bn_to_int(ctx.q), + generator=self._bn_to_int(ctx.g) + ) + + def generate_dsa_private_key(self, parameters): + ctx = self._lib.DSA_new() + assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.DSA_free) + ctx.p = self._int_to_bn(parameters.p) + ctx.q = self._int_to_bn(parameters.q) + ctx.g = self._int_to_bn(parameters.g) + + self._lib.DSA_generate_key(ctx) + + return dsa.DSAPrivateKey( + modulus=self._bn_to_int(ctx.p), + subgroup_order=self._bn_to_int(ctx.q), + generator=self._bn_to_int(ctx.g), + x=self._bn_to_int(ctx.priv_key), + y=self._bn_to_int(ctx.pub_key) + ) + + def create_dsa_signature_ctx(self, private_key, algorithm): + return _DSASignatureContext(self, private_key, algorithm) + + def create_dsa_verification_ctx(self, public_key, signature, + algorithm): + return _DSAVerificationContext(self, public_key, signature, + algorithm) + + def _dsa_cdata_from_public_key(self, public_key): + # Does not GC the DSA cdata. You *must* make sure it's freed + # correctly yourself! + ctx = self._lib.DSA_new() + assert ctx != self._ffi.NULL + parameters = public_key.parameters() + ctx.p = self._int_to_bn(parameters.p) + ctx.q = self._int_to_bn(parameters.q) + ctx.g = self._int_to_bn(parameters.g) + ctx.pub_key = self._int_to_bn(public_key.y) + return ctx + + def _dsa_cdata_from_private_key(self, private_key): + # Does not GC the DSA cdata. You *must* make sure it's freed + # correctly yourself! + ctx = self._lib.DSA_new() + assert ctx != self._ffi.NULL + parameters = private_key.parameters() + ctx.p = self._int_to_bn(parameters.p) + ctx.q = self._int_to_bn(parameters.q) + ctx.g = self._int_to_bn(parameters.g) + ctx.priv_key = self._int_to_bn(private_key.x) + ctx.pub_key = self._int_to_bn(private_key.y) + return ctx + + def dsa_hash_supported(self, algorithm): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return isinstance(algorithm, hashes.SHA1) + else: + return self.hash_supported(algorithm) + + def dsa_parameters_supported(self, p, q, g): + if self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f: + return (utils.bit_length(p) <= 1024 and utils.bit_length(q) <= 160) + else: + return True + + def decrypt_rsa(self, private_key, ciphertext, padding): + key_size_bytes = int(math.ceil(private_key.key_size / 8.0)) + if key_size_bytes != len(ciphertext): + raise ValueError("Ciphertext length must be equal to key size.") + + return self._enc_dec_rsa(private_key, ciphertext, padding) + + def encrypt_rsa(self, public_key, plaintext, padding): + return self._enc_dec_rsa(public_key, plaintext, padding) + + def _enc_dec_rsa(self, key, data, padding): + if isinstance(padding, PKCS1v15): + padding_enum = self._lib.RSA_PKCS1_PADDING + elif isinstance(padding, OAEP): + padding_enum = self._lib.RSA_PKCS1_OAEP_PADDING + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend", + _Reasons.UNSUPPORTED_MGF + ) + + if not isinstance(padding._mgf._algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This backend supports only SHA1 inside MGF1 when " + "using OAEP", + _Reasons.UNSUPPORTED_HASH + ) + + if padding._label is not None and padding._label != b"": + raise ValueError("This backend does not support OAEP labels") + + if not isinstance(padding._algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This backend only supports SHA1 when using OAEP", + _Reasons.UNSUPPORTED_HASH + ) + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend".format( + padding.name + ), + _Reasons.UNSUPPORTED_PADDING + ) + + if self._lib.Cryptography_HAS_PKEY_CTX: + return self._enc_dec_rsa_pkey_ctx(key, data, padding_enum) + else: + return self._enc_dec_rsa_098(key, data, padding_enum) + + def _enc_dec_rsa_pkey_ctx(self, key, data, padding_enum): + if isinstance(key, rsa.RSAPublicKey): + init = self._lib.EVP_PKEY_encrypt_init + crypt = self._lib.Cryptography_EVP_PKEY_encrypt + evp_pkey = self._rsa_public_key_to_evp_pkey(key) + else: + init = self._lib.EVP_PKEY_decrypt_init + crypt = self._lib.Cryptography_EVP_PKEY_decrypt + evp_pkey = self._rsa_private_key_to_evp_pkey(key) + + pkey_ctx = self._lib.EVP_PKEY_CTX_new( + evp_pkey, self._ffi.NULL + ) + assert pkey_ctx != self._ffi.NULL + pkey_ctx = self._ffi.gc(pkey_ctx, self._lib.EVP_PKEY_CTX_free) + res = init(pkey_ctx) + assert res == 1 + res = self._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, padding_enum) + assert res > 0 + buf_size = self._lib.EVP_PKEY_size(evp_pkey) + assert buf_size > 0 + outlen = self._ffi.new("size_t *", buf_size) + buf = self._ffi.new("char[]", buf_size) + res = crypt( + pkey_ctx, + buf, + outlen, + data, + len(data) + ) + if res <= 0: + self._handle_rsa_enc_dec_error(key) + + return self._ffi.buffer(buf)[:outlen[0]] + + def _enc_dec_rsa_098(self, key, data, padding_enum): + if isinstance(key, rsa.RSAPublicKey): + crypt = self._lib.RSA_public_encrypt + rsa_cdata = self._rsa_cdata_from_public_key(key) + else: + crypt = self._lib.RSA_private_decrypt + rsa_cdata = self._rsa_cdata_from_private_key(key) + + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + key_size = self._lib.RSA_size(rsa_cdata) + assert key_size > 0 + buf = self._ffi.new("unsigned char[]", key_size) + res = crypt( + len(data), + data, + buf, + rsa_cdata, + padding_enum + ) + if res < 0: + self._handle_rsa_enc_dec_error(key) + + return self._ffi.buffer(buf)[:res] + + def _handle_rsa_enc_dec_error(self, key): + errors = self._consume_errors() + assert errors + assert errors[0].lib == self._lib.ERR_LIB_RSA + if isinstance(key, rsa.RSAPublicKey): + assert (errors[0].reason == + self._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError( + "Data too long for key size. Encrypt less data or use a " + "larger key size" + ) + else: + assert ( + errors[0].reason == self._lib.RSA_R_BLOCK_TYPE_IS_NOT_01 or + errors[0].reason == self._lib.RSA_R_BLOCK_TYPE_IS_NOT_02 + ) + raise ValueError("Decryption failed") + + def cmac_algorithm_supported(self, algorithm): + return ( + self._lib.Cryptography_HAS_CMAC == 1 + and self.cipher_supported(algorithm, CBC( + b"\x00" * algorithm.block_size)) + ) + + def create_cmac_ctx(self, algorithm): + return _CMACContext(self, algorithm) + + +class GetCipherByName(object): + def __init__(self, fmt): + self._fmt = fmt + + def __call__(self, backend, cipher, mode): + cipher_name = self._fmt.format(cipher=cipher, mode=mode).lower() + return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) + + +@utils.register_interface(interfaces.CipherContext) +@utils.register_interface(interfaces.AEADCipherContext) +@utils.register_interface(interfaces.AEADEncryptionContext) +class _CipherContext(object): + _ENCRYPT = 1 + _DECRYPT = 0 + + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + if isinstance(self._cipher, interfaces.BlockCipherAlgorithm): + self._block_size = self._cipher.block_size + else: + self._block_size = 1 + + ctx = self._backend._lib.EVP_CIPHER_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_CIPHER_CTX_free + ) + + registry = self._backend._cipher_registry + try: + adapter = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + evp_cipher = adapter(self._backend, cipher, mode) + if evp_cipher == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + if isinstance(mode, interfaces.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, interfaces.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + # begin init with cipher and operation type + res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + operation) + assert res != 0 + # set the key length to handle variable key ciphers + res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( + ctx, len(cipher.key) + ) + assert res != 0 + if isinstance(mode, GCM): + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN, + len(iv_nonce), self._backend._ffi.NULL + ) + assert res != 0 + if operation == self._DECRYPT: + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG, + len(mode.tag), mode.tag + ) + assert res != 0 + + # pass key/iv + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + cipher.key, + iv_nonce, + operation + ) + assert res != 0 + # We purposely disable padding here as it's handled higher up in the + # API. + self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) + self._ctx = ctx + + def update(self, data): + # OpenSSL 0.9.8e has an assertion in its EVP code that causes it + # to SIGABRT if you call update with an empty byte string. This can be + # removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch + # should be taken only when length is zero and mode is not GCM because + # AES GCM can return improper tag values if you don't call update + # with empty plaintext when authenticating AAD for ...reasons. + if len(data) == 0 and not isinstance(self._mode, GCM): + return b"" + + buf = self._backend._ffi.new("unsigned char[]", + len(data) + self._block_size - 1) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data, + len(data)) + assert res != 0 + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._block_size) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) + if res == 0: + errors = self._backend._consume_errors() + + if not errors and isinstance(self._mode, GCM): + raise InvalidTag + + assert errors + + if errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) or errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ): + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + else: + raise self._backend._unknown_error(errors[0]) + + if (isinstance(self._mode, GCM) and + self._operation == self._ENCRYPT): + block_byte_size = self._block_size // 8 + tag_buf = self._backend._ffi.new( + "unsigned char[]", block_byte_size + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG, + block_byte_size, tag_buf + ) + assert res != 0 + self._tag = self._backend._ffi.buffer(tag_buf)[:] + + res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def authenticate_additional_data(self, data): + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, self._backend._ffi.NULL, outlen, data, len(data) + ) + assert res != 0 + + @property + def tag(self): + return self._tag + + +@utils.register_interface(interfaces.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self.algorithm = algorithm + + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.EVP_MD_CTX_create() + ctx = self._backend._ffi.gc(ctx, + self._backend._lib.EVP_MD_CTX_destroy) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, + self._backend._ffi.NULL) + assert res != 0 + + self._ctx = ctx + + def copy(self): + copied_ctx = self._backend._lib.EVP_MD_CTX_create() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.EVP_MD_CTX_destroy + ) + res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) + assert res != 0 + return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) + + def update(self, data): + res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) + assert res != 0 + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + assert res != 0 + assert outlen[0] == self.algorithm.digest_size + res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:outlen[0]] + + +@utils.register_interface(interfaces.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self.algorithm = algorithm + self._backend = backend + + if ctx is None: + ctx = self._backend._ffi.new("HMAC_CTX *") + self._backend._lib.HMAC_CTX_init(ctx) + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.HMAC_CTX_cleanup + ) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii')) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.Cryptography_HMAC_Init_ex( + ctx, key, len(key), evp_md, self._backend._ffi.NULL + ) + assert res != 0 + + self._ctx = ctx + self._key = key + + def copy(self): + copied_ctx = self._backend._ffi.new("HMAC_CTX *") + self._backend._lib.HMAC_CTX_init(copied_ctx) + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.HMAC_CTX_cleanup + ) + res = self._backend._lib.Cryptography_HMAC_CTX_copy( + copied_ctx, self._ctx + ) + assert res != 0 + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + res = self._backend._lib.Cryptography_HMAC_Update( + self._ctx, data, len(data) + ) + assert res != 0 + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.Cryptography_HMAC_Final( + self._ctx, buf, outlen + ) + assert res != 0 + assert outlen[0] == self.algorithm.digest_size + self._backend._lib.HMAC_CTX_cleanup(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + +def _get_rsa_pss_salt_length(pss, key_size, digest_size): + if pss._mgf._salt_length is not None: + salt = pss._mgf._salt_length + else: + salt = pss._salt_length + + if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: + # bit length - 1 per RFC 3447 + emlen = int(math.ceil((key_size - 1) / 8.0)) + salt_length = emlen - digest_size - 2 + assert salt_length >= 0 + return salt_length + else: + return salt + + +@utils.register_interface(interfaces.AsymmetricSignatureContext) +class _RSASignatureContext(object): + def __init__(self, backend, private_key, padding, algorithm): + self._backend = backend + self._private_key = private_key + + if not isinstance(padding, interfaces.AsymmetricPadding): + raise TypeError( + "Expected provider of interfaces.AsymmetricPadding") + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._finalize_method = self._finalize_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + key_size_bytes = int(math.ceil(private_key.key_size / 8.0)) + if key_size_bytes - algorithm.digest_size - 2 < 0: + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + if not self._backend.mgf1_hash_supported(padding._mgf._algorithm): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._finalize_method = self._finalize_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = _HashContext(backend, self._algorithm) + + def update(self, data): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + self._hash_ctx.update(data) + + def finalize(self): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + evp_pkey = self._backend._rsa_private_key_to_evp_pkey( + self._private_key) + + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + assert evp_md != self._backend._ffi.NULL + pkey_size = self._backend._lib.EVP_PKEY_size(evp_pkey) + assert pkey_size > 0 + + return self._finalize_method(evp_pkey, pkey_size, evp_md) + + def _finalize_pkey_ctx(self, evp_pkey, pkey_size, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + evp_pkey, self._backend._ffi.NULL + ) + assert pkey_ctx != self._backend._ffi.NULL + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx) + assert res == 1 + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + assert res > 0 + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + assert res > 0 + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + assert res > 0 + + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + assert mgf1_md != self._backend._ffi.NULL + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + assert res > 0 + data_to_sign = self._hash_ctx.finalize() + self._hash_ctx = None + buflen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, + self._backend._ffi.NULL, + buflen, + data_to_sign, + len(data_to_sign) + ) + assert res == 1 + buf = self._backend._ffi.new("unsigned char[]", buflen[0]) + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign)) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + reason = None + if (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): + reason = ("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + elif (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY): + reason = "Digest too large for key size. Use a larger key." + assert reason is not None + raise ValueError(reason) + + return self._backend._ffi.buffer(buf)[:] + + def _finalize_pkcs1(self, evp_pkey, pkey_size, evp_md): + sig_buf = self._backend._ffi.new("char[]", pkey_size) + sig_len = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_SignFinal( + self._hash_ctx._ctx, + sig_buf, + sig_len, + evp_pkey + ) + self._hash_ctx.finalize() + self._hash_ctx = None + if res == 0: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + return self._backend._ffi.buffer(sig_buf)[:sig_len[0]] + + def _finalize_pss(self, evp_pkey, pkey_size, evp_md): + data_to_sign = self._hash_ctx.finalize() + self._hash_ctx = None + padded = self._backend._ffi.new("unsigned char[]", pkey_size) + rsa_cdata = self._backend._lib.EVP_PKEY_get1_RSA(evp_pkey) + assert rsa_cdata != self._backend._ffi.NULL + rsa_cdata = self._backend._ffi.gc(rsa_cdata, + self._backend._lib.RSA_free) + res = self._backend._lib.RSA_padding_add_PKCS1_PSS( + rsa_cdata, + padded, + data_to_sign, + evp_md, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + len(data_to_sign) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + + sig_buf = self._backend._ffi.new("char[]", pkey_size) + sig_len = self._backend._lib.RSA_private_encrypt( + pkey_size, + padded, + sig_buf, + rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + assert sig_len != -1 + return self._backend._ffi.buffer(sig_buf)[:sig_len] + + +@utils.register_interface(interfaces.AsymmetricVerificationContext) +class _RSAVerificationContext(object): + def __init__(self, backend, public_key, signature, padding, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + + if not isinstance(padding, interfaces.AsymmetricPadding): + raise TypeError( + "Expected provider of interfaces.AsymmetricPadding") + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._verify_method = self._verify_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + key_size_bytes = int(math.ceil(public_key.key_size / 8.0)) + if key_size_bytes - algorithm.digest_size - 2 < 0: + raise ValueError( + "Digest too large for key size. Check that you have the " + "correct key and digest algorithm." + ) + + if not self._backend.mgf1_hash_supported(padding._mgf._algorithm): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._verify_method = self._verify_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = _HashContext(backend, self._algorithm) + + def update(self, data): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + self._hash_ctx.update(data) + + def verify(self): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + evp_pkey = self._backend._rsa_public_key_to_evp_pkey( + self._public_key) + + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + assert evp_md != self._backend._ffi.NULL + + self._verify_method(evp_pkey, evp_md) + + def _verify_pkey_ctx(self, evp_pkey, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + evp_pkey, self._backend._ffi.NULL + ) + assert pkey_ctx != self._backend._ffi.NULL + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx) + assert res == 1 + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + assert res > 0 + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + assert res > 0 + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + assert res > 0 + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + assert mgf1_md != self._backend._ffi.NULL + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + assert res > 0 + + data_to_verify = self._hash_ctx.finalize() + self._hash_ctx = None + res = self._backend._lib.EVP_PKEY_verify( + pkey_ctx, + self._signature, + len(self._signature), + data_to_verify, + len(data_to_verify) + ) + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + assert res >= 0 + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pkcs1(self, evp_pkey, evp_md): + res = self._backend._lib.EVP_VerifyFinal( + self._hash_ctx._ctx, + self._signature, + len(self._signature), + evp_pkey + ) + self._hash_ctx.finalize() + self._hash_ctx = None + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + assert res >= 0 + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pss(self, evp_pkey, evp_md): + pkey_size = self._backend._lib.EVP_PKEY_size(evp_pkey) + assert pkey_size > 0 + rsa_cdata = self._backend._lib.EVP_PKEY_get1_RSA(evp_pkey) + assert rsa_cdata != self._backend._ffi.NULL + rsa_cdata = self._backend._ffi.gc(rsa_cdata, + self._backend._lib.RSA_free) + buf = self._backend._ffi.new("unsigned char[]", pkey_size) + res = self._backend._lib.RSA_public_decrypt( + len(self._signature), + self._signature, + buf, + rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + if res != pkey_size: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + data_to_verify = self._hash_ctx.finalize() + self._hash_ctx = None + res = self._backend._lib.RSA_verify_PKCS1_PSS( + rsa_cdata, + data_to_verify, + evp_md, + buf, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + len(data_to_verify) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + +@utils.register_interface(interfaces.AsymmetricVerificationContext) +class _DSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._algorithm = algorithm + + self._hash_ctx = _HashContext(backend, self._algorithm) + + def update(self, data): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + self._hash_ctx.update(data) + + def verify(self): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + self._dsa_cdata = self._backend._dsa_cdata_from_public_key( + self._public_key) + self._dsa_cdata = self._backend._ffi.gc(self._dsa_cdata, + self._backend._lib.DSA_free) + + data_to_verify = self._hash_ctx.finalize() + self._hash_ctx = None + + # The first parameter passed to DSA_verify is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_verify( + 0, data_to_verify, len(data_to_verify), self._signature, + len(self._signature), self._dsa_cdata) + + if res != 1: + errors = self._backend._consume_errors() + assert errors + if res == -1: + assert errors[0].lib == self._backend._lib.ERR_LIB_ASN1 + + raise InvalidSignature + + +@utils.register_interface(interfaces.AsymmetricSignatureContext) +class _DSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._algorithm = algorithm + self._hash_ctx = _HashContext(backend, self._algorithm) + self._dsa_cdata = self._backend._dsa_cdata_from_private_key( + self._private_key) + self._dsa_cdata = self._backend._ffi.gc(self._dsa_cdata, + self._backend._lib.DSA_free) + + def update(self, data): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + self._hash_ctx.update(data) + + def finalize(self): + if self._hash_ctx is None: + raise AlreadyFinalized("Context has already been finalized") + + data_to_sign = self._hash_ctx.finalize() + self._hash_ctx = None + sig_buf_len = self._backend._lib.DSA_size(self._dsa_cdata) + sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len) + buflen = self._backend._ffi.new("unsigned int *") + + # The first parameter passed to DSA_sign is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_sign( + 0, data_to_sign, len(data_to_sign), sig_buf, + buflen, self._dsa_cdata) + assert res == 1 + assert buflen[0] + + return self._backend._ffi.buffer(sig_buf)[:buflen[0]] + + +@utils.register_interface(interfaces.CMACContext) +class _CMACContext(object): + def __init__(self, backend, algorithm, ctx=None): + if not backend.cmac_algorithm_supported(algorithm): + raise UnsupportedAlgorithm("This backend does not support CMAC", + _Reasons.UNSUPPORTED_CIPHER) + + self._backend = backend + self._key = algorithm.key + self._algorithm = algorithm + self._output_length = algorithm.block_size // 8 + + if ctx is None: + registry = self._backend._cipher_registry + adapter = registry[type(algorithm), CBC] + + evp_cipher = adapter(self._backend, algorithm, CBC) + + ctx = self._backend._lib.CMAC_CTX_new() + + assert ctx != self._backend._ffi.NULL + ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + + self._backend._lib.CMAC_Init( + ctx, self._key, len(self._key), + evp_cipher, self._backend._ffi.NULL + ) + + self._ctx = ctx + + def update(self, data): + res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) + assert res == 1 + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._output_length) + length = self._backend._ffi.new("size_t *", self._output_length) + res = self._backend._lib.CMAC_Final( + self._ctx, buf, length + ) + assert res == 1 + + self._ctx = None + + return self._backend._ffi.buffer(buf)[:] + + def copy(self): + copied_ctx = self._backend._lib.CMAC_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.CMAC_CTX_free + ) + res = self._backend._lib.CMAC_CTX_copy( + copied_ctx, self._ctx + ) + assert res == 1 + return _CMACContext( + self._backend, self._algorithm, ctx=copied_ctx + ) + + +backend = Backend() diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py new file mode 100644 index 0000000..144bb09 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py @@ -0,0 +1,53 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import platform +import sys + +from cryptography.hazmat.bindings.utils import build_ffi + + +class Binding(object): + """ + CommonCrypto API wrapper. + """ + _module_prefix = "cryptography.hazmat.bindings.commoncrypto." + _modules = [ + "common_digest", + "common_hmac", + "common_key_derivation", + "common_cryptor", + ] + + ffi = None + lib = None + + def __init__(self): + self._ensure_ffi_initialized() + + @classmethod + def _ensure_ffi_initialized(cls): + if cls.ffi is not None and cls.lib is not None: + return + + cls.ffi, cls.lib = build_ffi( + module_prefix=cls._module_prefix, + modules=cls._modules, + ) + + @classmethod + def is_available(cls): + return sys.platform == "darwin" and list(map( + int, platform.mac_ver()[0].split("."))) >= [10, 8, 0] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py new file mode 100644 index 0000000..9bd03a7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py @@ -0,0 +1,110 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +enum { + kCCAlgorithmAES128 = 0, + kCCAlgorithmDES, + kCCAlgorithm3DES, + kCCAlgorithmCAST, + kCCAlgorithmRC4, + kCCAlgorithmRC2, + kCCAlgorithmBlowfish +}; +typedef uint32_t CCAlgorithm; +enum { + kCCSuccess = 0, + kCCParamError = -4300, + kCCBufferTooSmall = -4301, + kCCMemoryFailure = -4302, + kCCAlignmentError = -4303, + kCCDecodeError = -4304, + kCCUnimplemented = -4305 +}; +typedef int32_t CCCryptorStatus; +typedef uint32_t CCOptions; +enum { + kCCEncrypt = 0, + kCCDecrypt, +}; +typedef uint32_t CCOperation; +typedef ... *CCCryptorRef; + +enum { + kCCModeOptionCTR_LE = 0x0001, + kCCModeOptionCTR_BE = 0x0002 +}; + +typedef uint32_t CCModeOptions; + +enum { + kCCModeECB = 1, + kCCModeCBC = 2, + kCCModeCFB = 3, + kCCModeCTR = 4, + kCCModeF8 = 5, + kCCModeLRW = 6, + kCCModeOFB = 7, + kCCModeXTS = 8, + kCCModeRC4 = 9, + kCCModeCFB8 = 10, + kCCModeGCM = 11 +}; +typedef uint32_t CCMode; +enum { + ccNoPadding = 0, + ccPKCS7Padding = 1, +}; +typedef uint32_t CCPadding; +""" + +FUNCTIONS = """ +CCCryptorStatus CCCryptorCreateWithMode(CCOperation, CCMode, CCAlgorithm, + CCPadding, const void *, const void *, + size_t, const void *, size_t, int, + CCModeOptions, CCCryptorRef *); +CCCryptorStatus CCCryptorCreate(CCOperation, CCAlgorithm, CCOptions, + const void *, size_t, const void *, + CCCryptorRef *); +CCCryptorStatus CCCryptorUpdate(CCCryptorRef, const void *, size_t, void *, + size_t, size_t *); +CCCryptorStatus CCCryptorFinal(CCCryptorRef, void *, size_t, size_t *); +CCCryptorStatus CCCryptorRelease(CCCryptorRef); + +CCCryptorStatus CCCryptorGCMAddIV(CCCryptorRef, const void *, size_t); +CCCryptorStatus CCCryptorGCMAddAAD(CCCryptorRef, const void *, size_t); +CCCryptorStatus CCCryptorGCMEncrypt(CCCryptorRef, const void *, size_t, + void *); +CCCryptorStatus CCCryptorGCMDecrypt(CCCryptorRef, const void *, size_t, + void *); +CCCryptorStatus CCCryptorGCMFinal(CCCryptorRef, const void *, size_t *); +CCCryptorStatus CCCryptorGCMReset(CCCryptorRef); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +// Not defined in the public header +enum { + kCCModeGCM = 11 +}; +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_digest.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_digest.py new file mode 100644 index 0000000..c59200c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_digest.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef uint32_t CC_LONG; +typedef uint64_t CC_LONG64; +typedef struct CC_MD5state_st { + ...; +} CC_MD5_CTX; +typedef struct CC_SHA1state_st { + ...; +} CC_SHA1_CTX; +typedef struct CC_SHA256state_st { + ...; +} CC_SHA256_CTX; +typedef struct CC_SHA512state_st { + ...; +} CC_SHA512_CTX; +""" + +FUNCTIONS = """ +int CC_MD5_Init(CC_MD5_CTX *); +int CC_MD5_Update(CC_MD5_CTX *, const void *, CC_LONG); +int CC_MD5_Final(unsigned char *, CC_MD5_CTX *); + +int CC_SHA1_Init(CC_SHA1_CTX *); +int CC_SHA1_Update(CC_SHA1_CTX *, const void *, CC_LONG); +int CC_SHA1_Final(unsigned char *, CC_SHA1_CTX *); + +int CC_SHA224_Init(CC_SHA256_CTX *); +int CC_SHA224_Update(CC_SHA256_CTX *, const void *, CC_LONG); +int CC_SHA224_Final(unsigned char *, CC_SHA256_CTX *); + +int CC_SHA256_Init(CC_SHA256_CTX *); +int CC_SHA256_Update(CC_SHA256_CTX *, const void *, CC_LONG); +int CC_SHA256_Final(unsigned char *, CC_SHA256_CTX *); + +int CC_SHA384_Init(CC_SHA512_CTX *); +int CC_SHA384_Update(CC_SHA512_CTX *, const void *, CC_LONG); +int CC_SHA384_Final(unsigned char *, CC_SHA512_CTX *); + +int CC_SHA512_Init(CC_SHA512_CTX *); +int CC_SHA512_Update(CC_SHA512_CTX *, const void *, CC_LONG); +int CC_SHA512_Final(unsigned char *, CC_SHA512_CTX *); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_hmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_hmac.py new file mode 100644 index 0000000..4f54b62 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_hmac.py @@ -0,0 +1,48 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct { + ...; +} CCHmacContext; +enum { + kCCHmacAlgSHA1, + kCCHmacAlgMD5, + kCCHmacAlgSHA256, + kCCHmacAlgSHA384, + kCCHmacAlgSHA512, + kCCHmacAlgSHA224 +}; +typedef uint32_t CCHmacAlgorithm; +""" + +FUNCTIONS = """ +void CCHmacInit(CCHmacContext *, CCHmacAlgorithm, const void *, size_t); +void CCHmacUpdate(CCHmacContext *, const void *, size_t); +void CCHmacFinal(CCHmacContext *, void *); + +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py new file mode 100644 index 0000000..e8cc03e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py @@ -0,0 +1,50 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +enum { + kCCPBKDF2 = 2, +}; +typedef uint32_t CCPBKDFAlgorithm; +enum { + kCCPRFHmacAlgSHA1 = 1, + kCCPRFHmacAlgSHA224 = 2, + kCCPRFHmacAlgSHA256 = 3, + kCCPRFHmacAlgSHA384 = 4, + kCCPRFHmacAlgSHA512 = 5, +}; +typedef uint32_t CCPseudoRandomAlgorithm; +typedef unsigned int uint; +""" + +FUNCTIONS = """ +int CCKeyDerivationPBKDF(CCPBKDFAlgorithm, const char *, size_t, + const uint8_t *, size_t, CCPseudoRandomAlgorithm, + uint, uint8_t *, size_t); +uint CCCalibratePBKDF(CCPBKDFAlgorithm, size_t, size_t, + CCPseudoRandomAlgorithm, size_t, uint32_t); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py new file mode 100644 index 0000000..17c154c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py @@ -0,0 +1,62 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +static const int Cryptography_HAS_AES_WRAP; + +struct aes_key_st { + ...; +}; +typedef struct aes_key_st AES_KEY; +""" + +FUNCTIONS = """ +int AES_set_encrypt_key(const unsigned char *, const int, AES_KEY *); +int AES_set_decrypt_key(const unsigned char *, const int, AES_KEY *); +""" + +MACROS = """ +/* these can be moved back to FUNCTIONS once we drop support for 0.9.8h. + This should be when we drop RHEL/CentOS 5, which is on 0.9.8e. */ +int AES_wrap_key(AES_KEY *, const unsigned char *, unsigned char *, + const unsigned char *, unsigned int); +int AES_unwrap_key(AES_KEY *, const unsigned char *, unsigned char *, + const unsigned char *, unsigned int); +""" + +CUSTOMIZATIONS = """ +// OpenSSL 0.9.8h+ +#if OPENSSL_VERSION_NUMBER >= 0x0090808fL +static const long Cryptography_HAS_AES_WRAP = 1; +#else +static const long Cryptography_HAS_AES_WRAP = 0; +int (*AES_wrap_key)(AES_KEY *, const unsigned char *, unsigned char *, + const unsigned char *, unsigned int) = NULL; +int (*AES_unwrap_key)(AES_KEY *, const unsigned char *, unsigned char *, + const unsigned char *, unsigned int) = NULL; +#endif + +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_AES_WRAP": [ + "AES_wrap_key", + "AES_unwrap_key", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py new file mode 100644 index 0000000..dfdf1bf --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py @@ -0,0 +1,149 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +/* + * TODO: This typedef is wrong. + * + * This is due to limitations of cffi. + * See https://bitbucket.org/cffi/cffi/issue/69 + * + * For another possible work-around (not used here because it involves more + * complicated use of the cffi API which falls outside the general pattern used + * by this package), see + * http://paste.pound-python.org/show/iJcTUMkKeBeS6yXpZWUU/ + * + * The work-around used here is to just be sure to declare a type that is at + * least as large as the real type. Maciej explains: + * + * I think you want to declare your value too large (e.g. long) + * that way you'll never pass garbage + */ +typedef intptr_t time_t; + +typedef int ASN1_BOOLEAN; +typedef ... ASN1_INTEGER; + +struct asn1_string_st { + int length; + int type; + unsigned char *data; + long flags; +}; + +typedef struct asn1_string_st ASN1_OCTET_STRING; +typedef struct asn1_string_st ASN1_IA5STRING; +typedef ... ASN1_OBJECT; +typedef ... ASN1_STRING; +typedef ... ASN1_TYPE; +typedef ... ASN1_GENERALIZEDTIME; +typedef ... ASN1_ENUMERATED; +typedef ... ASN1_ITEM; +typedef ... ASN1_VALUE; + +typedef struct { + ...; +} ASN1_TIME; +typedef ... ASN1_ITEM_EXP; + +typedef ... ASN1_UTCTIME; + +static const int V_ASN1_GENERALIZEDTIME; + +static const int MBSTRING_UTF8; +""" + +FUNCTIONS = """ +ASN1_OBJECT *ASN1_OBJECT_new(void); +void ASN1_OBJECT_free(ASN1_OBJECT *); + +/* ASN1 OBJECT IDENTIFIER */ +ASN1_OBJECT *d2i_ASN1_OBJECT(ASN1_OBJECT **, const unsigned char **, long); +int i2d_ASN1_OBJECT(ASN1_OBJECT *, unsigned char **); + +/* ASN1 STRING */ +ASN1_STRING *ASN1_STRING_new(void); +ASN1_STRING *ASN1_STRING_type_new(int); +void ASN1_STRING_free(ASN1_STRING *); +unsigned char *ASN1_STRING_data(ASN1_STRING *); +int ASN1_STRING_set(ASN1_STRING *, const void *, int); +int ASN1_STRING_type(ASN1_STRING *); +int ASN1_STRING_to_UTF8(unsigned char **, ASN1_STRING *); + +/* ASN1 OCTET STRING */ +ASN1_OCTET_STRING *ASN1_OCTET_STRING_new(void); +void ASN1_OCTET_STRING_free(ASN1_OCTET_STRING *); +int ASN1_OCTET_STRING_set(ASN1_OCTET_STRING *, const unsigned char *, int); + +/* ASN1 INTEGER */ +ASN1_INTEGER *ASN1_INTEGER_new(void); +void ASN1_INTEGER_free(ASN1_INTEGER *); +int ASN1_INTEGER_set(ASN1_INTEGER *, long); +int i2a_ASN1_INTEGER(BIO *, ASN1_INTEGER *); + +/* ASN1 TIME */ +ASN1_TIME *ASN1_TIME_new(void); +void ASN1_TIME_free(ASN1_TIME *); +ASN1_GENERALIZEDTIME *ASN1_TIME_to_generalizedtime(ASN1_TIME *, + ASN1_GENERALIZEDTIME **); + +/* ASN1 UTCTIME */ +int ASN1_UTCTIME_cmp_time_t(const ASN1_UTCTIME *, time_t); + +/* ASN1 GENERALIZEDTIME */ +int ASN1_GENERALIZEDTIME_set_string(ASN1_GENERALIZEDTIME *, const char *); +void ASN1_GENERALIZEDTIME_free(ASN1_GENERALIZEDTIME *); + +/* ASN1 ENUMERATED */ +ASN1_ENUMERATED *ASN1_ENUMERATED_new(void); +void ASN1_ENUMERATED_free(ASN1_ENUMERATED *); +int ASN1_ENUMERATED_set(ASN1_ENUMERATED *, long); + +ASN1_VALUE *ASN1_item_d2i(ASN1_VALUE **, const unsigned char **, long, + const ASN1_ITEM *); +""" + +MACROS = """ +ASN1_TIME *M_ASN1_TIME_dup(void *); +const ASN1_ITEM *ASN1_ITEM_ptr(ASN1_ITEM_EXP *); + +/* These aren't macros these arguments are all const X on openssl > 1.0.x */ + +int ASN1_STRING_length(ASN1_STRING *); +ASN1_STRING *ASN1_STRING_dup(ASN1_STRING *); +int ASN1_STRING_cmp(ASN1_STRING *, ASN1_STRING *); + +ASN1_OCTET_STRING *ASN1_OCTET_STRING_dup(ASN1_OCTET_STRING *); +int ASN1_OCTET_STRING_cmp(ASN1_OCTET_STRING *, ASN1_OCTET_STRING *); + +ASN1_INTEGER *ASN1_INTEGER_dup(ASN1_INTEGER *); +int ASN1_INTEGER_cmp(ASN1_INTEGER *, ASN1_INTEGER *); +long ASN1_INTEGER_get(ASN1_INTEGER *); + +BIGNUM *ASN1_INTEGER_to_BN(ASN1_INTEGER *, BIGNUM *); +ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *); + +/* These isn't a macro the arg is const on openssl 1.0.2+ */ +int ASN1_GENERALIZEDTIME_check(ASN1_GENERALIZEDTIME *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/bignum.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/bignum.py new file mode 100644 index 0000000..1d944ee --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/bignum.py @@ -0,0 +1,114 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... BN_CTX; +typedef ... BIGNUM; +/* + * TODO: This typedef is wrong. + * + * This is due to limitations of cffi. + * See https://bitbucket.org/cffi/cffi/issue/69 + * + * For another possible work-around (not used here because it involves more + * complicated use of the cffi API which falls outside the general pattern used + * by this package), see + * http://paste.pound-python.org/show/iJcTUMkKeBeS6yXpZWUU/ + * + * The work-around used here is to just be sure to declare a type that is at + * least as large as the real type. Maciej explains: + * + * I think you want to declare your value too large (e.g. long) + * that way you'll never pass garbage + */ +typedef uintptr_t BN_ULONG; +""" + +FUNCTIONS = """ +BIGNUM *BN_new(void); +void BN_free(BIGNUM *); + +BN_CTX *BN_CTX_new(void); +void BN_CTX_free(BN_CTX *); + +void BN_CTX_start(BN_CTX *); +BIGNUM *BN_CTX_get(BN_CTX *); +void BN_CTX_end(BN_CTX *); + +BIGNUM *BN_copy(BIGNUM *, const BIGNUM *); +BIGNUM *BN_dup(const BIGNUM *); + +int BN_set_word(BIGNUM *, BN_ULONG); +BN_ULONG BN_get_word(const BIGNUM *); + +const BIGNUM *BN_value_one(void); + +char *BN_bn2hex(const BIGNUM *); +int BN_hex2bn(BIGNUM **, const char *); +int BN_dec2bn(BIGNUM **, const char *); + +int BN_bn2bin(const BIGNUM *, unsigned char *); +BIGNUM *BN_bin2bn(const unsigned char *, int, BIGNUM *); + +int BN_num_bits(const BIGNUM *); + +int BN_cmp(const BIGNUM *, const BIGNUM *); +int BN_add(BIGNUM *, const BIGNUM *, const BIGNUM *); +int BN_sub(BIGNUM *, const BIGNUM *, const BIGNUM *); +int BN_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_sqr(BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_div(BIGNUM *, BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_nnmod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_mod_add(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, + BN_CTX *); +int BN_mod_sub(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, + BN_CTX *); +int BN_mod_mul(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, + BN_CTX *); +int BN_mod_sqr(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int BN_mod_exp(BIGNUM *, const BIGNUM *, const BIGNUM *, const BIGNUM *, + BN_CTX *); +int BN_gcd(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +BIGNUM *BN_mod_inverse(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); + +int BN_set_bit(BIGNUM *, int); +int BN_clear_bit(BIGNUM *, int); + +int BN_is_bit_set(const BIGNUM *, int); + +int BN_mask_bits(BIGNUM *, int); +""" + +MACROS = """ +int BN_zero(BIGNUM *); +int BN_one(BIGNUM *); +int BN_mod(BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); + +int BN_lshift(BIGNUM *, const BIGNUM *, int); +int BN_lshift1(BIGNUM *, BIGNUM *); + +int BN_rshift(BIGNUM *, BIGNUM *, int); +int BN_rshift1(BIGNUM *, BIGNUM *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py new file mode 100644 index 0000000..aa0525f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -0,0 +1,155 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import sys +import threading + +from cryptography.hazmat.bindings.utils import build_ffi + + +_OSX_PRE_INCLUDE = """ +#ifdef __APPLE__ +#include +#define __ORIG_DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER \ + DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER +#undef DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER +#define DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER +#endif +""" + +_OSX_POST_INCLUDE = """ +#ifdef __APPLE__ +#undef DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER +#define DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER \ + __ORIG_DEPRECATED_IN_MAC_OS_X_VERSION_10_7_AND_LATER +#endif +""" + + +class Binding(object): + """ + OpenSSL API wrapper. + """ + _module_prefix = "cryptography.hazmat.bindings.openssl." + _modules = [ + "aes", + "asn1", + "bignum", + "bio", + "cmac", + "cms", + "conf", + "crypto", + "dh", + "dsa", + "ec", + "ecdh", + "ecdsa", + "engine", + "err", + "evp", + "hmac", + "nid", + "objects", + "opensslv", + "osrandom_engine", + "pem", + "pkcs7", + "pkcs12", + "rand", + "rsa", + "ssl", + "x509", + "x509name", + "x509v3", + ] + + _locks = None + _lock_cb_handle = None + _lock_init_lock = threading.Lock() + + ffi = None + lib = None + + def __init__(self): + self._ensure_ffi_initialized() + + @classmethod + def _ensure_ffi_initialized(cls): + if cls.ffi is not None and cls.lib is not None: + return + + # OpenSSL goes by a different library name on different operating + # systems. + if sys.platform != "win32": + libraries = ["crypto", "ssl"] + else: # pragma: no cover + libraries = ["libeay32", "ssleay32", "advapi32"] + + cls.ffi, cls.lib = build_ffi( + module_prefix=cls._module_prefix, + modules=cls._modules, + pre_include=_OSX_PRE_INCLUDE, + post_include=_OSX_POST_INCLUDE, + libraries=libraries, + ) + res = cls.lib.Cryptography_add_osrandom_engine() + assert res != 0 + + @classmethod + def is_available(cls): + # For now, OpenSSL is considered our "default" binding, so we treat it + # as always available. + return True + + @classmethod + def init_static_locks(cls): + with cls._lock_init_lock: + cls._ensure_ffi_initialized() + + if not cls._lock_cb_handle: + cls._lock_cb_handle = cls.ffi.callback( + "void(int, int, const char *, int)", + cls._lock_cb + ) + + # Use Python's implementation if available, importing _ssl triggers + # the setup for this. + __import__("_ssl") + + if cls.lib.CRYPTO_get_locking_callback() != cls.ffi.NULL: + return + + # If nothing else has setup a locking callback already, we set up + # our own + num_locks = cls.lib.CRYPTO_num_locks() + cls._locks = [threading.Lock() for n in range(num_locks)] + + cls.lib.CRYPTO_set_locking_callback(cls._lock_cb_handle) + + @classmethod + def _lock_cb(cls, mode, n, file, line): + lock = cls._locks[n] + + if mode & cls.lib.CRYPTO_LOCK: + lock.acquire() + elif mode & cls.lib.CRYPTO_UNLOCK: + lock.release() + else: + raise RuntimeError( + "Unknown lock mode {0}: lock={1}, file={2}, line={3}".format( + mode, n, file, line + ) + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/bio.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/bio.py new file mode 100644 index 0000000..cfe6034 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/bio.py @@ -0,0 +1,181 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct bio_st BIO; +typedef void bio_info_cb(BIO *, int, const char *, int, long, long); +struct bio_method_st { + int type; + const char *name; + int (*bwrite)(BIO *, const char *, int); + int (*bread)(BIO *, char *, int); + int (*bputs)(BIO *, const char *); + int (*bgets)(BIO *, char*, int); + long (*ctrl)(BIO *, int, long, void *); + int (*create)(BIO *); + int (*destroy)(BIO *); + long (*callback_ctrl)(BIO *, int, bio_info_cb *); + ...; +}; +typedef struct bio_method_st BIO_METHOD; +struct bio_st { + BIO_METHOD *method; + long (*callback)(struct bio_st*, int, const char*, int, long, long); + char *cb_arg; + int init; + int shutdown; + int flags; + int retry_reason; + int num; + void *ptr; + struct bio_st *next_bio; + struct bio_st *prev_bio; + int references; + unsigned long num_read; + unsigned long num_write; + ...; +}; +typedef ... BUF_MEM; + +static const int BIO_TYPE_MEM; +static const int BIO_TYPE_FILE; +static const int BIO_TYPE_FD; +static const int BIO_TYPE_SOCKET; +static const int BIO_TYPE_CONNECT; +static const int BIO_TYPE_ACCEPT; +static const int BIO_TYPE_NULL; +static const int BIO_CLOSE; +static const int BIO_NOCLOSE; +static const int BIO_TYPE_SOURCE_SINK; +static const int BIO_CTRL_RESET; +static const int BIO_CTRL_EOF; +static const int BIO_CTRL_SET; +static const int BIO_CTRL_SET_CLOSE; +static const int BIO_CTRL_FLUSH; +static const int BIO_CTRL_DUP; +static const int BIO_CTRL_GET_CLOSE; +static const int BIO_CTRL_INFO; +static const int BIO_CTRL_GET; +static const int BIO_CTRL_PENDING; +static const int BIO_CTRL_WPENDING; +static const int BIO_C_FILE_SEEK; +static const int BIO_C_FILE_TELL; +static const int BIO_TYPE_NONE; +static const int BIO_TYPE_PROXY_CLIENT; +static const int BIO_TYPE_PROXY_SERVER; +static const int BIO_TYPE_NBIO_TEST; +static const int BIO_TYPE_BER; +static const int BIO_TYPE_BIO; +static const int BIO_TYPE_DESCRIPTOR; +static const int BIO_FLAGS_READ; +static const int BIO_FLAGS_WRITE; +static const int BIO_FLAGS_IO_SPECIAL; +static const int BIO_FLAGS_RWS; +static const int BIO_FLAGS_SHOULD_RETRY; +static const int BIO_TYPE_NULL_FILTER; +static const int BIO_TYPE_SSL; +static const int BIO_TYPE_MD; +static const int BIO_TYPE_BUFFER; +static const int BIO_TYPE_CIPHER; +static const int BIO_TYPE_BASE64; +static const int BIO_TYPE_FILTER; +""" + +FUNCTIONS = """ +BIO* BIO_new(BIO_METHOD *); +int BIO_set(BIO *, BIO_METHOD *); +int BIO_free(BIO *); +void BIO_vfree(BIO *); +void BIO_free_all(BIO *); +BIO *BIO_push(BIO *, BIO *); +BIO *BIO_pop(BIO *); +BIO *BIO_next(BIO *); +BIO *BIO_find_type(BIO *, int); +BIO_METHOD *BIO_s_mem(void); +BIO *BIO_new_mem_buf(void *, int); +BIO_METHOD *BIO_s_file(void); +BIO *BIO_new_file(const char *, const char *); +BIO *BIO_new_fp(FILE *, int); +BIO_METHOD *BIO_s_fd(void); +BIO *BIO_new_fd(int, int); +BIO_METHOD *BIO_s_socket(void); +BIO *BIO_new_socket(int, int); +BIO_METHOD *BIO_s_null(void); +long BIO_ctrl(BIO *, int, long, void *); +long BIO_callback_ctrl( + BIO *, + int, + void (*)(struct bio_st *, int, const char *, int, long, long) +); +char *BIO_ptr_ctrl(BIO *, int, long); +long BIO_int_ctrl(BIO *, int, long, int); +size_t BIO_ctrl_pending(BIO *); +size_t BIO_ctrl_wpending(BIO *); +int BIO_read(BIO *, void *, int); +int BIO_gets(BIO *, char *, int); +int BIO_write(BIO *, const void *, int); +int BIO_puts(BIO *, const char *); +BIO_METHOD *BIO_f_null(void); +BIO_METHOD *BIO_f_buffer(void); +""" + +MACROS = """ +long BIO_set_fd(BIO *, long, int); +long BIO_get_fd(BIO *, char *); +long BIO_set_mem_eof_return(BIO *, int); +long BIO_get_mem_data(BIO *, char **); +long BIO_set_mem_buf(BIO *, BUF_MEM *, int); +long BIO_get_mem_ptr(BIO *, BUF_MEM **); +long BIO_set_fp(BIO *, FILE *, int); +long BIO_get_fp(BIO *, FILE **); +long BIO_read_filename(BIO *, char *); +long BIO_write_filename(BIO *, char *); +long BIO_append_filename(BIO *, char *); +long BIO_rw_filename(BIO *, char *); +int BIO_should_read(BIO *); +int BIO_should_write(BIO *); +int BIO_should_io_special(BIO *); +int BIO_retry_type(BIO *); +int BIO_should_retry(BIO *); +int BIO_reset(BIO *); +int BIO_seek(BIO *, int); +int BIO_tell(BIO *); +int BIO_flush(BIO *); +int BIO_eof(BIO *); +int BIO_set_close(BIO *,long); +int BIO_get_close(BIO *); +int BIO_pending(BIO *); +int BIO_wpending(BIO *); +int BIO_get_info_callback(BIO *, bio_info_cb **); +int BIO_set_info_callback(BIO *, bio_info_cb *); +long BIO_get_buffer_num_lines(BIO *); +long BIO_set_read_buffer_size(BIO *, long); +long BIO_set_write_buffer_size(BIO *, long); +long BIO_set_buffer_size(BIO *, long); +long BIO_set_buffer_read_data(BIO *, void *, long); + +/* The following was a macro in 0.9.8e. Once we drop support for RHEL/CentOS 5 + we should move this back to FUNCTIONS. */ +int BIO_method_type(const BIO *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cmac.py new file mode 100644 index 0000000..c8bcc82 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cmac.py @@ -0,0 +1,65 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#if OPENSSL_VERSION_NUMBER >= 0x10001000L +#include +#endif +""" + +TYPES = """ +static const int Cryptography_HAS_CMAC; +typedef ... CMAC_CTX; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +CMAC_CTX *CMAC_CTX_new(void); +int CMAC_Init(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, ENGINE *); +int CMAC_Update(CMAC_CTX *, const void *, size_t); +int CMAC_Final(CMAC_CTX *, unsigned char *, size_t *); +int CMAC_CTX_copy(CMAC_CTX *, const CMAC_CTX *); +void CMAC_CTX_free(CMAC_CTX *); +""" + +CUSTOMIZATIONS = """ +#if OPENSSL_VERSION_NUMBER < 0x10001000L + +static const long Cryptography_HAS_CMAC = 0; +typedef void CMAC_CTX; +CMAC_CTX *(*CMAC_CTX_new)(void) = NULL; +int (*CMAC_Init)(CMAC_CTX *, const void *, size_t, const EVP_CIPHER *, + ENGINE *) = NULL; +int (*CMAC_Update)(CMAC_CTX *, const void *, size_t) = NULL; +int (*CMAC_Final)(CMAC_CTX *, unsigned char *, size_t *) = NULL; +int (*CMAC_CTX_copy)(CMAC_CTX *, const CMAC_CTX *) = NULL; +void (*CMAC_CTX_free)(CMAC_CTX *) = NULL; +#else +static const long Cryptography_HAS_CMAC = 1; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_CMAC": [ + "CMAC_CTX_new", + "CMAC_Init", + "CMAC_Update", + "CMAC_Final", + "CMAC_CTX_copy", + "CMAC_CTX_free", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py new file mode 100644 index 0000000..a3760f2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py @@ -0,0 +1,100 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#if !defined(OPENSSL_NO_CMS) && OPENSSL_VERSION_NUMBER >= 0x0090808fL +// The next define should really be in the OpenSSL header, but it is missing. +// Failing to include this on Windows causes compilation failures. +#if defined(OPENSSL_SYS_WINDOWS) +#include +#endif +#include +#endif +""" + +TYPES = """ +static const long Cryptography_HAS_CMS; + +typedef ... CMS_ContentInfo; +typedef ... CMS_SignerInfo; +typedef ... CMS_CertificateChoices; +typedef ... CMS_RevocationInfoChoice; +typedef ... CMS_RecipientInfo; +typedef ... CMS_ReceiptRequest; +typedef ... CMS_Receipt; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +BIO *BIO_new_CMS(BIO *, CMS_ContentInfo *); +int i2d_CMS_bio_stream(BIO *, CMS_ContentInfo *, BIO *, int); +int PEM_write_bio_CMS_stream(BIO *, CMS_ContentInfo *, BIO *, int); +int CMS_final(CMS_ContentInfo *, BIO *, BIO *, unsigned int); +CMS_ContentInfo *CMS_sign(X509 *, EVP_PKEY *, Cryptography_STACK_OF_X509 *, + BIO *, unsigned int); +int CMS_verify(CMS_ContentInfo *, Cryptography_STACK_OF_X509 *, X509_STORE *, + BIO *, BIO *, unsigned int); +CMS_ContentInfo *CMS_encrypt(Cryptography_STACK_OF_X509 *, BIO *, + const EVP_CIPHER *, unsigned int); +int CMS_decrypt(CMS_ContentInfo *, EVP_PKEY *, X509 *, BIO *, BIO *, + unsigned int); +CMS_SignerInfo *CMS_add1_signer(CMS_ContentInfo *, X509 *, EVP_PKEY *, + const EVP_MD *, unsigned int); +""" + +CUSTOMIZATIONS = """ +#if !defined(OPENSSL_NO_CMS) && OPENSSL_VERSION_NUMBER >= 0x0090808fL +static const long Cryptography_HAS_CMS = 1; +#else +static const long Cryptography_HAS_CMS = 0; +typedef void CMS_ContentInfo; +typedef void CMS_SignerInfo; +typedef void CMS_CertificateChoices; +typedef void CMS_RevocationInfoChoice; +typedef void CMS_RecipientInfo; +typedef void CMS_ReceiptRequest; +typedef void CMS_Receipt; +BIO *(*BIO_new_CMS)(BIO *, CMS_ContentInfo *) = NULL; +int (*i2d_CMS_bio_stream)(BIO *, CMS_ContentInfo *, BIO *, int) = NULL; +int (*PEM_write_bio_CMS_stream)(BIO *, CMS_ContentInfo *, BIO *, int) = NULL; +int (*CMS_final)(CMS_ContentInfo *, BIO *, BIO *, unsigned int) = NULL; +CMS_ContentInfo *(*CMS_sign)(X509 *, EVP_PKEY *, Cryptography_STACK_OF_X509 *, + BIO *, unsigned int) = NULL; +int (*CMS_verify)(CMS_ContentInfo *, Cryptography_STACK_OF_X509 *, + X509_STORE *, BIO *, BIO *, unsigned int) = NULL; +CMS_ContentInfo *(*CMS_encrypt)(Cryptography_STACK_OF_X509 *, BIO *, + const EVP_CIPHER *, unsigned int) = NULL; +int (*CMS_decrypt)(CMS_ContentInfo *, EVP_PKEY *, X509 *, BIO *, BIO *, + unsigned int) = NULL; +CMS_SignerInfo *(*CMS_add1_signer)(CMS_ContentInfo *, X509 *, EVP_PKEY *, + const EVP_MD *, unsigned int) = NULL; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_CMS": [ + "BIO_new_CMS", + "i2d_CMS_bio_stream", + "PEM_write_bio_CMS_stream", + "CMS_final", + "CMS_sign", + "CMS_verify", + "CMS_encrypt", + "CMS_decrypt", + "CMS_add1_signer", + ] +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py new file mode 100644 index 0000000..dda35e8 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... CONF; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/crypto.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/crypto.py new file mode 100644 index 0000000..99e1a61 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/crypto.py @@ -0,0 +1,67 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... CRYPTO_THREADID; + +static const int SSLEAY_VERSION; +static const int SSLEAY_CFLAGS; +static const int SSLEAY_PLATFORM; +static const int SSLEAY_DIR; +static const int SSLEAY_BUILT_ON; +static const int CRYPTO_MEM_CHECK_ON; +static const int CRYPTO_MEM_CHECK_OFF; +static const int CRYPTO_MEM_CHECK_ENABLE; +static const int CRYPTO_MEM_CHECK_DISABLE; +static const int CRYPTO_LOCK; +static const int CRYPTO_UNLOCK; +static const int CRYPTO_READ; +static const int CRYPTO_WRITE; +static const int CRYPTO_LOCK_SSL; +""" + +FUNCTIONS = """ +unsigned long SSLeay(void); +const char *SSLeay_version(int); + +void CRYPTO_free(void *); +int CRYPTO_mem_ctrl(int); +int CRYPTO_is_mem_check_on(void); +void CRYPTO_mem_leaks(struct bio_st *); +void CRYPTO_cleanup_all_ex_data(void); +int CRYPTO_num_locks(void); +void CRYPTO_set_locking_callback(void(*)(int, int, const char *, int)); +void CRYPTO_set_id_callback(unsigned long (*)(void)); +unsigned long (*CRYPTO_get_id_callback(void))(void); +void (*CRYPTO_get_locking_callback(void))(int, int, const char *, int); +void CRYPTO_lock(int, int, const char *, int); + +void OPENSSL_free(void *); +""" + +MACROS = """ +void CRYPTO_add(int *, int, int); +void CRYPTO_malloc_init(void); +void CRYPTO_malloc_debug_init(void); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py new file mode 100644 index 0000000..a0f9947 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py @@ -0,0 +1,57 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct dh_st { + // prime number (shared) + BIGNUM *p; + // generator of Z_p (shared) + BIGNUM *g; + // private DH value x + BIGNUM *priv_key; + // public DH value g^x + BIGNUM *pub_key; + ...; +} DH; +""" + +FUNCTIONS = """ +DH *DH_new(void); +void DH_free(DH *); +int DH_size(const DH *); +DH *DH_generate_parameters(int, int, void (*)(int, int, void *), void *); +int DH_check(const DH *, int *); +int DH_generate_key(DH *); +int DH_compute_key(unsigned char *, const BIGNUM *, DH *); +int DH_set_ex_data(DH *, int, void *); +void *DH_get_ex_data(DH *, int); +DH *d2i_DHparams(DH **, const unsigned char **, long); +int i2d_DHparams(const DH *, unsigned char **); +int DHparams_print_fp(FILE *, const DH *); +int DHparams_print(BIO *, const DH *); +""" + +MACROS = """ +int DH_generate_parameters_ex(DH *, int, int, BN_GENCB *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py new file mode 100644 index 0000000..7db0332 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py @@ -0,0 +1,65 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct dsa_st { + // prime number (public) + BIGNUM *p; + // 160-bit subprime, q | p-1 (public) + BIGNUM *q; + // generator of subgroup (public) + BIGNUM *g; + // private key x + BIGNUM *priv_key; + // public key y = g^x + BIGNUM *pub_key; + ...; +} DSA; +typedef struct { + BIGNUM *r; + BIGNUM *s; +} DSA_SIG; +""" + +FUNCTIONS = """ +DSA *DSA_generate_parameters(int, unsigned char *, int, int *, unsigned long *, + void (*)(int, int, void *), void *); +int DSA_generate_key(DSA *); +DSA *DSA_new(void); +void DSA_free(DSA *); +DSA_SIG *DSA_SIG_new(void); +void DSA_SIG_free(DSA_SIG *); +int i2d_DSA_SIG(const DSA_SIG *, unsigned char **); +DSA_SIG *d2i_DSA_SIG(DSA_SIG **, const unsigned char **, long); +int DSA_size(const DSA *); +int DSA_sign(int, const unsigned char *, int, unsigned char *, unsigned int *, + DSA *); +int DSA_verify(int, const unsigned char *, int, const unsigned char *, int, + DSA *); +""" + +MACROS = """ +int DSA_generate_parameters_ex(DSA *, int, unsigned char *, int, + int *, unsigned long *, BN_GENCB *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py new file mode 100644 index 0000000..45c17c2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py @@ -0,0 +1,479 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#ifndef OPENSSL_NO_EC +#include +#endif + +#include +""" + +TYPES = """ +static const int Cryptography_HAS_EC; +static const int Cryptography_HAS_EC_1_0_1; +static const int Cryptography_HAS_EC_NISTP_64_GCC_128; +static const int Cryptography_HAS_EC2M; + +typedef ... EC_KEY; +typedef ... EC_GROUP; +typedef ... EC_POINT; +typedef ... EC_METHOD; +typedef struct { + int nid; + const char *comment; +} EC_builtin_curve; +typedef enum { ... } point_conversion_form_t; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +EC_GROUP *EC_GROUP_new(const EC_METHOD *); +void EC_GROUP_free(EC_GROUP *); +void EC_GROUP_clear_free(EC_GROUP *); + +EC_GROUP *EC_GROUP_new_curve_GFp( + const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +EC_GROUP *EC_GROUP_new_curve_GF2m( + const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +EC_GROUP *EC_GROUP_new_by_curve_name(int); + +int EC_GROUP_set_curve_GFp( + EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int EC_GROUP_get_curve_GFp( + const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); +int EC_GROUP_set_curve_GF2m( + EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +int EC_GROUP_get_curve_GF2m( + const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); + +const EC_METHOD *EC_GROUP_method_of(const EC_GROUP *); +const EC_POINT *EC_GROUP_get0_generator(const EC_GROUP *); +int EC_GROUP_get_curve_name(const EC_GROUP *); + +size_t EC_get_builtin_curves(EC_builtin_curve *, size_t); + +void EC_KEY_free(EC_KEY *); + +int EC_KEY_get_flags(const EC_KEY *); +void EC_KEY_set_flags(EC_KEY *, int); +void EC_KEY_clear_flags(EC_KEY *, int); +EC_KEY *EC_KEY_new_by_curve_name(int); +EC_KEY *EC_KEY_copy(EC_KEY *, const EC_KEY *); +EC_KEY *EC_KEY_dup(const EC_KEY *); +int EC_KEY_up_ref(EC_KEY *); +const EC_GROUP *EC_KEY_get0_group(const EC_KEY *); +int EC_GROUP_get_order(const EC_GROUP *, BIGNUM *, BN_CTX *); +int EC_KEY_set_group(EC_KEY *, const EC_GROUP *); +const BIGNUM *EC_KEY_get0_private_key(const EC_KEY *); +int EC_KEY_set_private_key(EC_KEY *, const BIGNUM *); +const EC_POINT *EC_KEY_get0_public_key(const EC_KEY *); +int EC_KEY_set_public_key(EC_KEY *, const EC_POINT *); +unsigned int EC_KEY_get_enc_flags(const EC_KEY *); +void EC_KEY_set_enc_flags(EC_KEY *eckey, unsigned int); +point_conversion_form_t EC_KEY_get_conv_form(const EC_KEY *); +void EC_KEY_set_conv_form(EC_KEY *, point_conversion_form_t); +void *EC_KEY_get_key_method_data( + EC_KEY *, + void *(*)(void *), + void (*)(void *), + void (*)(void *) +); +void EC_KEY_insert_key_method_data( + EC_KEY *, + void *, + void *(*)(void *), + void (*)(void *), + void (*)(void *) +); +void EC_KEY_set_asn1_flag(EC_KEY *, int); +int EC_KEY_precompute_mult(EC_KEY *, BN_CTX *); +int EC_KEY_generate_key(EC_KEY *); +int EC_KEY_check_key(const EC_KEY *); +int EC_KEY_set_public_key_affine_coordinates(EC_KEY *, BIGNUM *, BIGNUM *); + +EC_POINT *EC_POINT_new(const EC_GROUP *); +void EC_POINT_free(EC_POINT *); +void EC_POINT_clear_free(EC_POINT *); +int EC_POINT_copy(EC_POINT *, const EC_POINT *); +EC_POINT *EC_POINT_dup(const EC_POINT *, const EC_GROUP *); +const EC_METHOD *EC_POINT_method_of(const EC_POINT *); + +int EC_POINT_set_to_infinity(const EC_GROUP *, EC_POINT *); + +int EC_POINT_set_Jprojective_coordinates_GFp(const EC_GROUP *, EC_POINT *, + const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); + +int EC_POINT_get_Jprojective_coordinates_GFp(const EC_GROUP *, + const EC_POINT *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); + +int EC_POINT_set_affine_coordinates_GFp(const EC_GROUP *, EC_POINT *, + const BIGNUM *, const BIGNUM *, BN_CTX *); + +int EC_POINT_get_affine_coordinates_GFp(const EC_GROUP *, + const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *); + +int EC_POINT_set_compressed_coordinates_GFp(const EC_GROUP *, EC_POINT *, + const BIGNUM *, int, BN_CTX *); + +int EC_POINT_set_affine_coordinates_GF2m(const EC_GROUP *, EC_POINT *, + const BIGNUM *, const BIGNUM *, BN_CTX *); + +int EC_POINT_get_affine_coordinates_GF2m(const EC_GROUP *, + const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *); + +int EC_POINT_set_compressed_coordinates_GF2m(const EC_GROUP *, EC_POINT *, + const BIGNUM *, int, BN_CTX *); + +size_t EC_POINT_point2oct(const EC_GROUP *, const EC_POINT *, + point_conversion_form_t, + unsigned char *, size_t, BN_CTX *); + +int EC_POINT_oct2point(const EC_GROUP *, EC_POINT *, + const unsigned char *, size_t, BN_CTX *); + +BIGNUM *EC_POINT_point2bn(const EC_GROUP *, const EC_POINT *, + point_conversion_form_t form, BIGNUM *, BN_CTX *); + +EC_POINT *EC_POINT_bn2point(const EC_GROUP *, const BIGNUM *, + EC_POINT *, BN_CTX *); + +char *EC_POINT_point2hex(const EC_GROUP *, const EC_POINT *, + point_conversion_form_t form, BN_CTX *); + +EC_POINT *EC_POINT_hex2point(const EC_GROUP *, const char *, + EC_POINT *, BN_CTX *); + +int EC_POINT_add(const EC_GROUP *, EC_POINT *, const EC_POINT *, + const EC_POINT *, BN_CTX *); + +int EC_POINT_dbl(const EC_GROUP *, EC_POINT *, const EC_POINT *, BN_CTX *); +int EC_POINT_invert(const EC_GROUP *, EC_POINT *, BN_CTX *); +int EC_POINT_is_at_infinity(const EC_GROUP *, const EC_POINT *); +int EC_POINT_is_on_curve(const EC_GROUP *, const EC_POINT *, BN_CTX *); + +int EC_POINT_cmp( + const EC_GROUP *, const EC_POINT *, const EC_POINT *, BN_CTX *); + +int EC_POINT_make_affine(const EC_GROUP *, EC_POINT *, BN_CTX *); +int EC_POINTs_make_affine(const EC_GROUP *, size_t, EC_POINT *[], BN_CTX *); + +int EC_POINTs_mul( + const EC_GROUP *, EC_POINT *, const BIGNUM *, + size_t, const EC_POINT *[], const BIGNUM *[], BN_CTX *); + +int EC_POINT_mul(const EC_GROUP *, EC_POINT *, const BIGNUM *, + const EC_POINT *, const BIGNUM *, BN_CTX *); + +int EC_GROUP_precompute_mult(EC_GROUP *, BN_CTX *); +int EC_GROUP_have_precompute_mult(const EC_GROUP *); + +const EC_METHOD *EC_GFp_simple_method(); +const EC_METHOD *EC_GFp_mont_method(); +const EC_METHOD *EC_GFp_nist_method(); + +const EC_METHOD *EC_GFp_nistp224_method(); +const EC_METHOD *EC_GFp_nistp256_method(); +const EC_METHOD *EC_GFp_nistp521_method(); + +const EC_METHOD *EC_GF2m_simple_method(); + +int EC_METHOD_get_field_type(const EC_METHOD *); +""" + +CUSTOMIZATIONS = """ +#ifdef OPENSSL_NO_EC +static const long Cryptography_HAS_EC = 0; +typedef void EC_KEY; +typedef void EC_GROUP; +typedef void EC_POINT; +typedef void EC_METHOD; +typedef struct { + int nid; + const char *comment; +} EC_builtin_curve; +typedef long point_conversion_form_t; + +void (*EC_KEY_free)(EC_KEY *) = NULL; +size_t (*EC_get_builtin_curves)(EC_builtin_curve *, size_t) = NULL; +EC_KEY *(*EC_KEY_new_by_curve_name)(int) = NULL; +EC_KEY *(*EC_KEY_copy)(EC_KEY *, const EC_KEY *) = NULL; +EC_KEY *(*EC_KEY_dup)(const EC_KEY *) = NULL; +int (*EC_KEY_up_ref)(EC_KEY *) = NULL; +const EC_GROUP *(*EC_KEY_get0_group)(const EC_KEY *) = NULL; +int (*EC_GROUP_get_order)(const EC_GROUP *, BIGNUM *, BN_CTX *) = NULL; +int (*EC_KEY_set_group)(EC_KEY *, const EC_GROUP *) = NULL; +const BIGNUM *(*EC_KEY_get0_private_key)(const EC_KEY *) = NULL; +int (*EC_KEY_set_private_key)(EC_KEY *, const BIGNUM *) = NULL; +const EC_POINT *(*EC_KEY_get0_public_key)(const EC_KEY *) = NULL; +int (*EC_KEY_set_public_key)(EC_KEY *, const EC_POINT *) = NULL; +unsigned int (*EC_KEY_get_enc_flags)(const EC_KEY *) = NULL; +void (*EC_KEY_set_enc_flags)(EC_KEY *eckey, unsigned int) = NULL; +point_conversion_form_t (*EC_KEY_get_conv_form)(const EC_KEY *) = NULL; +void (*EC_KEY_set_conv_form)(EC_KEY *, point_conversion_form_t) = NULL; +void *(*EC_KEY_get_key_method_data)( + EC_KEY *, void *(*)(void *), void (*)(void *), void (*)(void *)) = NULL; +void (*EC_KEY_insert_key_method_data)( + EC_KEY *, void *, + void *(*)(void *), void (*)(void *), void (*)(void *)) = NULL; +void (*EC_KEY_set_asn1_flag)(EC_KEY *, int) = NULL; +int (*EC_KEY_precompute_mult)(EC_KEY *, BN_CTX *) = NULL; +int (*EC_KEY_generate_key)(EC_KEY *) = NULL; +int (*EC_KEY_check_key)(const EC_KEY *) = NULL; + +EC_GROUP *(*EC_GROUP_new)(const EC_METHOD *); +void (*EC_GROUP_free)(EC_GROUP *); +void (*EC_GROUP_clear_free)(EC_GROUP *); + +EC_GROUP *(*EC_GROUP_new_curve_GFp)( + const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); + +EC_GROUP *(*EC_GROUP_new_by_curve_name)(int); + +int (*EC_GROUP_set_curve_GFp)( + EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); + +int (*EC_GROUP_get_curve_GFp)( + const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); + +const EC_METHOD *(*EC_GROUP_method_of)(const EC_GROUP *) = NULL; +const EC_POINT *(*EC_GROUP_get0_generator)(const EC_GROUP *) = NULL; +int (*EC_GROUP_get_curve_name)(const EC_GROUP *) = NULL; + +EC_POINT *(*EC_POINT_new)(const EC_GROUP *) = NULL; +void (*EC_POINT_free)(EC_POINT *) = NULL; +void (*EC_POINT_clear_free)(EC_POINT *) = NULL; +int (*EC_POINT_copy)(EC_POINT *, const EC_POINT *) = NULL; +EC_POINT *(*EC_POINT_dup)(const EC_POINT *, const EC_GROUP *) = NULL; +const EC_METHOD *(*EC_POINT_method_of)(const EC_POINT *) = NULL; +int (*EC_POINT_set_to_infinity)(const EC_GROUP *, EC_POINT *) = NULL; +int (*EC_POINT_set_Jprojective_coordinates_GFp)(const EC_GROUP *, EC_POINT *, + const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *) = NULL; + +int (*EC_POINT_get_Jprojective_coordinates_GFp)(const EC_GROUP *, + const EC_POINT *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *) = NULL; + +int (*EC_POINT_set_affine_coordinates_GFp)(const EC_GROUP *, EC_POINT *, + const BIGNUM *, const BIGNUM *, BN_CTX *) = NULL; + +int (*EC_POINT_get_affine_coordinates_GFp)(const EC_GROUP *, + const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *) = NULL; + +int (*EC_POINT_set_compressed_coordinates_GFp)(const EC_GROUP *, EC_POINT *, + const BIGNUM *, int, BN_CTX *) = NULL; + +size_t (*EC_POINT_point2oct)(const EC_GROUP *, const EC_POINT *, + point_conversion_form_t, + unsigned char *, size_t, BN_CTX *) = NULL; + +int (*EC_POINT_oct2point)(const EC_GROUP *, EC_POINT *, + const unsigned char *, size_t, BN_CTX *) = NULL; + +BIGNUM *(*EC_POINT_point2bn)(const EC_GROUP *, const EC_POINT *, + point_conversion_form_t form, BIGNUM *, BN_CTX *) = NULL; + +EC_POINT *(*EC_POINT_bn2point)(const EC_GROUP *, const BIGNUM *, + EC_POINT *, BN_CTX *) = NULL; + +char *(*EC_POINT_point2hex)(const EC_GROUP *, const EC_POINT *, + point_conversion_form_t form, BN_CTX *) = NULL; + +EC_POINT *(*EC_POINT_hex2point)(const EC_GROUP *, const char *, + EC_POINT *, BN_CTX *) = NULL; + +int (*EC_POINT_add)(const EC_GROUP *, EC_POINT *, const EC_POINT *, + const EC_POINT *, BN_CTX *) = NULL; + +int (*EC_POINT_dbl)(const EC_GROUP *, EC_POINT *, const EC_POINT *, + BN_CTX *) = NULL; + +int (*EC_POINT_invert)(const EC_GROUP *, EC_POINT *, BN_CTX *) = NULL; +int (*EC_POINT_is_at_infinity)(const EC_GROUP *, const EC_POINT *) = NULL; + +int (*EC_POINT_is_on_curve)(const EC_GROUP *, const EC_POINT *, + BN_CTX *) = NULL; + +int (*EC_POINT_cmp)( + const EC_GROUP *, const EC_POINT *, const EC_POINT *, BN_CTX *) = NULL; + +int (*EC_POINT_make_affine)(const EC_GROUP *, EC_POINT *, BN_CTX *) = NULL; + +int (*EC_POINTs_make_affine)(const EC_GROUP *, size_t, EC_POINT *[], + BN_CTX *) = NULL; + +int (*EC_POINTs_mul)( + const EC_GROUP *, EC_POINT *, const BIGNUM *, + size_t, const EC_POINT *[], const BIGNUM *[], BN_CTX *) = NULL; + +int (*EC_POINT_mul)(const EC_GROUP *, EC_POINT *, const BIGNUM *, + const EC_POINT *, const BIGNUM *, BN_CTX *) = NULL; + +int (*EC_GROUP_precompute_mult)(EC_GROUP *, BN_CTX *) = NULL; +int (*EC_GROUP_have_precompute_mult)(const EC_GROUP *) = NULL; + +const EC_METHOD *(*EC_GFp_simple_method)() = NULL; +const EC_METHOD *(*EC_GFp_mont_method)() = NULL; +const EC_METHOD *(*EC_GFp_nist_method)() = NULL; + +int (*EC_METHOD_get_field_type)(const EC_METHOD *) = NULL; + +#else +static const long Cryptography_HAS_EC = 1; +#endif + +#if defined(OPENSSL_NO_EC) || OPENSSL_VERSION_NUMBER < 0x1000100f +static const long Cryptography_HAS_EC_1_0_1 = 0; + +int (*EC_KEY_get_flags)(const EC_KEY *) = NULL; +void (*EC_KEY_set_flags)(EC_KEY *, int) = NULL; +void (*EC_KEY_clear_flags)(EC_KEY *, int) = NULL; + +int (*EC_KEY_set_public_key_affine_coordinates)( + EC_KEY *, BIGNUM *, BIGNUM *) = NULL; +#else +static const long Cryptography_HAS_EC_1_0_1 = 1; +#endif + + +#if defined(OPENSSL_NO_EC) || OPENSSL_VERSION_NUMBER < 0x1000100f || \ + defined(OPENSSL_NO_EC_NISTP_64_GCC_128) +static const long Cryptography_HAS_EC_NISTP_64_GCC_128 = 0; + +const EC_METHOD *(*EC_GFp_nistp224_method)(void) = NULL; +const EC_METHOD *(*EC_GFp_nistp256_method)(void) = NULL; +const EC_METHOD *(*EC_GFp_nistp521_method)(void) = NULL; +#else +static const long Cryptography_HAS_EC_NISTP_64_GCC_128 = 1; +#endif + +#if defined(OPENSSL_NO_EC) || defined(OPENSSL_NO_EC2M) +static const long Cryptography_HAS_EC2M = 0; + +const EC_METHOD *(*EC_GF2m_simple_method)() = NULL; + +int (*EC_POINT_set_affine_coordinates_GF2m)(const EC_GROUP *, EC_POINT *, + const BIGNUM *, const BIGNUM *, BN_CTX *) = NULL; + +int (*EC_POINT_get_affine_coordinates_GF2m)(const EC_GROUP *, + const EC_POINT *, BIGNUM *, BIGNUM *, BN_CTX *) = NULL; + +int (*EC_POINT_set_compressed_coordinates_GF2m)(const EC_GROUP *, EC_POINT *, + const BIGNUM *, int, BN_CTX *) = NULL; + +int (*EC_GROUP_set_curve_GF2m)( + EC_GROUP *, const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); + +int (*EC_GROUP_get_curve_GF2m)( + const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); + +EC_GROUP *(*EC_GROUP_new_curve_GF2m)( + const BIGNUM *, const BIGNUM *, const BIGNUM *, BN_CTX *); +#else +static const long Cryptography_HAS_EC2M = 1; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_EC": [ + "EC_GROUP_new", + "EC_GROUP_free", + "EC_GROUP_clear_free", + "EC_GROUP_new_curve_GFp", + "EC_GROUP_new_by_curve_name", + "EC_GROUP_set_curve_GFp", + "EC_GROUP_get_curve_GFp", + "EC_GROUP_method_of", + "EC_GROUP_get0_generator", + "EC_GROUP_get_curve_name", + "EC_KEY_free", + "EC_get_builtin_curves", + "EC_KEY_new_by_curve_name", + "EC_KEY_copy", + "EC_KEY_dup", + "EC_KEY_up_ref", + "EC_KEY_set_group", + "EC_KEY_get0_private_key", + "EC_KEY_set_private_key", + "EC_KEY_set_public_key", + "EC_KEY_get_enc_flags", + "EC_KEY_set_enc_flags", + "EC_KEY_set_conv_form", + "EC_KEY_get_key_method_data", + "EC_KEY_insert_key_method_data", + "EC_KEY_set_asn1_flag", + "EC_KEY_precompute_mult", + "EC_KEY_generate_key", + "EC_KEY_check_key", + "EC_POINT_new", + "EC_POINT_free", + "EC_POINT_clear_free", + "EC_POINT_copy", + "EC_POINT_dup", + "EC_POINT_method_of", + "EC_POINT_set_to_infinity", + "EC_POINT_set_Jprojective_coordinates_GFp", + "EC_POINT_get_Jprojective_coordinates_GFp", + "EC_POINT_set_affine_coordinates_GFp", + "EC_POINT_get_affine_coordinates_GFp", + "EC_POINT_set_compressed_coordinates_GFp", + "EC_POINT_point2oct", + "EC_POINT_oct2point", + "EC_POINT_point2bn", + "EC_POINT_bn2point", + "EC_POINT_point2hex", + "EC_POINT_hex2point", + "EC_POINT_add", + "EC_POINT_dbl", + "EC_POINT_invert", + "EC_POINT_is_at_infinity", + "EC_POINT_is_on_curve", + "EC_POINT_cmp", + "EC_POINT_make_affine", + "EC_POINTs_make_affine", + "EC_POINTs_mul", + "EC_POINT_mul", + "EC_GROUP_precompute_mult", + "EC_GROUP_have_precompute_mult", + "EC_GFp_simple_method", + "EC_GFp_mont_method", + "EC_GFp_nist_method", + "EC_METHOD_get_field_type", + ], + + "Cryptography_HAS_EC_1_0_1": [ + "EC_KEY_get_flags", + "EC_KEY_set_flags", + "EC_KEY_clear_flags", + "EC_KEY_set_public_key_affine_coordinates", + ], + + "Cryptography_HAS_EC_NISTP_64_GCC_128": [ + "EC_GFp_nistp224_method", + "EC_GFp_nistp256_method", + "EC_GFp_nistp521_method", + ], + + "Cryptography_HAS_EC2M": [ + "EC_GF2m_simple_method", + "EC_POINT_set_affine_coordinates_GF2m", + "EC_POINT_get_affine_coordinates_GF2m", + "EC_POINT_set_compressed_coordinates_GF2m", + "EC_GROUP_set_curve_GF2m", + "EC_GROUP_get_curve_GF2m", + "EC_GROUP_new_curve_GF2m", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ecdh.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ecdh.py new file mode 100644 index 0000000..960d46f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ecdh.py @@ -0,0 +1,68 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#ifndef OPENSSL_NO_ECDH +#include +#endif +""" + +TYPES = """ +static const int Cryptography_HAS_ECDH; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +int ECDH_compute_key(void *, size_t, const EC_POINT *, EC_KEY *, + void *(*)(const void *, size_t, void *, size_t *)); + +int ECDH_get_ex_new_index(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *, + CRYPTO_EX_free *); + +int ECDH_set_ex_data(EC_KEY *, int, void *); + +void *ECDH_get_ex_data(EC_KEY *, int); +""" + +CUSTOMIZATIONS = """ +#ifdef OPENSSL_NO_ECDH +static const long Cryptography_HAS_ECDH = 0; + +int (*ECDH_compute_key)(void *, size_t, const EC_POINT *, EC_KEY *, + void *(*)(const void *, size_t, void *, + size_t *)) = NULL; + +int (*ECDH_get_ex_new_index)(long, void *, CRYPTO_EX_new *, CRYPTO_EX_dup *, + CRYPTO_EX_free *) = NULL; + +int (*ECDH_set_ex_data)(EC_KEY *, int, void *) = NULL; + +void *(*ECDH_get_ex_data)(EC_KEY *, int) = NULL; + +#else +static const long Cryptography_HAS_ECDH = 1; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_ECDH": [ + "ECDH_compute_key", + "ECDH_get_ex_new_index", + "ECDH_set_ex_data", + "ECDH_get_ex_data", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ecdsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ecdsa.py new file mode 100644 index 0000000..bfa6720 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ecdsa.py @@ -0,0 +1,130 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#ifndef OPENSSL_NO_ECDSA +#include +#endif +""" + +TYPES = """ +static const int Cryptography_HAS_ECDSA; + +typedef struct { + BIGNUM *r; + BIGNUM *s; +} ECDSA_SIG; + +typedef ... CRYPTO_EX_new; +typedef ... CRYPTO_EX_dup; +typedef ... CRYPTO_EX_free; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +ECDSA_SIG *ECDSA_SIG_new(); +void ECDSA_SIG_free(ECDSA_SIG *); +int i2d_ECDSA_SIG(const ECDSA_SIG *, unsigned char **); +ECDSA_SIG *d2i_ECDSA_SIG(ECDSA_SIG **s, const unsigned char **, long); +ECDSA_SIG *ECDSA_do_sign(const unsigned char *, int, EC_KEY *); +ECDSA_SIG *ECDSA_do_sign_ex(const unsigned char *, int, const BIGNUM *, + const BIGNUM *, EC_KEY *); +int ECDSA_do_verify(const unsigned char *, int, const ECDSA_SIG *, EC_KEY*); +int ECDSA_sign_setup(EC_KEY *, BN_CTX *, BIGNUM **, BIGNUM **); +int ECDSA_sign(int, const unsigned char *, int, unsigned char *, + unsigned int *, EC_KEY *); +int ECDSA_sign_ex(int, const unsigned char *, int dgstlen, unsigned char *, + unsigned int *, const BIGNUM *, const BIGNUM *, EC_KEY *); +int ECDSA_verify(int, const unsigned char *, int, const unsigned char *, int, + EC_KEY *); +int ECDSA_size(const EC_KEY *); + +const ECDSA_METHOD* ECDSA_OpenSSL(); +void ECDSA_set_default_method(const ECDSA_METHOD *); +const ECDSA_METHOD* ECDSA_get_default_method(); +int ECDSA_get_ex_new_index(long, void *, CRYPTO_EX_new *, + CRYPTO_EX_dup *, CRYPTO_EX_free *); +int ECDSA_set_method(EC_KEY *, const ECDSA_METHOD *); +int ECDSA_set_ex_data(EC_KEY *, int, void *); +void *ECDSA_get_ex_data(EC_KEY *, int); +""" + +CUSTOMIZATIONS = """ +#ifdef OPENSSL_NO_ECDSA +static const long Cryptography_HAS_ECDSA = 0; + +typedef struct { + BIGNUM *r; + BIGNUM *s; +} ECDSA_SIG; + +ECDSA_SIG* (*ECDSA_SIG_new)() = NULL; +void (*ECDSA_SIG_free)(ECDSA_SIG *) = NULL; +int (*i2d_ECDSA_SIG)(const ECDSA_SIG *, unsigned char **) = NULL; +ECDSA_SIG* (*d2i_ECDSA_SIG)(ECDSA_SIG **s, const unsigned char **, + long) = NULL; +ECDSA_SIG* (*ECDSA_do_sign)(const unsigned char *, int, EC_KEY *eckey) = NULL; +ECDSA_SIG* (*ECDSA_do_sign_ex)(const unsigned char *, int, const BIGNUM *, + const BIGNUM *, EC_KEY *) = NULL; +int (*ECDSA_do_verify)(const unsigned char *, int, const ECDSA_SIG *, + EC_KEY*) = NULL; +int (*ECDSA_sign_setup)(EC_KEY *, BN_CTX *, BIGNUM **, BIGNUM **) = NULL; +int (*ECDSA_sign)(int, const unsigned char *, int, unsigned char *, + unsigned int *, EC_KEY *) = NULL; +int (*ECDSA_sign_ex)(int, const unsigned char *, int dgstlen, unsigned char *, + unsigned int *, const BIGNUM *, const BIGNUM *, + EC_KEY *) = NULL; +int (*ECDSA_verify)(int, const unsigned char *, int, const unsigned char *, + int, EC_KEY *) = NULL; +int (*ECDSA_size)(const EC_KEY *) = NULL; + +const ECDSA_METHOD* (*ECDSA_OpenSSL)() = NULL; +void (*ECDSA_set_default_method)(const ECDSA_METHOD *) = NULL; +const ECDSA_METHOD* (*ECDSA_get_default_method)() = NULL; +int (*ECDSA_set_method)(EC_KEY *, const ECDSA_METHOD *) = NULL; +int (*ECDSA_get_ex_new_index)(long, void *, CRYPTO_EX_new *, + CRYPTO_EX_dup *, CRYPTO_EX_free *) = NULL; +int (*ECDSA_set_ex_data)(EC_KEY *, int, void *) = NULL; +void* (*ECDSA_get_ex_data)(EC_KEY *, int) = NULL; +#else +static const long Cryptography_HAS_ECDSA = 1; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_ECDSA": [ + "ECDSA_SIG_new", + "ECDSA_SIG_free", + "i2d_ECDSA_SIG", + "d2i_ECDSA_SIG", + "ECDSA_do_sign", + "ECDSA_do_sign_ex", + "ECDSA_do_verify", + "ECDSA_sign_setup", + "ECDSA_sign", + "ECDSA_sign_ex", + "ECDSA_verify", + "ECDSA_size", + "ECDSA_OpenSSL", + "ECDSA_set_default_method", + "ECDSA_get_default_method", + "ECDSA_set_method", + "ECDSA_get_ex_new_index", + "ECDSA_set_ex_data", + "ECDSA_get_ex_data", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/engine.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/engine.py new file mode 100644 index 0000000..364232e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/engine.py @@ -0,0 +1,165 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... ENGINE; +typedef ... RSA_METHOD; +typedef ... DSA_METHOD; +typedef ... ECDH_METHOD; +typedef ... ECDSA_METHOD; +typedef ... DH_METHOD; +typedef ... RAND_METHOD; +typedef ... STORE_METHOD; +typedef ... *ENGINE_GEN_INT_FUNC_PTR; +typedef ... *ENGINE_CTRL_FUNC_PTR; +typedef ... *ENGINE_LOAD_KEY_PTR; +typedef ... *ENGINE_CIPHERS_PTR; +typedef ... *ENGINE_DIGESTS_PTR; +typedef ... ENGINE_CMD_DEFN; +typedef ... UI_METHOD; + +static const unsigned int ENGINE_METHOD_RSA; +static const unsigned int ENGINE_METHOD_DSA; +static const unsigned int ENGINE_METHOD_RAND; +static const unsigned int ENGINE_METHOD_ECDH; +static const unsigned int ENGINE_METHOD_ECDSA; +static const unsigned int ENGINE_METHOD_CIPHERS; +static const unsigned int ENGINE_METHOD_DIGESTS; +static const unsigned int ENGINE_METHOD_STORE; +static const unsigned int ENGINE_METHOD_ALL; +static const unsigned int ENGINE_METHOD_NONE; +""" + +FUNCTIONS = """ +ENGINE *ENGINE_get_first(void); +ENGINE *ENGINE_get_last(void); +ENGINE *ENGINE_get_next(ENGINE *); +ENGINE *ENGINE_get_prev(ENGINE *); +int ENGINE_add(ENGINE *); +int ENGINE_remove(ENGINE *); +ENGINE *ENGINE_by_id(const char *); +int ENGINE_init(ENGINE *); +int ENGINE_finish(ENGINE *); +void ENGINE_load_openssl(void); +void ENGINE_load_dynamic(void); +void ENGINE_load_cryptodev(void); +void ENGINE_load_builtin_engines(void); +void ENGINE_cleanup(void); +ENGINE *ENGINE_get_default_RSA(void); +ENGINE *ENGINE_get_default_DSA(void); +ENGINE *ENGINE_get_default_ECDH(void); +ENGINE *ENGINE_get_default_ECDSA(void); +ENGINE *ENGINE_get_default_DH(void); +ENGINE *ENGINE_get_default_RAND(void); +ENGINE *ENGINE_get_cipher_engine(int); +ENGINE *ENGINE_get_digest_engine(int); +int ENGINE_set_default_RSA(ENGINE *); +int ENGINE_set_default_DSA(ENGINE *); +int ENGINE_set_default_ECDH(ENGINE *); +int ENGINE_set_default_ECDSA(ENGINE *); +int ENGINE_set_default_DH(ENGINE *); +int ENGINE_set_default_RAND(ENGINE *); +int ENGINE_set_default_ciphers(ENGINE *); +int ENGINE_set_default_digests(ENGINE *); +int ENGINE_set_default_string(ENGINE *, const char *); +int ENGINE_set_default(ENGINE *, unsigned int); +unsigned int ENGINE_get_table_flags(void); +void ENGINE_set_table_flags(unsigned int); +int ENGINE_register_RSA(ENGINE *); +void ENGINE_unregister_RSA(ENGINE *); +void ENGINE_register_all_RSA(void); +int ENGINE_register_DSA(ENGINE *); +void ENGINE_unregister_DSA(ENGINE *); +void ENGINE_register_all_DSA(void); +int ENGINE_register_ECDH(ENGINE *); +void ENGINE_unregister_ECDH(ENGINE *); +void ENGINE_register_all_ECDH(void); +int ENGINE_register_ECDSA(ENGINE *); +void ENGINE_unregister_ECDSA(ENGINE *); +void ENGINE_register_all_ECDSA(void); +int ENGINE_register_DH(ENGINE *); +void ENGINE_unregister_DH(ENGINE *); +void ENGINE_register_all_DH(void); +int ENGINE_register_RAND(ENGINE *); +void ENGINE_unregister_RAND(ENGINE *); +void ENGINE_register_all_RAND(void); +int ENGINE_register_STORE(ENGINE *); +void ENGINE_unregister_STORE(ENGINE *); +void ENGINE_register_all_STORE(void); +int ENGINE_register_ciphers(ENGINE *); +void ENGINE_unregister_ciphers(ENGINE *); +void ENGINE_register_all_ciphers(void); +int ENGINE_register_digests(ENGINE *); +void ENGINE_unregister_digests(ENGINE *); +void ENGINE_register_all_digests(void); +int ENGINE_register_complete(ENGINE *); +int ENGINE_register_all_complete(void); +int ENGINE_ctrl(ENGINE *, int, long, void *, void (*)(void)); +int ENGINE_cmd_is_executable(ENGINE *, int); +int ENGINE_ctrl_cmd(ENGINE *, const char *, long, void *, void (*)(void), int); +int ENGINE_ctrl_cmd_string(ENGINE *, const char *, const char *, int); + +ENGINE *ENGINE_new(void); +int ENGINE_free(ENGINE *); +int ENGINE_up_ref(ENGINE *); +int ENGINE_set_id(ENGINE *, const char *); +int ENGINE_set_name(ENGINE *, const char *); +int ENGINE_set_RSA(ENGINE *, const RSA_METHOD *); +int ENGINE_set_DSA(ENGINE *, const DSA_METHOD *); +int ENGINE_set_ECDH(ENGINE *, const ECDH_METHOD *); +int ENGINE_set_ECDSA(ENGINE *, const ECDSA_METHOD *); +int ENGINE_set_DH(ENGINE *, const DH_METHOD *); +int ENGINE_set_RAND(ENGINE *, const RAND_METHOD *); +int ENGINE_set_STORE(ENGINE *, const STORE_METHOD *); +int ENGINE_set_destroy_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); +int ENGINE_set_init_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); +int ENGINE_set_finish_function(ENGINE *, ENGINE_GEN_INT_FUNC_PTR); +int ENGINE_set_ctrl_function(ENGINE *, ENGINE_CTRL_FUNC_PTR); +int ENGINE_set_load_privkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR); +int ENGINE_set_load_pubkey_function(ENGINE *, ENGINE_LOAD_KEY_PTR); +int ENGINE_set_ciphers(ENGINE *, ENGINE_CIPHERS_PTR); +int ENGINE_set_digests(ENGINE *, ENGINE_DIGESTS_PTR); +int ENGINE_set_flags(ENGINE *, int); +int ENGINE_set_cmd_defns(ENGINE *, const ENGINE_CMD_DEFN *); +const char *ENGINE_get_id(const ENGINE *); +const char *ENGINE_get_name(const ENGINE *); +const RSA_METHOD *ENGINE_get_RSA(const ENGINE *); +const DSA_METHOD *ENGINE_get_DSA(const ENGINE *); +const ECDH_METHOD *ENGINE_get_ECDH(const ENGINE *); +const ECDSA_METHOD *ENGINE_get_ECDSA(const ENGINE *); +const DH_METHOD *ENGINE_get_DH(const ENGINE *); +const RAND_METHOD *ENGINE_get_RAND(const ENGINE *); +const STORE_METHOD *ENGINE_get_STORE(const ENGINE *); + +const EVP_CIPHER *ENGINE_get_cipher(ENGINE *, int); +const EVP_MD *ENGINE_get_digest(ENGINE *, int); +int ENGINE_get_flags(const ENGINE *); +const ENGINE_CMD_DEFN *ENGINE_get_cmd_defns(const ENGINE *); +EVP_PKEY *ENGINE_load_private_key(ENGINE *, const char *, UI_METHOD *, void *); +EVP_PKEY *ENGINE_load_public_key(ENGINE *, const char *, UI_METHOD *, void *); +void ENGINE_add_conf_module(void); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py new file mode 100644 index 0000000..f6456d6 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py @@ -0,0 +1,328 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +static const int Cryptography_HAS_REMOVE_THREAD_STATE; +static const int Cryptography_HAS_098H_ERROR_CODES; +static const int Cryptography_HAS_098C_CAMELLIA_CODES; + +struct ERR_string_data_st { + unsigned long error; + const char *string; +}; +typedef struct ERR_string_data_st ERR_STRING_DATA; + + +static const int ERR_LIB_EVP; +static const int ERR_LIB_PEM; +static const int ERR_LIB_ASN1; +static const int ERR_LIB_RSA; + +static const int ASN1_F_ASN1_ENUMERATED_TO_BN; +static const int ASN1_F_ASN1_EX_C2I; +static const int ASN1_F_ASN1_FIND_END; +static const int ASN1_F_ASN1_GENERALIZEDTIME_SET; +static const int ASN1_F_ASN1_GENERATE_V3; +static const int ASN1_F_ASN1_GET_OBJECT; +static const int ASN1_F_ASN1_ITEM_I2D_FP; +static const int ASN1_F_ASN1_ITEM_PACK; +static const int ASN1_F_ASN1_ITEM_SIGN; +static const int ASN1_F_ASN1_ITEM_UNPACK; +static const int ASN1_F_ASN1_ITEM_VERIFY; +static const int ASN1_F_ASN1_MBSTRING_NCOPY; +static const int ASN1_F_ASN1_TEMPLATE_EX_D2I; +static const int ASN1_F_ASN1_TEMPLATE_NEW; +static const int ASN1_F_ASN1_TEMPLATE_NOEXP_D2I; +static const int ASN1_F_ASN1_TIME_SET; +static const int ASN1_F_ASN1_TYPE_GET_INT_OCTETSTRING; +static const int ASN1_F_ASN1_TYPE_GET_OCTETSTRING; +static const int ASN1_F_ASN1_UNPACK_STRING; +static const int ASN1_F_ASN1_UTCTIME_SET; +static const int ASN1_F_ASN1_VERIFY; +static const int ASN1_F_BITSTR_CB; +static const int ASN1_F_BN_TO_ASN1_ENUMERATED; +static const int ASN1_F_BN_TO_ASN1_INTEGER; +static const int ASN1_F_D2I_ASN1_TYPE_BYTES; +static const int ASN1_F_D2I_ASN1_UINTEGER; +static const int ASN1_F_D2I_ASN1_UTCTIME; +static const int ASN1_F_D2I_NETSCAPE_RSA; +static const int ASN1_F_D2I_NETSCAPE_RSA_2; +static const int ASN1_F_D2I_PRIVATEKEY; +static const int ASN1_F_D2I_X509; +static const int ASN1_F_D2I_X509_CINF; +static const int ASN1_F_D2I_X509_PKEY; +static const int ASN1_F_I2D_ASN1_SET; +static const int ASN1_F_I2D_ASN1_TIME; +static const int ASN1_F_I2D_DSA_PUBKEY; +static const int ASN1_F_LONG_C2I; +static const int ASN1_F_OID_MODULE_INIT; +static const int ASN1_F_PARSE_TAGGING; +static const int ASN1_F_PKCS5_PBE_SET; +static const int ASN1_F_X509_CINF_NEW; +static const int ASN1_R_BOOLEAN_IS_WRONG_LENGTH; +static const int ASN1_R_BUFFER_TOO_SMALL; +static const int ASN1_R_CIPHER_HAS_NO_OBJECT_IDENTIFIER; +static const int ASN1_R_DATA_IS_WRONG; +static const int ASN1_R_DECODE_ERROR; +static const int ASN1_R_DECODING_ERROR; +static const int ASN1_R_DEPTH_EXCEEDED; +static const int ASN1_R_ENCODE_ERROR; +static const int ASN1_R_ERROR_GETTING_TIME; +static const int ASN1_R_ERROR_LOADING_SECTION; +static const int ASN1_R_MSTRING_WRONG_TAG; +static const int ASN1_R_NESTED_ASN1_STRING; +static const int ASN1_R_NO_MATCHING_CHOICE_TYPE; +static const int ASN1_R_UNKNOWN_MESSAGE_DIGEST_ALGORITHM; +static const int ASN1_R_UNKNOWN_OBJECT_TYPE; +static const int ASN1_R_UNKNOWN_PUBLIC_KEY_TYPE; +static const int ASN1_R_UNKNOWN_TAG; +static const int ASN1_R_UNKOWN_FORMAT; +static const int ASN1_R_UNSUPPORTED_ANY_DEFINED_BY_TYPE; +static const int ASN1_R_UNSUPPORTED_ENCRYPTION_ALGORITHM; +static const int ASN1_R_UNSUPPORTED_PUBLIC_KEY_TYPE; +static const int ASN1_R_UNSUPPORTED_TYPE; +static const int ASN1_R_WRONG_TAG; +static const int ASN1_R_WRONG_TYPE; + +static const int EVP_F_AES_INIT_KEY; +static const int EVP_F_D2I_PKEY; +static const int EVP_F_DSA_PKEY2PKCS8; +static const int EVP_F_DSAPKEY2PKCS8; +static const int EVP_F_ECDSA_PKEY2PKCS8; +static const int EVP_F_ECKEY_PKEY2PKCS8; +static const int EVP_F_EVP_CIPHER_CTX_CTRL; +static const int EVP_F_EVP_CIPHER_CTX_SET_KEY_LENGTH; +static const int EVP_F_EVP_CIPHERINIT_EX; +static const int EVP_F_EVP_DECRYPTFINAL_EX; +static const int EVP_F_EVP_DIGESTINIT_EX; +static const int EVP_F_EVP_ENCRYPTFINAL_EX; +static const int EVP_F_EVP_MD_CTX_COPY_EX; +static const int EVP_F_EVP_OPENINIT; +static const int EVP_F_EVP_PBE_ALG_ADD; +static const int EVP_F_EVP_PBE_CIPHERINIT; +static const int EVP_F_EVP_PKCS82PKEY; +static const int EVP_F_EVP_PKEY2PKCS8_BROKEN; +static const int EVP_F_EVP_PKEY_COPY_PARAMETERS; +static const int EVP_F_EVP_PKEY_DECRYPT; +static const int EVP_F_EVP_PKEY_ENCRYPT; +static const int EVP_F_EVP_PKEY_GET1_DH; +static const int EVP_F_EVP_PKEY_GET1_DSA; +static const int EVP_F_EVP_PKEY_GET1_ECDSA; +static const int EVP_F_EVP_PKEY_GET1_EC_KEY; +static const int EVP_F_EVP_PKEY_GET1_RSA; +static const int EVP_F_EVP_PKEY_NEW; +static const int EVP_F_EVP_RIJNDAEL; +static const int EVP_F_EVP_SIGNFINAL; +static const int EVP_F_EVP_VERIFYFINAL; +static const int EVP_F_PKCS5_PBE_KEYIVGEN; +static const int EVP_F_PKCS5_V2_PBE_KEYIVGEN; +static const int EVP_F_PKCS8_SET_BROKEN; +static const int EVP_F_RC2_MAGIC_TO_METH; +static const int EVP_F_RC5_CTRL; +static const int EVP_R_AES_KEY_SETUP_FAILED; +static const int EVP_R_ASN1_LIB; +static const int EVP_R_BAD_BLOCK_LENGTH; +static const int EVP_R_BAD_DECRYPT; +static const int EVP_R_BAD_KEY_LENGTH; +static const int EVP_R_BN_DECODE_ERROR; +static const int EVP_R_BN_PUBKEY_ERROR; +static const int EVP_R_CIPHER_PARAMETER_ERROR; +static const int EVP_R_CTRL_NOT_IMPLEMENTED; +static const int EVP_R_CTRL_OPERATION_NOT_IMPLEMENTED; +static const int EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH; +static const int EVP_R_DECODE_ERROR; +static const int EVP_R_DIFFERENT_KEY_TYPES; +static const int EVP_R_ENCODE_ERROR; +static const int EVP_R_INITIALIZATION_ERROR; +static const int EVP_R_INPUT_NOT_INITIALIZED; +static const int EVP_R_INVALID_KEY_LENGTH; +static const int EVP_R_IV_TOO_LARGE; +static const int EVP_R_KEYGEN_FAILURE; +static const int EVP_R_MISSING_PARAMETERS; +static const int EVP_R_NO_CIPHER_SET; +static const int EVP_R_NO_DIGEST_SET; +static const int EVP_R_NO_DSA_PARAMETERS; +static const int EVP_R_NO_SIGN_FUNCTION_CONFIGURED; +static const int EVP_R_NO_VERIFY_FUNCTION_CONFIGURED; +static const int EVP_R_PKCS8_UNKNOWN_BROKEN_TYPE; +static const int EVP_R_PUBLIC_KEY_NOT_RSA; +static const int EVP_R_UNKNOWN_PBE_ALGORITHM; +static const int EVP_R_UNSUPORTED_NUMBER_OF_ROUNDS; +static const int EVP_R_UNSUPPORTED_CIPHER; +static const int EVP_R_UNSUPPORTED_KEY_DERIVATION_FUNCTION; +static const int EVP_R_UNSUPPORTED_KEYLENGTH; +static const int EVP_R_UNSUPPORTED_SALT_TYPE; +static const int EVP_R_WRONG_FINAL_BLOCK_LENGTH; +static const int EVP_R_WRONG_PUBLIC_KEY_TYPE; + +static const int PEM_F_D2I_PKCS8PRIVATEKEY_BIO; +static const int PEM_F_D2I_PKCS8PRIVATEKEY_FP; +static const int PEM_F_DO_PK8PKEY; +static const int PEM_F_DO_PK8PKEY_FP; +static const int PEM_F_LOAD_IV; +static const int PEM_F_PEM_ASN1_READ; +static const int PEM_F_PEM_ASN1_READ_BIO; +static const int PEM_F_PEM_ASN1_WRITE; +static const int PEM_F_PEM_ASN1_WRITE_BIO; +static const int PEM_F_PEM_DEF_CALLBACK; +static const int PEM_F_PEM_DO_HEADER; +static const int PEM_F_PEM_F_PEM_WRITE_PKCS8PRIVATEKEY; +static const int PEM_F_PEM_GET_EVP_CIPHER_INFO; +static const int PEM_F_PEM_PK8PKEY; +static const int PEM_F_PEM_READ; +static const int PEM_F_PEM_READ_BIO; +static const int PEM_F_PEM_READ_BIO_PRIVATEKEY; +static const int PEM_F_PEM_READ_PRIVATEKEY; +static const int PEM_F_PEM_SEALFINAL; +static const int PEM_F_PEM_SEALINIT; +static const int PEM_F_PEM_SIGNFINAL; +static const int PEM_F_PEM_WRITE; +static const int PEM_F_PEM_WRITE_BIO; +static const int PEM_F_PEM_X509_INFO_READ; +static const int PEM_F_PEM_X509_INFO_READ_BIO; +static const int PEM_F_PEM_X509_INFO_WRITE_BIO; + +static const int PEM_R_BAD_BASE64_DECODE; +static const int PEM_R_BAD_DECRYPT; +static const int PEM_R_BAD_END_LINE; +static const int PEM_R_BAD_IV_CHARS; +static const int PEM_R_BAD_PASSWORD_READ; +static const int PEM_R_ERROR_CONVERTING_PRIVATE_KEY; +static const int PEM_R_NO_START_LINE; +static const int PEM_R_NOT_DEK_INFO; +static const int PEM_R_NOT_ENCRYPTED; +static const int PEM_R_NOT_PROC_TYPE; +static const int PEM_R_PROBLEMS_GETTING_PASSWORD; +static const int PEM_R_PUBLIC_KEY_NO_RSA; +static const int PEM_R_READ_KEY; +static const int PEM_R_SHORT_HEADER; +static const int PEM_R_UNSUPPORTED_CIPHER; +static const int PEM_R_UNSUPPORTED_ENCRYPTION; + +static const int RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE; +static const int RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY; +static const int RSA_R_BLOCK_TYPE_IS_NOT_01; +static const int RSA_R_BLOCK_TYPE_IS_NOT_02; +""" + +FUNCTIONS = """ +void ERR_load_crypto_strings(void); +void ERR_load_SSL_strings(void); +void ERR_free_strings(void); +char* ERR_error_string(unsigned long, char *); +void ERR_error_string_n(unsigned long, char *, size_t); +const char* ERR_lib_error_string(unsigned long); +const char* ERR_func_error_string(unsigned long); +const char* ERR_reason_error_string(unsigned long); +void ERR_print_errors(BIO *); +void ERR_print_errors_fp(FILE *); +unsigned long ERR_get_error(void); +unsigned long ERR_peek_error(void); +unsigned long ERR_peek_last_error(void); +unsigned long ERR_get_error_line(const char **, int *); +unsigned long ERR_peek_error_line(const char **, int *); +unsigned long ERR_peek_last_error_line(const char **, int *); +unsigned long ERR_get_error_line_data(const char **, int *, + const char **, int *); +unsigned long ERR_peek_error_line_data(const char **, + int *, const char **, int *); +unsigned long ERR_peek_last_error_line_data(const char **, + int *, const char **, int *); +void ERR_put_error(int, int, int, const char *, int); +void ERR_add_error_data(int, ...); +int ERR_get_next_error_library(void); +""" + +MACROS = """ +unsigned long ERR_PACK(int, int, int); +int ERR_GET_LIB(unsigned long); +int ERR_GET_FUNC(unsigned long); +int ERR_GET_REASON(unsigned long); +int ERR_FATAL_ERROR(unsigned long); +/* introduced in 1.0.0 so we have to handle this specially to continue + * supporting 0.9.8 + */ +void ERR_remove_thread_state(const CRYPTO_THREADID *); + +/* These were added in OpenSSL 0.9.8h. When we drop support for RHEL/CentOS 5 + we should be able to move these back to TYPES. */ +static const int ASN1_F_B64_READ_ASN1; +static const int ASN1_F_B64_WRITE_ASN1; +static const int ASN1_F_SMIME_READ_ASN1; +static const int ASN1_F_SMIME_TEXT; +static const int ASN1_R_NO_CONTENT_TYPE; +static const int ASN1_R_NO_MULTIPART_BODY_FAILURE; +static const int ASN1_R_NO_MULTIPART_BOUNDARY; +/* These were added in OpenSSL 0.9.8c. */ +static const int EVP_F_CAMELLIA_INIT_KEY; +static const int EVP_R_CAMELLIA_KEY_SETUP_FAILED; +""" + +CUSTOMIZATIONS = """ +#if OPENSSL_VERSION_NUMBER >= 0x10000000L +static const long Cryptography_HAS_REMOVE_THREAD_STATE = 1; +#else +static const long Cryptography_HAS_REMOVE_THREAD_STATE = 0; +typedef uint32_t CRYPTO_THREADID; +void (*ERR_remove_thread_state)(const CRYPTO_THREADID *) = NULL; +#endif + +// OpenSSL 0.9.8h+ +#if OPENSSL_VERSION_NUMBER >= 0x0090808fL +static const long Cryptography_HAS_098H_ERROR_CODES = 1; +#else +static const long Cryptography_HAS_098H_ERROR_CODES = 0; +static const int ASN1_F_B64_READ_ASN1 = 0; +static const int ASN1_F_B64_WRITE_ASN1 = 0; +static const int ASN1_F_SMIME_READ_ASN1 = 0; +static const int ASN1_F_SMIME_TEXT = 0; +static const int ASN1_R_NO_CONTENT_TYPE = 0; +static const int ASN1_R_NO_MULTIPART_BODY_FAILURE = 0; +static const int ASN1_R_NO_MULTIPART_BOUNDARY = 0; +#endif + +// OpenSSL 0.9.8c+ +#ifdef EVP_F_CAMELLIA_INIT_KEY +static const long Cryptography_HAS_098C_CAMELLIA_CODES = 1; +#else +static const long Cryptography_HAS_098C_CAMELLIA_CODES = 0; +static const int EVP_F_CAMELLIA_INIT_KEY = 0; +static const int EVP_R_CAMELLIA_KEY_SETUP_FAILED = 0; +#endif + +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_REMOVE_THREAD_STATE": [ + "ERR_remove_thread_state" + ], + "Cryptography_HAS_098H_ERROR_CODES": [ + "ASN1_F_B64_READ_ASN1", + "ASN1_F_B64_WRITE_ASN1", + "ASN1_F_SMIME_READ_ASN1", + "ASN1_F_SMIME_TEXT", + "ASN1_R_NO_CONTENT_TYPE", + "ASN1_R_NO_MULTIPART_BODY_FAILURE", + "ASN1_R_NO_MULTIPART_BOUNDARY", + ], + "Cryptography_HAS_098C_CAMELLIA_CODES": [ + "EVP_F_CAMELLIA_INIT_KEY", + "EVP_R_CAMELLIA_KEY_SETUP_FAILED" + ] +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py new file mode 100644 index 0000000..b3d958e --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py @@ -0,0 +1,260 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... EVP_CIPHER; +typedef struct { + const EVP_CIPHER *cipher; + ENGINE *engine; + int encrypt; + ...; +} EVP_CIPHER_CTX; +typedef ... EVP_MD; +typedef struct env_md_ctx_st { + ...; +} EVP_MD_CTX; + +typedef struct evp_pkey_st { + int type; + ...; +} EVP_PKEY; +typedef ... EVP_PKEY_CTX; +static const int EVP_PKEY_RSA; +static const int EVP_PKEY_DSA; +static const int EVP_PKEY_EC; +static const int EVP_MAX_MD_SIZE; +static const int EVP_CTRL_GCM_SET_IVLEN; +static const int EVP_CTRL_GCM_GET_TAG; +static const int EVP_CTRL_GCM_SET_TAG; + +static const int Cryptography_HAS_GCM; +static const int Cryptography_HAS_PBKDF2_HMAC; +static const int Cryptography_HAS_PKEY_CTX; +""" + +FUNCTIONS = """ +const EVP_CIPHER *EVP_get_cipherbyname(const char *); +int EVP_EncryptInit_ex(EVP_CIPHER_CTX *, const EVP_CIPHER *, ENGINE *, + const unsigned char *, const unsigned char *); +int EVP_CIPHER_CTX_set_padding(EVP_CIPHER_CTX *, int); +int EVP_EncryptUpdate(EVP_CIPHER_CTX *, unsigned char *, int *, + const unsigned char *, int); +int EVP_EncryptFinal_ex(EVP_CIPHER_CTX *, unsigned char *, int *); +int EVP_DecryptInit_ex(EVP_CIPHER_CTX *, const EVP_CIPHER *, ENGINE *, + const unsigned char *, const unsigned char *); +int EVP_DecryptUpdate(EVP_CIPHER_CTX *, unsigned char *, int *, + const unsigned char *, int); +int EVP_DecryptFinal_ex(EVP_CIPHER_CTX *, unsigned char *, int *); +int EVP_CipherInit_ex(EVP_CIPHER_CTX *, const EVP_CIPHER *, ENGINE *, + const unsigned char *, const unsigned char *, int); +int EVP_CipherUpdate(EVP_CIPHER_CTX *, unsigned char *, int *, + const unsigned char *, int); +int EVP_CipherFinal_ex(EVP_CIPHER_CTX *, unsigned char *, int *); +int EVP_CIPHER_CTX_cleanup(EVP_CIPHER_CTX *); +void EVP_CIPHER_CTX_init(EVP_CIPHER_CTX *); +EVP_CIPHER_CTX *EVP_CIPHER_CTX_new(void); +void EVP_CIPHER_CTX_free(EVP_CIPHER_CTX *); +int EVP_CIPHER_CTX_set_key_length(EVP_CIPHER_CTX *, int); + +EVP_MD_CTX *EVP_MD_CTX_create(void); +int EVP_MD_CTX_copy_ex(EVP_MD_CTX *, const EVP_MD_CTX *); +int EVP_DigestInit_ex(EVP_MD_CTX *, const EVP_MD *, ENGINE *); +int EVP_DigestUpdate(EVP_MD_CTX *, const void *, size_t); +int EVP_DigestFinal_ex(EVP_MD_CTX *, unsigned char *, unsigned int *); +int EVP_MD_CTX_cleanup(EVP_MD_CTX *); +void EVP_MD_CTX_destroy(EVP_MD_CTX *); +const EVP_MD *EVP_get_digestbyname(const char *); + +EVP_PKEY *EVP_PKEY_new(void); +void EVP_PKEY_free(EVP_PKEY *); +int EVP_PKEY_type(int); +int EVP_PKEY_bits(EVP_PKEY *); +int EVP_PKEY_size(EVP_PKEY *); +RSA *EVP_PKEY_get1_RSA(EVP_PKEY *); +DSA *EVP_PKEY_get1_DSA(EVP_PKEY *); +DH *EVP_PKEY_get1_DH(EVP_PKEY *); + +int EVP_SignInit(EVP_MD_CTX *, const EVP_MD *); +int EVP_SignUpdate(EVP_MD_CTX *, const void *, size_t); +int EVP_SignFinal(EVP_MD_CTX *, unsigned char *, unsigned int *, EVP_PKEY *); + +int EVP_VerifyInit(EVP_MD_CTX *, const EVP_MD *); +int EVP_VerifyUpdate(EVP_MD_CTX *, const void *, size_t); +int EVP_VerifyFinal(EVP_MD_CTX *, const unsigned char *, unsigned int, + EVP_PKEY *); + +const EVP_MD *EVP_md5(void); + +int PKCS5_PBKDF2_HMAC_SHA1(const char *, int, const unsigned char *, int, int, + int, unsigned char *); + +int EVP_PKEY_set1_RSA(EVP_PKEY *, struct rsa_st *); +int EVP_PKEY_set1_DSA(EVP_PKEY *, struct dsa_st *); +int EVP_PKEY_set1_DH(EVP_PKEY *, DH *); + +int EVP_PKEY_get_attr_count(const EVP_PKEY *); +int EVP_PKEY_get_attr_by_NID(const EVP_PKEY *, int, int); +int EVP_PKEY_get_attr_by_OBJ(const EVP_PKEY *, ASN1_OBJECT *, int); +X509_ATTRIBUTE *EVP_PKEY_get_attr(const EVP_PKEY *, int); +X509_ATTRIBUTE *EVP_PKEY_delete_attr(EVP_PKEY *, int); +int EVP_PKEY_add1_attr(EVP_PKEY *, X509_ATTRIBUTE *); +int EVP_PKEY_add1_attr_by_OBJ(EVP_PKEY *, const ASN1_OBJECT *, int, + const unsigned char *, int); +int EVP_PKEY_add1_attr_by_NID(EVP_PKEY *, int, int, + const unsigned char *, int); +int EVP_PKEY_add1_attr_by_txt(EVP_PKEY *, const char *, int, + const unsigned char *, int); +""" + +MACROS = """ +void OpenSSL_add_all_algorithms(void); +int EVP_PKEY_assign_RSA(EVP_PKEY *, RSA *); +int EVP_PKEY_assign_DSA(EVP_PKEY *, DSA *); + +int EVP_PKEY_assign_EC_KEY(EVP_PKEY *, EC_KEY *); +EC_KEY *EVP_PKEY_get1_EC_KEY(EVP_PKEY *); +int EVP_PKEY_set1_EC_KEY(EVP_PKEY *, EC_KEY *); + +int EVP_CIPHER_CTX_block_size(const EVP_CIPHER_CTX *); +int EVP_CIPHER_CTX_ctrl(EVP_CIPHER_CTX *, int, int, void *); + +int PKCS5_PBKDF2_HMAC(const char *, int, const unsigned char *, int, int, + const EVP_MD *, int, unsigned char *); + +int EVP_PKEY_CTX_set_signature_md(EVP_PKEY_CTX *, const EVP_MD *); + +// not macros but must be in this section since they're not available in 0.9.8 +EVP_PKEY_CTX *EVP_PKEY_CTX_new(EVP_PKEY *, ENGINE *); +EVP_PKEY_CTX *EVP_PKEY_CTX_new_id(int, ENGINE *); +EVP_PKEY_CTX *EVP_PKEY_CTX_dup(EVP_PKEY_CTX *); +void EVP_PKEY_CTX_free(EVP_PKEY_CTX *); +int EVP_PKEY_sign_init(EVP_PKEY_CTX *); +int EVP_PKEY_sign(EVP_PKEY_CTX *, unsigned char *, size_t *, + const unsigned char *, size_t); +int EVP_PKEY_verify_init(EVP_PKEY_CTX *); +int EVP_PKEY_verify(EVP_PKEY_CTX *, const unsigned char *, size_t, + const unsigned char *, size_t); +int EVP_PKEY_encrypt_init(EVP_PKEY_CTX *); +int EVP_PKEY_decrypt_init(EVP_PKEY_CTX *); + +/* The following were macros in 0.9.8e. Once we drop support for RHEL/CentOS 5 + we should move these back to FUNCTIONS. */ +const EVP_CIPHER *EVP_CIPHER_CTX_cipher(const EVP_CIPHER_CTX *); +int EVP_CIPHER_block_size(const EVP_CIPHER *); +const EVP_MD *EVP_MD_CTX_md(const EVP_MD_CTX *); +int EVP_MD_size(const EVP_MD *); + +/* Must be in macros because EVP_PKEY_CTX is undefined in 0.9.8 */ +int Cryptography_EVP_PKEY_encrypt(EVP_PKEY_CTX *ctx, unsigned char *out, + size_t *outlen, const unsigned char *in, + size_t inlen); +int Cryptography_EVP_PKEY_decrypt(EVP_PKEY_CTX *ctx, unsigned char *out, + size_t *outlen, const unsigned char *in, + size_t inlen); +""" + +CUSTOMIZATIONS = """ +#ifdef EVP_CTRL_GCM_SET_TAG +const long Cryptography_HAS_GCM = 1; +#else +const long Cryptography_HAS_GCM = 0; +const long EVP_CTRL_GCM_GET_TAG = -1; +const long EVP_CTRL_GCM_SET_TAG = -1; +const long EVP_CTRL_GCM_SET_IVLEN = -1; +#endif +#if OPENSSL_VERSION_NUMBER >= 0x10000000L +const long Cryptography_HAS_PBKDF2_HMAC = 1; +const long Cryptography_HAS_PKEY_CTX = 1; + +/* OpenSSL 0.9.8 defines EVP_PKEY_encrypt and EVP_PKEY_decrypt functions, + but they are a completely different signature from the ones in 1.0.0+. + These wrapper functions allows us to safely declare them on any version and + conditionally remove them on 0.9.8. */ +int Cryptography_EVP_PKEY_encrypt(EVP_PKEY_CTX *ctx, unsigned char *out, + size_t *outlen, const unsigned char *in, + size_t inlen) { + return EVP_PKEY_encrypt(ctx, out, outlen, in, inlen); +} +int Cryptography_EVP_PKEY_decrypt(EVP_PKEY_CTX *ctx, unsigned char *out, + size_t *outlen, const unsigned char *in, + size_t inlen) { + return EVP_PKEY_decrypt(ctx, out, outlen, in, inlen); +} +#else +const long Cryptography_HAS_PBKDF2_HMAC = 0; +int (*PKCS5_PBKDF2_HMAC)(const char *, int, const unsigned char *, int, int, + const EVP_MD *, int, unsigned char *) = NULL; +const long Cryptography_HAS_PKEY_CTX = 0; +typedef void EVP_PKEY_CTX; +int (*EVP_PKEY_CTX_set_signature_md)(EVP_PKEY_CTX *, const EVP_MD *) = NULL; +int (*EVP_PKEY_sign_init)(EVP_PKEY_CTX *) = NULL; +int (*EVP_PKEY_sign)(EVP_PKEY_CTX *, unsigned char *, size_t *, + const unsigned char *, size_t) = NULL; +int (*EVP_PKEY_verify_init)(EVP_PKEY_CTX *) = NULL; +int (*EVP_PKEY_verify)(EVP_PKEY_CTX *, const unsigned char *, size_t, + const unsigned char *, size_t) = NULL; +EVP_PKEY_CTX *(*EVP_PKEY_CTX_new)(EVP_PKEY *, ENGINE *) = NULL; +EVP_PKEY_CTX *(*EVP_PKEY_CTX_new_id)(int, ENGINE *) = NULL; +EVP_PKEY_CTX *(*EVP_PKEY_CTX_dup)(EVP_PKEY_CTX *) = NULL; +void (*EVP_PKEY_CTX_free)(EVP_PKEY_CTX *) = NULL; +int (*EVP_PKEY_encrypt_init)(EVP_PKEY_CTX *) = NULL; +int (*EVP_PKEY_decrypt_init)(EVP_PKEY_CTX *) = NULL; +int (*Cryptography_EVP_PKEY_encrypt)(EVP_PKEY_CTX *, unsigned char *, size_t *, + const unsigned char *, size_t) = NULL; +int (*Cryptography_EVP_PKEY_decrypt)(EVP_PKEY_CTX *, unsigned char *, size_t *, + const unsigned char *, size_t) = NULL; +#endif +#ifdef OPENSSL_NO_EC +int (*EVP_PKEY_assign_EC_KEY)(EVP_PKEY *, EC_KEY *) = NULL; +EC_KEY *(*EVP_PKEY_get1_EC_KEY)(EVP_PKEY *) = NULL; +int (*EVP_PKEY_set1_EC_KEY)(EVP_PKEY *, EC_KEY *) = NULL; +#endif + +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_GCM": [ + "EVP_CTRL_GCM_GET_TAG", + "EVP_CTRL_GCM_SET_TAG", + "EVP_CTRL_GCM_SET_IVLEN", + ], + "Cryptography_HAS_PBKDF2_HMAC": [ + "PKCS5_PBKDF2_HMAC" + ], + "Cryptography_HAS_PKEY_CTX": [ + "EVP_PKEY_CTX_new", + "EVP_PKEY_CTX_new_id", + "EVP_PKEY_CTX_dup", + "EVP_PKEY_CTX_free", + "EVP_PKEY_sign", + "EVP_PKEY_sign_init", + "EVP_PKEY_verify", + "EVP_PKEY_verify_init", + "Cryptography_EVP_PKEY_encrypt", + "EVP_PKEY_encrypt_init", + "Cryptography_EVP_PKEY_decrypt", + "EVP_PKEY_decrypt_init", + "EVP_PKEY_CTX_set_signature_md", + ], + "Cryptography_HAS_EC": [ + "EVP_PKEY_assign_EC_KEY", + "EVP_PKEY_get1_EC_KEY", + "EVP_PKEY_set1_EC_KEY", + ] +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/hmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/hmac.py new file mode 100644 index 0000000..6a64b92 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/hmac.py @@ -0,0 +1,94 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct { ...; } HMAC_CTX; +""" + +FUNCTIONS = """ +void HMAC_CTX_init(HMAC_CTX *); +void HMAC_CTX_cleanup(HMAC_CTX *); + +int Cryptography_HMAC_Init_ex(HMAC_CTX *, const void *, int, const EVP_MD *, + ENGINE *); +int Cryptography_HMAC_Update(HMAC_CTX *, const unsigned char *, size_t); +int Cryptography_HMAC_Final(HMAC_CTX *, unsigned char *, unsigned int *); +int Cryptography_HMAC_CTX_copy(HMAC_CTX *, HMAC_CTX *); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +int Cryptography_HMAC_Init_ex(HMAC_CTX *ctx, const void *key, int key_len, + const EVP_MD *md, ENGINE *impl) { +#if OPENSSL_VERSION_NUMBER >= 0x010000000 + return HMAC_Init_ex(ctx, key, key_len, md, impl); +#else + HMAC_Init_ex(ctx, key, key_len, md, impl); + return 1; +#endif +} + +int Cryptography_HMAC_Update(HMAC_CTX *ctx, const unsigned char *data, + size_t data_len) { +#if OPENSSL_VERSION_NUMBER >= 0x010000000 + return HMAC_Update(ctx, data, data_len); +#else + HMAC_Update(ctx, data, data_len); + return 1; +#endif +} + +int Cryptography_HMAC_Final(HMAC_CTX *ctx, unsigned char *digest, + unsigned int *outlen) { +#if OPENSSL_VERSION_NUMBER >= 0x010000000 + return HMAC_Final(ctx, digest, outlen); +#else + HMAC_Final(ctx, digest, outlen); + return 1; +#endif +} + +int Cryptography_HMAC_CTX_copy(HMAC_CTX *dst_ctx, HMAC_CTX *src_ctx) { +#if OPENSSL_VERSION_NUMBER >= 0x010000000 + return HMAC_CTX_copy(dst_ctx, src_ctx); +#else + HMAC_CTX_init(dst_ctx); + if (!EVP_MD_CTX_copy_ex(&dst_ctx->i_ctx, &src_ctx->i_ctx)) { + goto err; + } + if (!EVP_MD_CTX_copy_ex(&dst_ctx->o_ctx, &src_ctx->o_ctx)) { + goto err; + } + if (!EVP_MD_CTX_copy_ex(&dst_ctx->md_ctx, &src_ctx->md_ctx)) { + goto err; + } + memcpy(dst_ctx->key, src_ctx->key, HMAC_MAX_MD_CBLOCK); + dst_ctx->key_length = src_ctx->key_length; + dst_ctx->md = src_ctx->md; + return 1; + + err: + return 0; +#endif +} +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py new file mode 100644 index 0000000..ea6fd4d --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py @@ -0,0 +1,215 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = "" + +TYPES = """ +static const int Cryptography_HAS_ECDSA_SHA2_NIDS; + +static const int NID_undef; +static const int NID_dsa; +static const int NID_dsaWithSHA; +static const int NID_dsaWithSHA1; +static const int NID_md2; +static const int NID_md4; +static const int NID_md5; +static const int NID_mdc2; +static const int NID_ripemd160; +static const int NID_sha; +static const int NID_sha1; +static const int NID_sha256; +static const int NID_sha384; +static const int NID_sha512; +static const int NID_sha224; +static const int NID_sha; +static const int NID_ecdsa_with_SHA1; +static const int NID_ecdsa_with_SHA224; +static const int NID_ecdsa_with_SHA256; +static const int NID_ecdsa_with_SHA384; +static const int NID_ecdsa_with_SHA512; +static const int NID_crl_reason; +static const int NID_pbe_WithSHA1And3_Key_TripleDES_CBC; +static const int NID_subject_alt_name; +static const int NID_X9_62_c2pnb163v1; +static const int NID_X9_62_c2pnb163v2; +static const int NID_X9_62_c2pnb163v3; +static const int NID_X9_62_c2pnb176v1; +static const int NID_X9_62_c2tnb191v1; +static const int NID_X9_62_c2tnb191v2; +static const int NID_X9_62_c2tnb191v3; +static const int NID_X9_62_c2onb191v4; +static const int NID_X9_62_c2onb191v5; +static const int NID_X9_62_c2pnb208w1; +static const int NID_X9_62_c2tnb239v1; +static const int NID_X9_62_c2tnb239v2; +static const int NID_X9_62_c2tnb239v3; +static const int NID_X9_62_c2onb239v4; +static const int NID_X9_62_c2onb239v5; +static const int NID_X9_62_c2pnb272w1; +static const int NID_X9_62_c2pnb304w1; +static const int NID_X9_62_c2tnb359v1; +static const int NID_X9_62_c2pnb368w1; +static const int NID_X9_62_c2tnb431r1; +static const int NID_X9_62_prime192v1; +static const int NID_X9_62_prime192v2; +static const int NID_X9_62_prime192v3; +static const int NID_X9_62_prime239v1; +static const int NID_X9_62_prime239v2; +static const int NID_X9_62_prime239v3; +static const int NID_X9_62_prime256v1; +static const int NID_secp112r1; +static const int NID_secp112r2; +static const int NID_secp128r1; +static const int NID_secp128r2; +static const int NID_secp160k1; +static const int NID_secp160r1; +static const int NID_secp160r2; +static const int NID_sect163k1; +static const int NID_sect163r1; +static const int NID_sect163r2; +static const int NID_secp192k1; +static const int NID_secp224k1; +static const int NID_secp224r1; +static const int NID_secp256k1; +static const int NID_secp384r1; +static const int NID_secp521r1; +static const int NID_sect113r1; +static const int NID_sect113r2; +static const int NID_sect131r1; +static const int NID_sect131r2; +static const int NID_sect193r1; +static const int NID_sect193r2; +static const int NID_sect233k1; +static const int NID_sect233r1; +static const int NID_sect239k1; +static const int NID_sect283k1; +static const int NID_sect283r1; +static const int NID_sect409k1; +static const int NID_sect409r1; +static const int NID_sect571k1; +static const int NID_sect571r1; +static const int NID_wap_wsg_idm_ecid_wtls1; +static const int NID_wap_wsg_idm_ecid_wtls3; +static const int NID_wap_wsg_idm_ecid_wtls4; +static const int NID_wap_wsg_idm_ecid_wtls5; +static const int NID_wap_wsg_idm_ecid_wtls6; +static const int NID_wap_wsg_idm_ecid_wtls7; +static const int NID_wap_wsg_idm_ecid_wtls8; +static const int NID_wap_wsg_idm_ecid_wtls9; +static const int NID_wap_wsg_idm_ecid_wtls10; +static const int NID_wap_wsg_idm_ecid_wtls11; +static const int NID_wap_wsg_idm_ecid_wtls12; +static const int NID_ipsec3; +static const int NID_ipsec4; +static const char *const SN_X9_62_c2pnb163v1; +static const char *const SN_X9_62_c2pnb163v2; +static const char *const SN_X9_62_c2pnb163v3; +static const char *const SN_X9_62_c2pnb176v1; +static const char *const SN_X9_62_c2tnb191v1; +static const char *const SN_X9_62_c2tnb191v2; +static const char *const SN_X9_62_c2tnb191v3; +static const char *const SN_X9_62_c2onb191v4; +static const char *const SN_X9_62_c2onb191v5; +static const char *const SN_X9_62_c2pnb208w1; +static const char *const SN_X9_62_c2tnb239v1; +static const char *const SN_X9_62_c2tnb239v2; +static const char *const SN_X9_62_c2tnb239v3; +static const char *const SN_X9_62_c2onb239v4; +static const char *const SN_X9_62_c2onb239v5; +static const char *const SN_X9_62_c2pnb272w1; +static const char *const SN_X9_62_c2pnb304w1; +static const char *const SN_X9_62_c2tnb359v1; +static const char *const SN_X9_62_c2pnb368w1; +static const char *const SN_X9_62_c2tnb431r1; +static const char *const SN_X9_62_prime192v1; +static const char *const SN_X9_62_prime192v2; +static const char *const SN_X9_62_prime192v3; +static const char *const SN_X9_62_prime239v1; +static const char *const SN_X9_62_prime239v2; +static const char *const SN_X9_62_prime239v3; +static const char *const SN_X9_62_prime256v1; +static const char *const SN_secp112r1; +static const char *const SN_secp112r2; +static const char *const SN_secp128r1; +static const char *const SN_secp128r2; +static const char *const SN_secp160k1; +static const char *const SN_secp160r1; +static const char *const SN_secp160r2; +static const char *const SN_sect163k1; +static const char *const SN_sect163r1; +static const char *const SN_sect163r2; +static const char *const SN_secp192k1; +static const char *const SN_secp224k1; +static const char *const SN_secp224r1; +static const char *const SN_secp256k1; +static const char *const SN_secp384r1; +static const char *const SN_secp521r1; +static const char *const SN_sect113r1; +static const char *const SN_sect113r2; +static const char *const SN_sect131r1; +static const char *const SN_sect131r2; +static const char *const SN_sect193r1; +static const char *const SN_sect193r2; +static const char *const SN_sect233k1; +static const char *const SN_sect233r1; +static const char *const SN_sect239k1; +static const char *const SN_sect283k1; +static const char *const SN_sect283r1; +static const char *const SN_sect409k1; +static const char *const SN_sect409r1; +static const char *const SN_sect571k1; +static const char *const SN_sect571r1; +static const char *const SN_wap_wsg_idm_ecid_wtls1; +static const char *const SN_wap_wsg_idm_ecid_wtls3; +static const char *const SN_wap_wsg_idm_ecid_wtls4; +static const char *const SN_wap_wsg_idm_ecid_wtls5; +static const char *const SN_wap_wsg_idm_ecid_wtls6; +static const char *const SN_wap_wsg_idm_ecid_wtls7; +static const char *const SN_wap_wsg_idm_ecid_wtls8; +static const char *const SN_wap_wsg_idm_ecid_wtls9; +static const char *const SN_wap_wsg_idm_ecid_wtls10; +static const char *const SN_wap_wsg_idm_ecid_wtls11; +static const char *const SN_wap_wsg_idm_ecid_wtls12; +static const char *const SN_ipsec3; +static const char *const SN_ipsec4; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +// OpenSSL 0.9.8g+ +#if OPENSSL_VERSION_NUMBER >= 0x0090807fL +static const long Cryptography_HAS_ECDSA_SHA2_NIDS = 1; +#else +static const long Cryptography_HAS_ECDSA_SHA2_NIDS = 0; +static const int NID_ecdsa_with_SHA224 = 0; +static const int NID_ecdsa_with_SHA256 = 0; +static const int NID_ecdsa_with_SHA384 = 0; +static const int NID_ecdsa_with_SHA512 = 0; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_ECDSA_SHA2_NIDS": [ + "NID_ecdsa_with_SHA224", + "NID_ecdsa_with_SHA256", + "NID_ecdsa_with_SHA384", + "NID_ecdsa_with_SHA512", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/objects.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/objects.py new file mode 100644 index 0000000..557c015 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/objects.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +""" + +FUNCTIONS = """ +ASN1_OBJECT *OBJ_nid2obj(int); +const char *OBJ_nid2ln(int); +const char *OBJ_nid2sn(int); +int OBJ_obj2nid(const ASN1_OBJECT *); +int OBJ_ln2nid(const char *); +int OBJ_sn2nid(const char *); +int OBJ_txt2nid(const char *); +ASN1_OBJECT *OBJ_txt2obj(const char *, int); +int OBJ_obj2txt(char *, int, const ASN1_OBJECT *, int); +int OBJ_cmp(const ASN1_OBJECT *, const ASN1_OBJECT *); +ASN1_OBJECT *OBJ_dup(const ASN1_OBJECT *); +int OBJ_create(const char *, const char *, const char *); +void OBJ_cleanup(void); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py new file mode 100644 index 0000000..e4aa621 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py @@ -0,0 +1,34 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +static const int OPENSSL_VERSION_NUMBER; +static const char *const OPENSSL_VERSION_TEXT; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/osrandom_engine.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/osrandom_engine.py new file mode 100644 index 0000000..462997c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/osrandom_engine.py @@ -0,0 +1,218 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#ifdef _WIN32 +#include +#else +#include +#include +#endif +""" + +TYPES = """ +static const char *const Cryptography_osrandom_engine_name; +static const char *const Cryptography_osrandom_engine_id; +""" + +FUNCTIONS = """ +int Cryptography_add_osrandom_engine(void); +""" + +MACROS = """ +""" + +WIN32_CUSTOMIZATIONS = """ +static HCRYPTPROV hCryptProv = 0; + +static int osrandom_init(ENGINE *e) { + if (hCryptProv > 0) { + return 1; + } + if (CryptAcquireContext(&hCryptProv, NULL, NULL, + PROV_RSA_FULL, CRYPT_VERIFYCONTEXT)) { + return 1; + } else { + return 0; + } +} + +static int osrandom_rand_bytes(unsigned char *buffer, int size) { + if (hCryptProv == 0) { + return 0; + } + + if (!CryptGenRandom(hCryptProv, (DWORD)size, buffer)) { + ERR_put_error( + ERR_LIB_RAND, 0, ERR_R_RAND_LIB, "osrandom_engine.py", 0 + ); + return 0; + } + return 1; +} + +static int osrandom_finish(ENGINE *e) { + if (CryptReleaseContext(hCryptProv, 0)) { + hCryptProv = 0; + return 1; + } else { + return 0; + } +} + +static int osrandom_rand_status(void) { + if (hCryptProv == 0) { + return 0; + } else { + return 1; + } +} +""" + +POSIX_CUSTOMIZATIONS = """ +static int urandom_fd = -1; + +static int osrandom_finish(ENGINE *e); + +static int osrandom_init(ENGINE *e) { + if (urandom_fd > -1) { + return 1; + } + urandom_fd = open("/dev/urandom", O_RDONLY); + if (urandom_fd > -1) { + int flags = fcntl(urandom_fd, F_GETFD); + if (flags == -1) { + osrandom_finish(e); + return 0; + } else if (fcntl(urandom_fd, F_SETFD, flags | FD_CLOEXEC) == -1) { + osrandom_finish(e); + return 0; + } + return 1; + } else { + return 0; + } +} + +static int osrandom_rand_bytes(unsigned char *buffer, int size) { + ssize_t n; + while (size > 0) { + do { + n = read(urandom_fd, buffer, (size_t)size); + } while (n < 0 && errno == EINTR); + if (n <= 0) { + ERR_put_error( + ERR_LIB_RAND, 0, ERR_R_RAND_LIB, "osrandom_engine.py", 0 + ); + return 0; + } + buffer += n; + size -= n; + } + return 1; +} + +static int osrandom_finish(ENGINE *e) { + int n; + do { + n = close(urandom_fd); + } while (n < 0 && errno == EINTR); + urandom_fd = -1; + if (n < 0) { + return 0; + } else { + return 1; + } +} + +static int osrandom_rand_status(void) { + if (urandom_fd == -1) { + return 0; + } else { + return 1; + } +} +""" + +CUSTOMIZATIONS = """ +static const char *Cryptography_osrandom_engine_id = "osrandom"; +static const char *Cryptography_osrandom_engine_name = "osrandom_engine"; + +#if defined(_WIN32) +%(WIN32_CUSTOMIZATIONS)s +#else +%(POSIX_CUSTOMIZATIONS)s +#endif + +/* This replicates the behavior of the OpenSSL FIPS RNG, which returns a + -1 in the event that there is an error when calling RAND_pseudo_bytes. */ +static int osrandom_pseudo_rand_bytes(unsigned char *buffer, int size) { + int res = osrandom_rand_bytes(buffer, size); + if (res == 0) { + return -1; + } else { + return res; + } +} + +static RAND_METHOD osrandom_rand = { + NULL, + osrandom_rand_bytes, + NULL, + NULL, + osrandom_pseudo_rand_bytes, + osrandom_rand_status, +}; + +/* Returns 1 if successfully added, 2 if engine has previously been added, + and 0 for error. */ +int Cryptography_add_osrandom_engine(void) { + ENGINE *e; + e = ENGINE_by_id(Cryptography_osrandom_engine_id); + if (e != NULL) { + ENGINE_free(e); + return 2; + } else { + ERR_clear_error(); + } + + e = ENGINE_new(); + if (e == NULL) { + return 0; + } + if(!ENGINE_set_id(e, Cryptography_osrandom_engine_id) || + !ENGINE_set_name(e, Cryptography_osrandom_engine_name) || + !ENGINE_set_RAND(e, &osrandom_rand) || + !ENGINE_set_init_function(e, osrandom_init) || + !ENGINE_set_finish_function(e, osrandom_finish)) { + ENGINE_free(e); + return 0; + } + if (!ENGINE_add(e)) { + ENGINE_free(e); + return 0; + } + if (!ENGINE_free(e)) { + return 0; + } + + return 1; +} +""" % { + "WIN32_CUSTOMIZATIONS": WIN32_CUSTOMIZATIONS, + "POSIX_CUSTOMIZATIONS": POSIX_CUSTOMIZATIONS, +} + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py new file mode 100644 index 0000000..e42fc6f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py @@ -0,0 +1,88 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef int pem_password_cb(char *buf, int size, int rwflag, void *userdata); +""" + +FUNCTIONS = """ +X509 *PEM_read_bio_X509(BIO *, X509 **, pem_password_cb *, void *); +int PEM_write_bio_X509(BIO *, X509 *); + +int PEM_write_bio_PrivateKey(BIO *, EVP_PKEY *, const EVP_CIPHER *, + unsigned char *, int, pem_password_cb *, void *); + +EVP_PKEY *PEM_read_bio_PrivateKey(BIO *, EVP_PKEY **, pem_password_cb *, + void *); + +int PEM_write_bio_PKCS8PrivateKey(BIO *, EVP_PKEY *, const EVP_CIPHER *, + char *, int, pem_password_cb *, void *); +int PEM_write_bio_PKCS8PrivateKey_nid(BIO *, EVP_PKEY *, int, char *, int, + pem_password_cb *, void *); + +int i2d_PKCS8PrivateKey_bio(BIO *, EVP_PKEY *, const EVP_CIPHER *, + char *, int, pem_password_cb *, void *); +int i2d_PKCS8PrivateKey_nid_bio(BIO *, EVP_PKEY *, int, + char *, int, pem_password_cb *, void *); + +EVP_PKEY *d2i_PKCS8PrivateKey_bio(BIO *, EVP_PKEY **, pem_password_cb *, + void *); + +int PEM_write_bio_X509_REQ(BIO *, X509_REQ *); + +X509_REQ *PEM_read_bio_X509_REQ(BIO *, X509_REQ **, pem_password_cb *, void *); + +X509_CRL *PEM_read_bio_X509_CRL(BIO *, X509_CRL **, pem_password_cb *, void *); + +int PEM_write_bio_X509_CRL(BIO *, X509_CRL *); + +PKCS7 *PEM_read_bio_PKCS7(BIO *, PKCS7 **, pem_password_cb *, void *); +DH *PEM_read_bio_DHparams(BIO *, DH **, pem_password_cb *, void *); + +DSA *PEM_read_bio_DSAPrivateKey(BIO *, DSA **, pem_password_cb *, void *); + +RSA *PEM_read_bio_RSAPrivateKey(BIO *, RSA **, pem_password_cb *, void *); + +int PEM_write_bio_DSAPrivateKey(BIO *, DSA *, const EVP_CIPHER *, + unsigned char *, int, + pem_password_cb *, void *); + +int PEM_write_bio_RSAPrivateKey(BIO *, RSA *, const EVP_CIPHER *, + unsigned char *, int, + pem_password_cb *, void *); + +DSA *PEM_read_bio_DSA_PUBKEY(BIO *, DSA **, pem_password_cb *, void *); + +RSA *PEM_read_bio_RSAPublicKey(BIO *, RSA **, pem_password_cb *, void *); + +int PEM_write_bio_DSA_PUBKEY(BIO *, DSA *); + +int PEM_write_bio_RSAPublicKey(BIO *, const RSA *); + +EVP_PKEY *PEM_read_bio_PUBKEY(BIO *, EVP_PKEY **, pem_password_cb *, void *); +int PEM_write_bio_PUBKEY(BIO *, EVP_PKEY *); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pkcs12.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pkcs12.py new file mode 100644 index 0000000..a8f106f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pkcs12.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... PKCS12; +""" + +FUNCTIONS = """ +void PKCS12_free(PKCS12 *); + +PKCS12 *d2i_PKCS12_bio(BIO *, PKCS12 **); +int i2d_PKCS12_bio(BIO *, PKCS12 *); +""" + +MACROS = """ +int PKCS12_parse(PKCS12 *, const char *, EVP_PKEY **, X509 **, + Cryptography_STACK_OF_X509 **); +PKCS12 *PKCS12_create(char *, char *, EVP_PKEY *, X509 *, + Cryptography_STACK_OF_X509 *, int, int, int, int, int); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pkcs7.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pkcs7.py new file mode 100644 index 0000000..1343e56 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pkcs7.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct { + ASN1_OBJECT *type; + ...; +} PKCS7; +""" + +FUNCTIONS = """ +void PKCS7_free(PKCS7 *); +""" + +MACROS = """ +int PKCS7_type_is_signed(PKCS7 *); +int PKCS7_type_is_enveloped(PKCS7 *); +int PKCS7_type_is_signedAndEnveloped(PKCS7 *); +int PKCS7_type_is_data(PKCS7 *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rand.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rand.py new file mode 100644 index 0000000..7b1be9d --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rand.py @@ -0,0 +1,45 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +""" + +FUNCTIONS = """ +void ERR_load_RAND_strings(void); +void RAND_seed(const void *, int); +void RAND_add(const void *, int, double); +int RAND_status(void); +int RAND_egd(const char *); +int RAND_egd_bytes(const char *, int); +int RAND_query_egd_bytes(const char *, unsigned char *, int); +const char *RAND_file_name(char *, size_t); +int RAND_load_file(const char *, long); +int RAND_write_file(const char *); +void RAND_cleanup(void); +int RAND_bytes(unsigned char *, int); +int RAND_pseudo_bytes(unsigned char *, int); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py new file mode 100644 index 0000000..c635610 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct rsa_st { + BIGNUM *n; + BIGNUM *e; + BIGNUM *d; + BIGNUM *p; + BIGNUM *q; + BIGNUM *dmp1; + BIGNUM *dmq1; + BIGNUM *iqmp; + ...; +} RSA; +typedef ... BN_GENCB; +static const int RSA_PKCS1_PADDING; +static const int RSA_SSLV23_PADDING; +static const int RSA_NO_PADDING; +static const int RSA_PKCS1_OAEP_PADDING; +static const int RSA_X931_PADDING; +static const int RSA_PKCS1_PSS_PADDING; +static const int RSA_F4; + +static const int Cryptography_HAS_PSS_PADDING; +static const int Cryptography_HAS_MGF1_MD; +""" + +FUNCTIONS = """ +RSA *RSA_new(void); +void RSA_free(RSA *); +int RSA_size(const RSA *); +int RSA_generate_key_ex(RSA *, int, BIGNUM *, BN_GENCB *); +int RSA_check_key(const RSA *); +RSA *RSAPublicKey_dup(RSA *); +int RSA_blinding_on(RSA *, BN_CTX *); +void RSA_blinding_off(RSA *); +int RSA_public_encrypt(int, const unsigned char *, unsigned char *, + RSA *, int); +int RSA_private_encrypt(int, const unsigned char *, unsigned char *, + RSA *, int); +int RSA_public_decrypt(int, const unsigned char *, unsigned char *, + RSA *, int); +int RSA_private_decrypt(int, const unsigned char *, unsigned char *, + RSA *, int); +int RSA_print(BIO *, const RSA *, int); +int RSA_verify_PKCS1_PSS(RSA *, const unsigned char *, const EVP_MD *, + const unsigned char *, int); +int RSA_padding_add_PKCS1_PSS(RSA *, unsigned char *, const unsigned char *, + const EVP_MD *, int); +int RSA_padding_add_PKCS1_OAEP(unsigned char *, int, const unsigned char *, + int, const unsigned char *, int); +int RSA_padding_check_PKCS1_OAEP(unsigned char *, int, const unsigned char *, + int, int, const unsigned char *, int); +""" + +MACROS = """ +int EVP_PKEY_CTX_set_rsa_padding(EVP_PKEY_CTX *, int); +int EVP_PKEY_CTX_set_rsa_pss_saltlen(EVP_PKEY_CTX *, int); +int EVP_PKEY_CTX_set_rsa_mgf1_md(EVP_PKEY_CTX *, EVP_MD *); +""" + +CUSTOMIZATIONS = """ +#if OPENSSL_VERSION_NUMBER >= 0x10000000 +static const long Cryptography_HAS_PSS_PADDING = 1; +#else +// see evp.py for the definition of Cryptography_HAS_PKEY_CTX +static const long Cryptography_HAS_PSS_PADDING = 0; +int (*EVP_PKEY_CTX_set_rsa_padding)(EVP_PKEY_CTX *, int) = NULL; +int (*EVP_PKEY_CTX_set_rsa_pss_saltlen)(EVP_PKEY_CTX *, int) = NULL; +static const long RSA_PKCS1_PSS_PADDING = 0; +#endif +#if OPENSSL_VERSION_NUMBER >= 0x1000100f +static const long Cryptography_HAS_MGF1_MD = 1; +#else +static const long Cryptography_HAS_MGF1_MD = 0; +int (*EVP_PKEY_CTX_set_rsa_mgf1_md)(EVP_PKEY_CTX *, EVP_MD *) = NULL; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_PKEY_CTX": [ + "EVP_PKEY_CTX_set_rsa_padding", + "EVP_PKEY_CTX_set_rsa_pss_saltlen", + ], + "Cryptography_HAS_PSS_PADDING": [ + "RSA_PKCS1_PSS_PADDING", + ], + "Cryptography_HAS_MGF1_MD": [ + "EVP_PKEY_CTX_set_rsa_mgf1_md", + ], +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py new file mode 100644 index 0000000..0b15411 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py @@ -0,0 +1,555 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +/* + * Internally invented symbols to tell which versions of SSL/TLS are supported. +*/ +static const long Cryptography_HAS_SSL2; +static const long Cryptography_HAS_TLSv1_1; +static const long Cryptography_HAS_TLSv1_2; + +/* Internally invented symbol to tell us if SNI is supported */ +static const long Cryptography_HAS_TLSEXT_HOSTNAME; + +/* Internally invented symbol to tell us if SSL_MODE_RELEASE_BUFFERS is + * supported + */ +static const long Cryptography_HAS_RELEASE_BUFFERS; + +/* Internally invented symbol to tell us if SSL_OP_NO_COMPRESSION is + * supported + */ +static const long Cryptography_HAS_OP_NO_COMPRESSION; + +static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING; +static const long Cryptography_HAS_SSL_SET_SSL_CTX; +static const long Cryptography_HAS_SSL_OP_NO_TICKET; +static const long Cryptography_HAS_NETBSD_D1_METH; +static const long Cryptography_HAS_NEXTPROTONEG; + +static const long SSL_FILETYPE_PEM; +static const long SSL_FILETYPE_ASN1; +static const long SSL_ERROR_NONE; +static const long SSL_ERROR_ZERO_RETURN; +static const long SSL_ERROR_WANT_READ; +static const long SSL_ERROR_WANT_WRITE; +static const long SSL_ERROR_WANT_X509_LOOKUP; +static const long SSL_ERROR_SYSCALL; +static const long SSL_ERROR_SSL; +static const long SSL_SENT_SHUTDOWN; +static const long SSL_RECEIVED_SHUTDOWN; +static const long SSL_OP_NO_SSLv2; +static const long SSL_OP_NO_SSLv3; +static const long SSL_OP_NO_TLSv1; +static const long SSL_OP_NO_TLSv1_1; +static const long SSL_OP_NO_TLSv1_2; +static const long SSL_OP_NO_COMPRESSION; +static const long SSL_OP_SINGLE_DH_USE; +static const long SSL_OP_EPHEMERAL_RSA; +static const long SSL_OP_MICROSOFT_SESS_ID_BUG; +static const long SSL_OP_NETSCAPE_CHALLENGE_BUG; +static const long SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG; +static const long SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG; +static const long SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER; +static const long SSL_OP_MSIE_SSLV2_RSA_PADDING; +static const long SSL_OP_SSLEAY_080_CLIENT_DH_BUG; +static const long SSL_OP_TLS_D5_BUG; +static const long SSL_OP_TLS_BLOCK_PADDING_BUG; +static const long SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS; +static const long SSL_OP_CIPHER_SERVER_PREFERENCE; +static const long SSL_OP_TLS_ROLLBACK_BUG; +static const long SSL_OP_PKCS1_CHECK_1; +static const long SSL_OP_PKCS1_CHECK_2; +static const long SSL_OP_NETSCAPE_CA_DN_BUG; +static const long SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG; +static const long SSL_OP_NO_QUERY_MTU; +static const long SSL_OP_COOKIE_EXCHANGE; +static const long SSL_OP_NO_TICKET; +static const long SSL_OP_ALL; +static const long SSL_OP_SINGLE_ECDH_USE; +static const long SSL_VERIFY_PEER; +static const long SSL_VERIFY_FAIL_IF_NO_PEER_CERT; +static const long SSL_VERIFY_CLIENT_ONCE; +static const long SSL_VERIFY_NONE; +static const long SSL_SESS_CACHE_OFF; +static const long SSL_SESS_CACHE_CLIENT; +static const long SSL_SESS_CACHE_SERVER; +static const long SSL_SESS_CACHE_BOTH; +static const long SSL_SESS_CACHE_NO_AUTO_CLEAR; +static const long SSL_SESS_CACHE_NO_INTERNAL_LOOKUP; +static const long SSL_SESS_CACHE_NO_INTERNAL_STORE; +static const long SSL_SESS_CACHE_NO_INTERNAL; +static const long SSL_ST_CONNECT; +static const long SSL_ST_ACCEPT; +static const long SSL_ST_MASK; +static const long SSL_ST_INIT; +static const long SSL_ST_BEFORE; +static const long SSL_ST_OK; +static const long SSL_ST_RENEGOTIATE; +static const long SSL_CB_LOOP; +static const long SSL_CB_EXIT; +static const long SSL_CB_READ; +static const long SSL_CB_WRITE; +static const long SSL_CB_ALERT; +static const long SSL_CB_READ_ALERT; +static const long SSL_CB_WRITE_ALERT; +static const long SSL_CB_ACCEPT_LOOP; +static const long SSL_CB_ACCEPT_EXIT; +static const long SSL_CB_CONNECT_LOOP; +static const long SSL_CB_CONNECT_EXIT; +static const long SSL_CB_HANDSHAKE_START; +static const long SSL_CB_HANDSHAKE_DONE; +static const long SSL_MODE_RELEASE_BUFFERS; +static const long SSL_MODE_ENABLE_PARTIAL_WRITE; +static const long SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER; +static const long SSL_MODE_AUTO_RETRY; +static const long SSL3_RANDOM_SIZE; +typedef ... X509_STORE_CTX; +static const long X509_V_OK; +static const long X509_V_ERR_APPLICATION_VERIFICATION; +typedef ... SSL_METHOD; +typedef struct ssl_st { + int version; + int type; + ...; +} SSL_CTX; + +typedef struct { + int master_key_length; + unsigned char master_key[...]; + ...; +} SSL_SESSION; + +typedef struct { + unsigned char server_random[...]; + unsigned char client_random[...]; + ...; +} SSL3_STATE; + +typedef struct { + SSL3_STATE *s3; + SSL_SESSION *session; + int type; + ...; +} SSL; + +static const long TLSEXT_NAMETYPE_host_name; + +typedef ... SSL_CIPHER; +""" + +FUNCTIONS = """ +void SSL_load_error_strings(void); +int SSL_library_init(void); + +/* SSL */ +const char *SSL_state_string_long(const SSL *); +SSL_SESSION *SSL_get1_session(SSL *); +int SSL_set_session(SSL *, SSL_SESSION *); +int SSL_get_verify_mode(const SSL *); +void SSL_set_verify_depth(SSL *, int); +int SSL_get_verify_depth(const SSL *); +int (*SSL_get_verify_callback(const SSL *))(int, X509_STORE_CTX *); +void SSL_set_info_callback(SSL *ssl, void (*)(const SSL *, int, int)); +void (*SSL_get_info_callback(const SSL *))(const SSL *, int, int); +SSL *SSL_new(SSL_CTX *); +void SSL_free(SSL *); +int SSL_set_fd(SSL *, int); +void SSL_set_bio(SSL *, BIO *, BIO *); +void SSL_set_connect_state(SSL *); +void SSL_set_accept_state(SSL *); +void SSL_set_shutdown(SSL *, int); +int SSL_get_shutdown(const SSL *); +int SSL_pending(const SSL *); +int SSL_write(SSL *, const void *, int); +int SSL_read(SSL *, void *, int); +X509 *SSL_get_peer_certificate(const SSL *); +int SSL_get_ex_data_X509_STORE_CTX_idx(void); + +Cryptography_STACK_OF_X509 *SSL_get_peer_cert_chain(const SSL *); +Cryptography_STACK_OF_X509_NAME *SSL_get_client_CA_list(const SSL *); + +int SSL_get_error(const SSL *, int); +int SSL_do_handshake(SSL *); +int SSL_shutdown(SSL *); +const char *SSL_get_cipher_list(const SSL *, int); + +/* context */ +void SSL_CTX_free(SSL_CTX *); +long SSL_CTX_set_timeout(SSL_CTX *, long); +int SSL_CTX_set_default_verify_paths(SSL_CTX *); +void SSL_CTX_set_verify(SSL_CTX *, int, int (*)(int, X509_STORE_CTX *)); +void SSL_CTX_set_verify_depth(SSL_CTX *, int); +int (*SSL_CTX_get_verify_callback(const SSL_CTX *))(int, X509_STORE_CTX *); +int SSL_CTX_get_verify_mode(const SSL_CTX *); +int SSL_CTX_get_verify_depth(const SSL_CTX *); +int SSL_CTX_set_cipher_list(SSL_CTX *, const char *); +int SSL_CTX_load_verify_locations(SSL_CTX *, const char *, const char *); +void SSL_CTX_set_default_passwd_cb(SSL_CTX *, pem_password_cb *); +void SSL_CTX_set_default_passwd_cb_userdata(SSL_CTX *, void *); +int SSL_CTX_use_certificate(SSL_CTX *, X509 *); +int SSL_CTX_use_certificate_file(SSL_CTX *, const char *, int); +int SSL_CTX_use_certificate_chain_file(SSL_CTX *, const char *); +int SSL_CTX_use_PrivateKey(SSL_CTX *, EVP_PKEY *); +int SSL_CTX_use_PrivateKey_file(SSL_CTX *, const char *, int); +void SSL_CTX_set_cert_store(SSL_CTX *, X509_STORE *); +X509_STORE *SSL_CTX_get_cert_store(const SSL_CTX *); +int SSL_CTX_add_client_CA(SSL_CTX *, X509 *); + +void SSL_CTX_set_client_CA_list(SSL_CTX *, Cryptography_STACK_OF_X509_NAME *); + + +/* X509_STORE_CTX */ +int X509_STORE_CTX_get_error(X509_STORE_CTX *); +void X509_STORE_CTX_set_error(X509_STORE_CTX *, int); +int X509_STORE_CTX_get_error_depth(X509_STORE_CTX *); +X509 *X509_STORE_CTX_get_current_cert(X509_STORE_CTX *); +int X509_STORE_CTX_set_ex_data(X509_STORE_CTX *, int, void *); +void *X509_STORE_CTX_get_ex_data(X509_STORE_CTX *, int); + + +/* SSL_SESSION */ +void SSL_SESSION_free(SSL_SESSION *); + +/* Information about actually used cipher */ +const char *SSL_CIPHER_get_name(const SSL_CIPHER *); +int SSL_CIPHER_get_bits(const SSL_CIPHER *, int *); +char *SSL_CIPHER_get_version(const SSL_CIPHER *); + +size_t SSL_get_finished(const SSL *, void *, size_t); +size_t SSL_get_peer_finished(const SSL *, void *, size_t); +""" + +MACROS = """ +unsigned long SSL_set_mode(SSL *, unsigned long); +unsigned long SSL_get_mode(SSL *); + +unsigned long SSL_set_options(SSL *, unsigned long); +unsigned long SSL_get_options(SSL *); + +int SSL_want_read(const SSL *); +int SSL_want_write(const SSL *); + +long SSL_total_renegotiations(SSL *); + +/* Defined as unsigned long because SSL_OP_ALL is greater than signed 32-bit + and Windows defines long as 32-bit. */ +unsigned long SSL_CTX_set_options(SSL_CTX *, unsigned long); +unsigned long SSL_CTX_get_options(SSL_CTX *); +unsigned long SSL_CTX_set_mode(SSL_CTX *, unsigned long); +unsigned long SSL_CTX_get_mode(SSL_CTX *); +unsigned long SSL_CTX_set_session_cache_mode(SSL_CTX *, unsigned long); +unsigned long SSL_CTX_get_session_cache_mode(SSL_CTX *); +unsigned long SSL_CTX_set_tmp_dh(SSL_CTX *, DH *); +unsigned long SSL_CTX_set_tmp_ecdh(SSL_CTX *, EC_KEY *); +unsigned long SSL_CTX_add_extra_chain_cert(SSL_CTX *, X509 *); + +/*- These aren't macros these functions are all const X on openssl > 1.0.x -*/ + +/* methods */ + +/* SSLv2 support is compiled out of some versions of OpenSSL. These will + * get special support when we generate the bindings so that if they are + * available they will be wrapped, but if they are not they won't cause + * problems (like link errors). + */ +const SSL_METHOD *SSLv2_method(void); +const SSL_METHOD *SSLv2_server_method(void); +const SSL_METHOD *SSLv2_client_method(void); + +/* + * TLSv1_1 and TLSv1_2 are recent additions. Only sufficiently new versions of + * OpenSSL support them. + */ +const SSL_METHOD *TLSv1_1_method(void); +const SSL_METHOD *TLSv1_1_server_method(void); +const SSL_METHOD *TLSv1_1_client_method(void); + +const SSL_METHOD *TLSv1_2_method(void); +const SSL_METHOD *TLSv1_2_server_method(void); +const SSL_METHOD *TLSv1_2_client_method(void); + +const SSL_METHOD *SSLv3_method(void); +const SSL_METHOD *SSLv3_server_method(void); +const SSL_METHOD *SSLv3_client_method(void); + +const SSL_METHOD *TLSv1_method(void); +const SSL_METHOD *TLSv1_server_method(void); +const SSL_METHOD *TLSv1_client_method(void); + +const SSL_METHOD *DTLSv1_method(void); +const SSL_METHOD *DTLSv1_server_method(void); +const SSL_METHOD *DTLSv1_client_method(void); + +const SSL_METHOD *SSLv23_method(void); +const SSL_METHOD *SSLv23_server_method(void); +const SSL_METHOD *SSLv23_client_method(void); + +/*- These aren't macros these arguments are all const X on openssl > 1.0.x -*/ +SSL_CTX *SSL_CTX_new(SSL_METHOD *); +long SSL_CTX_get_timeout(const SSL_CTX *); + +const SSL_CIPHER *SSL_get_current_cipher(const SSL *); + +/* SNI APIs were introduced in OpenSSL 1.0.0. To continue to support + * earlier versions some special handling of these is necessary. + */ +const char *SSL_get_servername(const SSL *, const int); +void SSL_set_tlsext_host_name(SSL *, char *); +void SSL_CTX_set_tlsext_servername_callback( + SSL_CTX *, + int (*)(const SSL *, int *, void *)); + +long SSL_session_reused(SSL *); + +/* The following were macros in 0.9.8e. Once we drop support for RHEL/CentOS 5 + we should move these back to FUNCTIONS. */ +void SSL_CTX_set_info_callback(SSL_CTX *, void (*)(const SSL *, int, int)); +void (*SSL_CTX_get_info_callback(SSL_CTX *))(const SSL *, int, int); +/* This function does not exist in 0.9.8e. Once we drop support for + RHEL/CentOS 5 this can be moved back to FUNCTIONS. */ +SSL_CTX *SSL_set_SSL_CTX(SSL *, SSL_CTX *); + +const SSL_METHOD* Cryptography_SSL_CTX_get_method(const SSL_CTX*); + +/* NPN APIs were introduced in OpenSSL 1.0.1. To continue to support earlier + * versions some special handling of these is necessary. + */ +void SSL_CTX_set_next_protos_advertised_cb(SSL_CTX *, + int (*)(SSL *, + const unsigned char **, + unsigned int *, + void *), + void *); +void SSL_CTX_set_next_proto_select_cb(SSL_CTX *, + int (*)(SSL *, + unsigned char **, + unsigned char *, + const unsigned char *, + unsigned int, + void *), + void *); +int SSL_select_next_proto(unsigned char **, unsigned char *, + const unsigned char *, unsigned int, + const unsigned char *, unsigned int); +void SSL_get0_next_proto_negotiated(const SSL *, + const unsigned char **, unsigned *); +""" + +CUSTOMIZATIONS = """ +#ifdef OPENSSL_NO_SSL2 +static const long Cryptography_HAS_SSL2 = 0; +SSL_METHOD* (*SSLv2_method)(void) = NULL; +SSL_METHOD* (*SSLv2_client_method)(void) = NULL; +SSL_METHOD* (*SSLv2_server_method)(void) = NULL; +#else +static const long Cryptography_HAS_SSL2 = 1; +#endif + +#ifdef SSL_CTRL_SET_TLSEXT_HOSTNAME +static const long Cryptography_HAS_TLSEXT_HOSTNAME = 1; +#else +static const long Cryptography_HAS_TLSEXT_HOSTNAME = 0; +void (*SSL_set_tlsext_host_name)(SSL *, char *) = NULL; +const char* (*SSL_get_servername)(const SSL *, const int) = NULL; +void (*SSL_CTX_set_tlsext_servername_callback)( + SSL_CTX *, + int (*)(const SSL *, int *, void *)) = NULL; +#endif + +#ifdef SSL_MODE_RELEASE_BUFFERS +static const long Cryptography_HAS_RELEASE_BUFFERS = 1; +#else +static const long Cryptography_HAS_RELEASE_BUFFERS = 0; +const long SSL_MODE_RELEASE_BUFFERS = 0; +#endif + +#ifdef SSL_OP_NO_COMPRESSION +static const long Cryptography_HAS_OP_NO_COMPRESSION = 1; +#else +static const long Cryptography_HAS_OP_NO_COMPRESSION = 0; +const long SSL_OP_NO_COMPRESSION = 0; +#endif + +#ifdef SSL_OP_NO_TLSv1_1 +static const long Cryptography_HAS_TLSv1_1 = 1; +#else +static const long Cryptography_HAS_TLSv1_1 = 0; +static const long SSL_OP_NO_TLSv1_1 = 0; +SSL_METHOD* (*TLSv1_1_method)(void) = NULL; +SSL_METHOD* (*TLSv1_1_client_method)(void) = NULL; +SSL_METHOD* (*TLSv1_1_server_method)(void) = NULL; +#endif + +#ifdef SSL_OP_NO_TLSv1_2 +static const long Cryptography_HAS_TLSv1_2 = 1; +#else +static const long Cryptography_HAS_TLSv1_2 = 0; +static const long SSL_OP_NO_TLSv1_2 = 0; +SSL_METHOD* (*TLSv1_2_method)(void) = NULL; +SSL_METHOD* (*TLSv1_2_client_method)(void) = NULL; +SSL_METHOD* (*TLSv1_2_server_method)(void) = NULL; +#endif + +#ifdef SSL_OP_MSIE_SSLV2_RSA_PADDING +static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING = 1; +#else +static const long Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING = 0; +const long SSL_OP_MSIE_SSLV2_RSA_PADDING = 0; +#endif + +#ifdef OPENSSL_NO_EC +long (*SSL_CTX_set_tmp_ecdh)(SSL_CTX *, EC_KEY *) = NULL; +#endif + +#ifdef SSL_OP_NO_TICKET +static const long Cryptography_HAS_SSL_OP_NO_TICKET = 1; +#else +static const long Cryptography_HAS_SSL_OP_NO_TICKET = 0; +const long SSL_OP_NO_TICKET = 0; +#endif + +// OpenSSL 0.9.8f+ +#if OPENSSL_VERSION_NUMBER >= 0x00908070L +static const long Cryptography_HAS_SSL_SET_SSL_CTX = 1; +#else +static const long Cryptography_HAS_SSL_SET_SSL_CTX = 0; +static const long TLSEXT_NAMETYPE_host_name = 0; +SSL_CTX *(*SSL_set_SSL_CTX)(SSL *, SSL_CTX *) = NULL; +#endif + +/* NetBSD shipped without including d1_meth.c. This workaround checks to see + if the version of NetBSD we're currently running on is old enough to + have the bug and provides an empty implementation so we can link and + then remove the function from the ffi object. */ +#ifdef __NetBSD__ +# include +# if (__NetBSD_Version__ < 699003800) +static const long Cryptography_HAS_NETBSD_D1_METH = 0; +const SSL_METHOD *DTLSv1_method(void) { + return NULL; +} +# else +static const long Cryptography_HAS_NETBSD_D1_METH = 1; +# endif +#else +static const long Cryptography_HAS_NETBSD_D1_METH = 1; +#endif + +// Workaround for #794 caused by cffi const** bug. +const SSL_METHOD* Cryptography_SSL_CTX_get_method(const SSL_CTX* ctx) { + return ctx->method; +} + +/* Because OPENSSL defines macros that claim lack of support for things, rather + * than macros that claim support for things, we need to do a version check in + * addition to a definition check. NPN was added in 1.0.1: for any version + * before that, there is no compatibility. + */ +#if defined(OPENSSL_NO_NEXTPROTONEG) || OPENSSL_VERSION_NUMBER < 0x1000100fL +static const long Cryptography_HAS_NEXTPROTONEG = 0; +void (*SSL_CTX_set_next_protos_advertised_cb)(SSL_CTX *, + int (*)(SSL *, + const unsigned char **, + unsigned int *, + void *), + void *) = NULL; +void (*SSL_CTX_set_next_proto_select_cb)(SSL_CTX *, + int (*)(SSL *, + unsigned char **, + unsigned char *, + const unsigned char *, + unsigned int, + void *), + void *) = NULL; +int (*SSL_select_next_proto)(unsigned char **, unsigned char *, + const unsigned char *, unsigned int, + const unsigned char *, unsigned int) = NULL; +void (*SSL_get0_next_proto_negotiated)(const SSL *, + const unsigned char **, + unsigned *) = NULL; +#else +static const long Cryptography_HAS_NEXTPROTONEG = 1; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_TLSv1_1": [ + "SSL_OP_NO_TLSv1_1", + "TLSv1_1_method", + "TLSv1_1_server_method", + "TLSv1_1_client_method", + ], + + "Cryptography_HAS_TLSv1_2": [ + "SSL_OP_NO_TLSv1_2", + "TLSv1_2_method", + "TLSv1_2_server_method", + "TLSv1_2_client_method", + ], + + "Cryptography_HAS_SSL2": [ + "SSLv2_method", + "SSLv2_client_method", + "SSLv2_server_method", + ], + + "Cryptography_HAS_TLSEXT_HOSTNAME": [ + "SSL_set_tlsext_host_name", + "SSL_get_servername", + "SSL_CTX_set_tlsext_servername_callback", + ], + + "Cryptography_HAS_RELEASE_BUFFERS": [ + "SSL_MODE_RELEASE_BUFFERS", + ], + + "Cryptography_HAS_OP_NO_COMPRESSION": [ + "SSL_OP_NO_COMPRESSION", + ], + + "Cryptography_HAS_SSL_OP_MSIE_SSLV2_RSA_PADDING": [ + "SSL_OP_MSIE_SSLV2_RSA_PADDING", + ], + + "Cryptography_HAS_EC": [ + "SSL_CTX_set_tmp_ecdh", + ], + + "Cryptography_HAS_SSL_OP_NO_TICKET": [ + "SSL_OP_NO_TICKET", + ], + + "Cryptography_HAS_SSL_SET_SSL_CTX": [ + "SSL_set_SSL_CTX", + "TLSEXT_NAMETYPE_host_name", + ], + + "Cryptography_HAS_NETBSD_D1_METH": [ + "DTLSv1_method", + ], + + "Cryptography_HAS_NEXTPROTONEG": [ + "SSL_CTX_set_next_protos_advertised_cb", + "SSL_CTX_set_next_proto_select_cb", + "SSL_select_next_proto", + "SSL_get0_next_proto_negotiated", + ] +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py new file mode 100644 index 0000000..36a15e4 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py @@ -0,0 +1,277 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include + +/* + * This is part of a work-around for the difficulty cffi has in dealing with + * `STACK_OF(foo)` as the name of a type. We invent a new, simpler name that + * will be an alias for this type and use the alias throughout. This works + * together with another opaque typedef for the same name in the TYPES section. + * Note that the result is an opaque type. + */ +typedef STACK_OF(X509) Cryptography_STACK_OF_X509; +typedef STACK_OF(X509_REVOKED) Cryptography_STACK_OF_X509_REVOKED; +""" + +TYPES = """ +typedef ... Cryptography_STACK_OF_X509; +typedef ... Cryptography_STACK_OF_X509_REVOKED; + +typedef struct { + ASN1_OBJECT *algorithm; + ...; +} X509_ALGOR; + +typedef ... X509_ATTRIBUTE; + +typedef struct { + X509_ALGOR *signature; + ...; +} X509_CINF; + +typedef struct { + ASN1_OBJECT *object; + ASN1_BOOLEAN critical; + ASN1_OCTET_STRING *value; +} X509_EXTENSION; + +typedef ... X509_EXTENSIONS; + +typedef ... X509_REQ; + +typedef struct { + ASN1_INTEGER *serialNumber; + ASN1_TIME *revocationDate; + X509_EXTENSIONS *extensions; + int sequence; + ...; +} X509_REVOKED; + +typedef struct { + Cryptography_STACK_OF_X509_REVOKED *revoked; + ...; +} X509_CRL_INFO; + +typedef struct { + X509_CRL_INFO *crl; + ...; +} X509_CRL; + +typedef struct { + X509_CINF *cert_info; + ...; +} X509; + +typedef ... X509_STORE; +typedef ... NETSCAPE_SPKI; +""" + +FUNCTIONS = """ +X509 *X509_new(void); +void X509_free(X509 *); +X509 *X509_dup(X509 *); + +int X509_print_ex(BIO *, X509 *, unsigned long, unsigned long); + +int X509_set_version(X509 *, long); + +EVP_PKEY *X509_get_pubkey(X509 *); +int X509_set_pubkey(X509 *, EVP_PKEY *); + +unsigned char *X509_alias_get0(X509 *, int *); +int X509_sign(X509 *, EVP_PKEY *, const EVP_MD *); + +int X509_digest(const X509 *, const EVP_MD *, unsigned char *, unsigned int *); + +ASN1_TIME *X509_gmtime_adj(ASN1_TIME *, long); + +unsigned long X509_subject_name_hash(X509 *); + +X509_NAME *X509_get_subject_name(X509 *); +int X509_set_subject_name(X509 *, X509_NAME *); + +X509_NAME *X509_get_issuer_name(X509 *); +int X509_set_issuer_name(X509 *, X509_NAME *); + +int X509_get_ext_count(X509 *); +int X509_add_ext(X509 *, X509_EXTENSION *, int); +X509_EXTENSION *X509_EXTENSION_dup(X509_EXTENSION *); +X509_EXTENSION *X509_get_ext(X509 *, int); +int X509_EXTENSION_get_critical(X509_EXTENSION *); +ASN1_OBJECT *X509_EXTENSION_get_object(X509_EXTENSION *); +void X509_EXTENSION_free(X509_EXTENSION *); + +int X509_REQ_set_version(X509_REQ *, long); +X509_REQ *X509_REQ_new(void); +void X509_REQ_free(X509_REQ *); +int X509_REQ_set_pubkey(X509_REQ *, EVP_PKEY *); +int X509_REQ_sign(X509_REQ *, EVP_PKEY *, const EVP_MD *); +int X509_REQ_verify(X509_REQ *, EVP_PKEY *); +EVP_PKEY *X509_REQ_get_pubkey(X509_REQ *); +int X509_REQ_print_ex(BIO *, X509_REQ *, unsigned long, unsigned long); + +int X509V3_EXT_print(BIO *, X509_EXTENSION *, unsigned long, int); +ASN1_OCTET_STRING *X509_EXTENSION_get_data(X509_EXTENSION *); + +X509_REVOKED *X509_REVOKED_new(void); +void X509_REVOKED_free(X509_REVOKED *); + +int X509_REVOKED_set_serialNumber(X509_REVOKED *, ASN1_INTEGER *); + +int X509_REVOKED_add1_ext_i2d(X509_REVOKED *, int, void *, int, unsigned long); + +X509_CRL *d2i_X509_CRL_bio(BIO *, X509_CRL **); +X509_CRL *X509_CRL_new(void); +void X509_CRL_free(X509_CRL *); +int X509_CRL_add0_revoked(X509_CRL *, X509_REVOKED *); +int i2d_X509_CRL_bio(BIO *, X509_CRL *); +int X509_CRL_print(BIO *, X509_CRL *); +int X509_CRL_set_issuer_name(X509_CRL *, X509_NAME *); +int X509_CRL_sign(X509_CRL *, EVP_PKEY *, const EVP_MD *); + +int NETSCAPE_SPKI_verify(NETSCAPE_SPKI *, EVP_PKEY *); +int NETSCAPE_SPKI_sign(NETSCAPE_SPKI *, EVP_PKEY *, const EVP_MD *); +char *NETSCAPE_SPKI_b64_encode(NETSCAPE_SPKI *); +EVP_PKEY *NETSCAPE_SPKI_get_pubkey(NETSCAPE_SPKI *); +int NETSCAPE_SPKI_set_pubkey(NETSCAPE_SPKI *, EVP_PKEY *); +NETSCAPE_SPKI *NETSCAPE_SPKI_new(void); +void NETSCAPE_SPKI_free(NETSCAPE_SPKI *); + +/* ASN1 serialization */ +int i2d_X509_bio(BIO *, X509 *); +X509 *d2i_X509_bio(BIO *, X509 **); + +int i2d_X509_REQ_bio(BIO *, X509_REQ *); +X509_REQ *d2i_X509_REQ_bio(BIO *, X509_REQ **); + +int i2d_PrivateKey_bio(BIO *, EVP_PKEY *); +EVP_PKEY *d2i_PrivateKey_bio(BIO *, EVP_PKEY **); +int i2d_PUBKEY_bio(BIO *, EVP_PKEY *); +EVP_PKEY *d2i_PUBKEY_bio(BIO *, EVP_PKEY **); + +ASN1_INTEGER *X509_get_serialNumber(X509 *); +int X509_set_serialNumber(X509 *, ASN1_INTEGER *); + +/* X509_STORE */ +X509_STORE *X509_STORE_new(void); +void X509_STORE_free(X509_STORE *); +int X509_STORE_add_cert(X509_STORE *, X509 *); +int X509_verify_cert(X509_STORE_CTX *); + +const char *X509_verify_cert_error_string(long); + +const char *X509_get_default_cert_area(void); +const char *X509_get_default_cert_dir(void); +const char *X509_get_default_cert_file(void); +const char *X509_get_default_cert_dir_env(void); +const char *X509_get_default_cert_file_env(void); +const char *X509_get_default_private_dir(void); + +int i2d_RSA_PUBKEY(RSA *, unsigned char **); +RSA *d2i_RSA_PUBKEY(RSA **, const unsigned char **, long); +RSA *d2i_RSAPublicKey(RSA **, const unsigned char **, long); +RSA *d2i_RSAPrivateKey(RSA **, const unsigned char **, long); +int i2d_DSA_PUBKEY(DSA *, unsigned char **); +DSA *d2i_DSA_PUBKEY(DSA **, const unsigned char **, long); +DSA *d2i_DSAPublicKey(DSA **, const unsigned char **, long); +DSA *d2i_DSAPrivateKey(DSA **, const unsigned char **, long); + + +RSA *d2i_RSAPrivateKey_bio(BIO *, RSA **); +int i2d_RSAPrivateKey_bio(BIO *, RSA *); +RSA *d2i_RSAPublicKey_bio(BIO *, RSA **); +int i2d_RSAPublicKey_bio(BIO *, RSA *); +RSA *d2i_RSA_PUBKEY_bio(BIO *, RSA **); +int i2d_RSA_PUBKEY_bio(BIO *, RSA *); +DSA *d2i_DSA_PUBKEY_bio(BIO *, DSA **); +int i2d_DSA_PUBKEY_bio(BIO *, DSA *); +DSA *d2i_DSAPrivateKey_bio(BIO *, DSA **); +int i2d_DSAPrivateKey_bio(BIO *, DSA *); +""" + +MACROS = """ +long X509_get_version(X509 *); + +ASN1_TIME *X509_get_notBefore(X509 *); +ASN1_TIME *X509_get_notAfter(X509 *); + +long X509_REQ_get_version(X509_REQ *); +X509_NAME *X509_REQ_get_subject_name(X509_REQ *); + +Cryptography_STACK_OF_X509 *sk_X509_new_null(void); +void sk_X509_free(Cryptography_STACK_OF_X509 *); +int sk_X509_num(Cryptography_STACK_OF_X509 *); +int sk_X509_push(Cryptography_STACK_OF_X509 *, X509 *); +X509 *sk_X509_value(Cryptography_STACK_OF_X509 *, int); + +X509_EXTENSIONS *sk_X509_EXTENSION_new_null(void); +int sk_X509_EXTENSION_num(X509_EXTENSIONS *); +X509_EXTENSION *sk_X509_EXTENSION_value(X509_EXTENSIONS *, int); +int sk_X509_EXTENSION_push(X509_EXTENSIONS *, X509_EXTENSION *); +X509_EXTENSION *sk_X509_EXTENSION_delete(X509_EXTENSIONS *, int); +void sk_X509_EXTENSION_free(X509_EXTENSIONS *); + +int sk_X509_REVOKED_num(Cryptography_STACK_OF_X509_REVOKED *); +X509_REVOKED *sk_X509_REVOKED_value(Cryptography_STACK_OF_X509_REVOKED *, int); + +int i2d_RSAPublicKey(RSA *, unsigned char **); +int i2d_RSAPrivateKey(RSA *, unsigned char **); +int i2d_DSAPublicKey(DSA *, unsigned char **); +int i2d_DSAPrivateKey(DSA *, unsigned char **); + +/* These aren't macros these arguments are all const X on openssl > 1.0.x */ +int X509_CRL_set_lastUpdate(X509_CRL *, ASN1_TIME *); +int X509_CRL_set_nextUpdate(X509_CRL *, ASN1_TIME *); + +/* these use STACK_OF(X509_EXTENSION) in 0.9.8e. Once we drop support for + RHEL/CentOS 5 we should move these back to FUNCTIONS. */ +int X509_REQ_add_extensions(X509_REQ *, X509_EXTENSIONS *); +X509_EXTENSIONS *X509_REQ_get_extensions(X509_REQ *); + +int i2d_EC_PUBKEY(EC_KEY *, unsigned char **); +EC_KEY *d2i_EC_PUBKEY(EC_KEY **, const unsigned char **, long); +EC_KEY *d2i_EC_PUBKEY_bio(BIO *, EC_KEY **); +int i2d_EC_PUBKEY_bio(BIO *, EC_KEY *); +EC_KEY *d2i_ECPrivateKey_bio(BIO *, EC_KEY **); +int i2d_ECPrivateKey_bio(BIO *, EC_KEY *); +""" + +CUSTOMIZATIONS = """ +// OpenSSL 0.9.8e does not have this definition +#if OPENSSL_VERSION_NUMBER <= 0x0090805fL +typedef STACK_OF(X509_EXTENSION) X509_EXTENSIONS; +#endif +#ifdef OPENSSL_NO_EC +int (*i2d_EC_PUBKEY)(EC_KEY *, unsigned char **) = NULL; +EC_KEY *(*d2i_EC_PUBKEY)(EC_KEY **, const unsigned char **, long) = NULL; +EC_KEY *(*d2i_EC_PUBKEY_bio)(BIO *, EC_KEY **) = NULL; +int (*i2d_EC_PUBKEY_bio)(BIO *, EC_KEY *) = NULL; +EC_KEY *(*d2i_ECPrivateKey_bio)(BIO *, EC_KEY **) = NULL; +int (*i2d_ECPrivateKey_bio)(BIO *, EC_KEY *) = NULL; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_EC": [ + "i2d_EC_PUBKEY", + "d2i_EC_PUBKEY", + "d2i_EC_PUBKEY_bio", + "i2d_EC_PUBKEY_bio", + "d2i_ECPrivateKey_bio", + "i2d_ECPrivateKey_bio", + ] +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509name.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509name.py new file mode 100644 index 0000000..50abee2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509name.py @@ -0,0 +1,61 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include + +/* + * See the comment above Cryptography_STACK_OF_X509 in x509.py + */ +typedef STACK_OF(X509_NAME) Cryptography_STACK_OF_X509_NAME; +""" + +TYPES = """ +typedef ... X509_NAME; +typedef ... X509_NAME_ENTRY; +typedef ... Cryptography_STACK_OF_X509_NAME; +""" + +FUNCTIONS = """ +int X509_NAME_entry_count(X509_NAME *); +X509_NAME_ENTRY *X509_NAME_get_entry(X509_NAME *, int); +ASN1_OBJECT *X509_NAME_ENTRY_get_object(X509_NAME_ENTRY *); +ASN1_STRING *X509_NAME_ENTRY_get_data(X509_NAME_ENTRY *); +unsigned long X509_NAME_hash(X509_NAME *); + +int i2d_X509_NAME(X509_NAME *, unsigned char **); +int X509_NAME_add_entry_by_NID(X509_NAME *, int, int, unsigned char *, + int, int, int); +X509_NAME_ENTRY *X509_NAME_delete_entry(X509_NAME *, int); +void X509_NAME_ENTRY_free(X509_NAME_ENTRY *); +int X509_NAME_get_index_by_NID(X509_NAME *, int, int); +int X509_NAME_cmp(const X509_NAME *, const X509_NAME *); +char *X509_NAME_oneline(X509_NAME *, char *, int); +X509_NAME *X509_NAME_dup(X509_NAME *); +void X509_NAME_free(X509_NAME *); +""" + +MACROS = """ +Cryptography_STACK_OF_X509_NAME *sk_X509_NAME_new_null(void); +int sk_X509_NAME_num(Cryptography_STACK_OF_X509_NAME *); +int sk_X509_NAME_push(Cryptography_STACK_OF_X509_NAME *, X509_NAME *); +X509_NAME *sk_X509_NAME_value(Cryptography_STACK_OF_X509_NAME *, int); +void sk_X509_NAME_free(Cryptography_STACK_OF_X509_NAME *); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py new file mode 100644 index 0000000..02ec250 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py @@ -0,0 +1,101 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef struct { + X509 *issuer_cert; + X509 *subject_cert; + ...; +} X509V3_CTX; + +typedef void * (*X509V3_EXT_D2I)(void *, const unsigned char **, long); + +typedef struct { + ASN1_ITEM_EXP *it; + X509V3_EXT_D2I d2i; + ...; +} X509V3_EXT_METHOD; + +static const int GEN_OTHERNAME; +static const int GEN_EMAIL; +static const int GEN_X400; +static const int GEN_DNS; +static const int GEN_URI; +static const int GEN_DIRNAME; +static const int GEN_EDIPARTY; +static const int GEN_IPADD; +static const int GEN_RID; + +typedef struct { + ...; +} OTHERNAME; + +typedef struct { + ...; +} EDIPARTYNAME; + +typedef struct { + int type; + union { + char *ptr; + OTHERNAME *otherName; /* otherName */ + ASN1_IA5STRING *rfc822Name; + ASN1_IA5STRING *dNSName; + ASN1_TYPE *x400Address; + X509_NAME *directoryName; + EDIPARTYNAME *ediPartyName; + ASN1_IA5STRING *uniformResourceIdentifier; + ASN1_OCTET_STRING *iPAddress; + ASN1_OBJECT *registeredID; + + /* Old names */ + ASN1_OCTET_STRING *ip; /* iPAddress */ + X509_NAME *dirn; /* dirn */ + ASN1_IA5STRING *ia5; /* rfc822Name, dNSName, */ + /* uniformResourceIdentifier */ + ASN1_OBJECT *rid; /* registeredID */ + ASN1_TYPE *other; /* x400Address */ + } d; + ...; +} GENERAL_NAME; + +typedef struct stack_st_GENERAL_NAME GENERAL_NAMES; +""" + +FUNCTIONS = """ +void X509V3_set_ctx(X509V3_CTX *, X509 *, X509 *, X509_REQ *, X509_CRL *, int); +X509_EXTENSION *X509V3_EXT_nconf(CONF *, X509V3_CTX *, char *, char *); +int GENERAL_NAME_print(BIO *, GENERAL_NAME *); +""" + +MACROS = """ +void *X509V3_set_ctx_nodb(X509V3_CTX *); +int sk_GENERAL_NAME_num(struct stack_st_GENERAL_NAME *); +int sk_GENERAL_NAME_push(struct stack_st_GENERAL_NAME *, GENERAL_NAME *); +GENERAL_NAME *sk_GENERAL_NAME_value(struct stack_st_GENERAL_NAME *, int); + +/* These aren't macros these functions are all const X on openssl > 1.0.x */ +const X509V3_EXT_METHOD *X509V3_EXT_get(X509_EXTENSION *); +const X509V3_EXT_METHOD *X509V3_EXT_get_nid(int); +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/utils.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/utils.py new file mode 100644 index 0000000..1c48116 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/utils.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import binascii + +import sys + +import cffi + + +def build_ffi(module_prefix, modules, pre_include="", post_include="", + libraries=[], extra_compile_args=[], extra_link_args=[]): + """ + Modules listed in ``modules`` should have the following attributes: + + * ``INCLUDES``: A string containing C includes. + * ``TYPES``: A string containing C declarations for types. + * ``FUNCTIONS``: A string containing C declarations for functions. + * ``MACROS``: A string containing C declarations for any macros. + * ``CUSTOMIZATIONS``: A string containing arbitrary top-level C code, this + can be used to do things like test for a define and provide an + alternate implementation based on that. + * ``CONDITIONAL_NAMES``: A dict mapping strings of condition names from the + library to a list of names which will not be present without the + condition. + """ + ffi = cffi.FFI() + types = [] + includes = [] + functions = [] + macros = [] + customizations = [] + for name in modules: + module_name = module_prefix + name + __import__(module_name) + module = sys.modules[module_name] + + types.append(module.TYPES) + macros.append(module.MACROS) + functions.append(module.FUNCTIONS) + includes.append(module.INCLUDES) + customizations.append(module.CUSTOMIZATIONS) + + cdef_sources = types + functions + macros + ffi.cdef("\n".join(cdef_sources)) + + # We include functions here so that if we got any of their definitions + # wrong, the underlying C compiler will explode. In C you are allowed + # to re-declare a function if it has the same signature. That is: + # int foo(int); + # int foo(int); + # is legal, but the following will fail to compile: + # int foo(int); + # int foo(short); + source = "\n".join( + [pre_include] + + includes + + [post_include] + + functions + + customizations + ) + lib = ffi.verify( + source=source, + modulename=_create_modulename(cdef_sources, source, sys.version), + libraries=libraries, + ext_package="cryptography", + extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, + ) + + for name in modules: + module_name = module_prefix + name + module = sys.modules[module_name] + for condition, names in module.CONDITIONAL_NAMES.items(): + if not getattr(lib, condition): + for name in names: + delattr(lib, name) + + return ffi, lib + + +def _create_modulename(cdef_sources, source, sys_version): + """ + cffi creates a modulename internally that incorporates the cffi version. + This will cause cryptography's wheels to break when the version of cffi + the user has does not match what was used when building the wheel. To + resolve this we build our own modulename that uses most of the same code + from cffi but elides the version key. + """ + key = '\x00'.join([sys_version[:3], source] + cdef_sources) + key = key.encode('utf-8') + k1 = hex(binascii.crc32(key[0::2]) & 0xffffffff) + k1 = k1.lstrip('0x').rstrip('L') + k2 = hex(binascii.crc32(key[1::2]) & 0xffffffff) + k2 = k2.lstrip('0').rstrip('L') + return '_Cryptography_cffi_{0}{1}'.format(k1, k2) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py new file mode 100644 index 0000000..aa3cdc9 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -0,0 +1,183 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import DSABackend +from cryptography.hazmat.primitives import interfaces + + +def _check_dsa_parameters(modulus, subgroup_order, generator): + if ( + not isinstance(modulus, six.integer_types) or + not isinstance(subgroup_order, six.integer_types) or + not isinstance(generator, six.integer_types) + ): + raise TypeError("DSA parameters must be integers") + + if (utils.bit_length(modulus), + utils.bit_length(subgroup_order)) not in ( + (1024, 160), + (2048, 256), + (3072, 256)): + raise ValueError("modulus and subgroup_order lengths must be " + "one of these pairs (1024, 160) or (2048, 256) " + "or (3072, 256)") + + if generator <= 1 or generator >= modulus: + raise ValueError("generator must be > 1 and < modulus") + + +@utils.register_interface(interfaces.DSAParameters) +class DSAParameters(object): + def __init__(self, modulus, subgroup_order, generator): + _check_dsa_parameters(modulus, subgroup_order, generator) + + self._modulus = modulus + self._subgroup_order = subgroup_order + self._generator = generator + + @classmethod + def generate(cls, key_size, backend): + if not isinstance(backend, DSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement DSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.generate_dsa_parameters(key_size) + + @property + def modulus(self): + return self._modulus + + @property + def subgroup_order(self): + return self._subgroup_order + + @property + def generator(self): + return self._generator + + @property + def p(self): + return self.modulus + + @property + def q(self): + return self.subgroup_order + + @property + def g(self): + return self.generator + + +@utils.register_interface(interfaces.DSAPrivateKey) +class DSAPrivateKey(object): + def __init__(self, modulus, subgroup_order, generator, x, y): + _check_dsa_parameters(modulus, subgroup_order, generator) + if ( + not isinstance(x, six.integer_types) or + not isinstance(y, six.integer_types) + ): + raise TypeError("DSAPrivateKey arguments must be integers") + + if x <= 0 or x >= subgroup_order: + raise ValueError("x must be > 0 and < subgroup_order") + + if y != pow(generator, x, modulus): + raise ValueError("y must be equal to (generator ** x % modulus)") + + self._modulus = modulus + self._subgroup_order = subgroup_order + self._generator = generator + self._x = x + self._y = y + + @classmethod + def generate(cls, parameters, backend): + if not isinstance(backend, DSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement DSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.generate_dsa_private_key(parameters) + + def signer(self, algorithm, backend): + if not isinstance(backend, DSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement DSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.create_dsa_signature_ctx(self, algorithm) + + @property + def key_size(self): + return utils.bit_length(self._modulus) + + def public_key(self): + return DSAPublicKey(self._modulus, self._subgroup_order, + self._generator, self.y) + + @property + def x(self): + return self._x + + @property + def y(self): + return self._y + + def parameters(self): + return DSAParameters(self._modulus, self._subgroup_order, + self._generator) + + +@utils.register_interface(interfaces.DSAPublicKey) +class DSAPublicKey(object): + def __init__(self, modulus, subgroup_order, generator, y): + _check_dsa_parameters(modulus, subgroup_order, generator) + if not isinstance(y, six.integer_types): + raise TypeError("y must be an integer") + + self._modulus = modulus + self._subgroup_order = subgroup_order + self._generator = generator + self._y = y + + def verifier(self, signature, algorithm, backend): + if not isinstance(backend, DSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement DSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.create_dsa_verification_ctx(self, signature, + algorithm) + + @property + def key_size(self): + return utils.bit_length(self._modulus) + + @property + def y(self): + return self._y + + def parameters(self): + return DSAParameters(self._modulus, self._subgroup_order, + self._generator) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py new file mode 100644 index 0000000..dcc6fe0 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -0,0 +1,92 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import warnings + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.AsymmetricPadding) +class PKCS1v15(object): + name = "EMSA-PKCS1-v1_5" + + +@utils.register_interface(interfaces.AsymmetricPadding) +class PSS(object): + MAX_LENGTH = object() + name = "EMSA-PSS" + + def __init__(self, mgf, salt_length=None): + self._mgf = mgf + + if salt_length is None: + warnings.warn( + "salt_length is deprecated on MGF1 and should be added via the" + " PSS constructor.", + utils.DeprecatedIn04 + ) + else: + if (not isinstance(salt_length, six.integer_types) and + salt_length is not self.MAX_LENGTH): + raise TypeError("salt_length must be an integer") + + if salt_length is not self.MAX_LENGTH and salt_length < 0: + raise ValueError("salt_length must be zero or greater") + + if salt_length is None and self._mgf._salt_length is None: + raise ValueError("You must supply salt_length") + + self._salt_length = salt_length + + +@utils.register_interface(interfaces.AsymmetricPadding) +class OAEP(object): + name = "EME-OAEP" + + def __init__(self, mgf, algorithm, label): + if not isinstance(algorithm, interfaces.HashAlgorithm): + raise TypeError("Expected instance of interfaces.HashAlgorithm.") + + self._mgf = mgf + self._algorithm = algorithm + self._label = label + + +class MGF1(object): + MAX_LENGTH = object() + + def __init__(self, algorithm, salt_length=None): + if not isinstance(algorithm, interfaces.HashAlgorithm): + raise TypeError("Expected instance of interfaces.HashAlgorithm.") + + self._algorithm = algorithm + + if salt_length is not None: + warnings.warn( + "salt_length is deprecated on MGF1 and should be passed to " + "the PSS constructor instead.", + utils.DeprecatedIn04 + ) + if (not isinstance(salt_length, six.integer_types) and + salt_length is not self.MAX_LENGTH): + raise TypeError("salt_length must be an integer") + + if salt_length is not self.MAX_LENGTH and salt_length < 0: + raise ValueError("salt_length must be zero or greater") + + self._salt_length = salt_length diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py new file mode 100644 index 0000000..5d3bb36 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -0,0 +1,259 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.backends.interfaces import RSABackend +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.RSAPublicKey) +class RSAPublicKey(object): + def __init__(self, public_exponent, modulus): + if ( + not isinstance(public_exponent, six.integer_types) or + not isinstance(modulus, six.integer_types) + ): + raise TypeError("RSAPublicKey arguments must be integers") + + if modulus < 3: + raise ValueError("modulus must be >= 3") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd") + + self._public_exponent = public_exponent + self._modulus = modulus + + def verifier(self, signature, padding, algorithm, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.create_rsa_verification_ctx(self, signature, padding, + algorithm) + + def encrypt(self, plaintext, padding, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.encrypt_rsa(self, plaintext, padding) + + @property + def key_size(self): + return utils.bit_length(self.modulus) + + @property + def public_exponent(self): + return self._public_exponent + + @property + def modulus(self): + return self._modulus + + @property + def e(self): + return self.public_exponent + + @property + def n(self): + return self.modulus + + +def _modinv(e, m): + """ + Modular Multiplicative Inverse. Returns x such that: (x*e) mod m == 1 + """ + x1, y1, x2, y2 = 1, 0, 0, 1 + a, b = e, m + while b > 0: + q, r = divmod(a, b) + xn, yn = x1 - q * x2, y1 - q * y2 + a, b, x1, y1, x2, y2 = b, r, x2, y2, xn, yn + return x1 % m + + +def rsa_crt_iqmp(p, q): + """ + Compute the CRT (q ** -1) % p value from RSA primes p and q. + """ + return _modinv(q, p) + + +def rsa_crt_dmp1(private_exponent, p): + """ + Compute the CRT private_exponent % (p - 1) value from the RSA + private_exponent and p. + """ + return private_exponent % (p - 1) + + +def rsa_crt_dmq1(private_exponent, q): + """ + Compute the CRT private_exponent % (q - 1) value from the RSA + private_exponent and q. + """ + return private_exponent % (q - 1) + + +@utils.register_interface(interfaces.RSAPrivateKey) +class RSAPrivateKey(object): + def __init__(self, p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(dmp1, six.integer_types) or + not isinstance(dmq1, six.integer_types) or + not isinstance(iqmp, six.integer_types) or + not isinstance(private_exponent, six.integer_types) or + not isinstance(public_exponent, six.integer_types) or + not isinstance(modulus, six.integer_types) + ): + raise TypeError("RSAPrivateKey arguments must be integers") + + if modulus < 3: + raise ValueError("modulus must be >= 3") + + if p >= modulus: + raise ValueError("p must be < modulus") + + if q >= modulus: + raise ValueError("q must be < modulus") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd") + + if p * q != modulus: + raise ValueError("p*q must equal modulus") + + self._p = p + self._q = q + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._private_exponent = private_exponent + self._public_exponent = public_exponent + self._modulus = modulus + + @classmethod + def generate(cls, public_exponent, key_size, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.generate_rsa_private_key(public_exponent, key_size) + + def signer(self, padding, algorithm, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.create_rsa_signature_ctx(self, padding, algorithm) + + def decrypt(self, ciphertext, padding, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + return backend.decrypt_rsa(self, ciphertext, padding) + + @property + def key_size(self): + return utils.bit_length(self.modulus) + + def public_key(self): + return RSAPublicKey(self.public_exponent, self.modulus) + + @property + def p(self): + return self._p + + @property + def q(self): + return self._q + + @property + def private_exponent(self): + return self._private_exponent + + @property + def public_exponent(self): + return self._public_exponent + + @property + def modulus(self): + return self._modulus + + @property + def d(self): + return self.private_exponent + + @property + def dmp1(self): + return self._dmp1 + + @property + def dmq1(self): + return self._dmq1 + + @property + def iqmp(self): + return self._iqmp + + @property + def e(self): + return self.public_exponent + + @property + def n(self): + return self.modulus diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py new file mode 100644 index 0000000..e5a8ca5 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/__init__.py @@ -0,0 +1,21 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography.hazmat.primitives.ciphers.base import Cipher + + +__all__ = [ + "Cipher", +] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py new file mode 100644 index 0000000..52daf17 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -0,0 +1,147 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives import interfaces + + +def _verify_key_size(algorithm, key): + # Verify that the key size matches the expected key size + if len(key) * 8 not in algorithm.key_sizes: + raise ValueError("Invalid key size ({0}) for {1}".format( + len(key) * 8, algorithm.name + )) + return key + + +@utils.register_interface(interfaces.BlockCipherAlgorithm) +@utils.register_interface(interfaces.CipherAlgorithm) +class AES(object): + name = "AES" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.BlockCipherAlgorithm) +@utils.register_interface(interfaces.CipherAlgorithm) +class Camellia(object): + name = "camellia" + block_size = 128 + key_sizes = frozenset([128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.BlockCipherAlgorithm) +@utils.register_interface(interfaces.CipherAlgorithm) +class TripleDES(object): + name = "3DES" + block_size = 64 + key_sizes = frozenset([64, 128, 192]) + + def __init__(self, key): + if len(key) == 8: + key += key + key + elif len(key) == 16: + key += key[:8] + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.BlockCipherAlgorithm) +@utils.register_interface(interfaces.CipherAlgorithm) +class Blowfish(object): + name = "Blowfish" + block_size = 64 + key_sizes = frozenset(range(32, 449, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.BlockCipherAlgorithm) +@utils.register_interface(interfaces.CipherAlgorithm) +class CAST5(object): + name = "CAST5" + block_size = 64 + key_sizes = frozenset(range(40, 129, 8)) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.CipherAlgorithm) +class ARC4(object): + name = "RC4" + key_sizes = frozenset([40, 56, 64, 80, 128, 192, 256]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.CipherAlgorithm) +class IDEA(object): + name = "IDEA" + block_size = 64 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 + + +@utils.register_interface(interfaces.BlockCipherAlgorithm) +@utils.register_interface(interfaces.CipherAlgorithm) +class SEED(object): + name = "SEED" + block_size = 128 + key_sizes = frozenset([128]) + + def __init__(self, key): + self.key = _verify_key_size(self, key) + + @property + def key_size(self): + return len(self.key) * 8 diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py new file mode 100644 index 0000000..2274e94 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -0,0 +1,130 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, AlreadyUpdated, NotYetFinalized, UnsupportedAlgorithm, + _Reasons +) +from cryptography.hazmat.backends.interfaces import CipherBackend +from cryptography.hazmat.primitives import interfaces + + +class Cipher(object): + def __init__(self, algorithm, mode, backend): + if not isinstance(backend, CipherBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CipherBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, interfaces.CipherAlgorithm): + raise TypeError("Expected interface of interfaces.CipherAlgorithm") + + if mode is not None: + mode.validate_for_algorithm(algorithm) + + self.algorithm = algorithm + self.mode = mode + self._backend = backend + + def encryptor(self): + if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): + if self.mode.tag is not None: + raise ValueError( + "Authentication tag must be None when encrypting" + ) + ctx = self._backend.create_symmetric_encryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=True) + + def decryptor(self): + if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): + if self.mode.tag is None: + raise ValueError( + "Authentication tag must be provided when decrypting" + ) + ctx = self._backend.create_symmetric_decryption_ctx( + self.algorithm, self.mode + ) + return self._wrap_ctx(ctx, encrypt=False) + + def _wrap_ctx(self, ctx, encrypt): + if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): + if encrypt: + return _AEADEncryptionContext(ctx) + else: + return _AEADCipherContext(ctx) + else: + return _CipherContext(ctx) + + +@utils.register_interface(interfaces.CipherContext) +class _CipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + return self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + data = self._ctx.finalize() + self._ctx = None + return data + + +@utils.register_interface(interfaces.AEADCipherContext) +@utils.register_interface(interfaces.CipherContext) +class _AEADCipherContext(object): + def __init__(self, ctx): + self._ctx = ctx + self._tag = None + self._updated = False + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + self._updated = True + return self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + data = self._ctx.finalize() + self._tag = self._ctx.tag + self._ctx = None + return data + + def authenticate_additional_data(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + if self._updated: + raise AlreadyUpdated("Update has been called on this context") + self._ctx.authenticate_additional_data(data) + + +@utils.register_interface(interfaces.AEADEncryptionContext) +class _AEADEncryptionContext(_AEADCipherContext): + @property + def tag(self): + if self._ctx is not None: + raise NotYetFinalized("You must finalize encryption before " + "getting the tag") + return self._tag diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py new file mode 100644 index 0000000..739f23d --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -0,0 +1,107 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.Mode) +@utils.register_interface(interfaces.ModeWithInitializationVector) +class CBC(object): + name = "CBC" + + def __init__(self, initialization_vector): + self.initialization_vector = initialization_vector + + def validate_for_algorithm(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid iv size ({0}) for {1}".format( + len(self.initialization_vector), self.name + )) + + +@utils.register_interface(interfaces.Mode) +class ECB(object): + name = "ECB" + + def validate_for_algorithm(self, algorithm): + pass + + +@utils.register_interface(interfaces.Mode) +@utils.register_interface(interfaces.ModeWithInitializationVector) +class OFB(object): + name = "OFB" + + def __init__(self, initialization_vector): + self.initialization_vector = initialization_vector + + def validate_for_algorithm(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid iv size ({0}) for {1}".format( + len(self.initialization_vector), self.name + )) + + +@utils.register_interface(interfaces.Mode) +@utils.register_interface(interfaces.ModeWithInitializationVector) +class CFB(object): + name = "CFB" + + def __init__(self, initialization_vector): + self.initialization_vector = initialization_vector + + def validate_for_algorithm(self, algorithm): + if len(self.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid iv size ({0}) for {1}".format( + len(self.initialization_vector), self.name + )) + + +@utils.register_interface(interfaces.Mode) +@utils.register_interface(interfaces.ModeWithNonce) +class CTR(object): + name = "CTR" + + def __init__(self, nonce): + self.nonce = nonce + + def validate_for_algorithm(self, algorithm): + if len(self.nonce) * 8 != algorithm.block_size: + raise ValueError("Invalid nonce size ({0}) for {1}".format( + len(self.nonce), self.name + )) + + +@utils.register_interface(interfaces.Mode) +@utils.register_interface(interfaces.ModeWithInitializationVector) +@utils.register_interface(interfaces.ModeWithAuthenticationTag) +class GCM(object): + name = "GCM" + + def __init__(self, initialization_vector, tag=None): + # len(initialization_vector) must in [1, 2 ** 64), but it's impossible + # to actually construct a bytes object that large, so we don't check + # for it + if tag is not None and len(tag) < 4: + raise ValueError( + "Authentication tag must be 4 bytes or longer" + ) + + self.initialization_vector = initialization_vector + self.tag = tag + + def validate_for_algorithm(self, algorithm): + pass diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py new file mode 100644 index 0000000..7e7f65a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import CMACBackend +from cryptography.hazmat.primitives import constant_time, interfaces + + +@utils.register_interface(interfaces.CMACContext) +class CMAC(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, CMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement CMACBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, interfaces.BlockCipherAlgorithm): + raise TypeError( + "Expected instance of interfaces.BlockCipherAlgorithm" + ) + self._algorithm = algorithm + + self._backend = backend + if ctx is None: + self._ctx = self._backend.create_cmac_ctx(self._algorithm) + else: + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + if isinstance(data, six.text_type): + raise TypeError("Unicode-objects must be encoded before hashing") + self._ctx.update(data) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + if isinstance(signature, six.text_type): + raise TypeError("Unicode-objects must be encoded before verifying") + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + return CMAC( + self._algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py new file mode 100644 index 0000000..e0e9aa3 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -0,0 +1,63 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import sys + +import cffi + +import six + +from cryptography.hazmat.bindings.utils import _create_modulename + +TYPES = """ +uint8_t Cryptography_constant_time_bytes_eq(uint8_t *, size_t, uint8_t *, + size_t); +""" + +FUNCTIONS = """ +uint8_t Cryptography_constant_time_bytes_eq(uint8_t *a, size_t len_a, + uint8_t *b, size_t len_b) { + size_t i = 0; + uint8_t mismatch = 0; + if (len_a != len_b) { + return 0; + } + for (i = 0; i < len_a; i++) { + mismatch |= a[i] ^ b[i]; + } + + /* Make sure any bits set are copied to the lowest bit */ + mismatch |= mismatch >> 4; + mismatch |= mismatch >> 2; + mismatch |= mismatch >> 1; + /* Now check the low bit to see if it's set */ + return (mismatch & 1) == 0; +} +""" + +_ffi = cffi.FFI() +_ffi.cdef(TYPES) +_lib = _ffi.verify( + source=FUNCTIONS, + modulename=_create_modulename([TYPES], FUNCTIONS, sys.version), + ext_package="cryptography", +) + + +def bytes_eq(a, b): + if isinstance(a, six.text_type) or isinstance(b, six.text_type): + raise TypeError("Unicode-objects must be encoded before comparing") + + return _lib.Cryptography_constant_time_bytes_eq(a, len(a), b, len(b)) == 1 diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py new file mode 100644 index 0000000..35b677b --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py @@ -0,0 +1,121 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HashBackend +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.HashContext) +class Hash(object): + def __init__(self, algorithm, backend, ctx=None): + if not isinstance(backend, HashBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HashBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, interfaces.HashAlgorithm): + raise TypeError("Expected instance of interfaces.HashAlgorithm.") + self.algorithm = algorithm + + self._backend = backend + + if ctx is None: + self._ctx = self._backend.create_hash_ctx(self.algorithm) + else: + self._ctx = ctx + + def update(self, data): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + if isinstance(data, six.text_type): + raise TypeError("Unicode-objects must be encoded before hashing") + self._ctx.update(data) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + return Hash( + self.algorithm, backend=self._backend, ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + digest = self._ctx.finalize() + self._ctx = None + return digest + + +@utils.register_interface(interfaces.HashAlgorithm) +class SHA1(object): + name = "sha1" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(interfaces.HashAlgorithm) +class SHA224(object): + name = "sha224" + digest_size = 28 + block_size = 64 + + +@utils.register_interface(interfaces.HashAlgorithm) +class SHA256(object): + name = "sha256" + digest_size = 32 + block_size = 64 + + +@utils.register_interface(interfaces.HashAlgorithm) +class SHA384(object): + name = "sha384" + digest_size = 48 + block_size = 128 + + +@utils.register_interface(interfaces.HashAlgorithm) +class SHA512(object): + name = "sha512" + digest_size = 64 + block_size = 128 + + +@utils.register_interface(interfaces.HashAlgorithm) +class RIPEMD160(object): + name = "ripemd160" + digest_size = 20 + block_size = 64 + + +@utils.register_interface(interfaces.HashAlgorithm) +class Whirlpool(object): + name = "whirlpool" + digest_size = 64 + block_size = 64 + + +@utils.register_interface(interfaces.HashAlgorithm) +class MD5(object): + name = "md5" + digest_size = 16 + block_size = 64 diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py new file mode 100644 index 0000000..afbb2f7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py @@ -0,0 +1,75 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, interfaces + + +@utils.register_interface(interfaces.HashContext) +class HMAC(object): + def __init__(self, key, algorithm, backend, ctx=None): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not isinstance(algorithm, interfaces.HashAlgorithm): + raise TypeError("Expected instance of interfaces.HashAlgorithm.") + self.algorithm = algorithm + + self._backend = backend + self._key = key + if ctx is None: + self._ctx = self._backend.create_hmac_ctx(key, self.algorithm) + else: + self._ctx = ctx + + def update(self, msg): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + if isinstance(msg, six.text_type): + raise TypeError("Unicode-objects must be encoded before hashing") + self._ctx.update(msg) + + def copy(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + return HMAC( + self._key, + self.algorithm, + backend=self._backend, + ctx=self._ctx.copy() + ) + + def finalize(self): + if self._ctx is None: + raise AlreadyFinalized("Context was already finalized") + digest = self._ctx.finalize() + self._ctx = None + return digest + + def verify(self, signature): + if isinstance(signature, six.text_type): + raise TypeError("Unicode-objects must be encoded before verifying") + digest = self.finalize() + if not constant_time.bytes_eq(digest, signature): + raise InvalidSignature("Signature did not match digest.") diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py new file mode 100644 index 0000000..810a67a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py @@ -0,0 +1,491 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import abc + +import six + + +@six.add_metaclass(abc.ABCMeta) +class CipherAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "AES", "Camellia"). + """ + + @abc.abstractproperty + def key_size(self): + """ + The size of the key being used as an integer in bits (e.g. 128, 256). + """ + + +@six.add_metaclass(abc.ABCMeta) +class BlockCipherAlgorithm(object): + @abc.abstractproperty + def block_size(self): + """ + The size of a block as an integer in bits (e.g. 64, 128). + """ + + +@six.add_metaclass(abc.ABCMeta) +class Mode(object): + @abc.abstractproperty + def name(self): + """ + A string naming this mode (e.g. "ECB", "CBC"). + """ + + @abc.abstractmethod + def validate_for_algorithm(self, algorithm): + """ + Checks that all the necessary invariants of this (mode, algorithm) + combination are met. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithInitializationVector(object): + @abc.abstractproperty + def initialization_vector(self): + """ + The value of the initialization vector for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithNonce(object): + @abc.abstractproperty + def nonce(self): + """ + The value of the nonce for this mode as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class ModeWithAuthenticationTag(object): + @abc.abstractproperty + def tag(self): + """ + The value of the tag supplied to the constructor of this mode. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CipherContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the cipher and returns the results + as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the results of processing the final block as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADCipherContext(object): + @abc.abstractmethod + def authenticate_additional_data(self, data): + """ + Authenticates the provided bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AEADEncryptionContext(object): + @abc.abstractproperty + def tag(self): + """ + Returns tag bytes. This is only available after encryption is + finalized. + """ + + +@six.add_metaclass(abc.ABCMeta) +class PaddingContext(object): + @abc.abstractmethod + def update(self, data): + """ + Pads the provided bytes and returns any available data as bytes. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalize the padding, returns bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashAlgorithm(object): + @abc.abstractproperty + def name(self): + """ + A string naming this algorithm (e.g. "sha256", "md5"). + """ + + @abc.abstractproperty + def digest_size(self): + """ + The size of the resulting digest in bytes. + """ + + @abc.abstractproperty + def block_size(self): + """ + The internal block size of the hash algorithm in bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class HashContext(object): + @abc.abstractproperty + def algorithm(self): + """ + A HashAlgorithm that will be used by this context. + """ + + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes through the hash. + """ + + @abc.abstractmethod + def finalize(self): + """ + Finalizes the hash context and returns the hash digest as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a HashContext that is a copy of the current context. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKey(object): + @abc.abstractmethod + def signer(self, padding, algorithm, backend): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractproperty + def modulus(self): + """ + The public modulus of the RSA key. + """ + + @abc.abstractproperty + def public_exponent(self): + """ + The public exponent of the RSA key. + """ + + @abc.abstractproperty + def private_exponent(self): + """ + The private exponent of the RSA key. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The RSAPublicKey associated with this private key. + """ + + @abc.abstractproperty + def n(self): + """ + The public modulus of the RSA key. Alias for modulus. + """ + + @abc.abstractproperty + def p(self): + """ + One of the two primes used to generate d. + """ + + @abc.abstractproperty + def q(self): + """ + One of the two primes used to generate d. + """ + + @abc.abstractproperty + def d(self): + """ + The private exponent. This can be calculated using p and q. Alias for + private_exponent. + """ + + @abc.abstractproperty + def dmp1(self): + """ + A Chinese remainder theorem coefficient used to speed up RSA + calculations. Calculated as: d mod (p-1) + """ + + @abc.abstractproperty + def dmq1(self): + """ + A Chinese remainder theorem coefficient used to speed up RSA + calculations. Calculated as: d mod (q-1) + """ + + @abc.abstractproperty + def iqmp(self): + """ + A Chinese remainder theorem coefficient used to speed up RSA + calculations. The modular inverse of q modulo p + """ + + @abc.abstractproperty + def e(self): + """ + The public exponent of the RSA key. Alias for public_exponent. + """ + + +@six.add_metaclass(abc.ABCMeta) +class RSAPublicKey(object): + @abc.abstractmethod + def verifier(self, signature, padding, algorithm, backend): + """ + Returns an AsymmetricVerificationContext used for verifying signatures. + """ + + @abc.abstractproperty + def modulus(self): + """ + The public modulus of the RSA key. + """ + + @abc.abstractproperty + def public_exponent(self): + """ + The public exponent of the RSA key. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the public modulus. + """ + + @abc.abstractproperty + def n(self): + """ + The public modulus of the RSA key. Alias for modulus. + """ + + @abc.abstractproperty + def e(self): + """ + The public exponent of the RSA key. Alias for public_exponent. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAParameters(object): + @abc.abstractproperty + def modulus(self): + """ + The prime modulus that's used in generating the DSA keypair and used + in the DSA signing and verification processes. + """ + + @abc.abstractproperty + def subgroup_order(self): + """ + The subgroup order that's used in generating the DSA keypair + by the generator and used in the DSA signing and verification + processes. + """ + + @abc.abstractproperty + def generator(self): + """ + The generator that is used in generating the DSA keypair and used + in the DSA signing and verification processes. + """ + + @abc.abstractproperty + def p(self): + """ + The prime modulus that's used in generating the DSA keypair and used + in the DSA signing and verification processes. Alias for modulus. + """ + + @abc.abstractproperty + def q(self): + """ + The subgroup order that's used in generating the DSA keypair + by the generator and used in the DSA signing and verification + processes. Alias for subgroup_order. + """ + + @abc.abstractproperty + def g(self): + """ + The generator that is used in generating the DSA keypair and used + in the DSA signing and verification processes. Alias for generator. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractmethod + def public_key(self): + """ + The DSAPublicKey associated with this private key. + """ + + @abc.abstractproperty + def x(self): + """ + The private key "x" in the DSA structure. + """ + + @abc.abstractproperty + def y(self): + """ + The public key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this private key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class DSAPublicKey(object): + @abc.abstractproperty + def key_size(self): + """ + The bit length of the prime modulus. + """ + + @abc.abstractproperty + def y(self): + """ + The public key. + """ + + @abc.abstractmethod + def parameters(self): + """ + The DSAParameters object associated with this public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricSignatureContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def finalize(self): + """ + Returns the signature as bytes. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricVerificationContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes and returns nothing. + """ + + @abc.abstractmethod + def verify(self): + """ + Raises an exception if the bytes provided to update do not match the + signature or the signature does not match the public key. + """ + + +@six.add_metaclass(abc.ABCMeta) +class AsymmetricPadding(object): + @abc.abstractproperty + def name(self): + """ + A string naming this padding (e.g. "PSS", "PKCS1"). + """ + + +@six.add_metaclass(abc.ABCMeta) +class KeyDerivationFunction(object): + @abc.abstractmethod + def derive(self, key_material): + """ + Deterministically generates and returns a new key based on the existing + key material. + """ + + @abc.abstractmethod + def verify(self, key_material, expected_key): + """ + Checks whether the key generated by the key material matches the + expected derived key. Raises an exception if they do not match. + """ + + +@six.add_metaclass(abc.ABCMeta) +class CMACContext(object): + @abc.abstractmethod + def update(self, data): + """ + Processes the provided bytes. + """ + + def finalize(self): + """ + Returns the message authentication code as bytes. + """ + + @abc.abstractmethod + def copy(self): + """ + Return a CMACContext that is a copy of the current context. + """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py new file mode 100644 index 0000000..03500aa --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -0,0 +1,102 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac, interfaces + + +@utils.register_interface(interfaces.KeyDerivationFunction) +class HKDF(object): + def __init__(self, algorithm, length, salt, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + max_length = 255 * (algorithm.digest_size // 8) + + if length > max_length: + raise ValueError( + "Can not derive keys larger than {0} octets.".format( + max_length + )) + + self._length = length + + if isinstance(salt, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before using them as a salt.") + + if salt is None: + salt = b"\x00" * (self._algorithm.digest_size // 8) + + self._salt = salt + + if isinstance(info, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before using them as info.") + + if info is None: + info = b"" + + self._info = info + self._backend = backend + + self._used = False + + def _extract(self, key_material): + h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend) + h.update(key_material) + return h.finalize() + + def _expand(self, key_material): + output = [b""] + counter = 1 + + while (self._algorithm.digest_size // 8) * len(output) < self._length: + h = hmac.HMAC(key_material, self._algorithm, backend=self._backend) + h.update(output[-1]) + h.update(self._info) + h.update(six.int2byte(counter)) + output.append(h.finalize()) + counter += 1 + + return b"".join(output)[:self._length] + + def derive(self, key_material): + if isinstance(key_material, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before using them as key " + "material." + ) + + if self._used: + raise AlreadyFinalized + + self._used = True + return self._expand(self._extract(key_material)) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py new file mode 100644 index 0000000..bec35bb --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -0,0 +1,74 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import PBKDF2HMACBackend +from cryptography.hazmat.primitives import constant_time, interfaces + + +@utils.register_interface(interfaces.KeyDerivationFunction) +class PBKDF2HMAC(object): + def __init__(self, algorithm, length, salt, iterations, backend): + if not isinstance(backend, PBKDF2HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement PBKDF2HMACBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if not backend.pbkdf2_hmac_supported(algorithm): + raise UnsupportedAlgorithm( + "{0} is not supported for PBKDF2 by this backend".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + self._used = False + self._algorithm = algorithm + self._length = length + if isinstance(salt, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before using them as key " + "material." + ) + self._salt = salt + self._iterations = iterations + self._backend = backend + + def derive(self, key_material): + if self._used: + raise AlreadyFinalized("PBKDF2 instances can only be used once") + self._used = True + + if isinstance(key_material, six.text_type): + raise TypeError( + "Unicode-objects must be encoded before using them as key " + "material." + ) + return self._backend.derive_pbkdf2_hmac( + self._algorithm, + self._length, + self._salt, + self._iterations, + key_material + ) + + def verify(self, key_material, expected_key): + derived_key = self.derive(key_material) + if not constant_time.bytes_eq(derived_key, expected_key): + raise InvalidKey("Keys do not match.") diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py new file mode 100644 index 0000000..c1a763b --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py @@ -0,0 +1,172 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import sys + +import cffi + +import six + +from cryptography import utils +from cryptography.exceptions import AlreadyFinalized +from cryptography.hazmat.bindings.utils import _create_modulename +from cryptography.hazmat.primitives import interfaces + + +TYPES = """ +uint8_t Cryptography_check_pkcs7_padding(const uint8_t *, uint8_t); +""" + +FUNCTIONS = """ +/* Returns the value of the input with the most-significant-bit copied to all + of the bits. */ +static uint8_t Cryptography_DUPLICATE_MSB_TO_ALL(uint8_t a) { + return (1 - (a >> (sizeof(uint8_t) * 8 - 1))) - 1; +} + +/* This returns 0xFF if a < b else 0x00, but does so in a constant time + fashion */ +static uint8_t Cryptography_constant_time_lt(uint8_t a, uint8_t b) { + a -= b; + return Cryptography_DUPLICATE_MSB_TO_ALL(a); +} + +uint8_t Cryptography_check_pkcs7_padding(const uint8_t *data, + uint8_t block_len) { + uint8_t i; + uint8_t pad_size = data[block_len - 1]; + uint8_t mismatch = 0; + for (i = 0; i < block_len; i++) { + unsigned int mask = Cryptography_constant_time_lt(i, pad_size); + uint8_t b = data[block_len - 1 - i]; + mismatch |= (mask & (pad_size ^ b)); + } + + /* Check to make sure the pad_size was within the valid range. */ + mismatch |= ~Cryptography_constant_time_lt(0, pad_size); + mismatch |= Cryptography_constant_time_lt(block_len, pad_size); + + /* Make sure any bits set are copied to the lowest bit */ + mismatch |= mismatch >> 4; + mismatch |= mismatch >> 2; + mismatch |= mismatch >> 1; + /* Now check the low bit to see if it's set */ + return (mismatch & 1) == 0; +} +""" + +_ffi = cffi.FFI() +_ffi.cdef(TYPES) +_lib = _ffi.verify( + source=FUNCTIONS, + modulename=_create_modulename([TYPES], FUNCTIONS, sys.version), + ext_package="cryptography", +) + + +class PKCS7(object): + def __init__(self, block_size): + if not (0 <= block_size < 256): + raise ValueError("block_size must be in range(0, 256)") + + if block_size % 8 != 0: + raise ValueError("block_size must be a multiple of 8") + + self.block_size = block_size + + def padder(self): + return _PKCS7PaddingContext(self.block_size) + + def unpadder(self): + return _PKCS7UnpaddingContext(self.block_size) + + +@utils.register_interface(interfaces.PaddingContext) +class _PKCS7PaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized") + + if isinstance(data, six.text_type): + raise TypeError("Unicode-objects must be encoded before padding") + + self._buffer += data + + finished_blocks = len(self._buffer) // (self.block_size // 8) + + result = self._buffer[:finished_blocks * (self.block_size // 8)] + self._buffer = self._buffer[finished_blocks * (self.block_size // 8):] + + return result + + def finalize(self): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized") + + pad_size = self.block_size // 8 - len(self._buffer) + result = self._buffer + six.int2byte(pad_size) * pad_size + self._buffer = None + return result + + +@utils.register_interface(interfaces.PaddingContext) +class _PKCS7UnpaddingContext(object): + def __init__(self, block_size): + self.block_size = block_size + # TODO: more copies than necessary, we should use zero-buffer (#193) + self._buffer = b"" + + def update(self, data): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized") + + if isinstance(data, six.text_type): + raise TypeError("Unicode-objects must be encoded before unpadding") + + self._buffer += data + + finished_blocks = max( + len(self._buffer) // (self.block_size // 8) - 1, + 0 + ) + + result = self._buffer[:finished_blocks * (self.block_size // 8)] + self._buffer = self._buffer[finished_blocks * (self.block_size // 8):] + + return result + + def finalize(self): + if self._buffer is None: + raise AlreadyFinalized("Context was already finalized") + + if len(self._buffer) != self.block_size // 8: + raise ValueError("Invalid padding bytes") + + valid = _lib.Cryptography_check_pkcs7_padding( + self._buffer, self.block_size // 8 + ) + + if not valid: + raise ValueError("Invalid padding bytes") + + pad_size = six.indexbytes(self._buffer, -1) + res = self._buffer[:-pad_size] + self._buffer = None + return res diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py new file mode 100644 index 0000000..2f42057 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/__init__.py @@ -0,0 +1,14 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py new file mode 100644 index 0000000..41c467c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -0,0 +1,71 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import struct + +import six + +from cryptography.exceptions import ( + InvalidToken, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time, hmac +from cryptography.hazmat.primitives.hashes import SHA1, SHA256, SHA512 + + +class HOTP(object): + def __init__(self, key, length, algorithm, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + if len(key) < 16: + raise ValueError("Key length has to be at least 128 bits.") + + if not isinstance(length, six.integer_types): + raise TypeError("Length parameter must be an integer type") + + if length < 6 or length > 8: + raise ValueError("Length of HOTP has to be between 6 to 8.") + + if not isinstance(algorithm, (SHA1, SHA256, SHA512)): + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512") + + self._key = key + self._length = length + self._algorithm = algorithm + self._backend = backend + + def generate(self, counter): + truncated_value = self._dynamic_truncate(counter) + hotp = truncated_value % (10 ** self._length) + return "{0:0{1}}".format(hotp, self._length).encode() + + def verify(self, hotp, counter): + if not constant_time.bytes_eq(self.generate(counter), hotp): + raise InvalidToken("Supplied HOTP value does not match") + + def _dynamic_truncate(self, counter): + ctx = hmac.HMAC(self._key, self._algorithm, self._backend) + ctx.update(struct.pack(">Q", counter)) + hmac_value = ctx.finalize() + + offset_bits = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 + + offset = int(offset_bits) + p = hmac_value[offset:offset + 4] + return struct.unpack(">I", p)[0] & 0x7fffffff diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py new file mode 100644 index 0000000..e55ba00 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -0,0 +1,41 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography.exceptions import ( + InvalidToken, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.backends.interfaces import HMACBackend +from cryptography.hazmat.primitives import constant_time +from cryptography.hazmat.primitives.twofactor.hotp import HOTP + + +class TOTP(object): + def __init__(self, key, length, algorithm, time_step, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._time_step = time_step + self._hotp = HOTP(key, length, algorithm, backend) + + def generate(self, time): + counter = int(time / self._time_step) + return self._hotp.generate(counter) + + def verify(self, totp, time): + if not constant_time.bytes_eq(self.generate(time), totp): + raise InvalidToken("Supplied TOTP value does not match") diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py b/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py new file mode 100644 index 0000000..5566d12 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py @@ -0,0 +1,33 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import sys + + +DeprecatedIn04 = PendingDeprecationWarning + + +def register_interface(iface): + def register_decorator(klass): + iface.register(klass) + return klass + return register_decorator + + +def bit_length(x): + if sys.version_info >= (2, 7): + return x.bit_length() + else: + return len(bin(x)) - (2 + (x <= 0)) diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/PKG-INFO new file mode 100644 index 0000000..b76dc63 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/PKG-INFO @@ -0,0 +1,19 @@ +Metadata-Version: 1.0 +Name: ed25519 +Version: 1.2 +Summary: Ed25519 public-key signatures +Home-page: https://github.com/warner/python-ed25519 +Author: Brian Warner +Author-email: warner-python-ed25519@lothar.com +License: MIT +Description: Python bindings to the Ed25519 public-key signature system. + + This offers a comfortable python interface to a C implementation of the + Ed25519 public-key signature system (http://ed25519.cr.yp.to/), using the + portable 'ref' code from the 'SUPERCOP' benchmarking suite. + + This system provides high (128-bit) security, short (32-byte) keys, short + (64-byte) signatures, and fast (2-6ms) operation. Please see the README for + more details. + +Platform: UNKNOWN diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/SOURCES.txt new file mode 100644 index 0000000..c533380 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/SOURCES.txt @@ -0,0 +1,38 @@ +LICENSE +MANIFEST.in +Makefile +NEWS +README.rst +kat-ed25519.txt +kat.py +test_ed25519_kat.py +versioneer.py +bin/edsig +ed25519.egg-info/PKG-INFO +ed25519.egg-info/SOURCES.txt +ed25519.egg-info/dependency_links.txt +ed25519.egg-info/top_level.txt +src/ed25519/__init__.py +src/ed25519/_version.py +src/ed25519/keys.py +src/ed25519/test_ed25519.py +src/ed25519-glue/ed25519module.c +src/ed25519-supercop-ref/Makefile +src/ed25519-supercop-ref/api.h +src/ed25519-supercop-ref/crypto_int32.h +src/ed25519-supercop-ref/crypto_sign.h +src/ed25519-supercop-ref/crypto_uint32.h +src/ed25519-supercop-ref/crypto_verify_32.h +src/ed25519-supercop-ref/ed25519.c +src/ed25519-supercop-ref/fe25519.c +src/ed25519-supercop-ref/fe25519.h +src/ed25519-supercop-ref/ge25519.c +src/ed25519-supercop-ref/ge25519.h +src/ed25519-supercop-ref/ge25519_base.data +src/ed25519-supercop-ref/sc25519.c +src/ed25519-supercop-ref/sc25519.h +src/ed25519-supercop-ref/sha512-blocks.c +src/ed25519-supercop-ref/sha512-hash.c +src/ed25519-supercop-ref/sha512.h +src/ed25519-supercop-ref/test.c +src/ed25519-supercop-ref/verify.c \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/installed-files.txt new file mode 100644 index 0000000..12197dd --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/installed-files.txt @@ -0,0 +1,15 @@ +../ed25519/keys.py +../ed25519/_version.py +../ed25519/test_ed25519.py +../ed25519/__init__.py +../ed25519/keys.pyc +../ed25519/_version.pyc +../ed25519/test_ed25519.pyc +../ed25519/__init__.pyc +../ed25519/_ed25519.so +./ +SOURCES.txt +dependency_links.txt +PKG-INFO +top_level.txt +../../../../bin/edsig diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/top_level.txt new file mode 100644 index 0000000..1da7fc0 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519-1.2.egg-info/top_level.txt @@ -0,0 +1 @@ +ed25519 diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519/__init__.py b/Linux_i686/lib/python2.7/site-packages/ed25519/__init__.py new file mode 100644 index 0000000..54d266c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519/__init__.py @@ -0,0 +1,11 @@ +from keys import (BadSignatureError, BadPrefixError, + create_keypair, SigningKey, VerifyingKey, + remove_prefix, to_ascii, from_ascii) + +(BadSignatureError, BadPrefixError, + create_keypair, SigningKey, VerifyingKey, + remove_prefix, to_ascii, from_ascii) # hush pyflakes + +from _version import get_versions +__version__ = get_versions()['version'] +del get_versions diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519/_ed25519.so b/Linux_i686/lib/python2.7/site-packages/ed25519/_ed25519.so new file mode 100755 index 0000000..9ce73f4 Binary files /dev/null and b/Linux_i686/lib/python2.7/site-packages/ed25519/_ed25519.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519/_version.py b/Linux_i686/lib/python2.7/site-packages/ed25519/_version.py new file mode 100644 index 0000000..eb46aac --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519/_version.py @@ -0,0 +1,11 @@ + +# This file was generated by 'versioneer.py' (0.7) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +version_version = '1.2' +version_full = '1d56b7b16152c960cb673c848316d14033cb2b6d' +def get_versions(default={}, verbose=False): + return {'version': version_version, 'full': version_full} + diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519/keys.py b/Linux_i686/lib/python2.7/site-packages/ed25519/keys.py new file mode 100644 index 0000000..8820a30 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519/keys.py @@ -0,0 +1,159 @@ +import os +import base64 +import _ed25519 +BadSignatureError = _ed25519.BadSignatureError + +def create_keypair(entropy=os.urandom): + SEEDLEN = _ed25519.SECRETKEYBYTES/2 + assert SEEDLEN == 32 + seed = entropy(SEEDLEN) + sk = SigningKey(seed) + vk = sk.get_verifying_key() + return sk, vk + +class BadPrefixError(Exception): + pass + +def remove_prefix(s_bytes, prefix): + if not s_bytes.startswith(prefix): + raise BadPrefixError("did not see expected '%s' prefix" % (prefix,)) + return s_bytes[len(prefix):] + +def to_ascii(s_bytes, prefix="", encoding="base64"): + """Return a version-prefixed ASCII representation of the given binary + string. 'encoding' indicates how to do the encoding, and can be one of: + * base64 + * base32 + * base16 (or hex) + + This function handles bytes, not bits, so it does not append any trailing + '=' (unlike standard base64.b64encode). It also lowercases the base32 + output. + + 'prefix' will be prepended to the encoded form, and is useful for + distinguishing the purpose and version of the binary string. E.g. you + could prepend 'pub0-' to a VerifyingKey string to allow the receiving + code to raise a useful error if someone pasted in a signature string by + mistake. + """ + if encoding == "base64": + s_ascii = base64.b64encode(s_bytes).rstrip("=") + elif encoding == "base32": + s_ascii = base64.b32encode(s_bytes).rstrip("=").lower() + elif encoding in ("base16", "hex"): + s_ascii = base64.b16encode(s_bytes).lower() + else: + raise NotImplementedError + return prefix+s_ascii + +def from_ascii(s_ascii, prefix="", encoding="base64"): + """This is the opposite of to_ascii. It will throw BadPrefixError if + the prefix is not found. + """ + s_ascii = remove_prefix(s_ascii.strip(), prefix) + if encoding == "base64": + s_ascii += "="*((4 - len(s_ascii)%4)%4) + s_bytes = base64.b64decode(s_ascii) + elif encoding == "base32": + s_ascii += "="*((8 - len(s_ascii)%8)%8) + s_bytes = base64.b32decode(s_ascii.upper()) + elif encoding in ("base16", "hex"): + s_bytes = base64.b16decode(s_ascii.upper()) + else: + raise NotImplementedError + return s_bytes + +class SigningKey(object): + # this can only be used to reconstruct a key created by create_keypair(). + def __init__(self, sk_s, prefix="", encoding=None): + assert isinstance(sk_s, type("")) # string, really bytes + sk_s = remove_prefix(sk_s, prefix) + if encoding is not None: + sk_s = from_ascii(sk_s, encoding=encoding) + if len(sk_s) == 32: + # create from seed + vk_s, sk_s = _ed25519.publickey(sk_s) + else: + if len(sk_s) != 32+32: + raise ValueError("SigningKey takes 32-byte seed or 64-byte string") + self.sk_s = sk_s # seed+pubkey + self.vk_s = sk_s[32:] # just pubkey + + def to_bytes(self, prefix=""): + return prefix+self.sk_s + + def to_ascii(self, prefix="", encoding=None): + assert encoding + return to_ascii(self.to_seed(), prefix, encoding) + + def to_seed(self, prefix=""): + return prefix+self.sk_s[:32] + + def __eq__(self, them): + if not isinstance(them, object): return False + return (them.__class__ == self.__class__ + and them.sk_s == self.sk_s) + + def get_verifying_key(self): + return VerifyingKey(self.vk_s) + + def sign(self, msg, prefix="", encoding=None): + sig_and_msg = _ed25519.sign(msg, self.sk_s) + # the response is R+S+msg + sig_R = sig_and_msg[0:32] + sig_S = sig_and_msg[32:64] + msg_out = sig_and_msg[64:] + sig_out = sig_R + sig_S + assert msg_out == msg + if encoding: + return to_ascii(sig_out, prefix, encoding) + return prefix+sig_out + +class VerifyingKey(object): + def __init__(self, vk_s, prefix="", encoding=None): + assert isinstance(vk_s, type("")) # string, really bytes + vk_s = remove_prefix(vk_s, prefix) + if encoding is not None: + vk_s = from_ascii(vk_s, encoding=encoding) + + assert len(vk_s) == 32 + self.vk_s = vk_s + + def to_bytes(self, prefix=""): + return prefix+self.vk_s + + def to_ascii(self, prefix="", encoding=None): + assert encoding + return to_ascii(self.vk_s, prefix, encoding) + + def __eq__(self, them): + if not isinstance(them, object): return False + return (them.__class__ == self.__class__ + and them.vk_s == self.vk_s) + + def verify(self, sig, msg, prefix="", encoding=None): + assert isinstance(sig, type("")) # string, really bytes + if encoding: + sig = from_ascii(sig, prefix, encoding) + else: + sig = remove_prefix(sig, prefix) + assert len(sig) == 64 + sig_R = sig[:32] + sig_S = sig[32:] + sig_and_msg = sig_R + sig_S + msg + # this might raise BadSignatureError + msg2 = _ed25519.open(sig_and_msg, self.vk_s) + assert msg2 == msg + +def selftest(): + message = "crypto libraries should always test themselves at powerup" + sk = SigningKey("priv0-VIsfn5OFGa09Un2MR6Hm7BQ5++xhcQskU2OGXG8jSJl4cWLZrRrVcSN2gVYMGtZT+3354J5jfmqAcuRSD9KIyg", + prefix="priv0-", encoding="base64") + vk = VerifyingKey("pub0-eHFi2a0a1XEjdoFWDBrWU/t9+eCeY35qgHLkUg/SiMo", + prefix="pub0-", encoding="base64") + assert sk.get_verifying_key() == vk + sig = sk.sign(message, prefix="sig0-", encoding="base64") + assert sig == "sig0-E/QrwtSF52x8+q0l4ahA7eJbRKc777ClKNg217Q0z4fiYMCdmAOI+rTLVkiFhX6k3D+wQQfKdJYMxaTUFfv1DQ", sig + vk.verify(sig, message, prefix="sig0-", encoding="base64") + +selftest() diff --git a/Linux_i686/lib/python2.7/site-packages/ed25519/test_ed25519.py b/Linux_i686/lib/python2.7/site-packages/ed25519/test_ed25519.py new file mode 100644 index 0000000..840909d --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/ed25519/test_ed25519.py @@ -0,0 +1,267 @@ + +import unittest +import time +from binascii import hexlify, unhexlify +import ed25519 +from ed25519 import _ed25519 as raw + +def flip_bit(s, bit=0, in_byte=-1): + as_bytes = [ord(b) for b in s] + as_bytes[in_byte] = as_bytes[in_byte] ^ (0x01<', + endpoint='static', + view_func=self.send_static_file) + + def _get_error_handlers(self): + from warnings import warn + warn(DeprecationWarning('error_handlers is deprecated, use the ' + 'new error_handler_spec attribute instead.'), stacklevel=1) + return self._error_handlers + def _set_error_handlers(self, value): + self._error_handlers = value + self.error_handler_spec[None] = value + error_handlers = property(_get_error_handlers, _set_error_handlers) + del _get_error_handlers, _set_error_handlers + + @locked_cached_property + def name(self): + """The name of the application. This is usually the import name + with the difference that it's guessed from the run file if the + import name is main. This name is used as a display name when + Flask needs the name of the application. It can be set and overridden + to change the value. + + .. versionadded:: 0.8 + """ + if self.import_name == '__main__': + fn = getattr(sys.modules['__main__'], '__file__', None) + if fn is None: + return '__main__' + return os.path.splitext(os.path.basename(fn))[0] + return self.import_name + + @property + def propagate_exceptions(self): + """Returns the value of the `PROPAGATE_EXCEPTIONS` configuration + value in case it's set, otherwise a sensible default is returned. + + .. versionadded:: 0.7 + """ + rv = self.config['PROPAGATE_EXCEPTIONS'] + if rv is not None: + return rv + return self.testing or self.debug + + @property + def preserve_context_on_exception(self): + """Returns the value of the `PRESERVE_CONTEXT_ON_EXCEPTION` + configuration value in case it's set, otherwise a sensible default + is returned. + + .. versionadded:: 0.7 + """ + rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION'] + if rv is not None: + return rv + return self.debug + + @property + def logger(self): + """A :class:`logging.Logger` object for this application. The + default configuration is to log to stderr if the application is + in debug mode. This logger can be used to (surprise) log messages. + Here some examples:: + + app.logger.debug('A value for debugging') + app.logger.warning('A warning occurred (%d apples)', 42) + app.logger.error('An error occurred') + + .. versionadded:: 0.3 + """ + if self._logger and self._logger.name == self.logger_name: + return self._logger + with _logger_lock: + if self._logger and self._logger.name == self.logger_name: + return self._logger + from flask.logging import create_logger + self._logger = rv = create_logger(self) + return rv + + @locked_cached_property + def jinja_env(self): + """The Jinja2 environment used to load templates.""" + return self.create_jinja_environment() + + @property + def got_first_request(self): + """This attribute is set to `True` if the application started + handling the first request. + + .. versionadded:: 0.8 + """ + return self._got_first_request + + def make_config(self, instance_relative=False): + """Used to create the config attribute by the Flask constructor. + The `instance_relative` parameter is passed in from the constructor + of Flask (there named `instance_relative_config`) and indicates if + the config should be relative to the instance path or the root path + of the application. + + .. versionadded:: 0.8 + """ + root_path = self.root_path + if instance_relative: + root_path = self.instance_path + return Config(root_path, self.default_config) + + def auto_find_instance_path(self): + """Tries to locate the instance path if it was not provided to the + constructor of the application class. It will basically calculate + the path to a folder named ``instance`` next to your main file or + the package. + + .. versionadded:: 0.8 + """ + prefix, package_path = find_package(self.import_name) + if prefix is None: + return os.path.join(package_path, 'instance') + return os.path.join(prefix, 'var', self.name + '-instance') + + def open_instance_resource(self, resource, mode='rb'): + """Opens a resource from the application's instance folder + (:attr:`instance_path`). Otherwise works like + :meth:`open_resource`. Instance resources can also be opened for + writing. + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + return open(os.path.join(self.instance_path, resource), mode) + + def create_jinja_environment(self): + """Creates the Jinja2 environment based on :attr:`jinja_options` + and :meth:`select_jinja_autoescape`. Since 0.7 this also adds + the Jinja2 globals and filters after initialization. Override + this function to customize the behavior. + + .. versionadded:: 0.5 + """ + options = dict(self.jinja_options) + if 'autoescape' not in options: + options['autoescape'] = self.select_jinja_autoescape + rv = Environment(self, **options) + rv.globals.update( + url_for=url_for, + get_flashed_messages=get_flashed_messages, + config=self.config, + # request, session and g are normally added with the + # context processor for efficiency reasons but for imported + # templates we also want the proxies in there. + request=request, + session=session, + g=g + ) + rv.filters['tojson'] = json.tojson_filter + return rv + + def create_global_jinja_loader(self): + """Creates the loader for the Jinja2 environment. Can be used to + override just the loader and keeping the rest unchanged. It's + discouraged to override this function. Instead one should override + the :meth:`jinja_loader` function instead. + + The global loader dispatches between the loaders of the application + and the individual blueprints. + + .. versionadded:: 0.7 + """ + return DispatchingJinjaLoader(self) + + def init_jinja_globals(self): + """Deprecated. Used to initialize the Jinja2 globals. + + .. versionadded:: 0.5 + .. versionchanged:: 0.7 + This method is deprecated with 0.7. Override + :meth:`create_jinja_environment` instead. + """ + + def select_jinja_autoescape(self, filename): + """Returns `True` if autoescaping should be active for the given + template name. + + .. versionadded:: 0.5 + """ + if filename is None: + return False + return filename.endswith(('.html', '.htm', '.xml', '.xhtml')) + + def update_template_context(self, context): + """Update the template context with some commonly used variables. + This injects request, session, config and g into the template + context as well as everything template context processors want + to inject. Note that the as of Flask 0.6, the original values + in the context will not be overridden if a context processor + decides to return a value with the same key. + + :param context: the context as a dictionary that is updated in place + to add extra variables. + """ + funcs = self.template_context_processors[None] + reqctx = _request_ctx_stack.top + if reqctx is not None: + bp = reqctx.request.blueprint + if bp is not None and bp in self.template_context_processors: + funcs = chain(funcs, self.template_context_processors[bp]) + orig_ctx = context.copy() + for func in funcs: + context.update(func()) + # make sure the original values win. This makes it possible to + # easier add new variables in context processors without breaking + # existing views. + context.update(orig_ctx) + + def run(self, host=None, port=None, debug=None, **options): + """Runs the application on a local development server. If the + :attr:`debug` flag is set the server will automatically reload + for code changes and show a debugger in case an exception happened. + + If you want to run the application in debug mode, but disable the + code execution on the interactive debugger, you can pass + ``use_evalex=False`` as parameter. This will keep the debugger's + traceback screen active, but disable code execution. + + .. admonition:: Keep in Mind + + Flask will suppress any server error with a generic error page + unless it is in debug mode. As such to enable just the + interactive debugger without the code reloading, you have to + invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. + Setting ``use_debugger`` to `True` without being in debug mode + won't catch any exceptions because there won't be any to + catch. + + .. versionchanged:: 0.10 + The default port is now picked from the ``SERVER_NAME`` variable. + + :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to + have the server available externally as well. Defaults to + ``'127.0.0.1'``. + :param port: the port of the webserver. Defaults to ``5000`` or the + port defined in the ``SERVER_NAME`` config variable if + present. + :param debug: if given, enable or disable debug mode. + See :attr:`debug`. + :param options: the options to be forwarded to the underlying + Werkzeug server. See + :func:`werkzeug.serving.run_simple` for more + information. + """ + from werkzeug.serving import run_simple + if host is None: + host = '127.0.0.1' + if port is None: + server_name = self.config['SERVER_NAME'] + if server_name and ':' in server_name: + port = int(server_name.rsplit(':', 1)[1]) + else: + port = 5000 + if debug is not None: + self.debug = bool(debug) + options.setdefault('use_reloader', self.debug) + options.setdefault('use_debugger', self.debug) + try: + run_simple(host, port, self, **options) + finally: + # reset the first request information if the development server + # resetted normally. This makes it possible to restart the server + # without reloader and that stuff from an interactive shell. + self._got_first_request = False + + def test_client(self, use_cookies=True): + """Creates a test client for this application. For information + about unit testing head over to :ref:`testing`. + + Note that if you are testing for assertions or exceptions in your + application code, you must set ``app.testing = True`` in order for the + exceptions to propagate to the test client. Otherwise, the exception + will be handled by the application (not visible to the test client) and + the only indication of an AssertionError or other exception will be a + 500 status code response to the test client. See the :attr:`testing` + attribute. For example:: + + app.testing = True + client = app.test_client() + + The test client can be used in a `with` block to defer the closing down + of the context until the end of the `with` block. This is useful if + you want to access the context locals for testing:: + + with app.test_client() as c: + rv = c.get('/?vodka=42') + assert request.args['vodka'] == '42' + + See :class:`~flask.testing.FlaskClient` for more information. + + .. versionchanged:: 0.4 + added support for `with` block usage for the client. + + .. versionadded:: 0.7 + The `use_cookies` parameter was added as well as the ability + to override the client to be used by setting the + :attr:`test_client_class` attribute. + """ + cls = self.test_client_class + if cls is None: + from flask.testing import FlaskClient as cls + return cls(self, self.response_class, use_cookies=use_cookies) + + def open_session(self, request): + """Creates or opens a new session. Default implementation stores all + session data in a signed cookie. This requires that the + :attr:`secret_key` is set. Instead of overriding this method + we recommend replacing the :class:`session_interface`. + + :param request: an instance of :attr:`request_class`. + """ + return self.session_interface.open_session(self, request) + + def save_session(self, session, response): + """Saves the session if it needs updates. For the default + implementation, check :meth:`open_session`. Instead of overriding this + method we recommend replacing the :class:`session_interface`. + + :param session: the session to be saved (a + :class:`~werkzeug.contrib.securecookie.SecureCookie` + object) + :param response: an instance of :attr:`response_class` + """ + return self.session_interface.save_session(self, session, response) + + def make_null_session(self): + """Creates a new instance of a missing session. Instead of overriding + this method we recommend replacing the :class:`session_interface`. + + .. versionadded:: 0.7 + """ + return self.session_interface.make_null_session(self) + + def register_module(self, module, **options): + """Registers a module with this application. The keyword argument + of this function are the same as the ones for the constructor of the + :class:`Module` class and will override the values of the module if + provided. + + .. versionchanged:: 0.7 + The module system was deprecated in favor for the blueprint + system. + """ + assert blueprint_is_module(module), 'register_module requires ' \ + 'actual module objects. Please upgrade to blueprints though.' + if not self.enable_modules: + raise RuntimeError('Module support was disabled but code ' + 'attempted to register a module named %r' % module) + else: + from warnings import warn + warn(DeprecationWarning('Modules are deprecated. Upgrade to ' + 'using blueprints. Have a look into the documentation for ' + 'more information. If this module was registered by a ' + 'Flask-Extension upgrade the extension or contact the author ' + 'of that extension instead. (Registered %r)' % module), + stacklevel=2) + + self.register_blueprint(module, **options) + + @setupmethod + def register_blueprint(self, blueprint, **options): + """Registers a blueprint on the application. + + .. versionadded:: 0.7 + """ + first_registration = False + if blueprint.name in self.blueprints: + assert self.blueprints[blueprint.name] is blueprint, \ + 'A blueprint\'s name collision occurred between %r and ' \ + '%r. Both share the same name "%s". Blueprints that ' \ + 'are created on the fly need unique names.' % \ + (blueprint, self.blueprints[blueprint.name], blueprint.name) + else: + self.blueprints[blueprint.name] = blueprint + first_registration = True + blueprint.register(self, options, first_registration) + + @setupmethod + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """Connects a URL rule. Works exactly like the :meth:`route` + decorator. If a view_func is provided it will be registered with the + endpoint. + + Basically this example:: + + @app.route('/') + def index(): + pass + + Is equivalent to the following:: + + def index(): + pass + app.add_url_rule('/', 'index', index) + + If the view_func is not provided you will need to connect the endpoint + to a view function like so:: + + app.view_functions['index'] = index + + Internally :meth:`route` invokes :meth:`add_url_rule` so if you want + to customize the behavior via subclassing you only need to change + this method. + + For more information refer to :ref:`url-route-registrations`. + + .. versionchanged:: 0.2 + `view_func` parameter added. + + .. versionchanged:: 0.6 + `OPTIONS` is added automatically as method. + + :param rule: the URL rule as string + :param endpoint: the endpoint for the registered URL rule. Flask + itself assumes the name of the view function as + endpoint + :param view_func: the function to call when serving a request to the + provided endpoint + :param options: the options to be forwarded to the underlying + :class:`~werkzeug.routing.Rule` object. A change + to Werkzeug is handling of method options. methods + is a list of methods this rule should be limited + to (`GET`, `POST` etc.). By default a rule + just listens for `GET` (and implicitly `HEAD`). + Starting with Flask 0.6, `OPTIONS` is implicitly + added and handled by the standard request handling. + """ + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + options['endpoint'] = endpoint + methods = options.pop('methods', None) + + # if the methods are not given and the view_func object knows its + # methods we can use that instead. If neither exists, we go with + # a tuple of only `GET` as default. + if methods is None: + methods = getattr(view_func, 'methods', None) or ('GET',) + methods = set(methods) + + # Methods that should always be added + required_methods = set(getattr(view_func, 'required_methods', ())) + + # starting with Flask 0.8 the view_func object can disable and + # force-enable the automatic options handling. + provide_automatic_options = getattr(view_func, + 'provide_automatic_options', None) + + if provide_automatic_options is None: + if 'OPTIONS' not in methods: + provide_automatic_options = True + required_methods.add('OPTIONS') + else: + provide_automatic_options = False + + # Add the required methods now. + methods |= required_methods + + # due to a werkzeug bug we need to make sure that the defaults are + # None if they are an empty dictionary. This should not be necessary + # with Werkzeug 0.7 + options['defaults'] = options.get('defaults') or None + + rule = self.url_rule_class(rule, methods=methods, **options) + rule.provide_automatic_options = provide_automatic_options + + self.url_map.add(rule) + if view_func is not None: + old_func = self.view_functions.get(endpoint) + if old_func is not None and old_func != view_func: + raise AssertionError('View function mapping is overwriting an ' + 'existing endpoint function: %s' % endpoint) + self.view_functions[endpoint] = view_func + + def route(self, rule, **options): + """A decorator that is used to register a view function for a + given URL rule. This does the same thing as :meth:`add_url_rule` + but is intended for decorator usage:: + + @app.route('/') + def index(): + return 'Hello World' + + For more information refer to :ref:`url-route-registrations`. + + :param rule: the URL rule as string + :param endpoint: the endpoint for the registered URL rule. Flask + itself assumes the name of the view function as + endpoint + :param options: the options to be forwarded to the underlying + :class:`~werkzeug.routing.Rule` object. A change + to Werkzeug is handling of method options. methods + is a list of methods this rule should be limited + to (`GET`, `POST` etc.). By default a rule + just listens for `GET` (and implicitly `HEAD`). + Starting with Flask 0.6, `OPTIONS` is implicitly + added and handled by the standard request handling. + """ + def decorator(f): + endpoint = options.pop('endpoint', None) + self.add_url_rule(rule, endpoint, f, **options) + return f + return decorator + + @setupmethod + def endpoint(self, endpoint): + """A decorator to register a function as an endpoint. + Example:: + + @app.endpoint('example.endpoint') + def example(): + return "example" + + :param endpoint: the name of the endpoint + """ + def decorator(f): + self.view_functions[endpoint] = f + return f + return decorator + + @setupmethod + def errorhandler(self, code_or_exception): + """A decorator that is used to register a function give a given + error code. Example:: + + @app.errorhandler(404) + def page_not_found(error): + return 'This page does not exist', 404 + + You can also register handlers for arbitrary exceptions:: + + @app.errorhandler(DatabaseError) + def special_exception_handler(error): + return 'Database connection failed', 500 + + You can also register a function as error handler without using + the :meth:`errorhandler` decorator. The following example is + equivalent to the one above:: + + def page_not_found(error): + return 'This page does not exist', 404 + app.error_handler_spec[None][404] = page_not_found + + Setting error handlers via assignments to :attr:`error_handler_spec` + however is discouraged as it requires fiddling with nested dictionaries + and the special case for arbitrary exception types. + + The first `None` refers to the active blueprint. If the error + handler should be application wide `None` shall be used. + + .. versionadded:: 0.7 + One can now additionally also register custom exception types + that do not necessarily have to be a subclass of the + :class:`~werkzeug.exceptions.HTTPException` class. + + :param code: the code as integer for the handler + """ + def decorator(f): + self._register_error_handler(None, code_or_exception, f) + return f + return decorator + + def register_error_handler(self, code_or_exception, f): + """Alternative error attach function to the :meth:`errorhandler` + decorator that is more straightforward to use for non decorator + usage. + + .. versionadded:: 0.7 + """ + self._register_error_handler(None, code_or_exception, f) + + @setupmethod + def _register_error_handler(self, key, code_or_exception, f): + if isinstance(code_or_exception, HTTPException): + code_or_exception = code_or_exception.code + if isinstance(code_or_exception, integer_types): + assert code_or_exception != 500 or key is None, \ + 'It is currently not possible to register a 500 internal ' \ + 'server error on a per-blueprint level.' + self.error_handler_spec.setdefault(key, {})[code_or_exception] = f + else: + self.error_handler_spec.setdefault(key, {}).setdefault(None, []) \ + .append((code_or_exception, f)) + + @setupmethod + def template_filter(self, name=None): + """A decorator that is used to register custom template filter. + You can specify a name for the filter, otherwise the function + name will be used. Example:: + + @app.template_filter() + def reverse(s): + return s[::-1] + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def decorator(f): + self.add_template_filter(f, name=name) + return f + return decorator + + @setupmethod + def add_template_filter(self, f, name=None): + """Register a custom template filter. Works exactly like the + :meth:`template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + self.jinja_env.filters[name or f.__name__] = f + + @setupmethod + def template_test(self, name=None): + """A decorator that is used to register custom template test. + You can specify a name for the test, otherwise the function + name will be used. Example:: + + @app.template_test() + def is_prime(n): + if n == 2: + return True + for i in range(2, int(math.ceil(math.sqrt(n))) + 1): + if n % i == 0: + return False + return True + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def decorator(f): + self.add_template_test(f, name=name) + return f + return decorator + + @setupmethod + def add_template_test(self, f, name=None): + """Register a custom template test. Works exactly like the + :meth:`template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + self.jinja_env.tests[name or f.__name__] = f + + + @setupmethod + def template_global(self, name=None): + """A decorator that is used to register a custom template global function. + You can specify a name for the global function, otherwise the function + name will be used. Example:: + + @app.template_global() + def double(n): + return 2 * n + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + def decorator(f): + self.add_template_global(f, name=name) + return f + return decorator + + @setupmethod + def add_template_global(self, f, name=None): + """Register a custom template global function. Works exactly like the + :meth:`template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global function, otherwise the + function name will be used. + """ + self.jinja_env.globals[name or f.__name__] = f + + @setupmethod + def before_request(self, f): + """Registers a function to run before each request.""" + self.before_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def before_first_request(self, f): + """Registers a function to be run before the first request to this + instance of the application. + + .. versionadded:: 0.8 + """ + self.before_first_request_funcs.append(f) + + @setupmethod + def after_request(self, f): + """Register a function to be run after each request. Your function + must take one parameter, a :attr:`response_class` object and return + a new response object or the same (see :meth:`process_response`). + + As of Flask 0.7 this function might not be executed at the end of the + request in case an unhandled exception occurred. + """ + self.after_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_request(self, f): + """Register a function to be run at the end of each request, + regardless of whether there was an exception or not. These functions + are executed when the request context is popped, even if not an + actual request was performed. + + Example:: + + ctx = app.test_request_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the request context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Generally teardown functions must take every necessary step to avoid + that they will fail. If they do execute code that might fail they + will have to surround the execution of these code by try/except + statements and log occurring errors. + + When a teardown function was called because of a exception it will + be passed an error object. + + .. admonition:: Debug Note + + In debug mode Flask will not tear down a request on an exception + immediately. Instead if will keep it alive so that the interactive + debugger can still access it. This behavior can be controlled + by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. + """ + self.teardown_request_funcs.setdefault(None, []).append(f) + return f + + @setupmethod + def teardown_appcontext(self, f): + """Registers a function to be called when the application context + ends. These functions are typically also called when the request + context is popped. + + Example:: + + ctx = app.app_context() + ctx.push() + ... + ctx.pop() + + When ``ctx.pop()`` is executed in the above example, the teardown + functions are called just before the app context moves from the + stack of active contexts. This becomes relevant if you are using + such constructs in tests. + + Since a request context typically also manages an application + context it would also be called when you pop a request context. + + When a teardown function was called because of an exception it will + be passed an error object. + + .. versionadded:: 0.9 + """ + self.teardown_appcontext_funcs.append(f) + return f + + @setupmethod + def context_processor(self, f): + """Registers a template context processor function.""" + self.template_context_processors[None].append(f) + return f + + @setupmethod + def url_value_preprocessor(self, f): + """Registers a function as URL value preprocessor for all view + functions of the application. It's called before the view functions + are called and can modify the url values provided. + """ + self.url_value_preprocessors.setdefault(None, []).append(f) + return f + + @setupmethod + def url_defaults(self, f): + """Callback function for URL defaults for all view functions of the + application. It's called with the endpoint and values and should + update the values passed in place. + """ + self.url_default_functions.setdefault(None, []).append(f) + return f + + def handle_http_exception(self, e): + """Handles an HTTP exception. By default this will invoke the + registered error handlers and fall back to returning the + exception as response. + + .. versionadded:: 0.3 + """ + handlers = self.error_handler_spec.get(request.blueprint) + # Proxy exceptions don't have error codes. We want to always return + # those unchanged as errors + if e.code is None: + return e + if handlers and e.code in handlers: + handler = handlers[e.code] + else: + handler = self.error_handler_spec[None].get(e.code) + if handler is None: + return e + return handler(e) + + def trap_http_exception(self, e): + """Checks if an HTTP exception should be trapped or not. By default + this will return `False` for all exceptions except for a bad request + key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to `True`. It + also returns `True` if ``TRAP_HTTP_EXCEPTIONS`` is set to `True`. + + This is called for all HTTP exceptions raised by a view function. + If it returns `True` for any exception the error handler for this + exception is not called and it shows up as regular exception in the + traceback. This is helpful for debugging implicitly raised HTTP + exceptions. + + .. versionadded:: 0.8 + """ + if self.config['TRAP_HTTP_EXCEPTIONS']: + return True + if self.config['TRAP_BAD_REQUEST_ERRORS']: + return isinstance(e, BadRequest) + return False + + def handle_user_exception(self, e): + """This method is called whenever an exception occurs that should be + handled. A special case are + :class:`~werkzeug.exception.HTTPException`\s which are forwarded by + this function to the :meth:`handle_http_exception` method. This + function will either return a response value or reraise the + exception with the same traceback. + + .. versionadded:: 0.7 + """ + exc_type, exc_value, tb = sys.exc_info() + assert exc_value is e + + # ensure not to trash sys.exc_info() at that point in case someone + # wants the traceback preserved in handle_http_exception. Of course + # we cannot prevent users from trashing it themselves in a custom + # trap_http_exception method so that's their fault then. + if isinstance(e, HTTPException) and not self.trap_http_exception(e): + return self.handle_http_exception(e) + + blueprint_handlers = () + handlers = self.error_handler_spec.get(request.blueprint) + if handlers is not None: + blueprint_handlers = handlers.get(None, ()) + app_handlers = self.error_handler_spec[None].get(None, ()) + for typecheck, handler in chain(blueprint_handlers, app_handlers): + if isinstance(e, typecheck): + return handler(e) + + reraise(exc_type, exc_value, tb) + + def handle_exception(self, e): + """Default exception handling that kicks in when an exception + occurs that is not caught. In debug mode the exception will + be re-raised immediately, otherwise it is logged and the handler + for a 500 internal server error is used. If no such handler + exists, a default 500 internal server error message is displayed. + + .. versionadded:: 0.3 + """ + exc_type, exc_value, tb = sys.exc_info() + + got_request_exception.send(self, exception=e) + handler = self.error_handler_spec[None].get(500) + + if self.propagate_exceptions: + # if we want to repropagate the exception, we can attempt to + # raise it with the whole traceback in case we can do that + # (the function was actually called from the except part) + # otherwise, we just raise the error again + if exc_value is e: + reraise(exc_type, exc_value, tb) + else: + raise e + + self.log_exception((exc_type, exc_value, tb)) + if handler is None: + return InternalServerError() + return handler(e) + + def log_exception(self, exc_info): + """Logs an exception. This is called by :meth:`handle_exception` + if debugging is disabled and right before the handler is called. + The default implementation logs the exception as error on the + :attr:`logger`. + + .. versionadded:: 0.8 + """ + self.logger.error('Exception on %s [%s]' % ( + request.path, + request.method + ), exc_info=exc_info) + + def raise_routing_exception(self, request): + """Exceptions that are recording during routing are reraised with + this method. During debug we are not reraising redirect requests + for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising + a different error instead to help debug situations. + + :internal: + """ + if not self.debug \ + or not isinstance(request.routing_exception, RequestRedirect) \ + or request.method in ('GET', 'HEAD', 'OPTIONS'): + raise request.routing_exception + + from .debughelpers import FormDataRoutingRedirect + raise FormDataRoutingRedirect(request) + + def dispatch_request(self): + """Does the request dispatching. Matches the URL and returns the + return value of the view or error handler. This does not have to + be a response object. In order to convert the return value to a + proper response object, call :func:`make_response`. + + .. versionchanged:: 0.7 + This no longer does the exception handling, this code was + moved to the new :meth:`full_dispatch_request`. + """ + req = _request_ctx_stack.top.request + if req.routing_exception is not None: + self.raise_routing_exception(req) + rule = req.url_rule + # if we provide automatic options for this URL and the + # request came with the OPTIONS method, reply automatically + if getattr(rule, 'provide_automatic_options', False) \ + and req.method == 'OPTIONS': + return self.make_default_options_response() + # otherwise dispatch to the handler for that endpoint + return self.view_functions[rule.endpoint](**req.view_args) + + def full_dispatch_request(self): + """Dispatches the request and on top of that performs request + pre and postprocessing as well as HTTP exception catching and + error handling. + + .. versionadded:: 0.7 + """ + self.try_trigger_before_first_request_functions() + try: + request_started.send(self) + rv = self.preprocess_request() + if rv is None: + rv = self.dispatch_request() + except Exception as e: + rv = self.handle_user_exception(e) + response = self.make_response(rv) + response = self.process_response(response) + request_finished.send(self, response=response) + return response + + def try_trigger_before_first_request_functions(self): + """Called before each request and will ensure that it triggers + the :attr:`before_first_request_funcs` and only exactly once per + application instance (which means process usually). + + :internal: + """ + if self._got_first_request: + return + with self._before_request_lock: + if self._got_first_request: + return + self._got_first_request = True + for func in self.before_first_request_funcs: + func() + + def make_default_options_response(self): + """This method is called to create the default `OPTIONS` response. + This can be changed through subclassing to change the default + behavior of `OPTIONS` responses. + + .. versionadded:: 0.7 + """ + adapter = _request_ctx_stack.top.url_adapter + if hasattr(adapter, 'allowed_methods'): + methods = adapter.allowed_methods() + else: + # fallback for Werkzeug < 0.7 + methods = [] + try: + adapter.match(method='--') + except MethodNotAllowed as e: + methods = e.valid_methods + except HTTPException as e: + pass + rv = self.response_class() + rv.allow.update(methods) + return rv + + def should_ignore_error(self, error): + """This is called to figure out if an error should be ignored + or not as far as the teardown system is concerned. If this + function returns `True` then the teardown handlers will not be + passed the error. + + .. versionadded:: 0.10 + """ + return False + + def make_response(self, rv): + """Converts the return value from a view function to a real + response object that is an instance of :attr:`response_class`. + + The following types are allowed for `rv`: + + .. tabularcolumns:: |p{3.5cm}|p{9.5cm}| + + ======================= =========================================== + :attr:`response_class` the object is returned unchanged + :class:`str` a response object is created with the + string as body + :class:`unicode` a response object is created with the + string encoded to utf-8 as body + a WSGI function the function is called as WSGI application + and buffered as response object + :class:`tuple` A tuple in the form ``(response, status, + headers)`` where `response` is any of the + types defined here, `status` is a string + or an integer and `headers` is a list of + a dictionary with header values. + ======================= =========================================== + + :param rv: the return value from the view function + + .. versionchanged:: 0.9 + Previously a tuple was interpreted as the arguments for the + response object. + """ + status = headers = None + if isinstance(rv, tuple): + rv, status, headers = rv + (None,) * (3 - len(rv)) + + if rv is None: + raise ValueError('View function did not return a response') + + if not isinstance(rv, self.response_class): + # When we create a response object directly, we let the constructor + # set the headers and status. We do this because there can be + # some extra logic involved when creating these objects with + # specific values (like default content type selection). + if isinstance(rv, (text_type, bytes, bytearray)): + rv = self.response_class(rv, headers=headers, status=status) + headers = status = None + else: + rv = self.response_class.force_type(rv, request.environ) + + if status is not None: + if isinstance(status, string_types): + rv.status = status + else: + rv.status_code = status + if headers: + rv.headers.extend(headers) + + return rv + + def create_url_adapter(self, request): + """Creates a URL adapter for the given request. The URL adapter + is created at a point where the request context is not yet set up + so the request is passed explicitly. + + .. versionadded:: 0.6 + + .. versionchanged:: 0.9 + This can now also be called without a request object when the + URL adapter is created for the application context. + """ + if request is not None: + return self.url_map.bind_to_environ(request.environ, + server_name=self.config['SERVER_NAME']) + # We need at the very least the server name to be set for this + # to work. + if self.config['SERVER_NAME'] is not None: + return self.url_map.bind( + self.config['SERVER_NAME'], + script_name=self.config['APPLICATION_ROOT'] or '/', + url_scheme=self.config['PREFERRED_URL_SCHEME']) + + def inject_url_defaults(self, endpoint, values): + """Injects the URL defaults for the given endpoint directly into + the values dictionary passed. This is used internally and + automatically called on URL building. + + .. versionadded:: 0.7 + """ + funcs = self.url_default_functions.get(None, ()) + if '.' in endpoint: + bp = endpoint.rsplit('.', 1)[0] + funcs = chain(funcs, self.url_default_functions.get(bp, ())) + for func in funcs: + func(endpoint, values) + + def handle_url_build_error(self, error, endpoint, values): + """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. + """ + exc_type, exc_value, tb = sys.exc_info() + for handler in self.url_build_error_handlers: + try: + rv = handler(error, endpoint, values) + if rv is not None: + return rv + except BuildError as error: + pass + + # At this point we want to reraise the exception. If the error is + # still the same one we can reraise it with the original traceback, + # otherwise we raise it from here. + if error is exc_value: + reraise(exc_type, exc_value, tb) + raise error + + def preprocess_request(self): + """Called before the actual request dispatching and will + call every as :meth:`before_request` decorated function. + If any of these function returns a value it's handled as + if it was the return value from the view and further + request handling is stopped. + + This also triggers the :meth:`url_value_processor` functions before + the actual :meth:`before_request` functions are called. + """ + bp = _request_ctx_stack.top.request.blueprint + + funcs = self.url_value_preprocessors.get(None, ()) + if bp is not None and bp in self.url_value_preprocessors: + funcs = chain(funcs, self.url_value_preprocessors[bp]) + for func in funcs: + func(request.endpoint, request.view_args) + + funcs = self.before_request_funcs.get(None, ()) + if bp is not None and bp in self.before_request_funcs: + funcs = chain(funcs, self.before_request_funcs[bp]) + for func in funcs: + rv = func() + if rv is not None: + return rv + + def process_response(self, response): + """Can be overridden in order to modify the response object + before it's sent to the WSGI server. By default this will + call all the :meth:`after_request` decorated functions. + + .. versionchanged:: 0.5 + As of Flask 0.5 the functions registered for after request + execution are called in reverse order of registration. + + :param response: a :attr:`response_class` object. + :return: a new response object or the same, has to be an + instance of :attr:`response_class`. + """ + ctx = _request_ctx_stack.top + bp = ctx.request.blueprint + funcs = ctx._after_request_functions + if bp is not None and bp in self.after_request_funcs: + funcs = chain(funcs, reversed(self.after_request_funcs[bp])) + if None in self.after_request_funcs: + funcs = chain(funcs, reversed(self.after_request_funcs[None])) + for handler in funcs: + response = handler(response) + if not self.session_interface.is_null_session(ctx.session): + self.save_session(ctx.session, response) + return response + + def do_teardown_request(self, exc=None): + """Called after the actual request dispatching and will + call every as :meth:`teardown_request` decorated function. This is + not actually called by the :class:`Flask` object itself but is always + triggered when the request context is popped. That way we have a + tighter control over certain resources under testing environments. + + .. versionchanged:: 0.9 + Added the `exc` argument. Previously this was always using the + current exception information. + """ + if exc is None: + exc = sys.exc_info()[1] + funcs = reversed(self.teardown_request_funcs.get(None, ())) + bp = _request_ctx_stack.top.request.blueprint + if bp is not None and bp in self.teardown_request_funcs: + funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) + for func in funcs: + rv = func(exc) + request_tearing_down.send(self, exc=exc) + + def do_teardown_appcontext(self, exc=None): + """Called when an application context is popped. This works pretty + much the same as :meth:`do_teardown_request` but for the application + context. + + .. versionadded:: 0.9 + """ + if exc is None: + exc = sys.exc_info()[1] + for func in reversed(self.teardown_appcontext_funcs): + func(exc) + appcontext_tearing_down.send(self, exc=exc) + + def app_context(self): + """Binds the application only. For as long as the application is bound + to the current context the :data:`flask.current_app` points to that + application. An application context is automatically created when a + request context is pushed if necessary. + + Example usage:: + + with app.app_context(): + ... + + .. versionadded:: 0.9 + """ + return AppContext(self) + + def request_context(self, environ): + """Creates a :class:`~flask.ctx.RequestContext` from the given + environment and binds it to the current context. This must be used in + combination with the `with` statement because the request is only bound + to the current context for the duration of the `with` block. + + Example usage:: + + with app.request_context(environ): + do_something_with(request) + + The object returned can also be used without the `with` statement + which is useful for working in the shell. The example above is + doing exactly the same as this code:: + + ctx = app.request_context(environ) + ctx.push() + try: + do_something_with(request) + finally: + ctx.pop() + + .. versionchanged:: 0.3 + Added support for non-with statement usage and `with` statement + is now passed the ctx object. + + :param environ: a WSGI environment + """ + return RequestContext(self, environ) + + def test_request_context(self, *args, **kwargs): + """Creates a WSGI environment from the given values (see + :func:`werkzeug.test.EnvironBuilder` for more information, this + function accepts the same arguments). + """ + from flask.testing import make_test_environ_builder + builder = make_test_environ_builder(self, *args, **kwargs) + try: + return self.request_context(builder.get_environ()) + finally: + builder.close() + + def wsgi_app(self, environ, start_response): + """The actual WSGI application. This is not implemented in + `__call__` so that middlewares can be applied without losing a + reference to the class. So instead of doing this:: + + app = MyMiddleware(app) + + It's a better idea to do this instead:: + + app.wsgi_app = MyMiddleware(app.wsgi_app) + + Then you still have the original application object around and + can continue to call methods on it. + + .. versionchanged:: 0.7 + The behavior of the before and after request callbacks was changed + under error conditions and a new callback was added that will + always execute at the end of the request, independent on if an + error occurred or not. See :ref:`callbacks-and-errors`. + + :param environ: a WSGI environment + :param start_response: a callable accepting a status code, + a list of headers and an optional + exception context to start the response + """ + ctx = self.request_context(environ) + ctx.push() + error = None + try: + try: + response = self.full_dispatch_request() + except Exception as e: + error = e + response = self.make_response(self.handle_exception(e)) + return response(environ, start_response) + finally: + if self.should_ignore_error(error): + error = None + ctx.auto_pop(error) + + @property + def modules(self): + from warnings import warn + warn(DeprecationWarning('Flask.modules is deprecated, use ' + 'Flask.blueprints instead'), stacklevel=2) + return self.blueprints + + def __call__(self, environ, start_response): + """Shortcut for :attr:`wsgi_app`.""" + return self.wsgi_app(environ, start_response) + + def __repr__(self): + return '<%s %r>' % ( + self.__class__.__name__, + self.name, + ) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/blueprints.py b/Linux_i686/lib/python2.7/site-packages/flask/blueprints.py new file mode 100644 index 0000000..4575ec9 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/blueprints.py @@ -0,0 +1,401 @@ +# -*- coding: utf-8 -*- +""" + flask.blueprints + ~~~~~~~~~~~~~~~~ + + Blueprints are the recommended way to implement larger or more + pluggable applications in Flask 0.7 and later. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" +from functools import update_wrapper + +from .helpers import _PackageBoundObject, _endpoint_from_view_func + + +class BlueprintSetupState(object): + """Temporary holder object for registering a blueprint with the + application. An instance of this class is created by the + :meth:`~flask.Blueprint.make_setup_state` method and later passed + to all register callback functions. + """ + + def __init__(self, blueprint, app, options, first_registration): + #: a reference to the current application + self.app = app + + #: a reference to the blueprint that created this setup state. + self.blueprint = blueprint + + #: a dictionary with all options that were passed to the + #: :meth:`~flask.Flask.register_blueprint` method. + self.options = options + + #: as blueprints can be registered multiple times with the + #: application and not everything wants to be registered + #: multiple times on it, this attribute can be used to figure + #: out if the blueprint was registered in the past already. + self.first_registration = first_registration + + subdomain = self.options.get('subdomain') + if subdomain is None: + subdomain = self.blueprint.subdomain + + #: The subdomain that the blueprint should be active for, `None` + #: otherwise. + self.subdomain = subdomain + + url_prefix = self.options.get('url_prefix') + if url_prefix is None: + url_prefix = self.blueprint.url_prefix + + #: The prefix that should be used for all URLs defined on the + #: blueprint. + self.url_prefix = url_prefix + + #: A dictionary with URL defaults that is added to each and every + #: URL that was defined with the blueprint. + self.url_defaults = dict(self.blueprint.url_values_defaults) + self.url_defaults.update(self.options.get('url_defaults', ())) + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """A helper method to register a rule (and optionally a view function) + to the application. The endpoint is automatically prefixed with the + blueprint's name. + """ + if self.url_prefix: + rule = self.url_prefix + rule + options.setdefault('subdomain', self.subdomain) + if endpoint is None: + endpoint = _endpoint_from_view_func(view_func) + defaults = self.url_defaults + if 'defaults' in options: + defaults = dict(defaults, **options.pop('defaults')) + self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint), + view_func, defaults=defaults, **options) + + +class Blueprint(_PackageBoundObject): + """Represents a blueprint. A blueprint is an object that records + functions that will be called with the + :class:`~flask.blueprint.BlueprintSetupState` later to register functions + or other things on the main application. See :ref:`blueprints` for more + information. + + .. versionadded:: 0.7 + """ + + warn_on_modifications = False + _got_registered_once = False + + def __init__(self, name, import_name, static_folder=None, + static_url_path=None, template_folder=None, + url_prefix=None, subdomain=None, url_defaults=None): + _PackageBoundObject.__init__(self, import_name, template_folder) + self.name = name + self.url_prefix = url_prefix + self.subdomain = subdomain + self.static_folder = static_folder + self.static_url_path = static_url_path + self.deferred_functions = [] + self.view_functions = {} + if url_defaults is None: + url_defaults = {} + self.url_values_defaults = url_defaults + + def record(self, func): + """Registers a function that is called when the blueprint is + registered on the application. This function is called with the + state as argument as returned by the :meth:`make_setup_state` + method. + """ + if self._got_registered_once and self.warn_on_modifications: + from warnings import warn + warn(Warning('The blueprint was already registered once ' + 'but is getting modified now. These changes ' + 'will not show up.')) + self.deferred_functions.append(func) + + def record_once(self, func): + """Works like :meth:`record` but wraps the function in another + function that will ensure the function is only called once. If the + blueprint is registered a second time on the application, the + function passed is not called. + """ + def wrapper(state): + if state.first_registration: + func(state) + return self.record(update_wrapper(wrapper, func)) + + def make_setup_state(self, app, options, first_registration=False): + """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` + object that is later passed to the register callback functions. + Subclasses can override this to return a subclass of the setup state. + """ + return BlueprintSetupState(self, app, options, first_registration) + + def register(self, app, options, first_registration=False): + """Called by :meth:`Flask.register_blueprint` to register a blueprint + on the application. This can be overridden to customize the register + behavior. Keyword arguments from + :func:`~flask.Flask.register_blueprint` are directly forwarded to this + method in the `options` dictionary. + """ + self._got_registered_once = True + state = self.make_setup_state(app, options, first_registration) + if self.has_static_folder: + state.add_url_rule(self.static_url_path + '/', + view_func=self.send_static_file, + endpoint='static') + + for deferred in self.deferred_functions: + deferred(state) + + def route(self, rule, **options): + """Like :meth:`Flask.route` but for a blueprint. The endpoint for the + :func:`url_for` function is prefixed with the name of the blueprint. + """ + def decorator(f): + endpoint = options.pop("endpoint", f.__name__) + self.add_url_rule(rule, endpoint, f, **options) + return f + return decorator + + def add_url_rule(self, rule, endpoint=None, view_func=None, **options): + """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for + the :func:`url_for` function is prefixed with the name of the blueprint. + """ + if endpoint: + assert '.' not in endpoint, "Blueprint endpoint's should not contain dot's" + self.record(lambda s: + s.add_url_rule(rule, endpoint, view_func, **options)) + + def endpoint(self, endpoint): + """Like :meth:`Flask.endpoint` but for a blueprint. This does not + prefix the endpoint with the blueprint name, this has to be done + explicitly by the user of this method. If the endpoint is prefixed + with a `.` it will be registered to the current blueprint, otherwise + it's an application independent endpoint. + """ + def decorator(f): + def register_endpoint(state): + state.app.view_functions[endpoint] = f + self.record_once(register_endpoint) + return f + return decorator + + def app_template_filter(self, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.template_filter` but for a blueprint. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_filter(f, name=name) + return f + return decorator + + def add_app_template_filter(self, f, name=None): + """Register a custom template filter, available application wide. Like + :meth:`Flask.add_template_filter` but for a blueprint. Works exactly + like the :meth:`app_template_filter` decorator. + + :param name: the optional name of the filter, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.filters[name or f.__name__] = f + self.record_once(register_template) + + def app_template_test(self, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.template_test` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_test(f, name=name) + return f + return decorator + + def add_app_template_test(self, f, name=None): + """Register a custom template test, available application wide. Like + :meth:`Flask.add_template_test` but for a blueprint. Works exactly + like the :meth:`app_template_test` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the test, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.tests[name or f.__name__] = f + self.record_once(register_template) + + def app_template_global(self, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.template_global` but for a blueprint. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + def decorator(f): + self.add_app_template_global(f, name=name) + return f + return decorator + + def add_app_template_global(self, f, name=None): + """Register a custom template global, available application wide. Like + :meth:`Flask.add_template_global` but for a blueprint. Works exactly + like the :meth:`app_template_global` decorator. + + .. versionadded:: 0.10 + + :param name: the optional name of the global, otherwise the + function name will be used. + """ + def register_template(state): + state.app.jinja_env.globals[name or f.__name__] = f + self.record_once(register_template) + + def before_request(self, f): + """Like :meth:`Flask.before_request` but for a blueprint. This function + is only executed before each request that is handled by a function of + that blueprint. + """ + self.record_once(lambda s: s.app.before_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def before_app_request(self, f): + """Like :meth:`Flask.before_request`. Such a function is executed + before each request, even if outside of a blueprint. + """ + self.record_once(lambda s: s.app.before_request_funcs + .setdefault(None, []).append(f)) + return f + + def before_app_first_request(self, f): + """Like :meth:`Flask.before_first_request`. Such a function is + executed before the first request to the application. + """ + self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) + return f + + def after_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. This function + is only executed after each request that is handled by a function of + that blueprint. + """ + self.record_once(lambda s: s.app.after_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def after_app_request(self, f): + """Like :meth:`Flask.after_request` but for a blueprint. Such a function + is executed after each request, even if outside of the blueprint. + """ + self.record_once(lambda s: s.app.after_request_funcs + .setdefault(None, []).append(f)) + return f + + def teardown_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. This + function is only executed when tearing down requests handled by a + function of that blueprint. Teardown request functions are executed + when the request context is popped, even when no actual request was + performed. + """ + self.record_once(lambda s: s.app.teardown_request_funcs + .setdefault(self.name, []).append(f)) + return f + + def teardown_app_request(self, f): + """Like :meth:`Flask.teardown_request` but for a blueprint. Such a + function is executed when tearing down each request, even if outside of + the blueprint. + """ + self.record_once(lambda s: s.app.teardown_request_funcs + .setdefault(None, []).append(f)) + return f + + def context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. This + function is only executed for requests handled by a blueprint. + """ + self.record_once(lambda s: s.app.template_context_processors + .setdefault(self.name, []).append(f)) + return f + + def app_context_processor(self, f): + """Like :meth:`Flask.context_processor` but for a blueprint. Such a + function is executed each request, even if outside of the blueprint. + """ + self.record_once(lambda s: s.app.template_context_processors + .setdefault(None, []).append(f)) + return f + + def app_errorhandler(self, code): + """Like :meth:`Flask.errorhandler` but for a blueprint. This + handler is used for all requests, even if outside of the blueprint. + """ + def decorator(f): + self.record_once(lambda s: s.app.errorhandler(code)(f)) + return f + return decorator + + def url_value_preprocessor(self, f): + """Registers a function as URL value preprocessor for this + blueprint. It's called before the view functions are called and + can modify the url values provided. + """ + self.record_once(lambda s: s.app.url_value_preprocessors + .setdefault(self.name, []).append(f)) + return f + + def url_defaults(self, f): + """Callback function for URL defaults for this blueprint. It's called + with the endpoint and values and should update the values passed + in place. + """ + self.record_once(lambda s: s.app.url_default_functions + .setdefault(self.name, []).append(f)) + return f + + def app_url_value_preprocessor(self, f): + """Same as :meth:`url_value_preprocessor` but application wide. + """ + self.record_once(lambda s: s.app.url_value_preprocessors + .setdefault(None, []).append(f)) + return f + + def app_url_defaults(self, f): + """Same as :meth:`url_defaults` but application wide. + """ + self.record_once(lambda s: s.app.url_default_functions + .setdefault(None, []).append(f)) + return f + + def errorhandler(self, code_or_exception): + """Registers an error handler that becomes active for this blueprint + only. Please be aware that routing does not happen local to a + blueprint so an error handler for 404 usually is not handled by + a blueprint unless it is caused inside a view function. Another + special case is the 500 internal server error which is always looked + up from the application. + + Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator + of the :class:`~flask.Flask` object. + """ + def decorator(f): + self.record_once(lambda s: s.app._register_error_handler( + self.name, code_or_exception, f)) + return f + return decorator diff --git a/Linux_i686/lib/python2.7/site-packages/flask/config.py b/Linux_i686/lib/python2.7/site-packages/flask/config.py new file mode 100644 index 0000000..155afa2 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/config.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +""" + flask.config + ~~~~~~~~~~~~ + + Implements the configuration related objects. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" + +import imp +import os +import errno + +from werkzeug.utils import import_string +from ._compat import string_types + + +class ConfigAttribute(object): + """Makes an attribute forward to the config""" + + def __init__(self, name, get_converter=None): + self.__name__ = name + self.get_converter = get_converter + + def __get__(self, obj, type=None): + if obj is None: + return self + rv = obj.config[self.__name__] + if self.get_converter is not None: + rv = self.get_converter(rv) + return rv + + def __set__(self, obj, value): + obj.config[self.__name__] = value + + +class Config(dict): + """Works exactly like a dict but provides ways to fill it from files + or special dictionaries. There are two common patterns to populate the + config. + + Either you can fill the config from a config file:: + + app.config.from_pyfile('yourconfig.cfg') + + Or alternatively you can define the configuration options in the + module that calls :meth:`from_object` or provide an import path to + a module that should be loaded. It is also possible to tell it to + use the same module and with that provide the configuration values + just before the call:: + + DEBUG = True + SECRET_KEY = 'development key' + app.config.from_object(__name__) + + In both cases (loading from any Python file or loading from modules), + only uppercase keys are added to the config. This makes it possible to use + lowercase values in the config file for temporary values that are not added + to the config or to define the config keys in the same file that implements + the application. + + Probably the most interesting way to load configurations is from an + environment variable pointing to a file:: + + app.config.from_envvar('YOURAPPLICATION_SETTINGS') + + In this case before launching the application you have to set this + environment variable to the file you want to use. On Linux and OS X + use the export statement:: + + export YOURAPPLICATION_SETTINGS='/path/to/config/file' + + On windows use `set` instead. + + :param root_path: path to which files are read relative from. When the + config object is created by the application, this is + the application's :attr:`~flask.Flask.root_path`. + :param defaults: an optional dictionary of default values + """ + + def __init__(self, root_path, defaults=None): + dict.__init__(self, defaults or {}) + self.root_path = root_path + + def from_envvar(self, variable_name, silent=False): + """Loads a configuration from an environment variable pointing to + a configuration file. This is basically just a shortcut with nicer + error messages for this line of code:: + + app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) + + :param variable_name: name of the environment variable + :param silent: set to `True` if you want silent failure for missing + files. + :return: bool. `True` if able to load config, `False` otherwise. + """ + rv = os.environ.get(variable_name) + if not rv: + if silent: + return False + raise RuntimeError('The environment variable %r is not set ' + 'and as such configuration could not be ' + 'loaded. Set this variable and make it ' + 'point to a configuration file' % + variable_name) + return self.from_pyfile(rv, silent=silent) + + def from_pyfile(self, filename, silent=False): + """Updates the values in the config from a Python file. This function + behaves as if the file was imported as module with the + :meth:`from_object` function. + + :param filename: the filename of the config. This can either be an + absolute filename or a filename relative to the + root path. + :param silent: set to `True` if you want silent failure for missing + files. + + .. versionadded:: 0.7 + `silent` parameter. + """ + filename = os.path.join(self.root_path, filename) + d = imp.new_module('config') + d.__file__ = filename + try: + with open(filename) as config_file: + exec(compile(config_file.read(), filename, 'exec'), d.__dict__) + except IOError as e: + if silent and e.errno in (errno.ENOENT, errno.EISDIR): + return False + e.strerror = 'Unable to load configuration file (%s)' % e.strerror + raise + self.from_object(d) + return True + + def from_object(self, obj): + """Updates the values from the given object. An object can be of one + of the following two types: + + - a string: in this case the object with that name will be imported + - an actual object reference: that object is used directly + + Objects are usually either modules or classes. + + Just the uppercase variables in that object are stored in the config. + Example usage:: + + app.config.from_object('yourapplication.default_config') + from yourapplication import default_config + app.config.from_object(default_config) + + You should not use this function to load the actual configuration but + rather configuration defaults. The actual config should be loaded + with :meth:`from_pyfile` and ideally from a location not within the + package because the package might be installed system wide. + + :param obj: an import name or object + """ + if isinstance(obj, string_types): + obj = import_string(obj) + for key in dir(obj): + if key.isupper(): + self[key] = getattr(obj, key) + + def __repr__(self): + return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self)) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/ctx.py b/Linux_i686/lib/python2.7/site-packages/flask/ctx.py new file mode 100644 index 0000000..f134237 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/ctx.py @@ -0,0 +1,394 @@ +# -*- coding: utf-8 -*- +""" + flask.ctx + ~~~~~~~~~ + + Implements the objects required to keep the context. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" + +from __future__ import with_statement + +import sys +from functools import update_wrapper + +from werkzeug.exceptions import HTTPException + +from .globals import _request_ctx_stack, _app_ctx_stack +from .module import blueprint_is_module +from .signals import appcontext_pushed, appcontext_popped + + +class _AppCtxGlobals(object): + """A plain object.""" + + def get(self, name, default=None): + return self.__dict__.get(name, default) + + def __contains__(self, item): + return item in self.__dict__ + + def __iter__(self): + return iter(self.__dict__) + + def __repr__(self): + top = _app_ctx_stack.top + if top is not None: + return '' % top.app.name + return object.__repr__(self) + + +def after_this_request(f): + """Executes a function after this request. This is useful to modify + response objects. The function is passed the response object and has + to return the same or a new one. + + Example:: + + @app.route('/') + def index(): + @after_this_request + def add_header(response): + response.headers['X-Foo'] = 'Parachute' + return response + return 'Hello World!' + + This is more useful if a function other than the view function wants to + modify a response. For instance think of a decorator that wants to add + some headers without converting the return value into a response object. + + .. versionadded:: 0.9 + """ + _request_ctx_stack.top._after_request_functions.append(f) + return f + + +def copy_current_request_context(f): + """A helper function that decorates a function to retain the current + request context. This is useful when working with greenlets. The moment + the function is decorated a copy of the request context is created and + then pushed when the function is called. + + Example:: + + import gevent + from flask import copy_current_request_context + + @app.route('/') + def index(): + @copy_current_request_context + def do_some_work(): + # do some work here, it can access flask.request like you + # would otherwise in the view function. + ... + gevent.spawn(do_some_work) + return 'Regular response' + + .. versionadded:: 0.10 + """ + top = _request_ctx_stack.top + if top is None: + raise RuntimeError('This decorator can only be used at local scopes ' + 'when a request context is on the stack. For instance within ' + 'view functions.') + reqctx = top.copy() + def wrapper(*args, **kwargs): + with reqctx: + return f(*args, **kwargs) + return update_wrapper(wrapper, f) + + +def has_request_context(): + """If you have code that wants to test if a request context is there or + not this function can be used. For instance, you may want to take advantage + of request information if the request object is available, but fail + silently if it is unavailable. + + :: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and has_request_context(): + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + Alternatively you can also just test any of the context bound objects + (such as :class:`request` or :class:`g` for truthness):: + + class User(db.Model): + + def __init__(self, username, remote_addr=None): + self.username = username + if remote_addr is None and request: + remote_addr = request.remote_addr + self.remote_addr = remote_addr + + .. versionadded:: 0.7 + """ + return _request_ctx_stack.top is not None + + +def has_app_context(): + """Works like :func:`has_request_context` but for the application + context. You can also just do a boolean check on the + :data:`current_app` object instead. + + .. versionadded:: 0.9 + """ + return _app_ctx_stack.top is not None + + +class AppContext(object): + """The application context binds an application object implicitly + to the current thread or greenlet, similar to how the + :class:`RequestContext` binds request information. The application + context is also implicitly created if a request context is created + but the application is not on top of the individual application + context. + """ + + def __init__(self, app): + self.app = app + self.url_adapter = app.create_url_adapter(None) + self.g = app.app_ctx_globals_class() + + # Like request context, app contexts can be pushed multiple times + # but there a basic "refcount" is enough to track them. + self._refcnt = 0 + + def push(self): + """Binds the app context to the current context.""" + self._refcnt += 1 + _app_ctx_stack.push(self) + appcontext_pushed.send(self.app) + + def pop(self, exc=None): + """Pops the app context.""" + self._refcnt -= 1 + if self._refcnt <= 0: + if exc is None: + exc = sys.exc_info()[1] + self.app.do_teardown_appcontext(exc) + rv = _app_ctx_stack.pop() + assert rv is self, 'Popped wrong app context. (%r instead of %r)' \ + % (rv, self) + appcontext_popped.send(self.app) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + self.pop(exc_value) + + +class RequestContext(object): + """The request context contains all request relevant information. It is + created at the beginning of the request and pushed to the + `_request_ctx_stack` and removed at the end of it. It will create the + URL adapter and request object for the WSGI environment provided. + + Do not attempt to use this class directly, instead use + :meth:`~flask.Flask.test_request_context` and + :meth:`~flask.Flask.request_context` to create this object. + + When the request context is popped, it will evaluate all the + functions registered on the application for teardown execution + (:meth:`~flask.Flask.teardown_request`). + + The request context is automatically popped at the end of the request + for you. In debug mode the request context is kept around if + exceptions happen so that interactive debuggers have a chance to + introspect the data. With 0.4 this can also be forced for requests + that did not fail and outside of `DEBUG` mode. By setting + ``'flask._preserve_context'`` to `True` on the WSGI environment the + context will not pop itself at the end of the request. This is used by + the :meth:`~flask.Flask.test_client` for example to implement the + deferred cleanup functionality. + + You might find this helpful for unittests where you need the + information from the context local around for a little longer. Make + sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in + that situation, otherwise your unittests will leak memory. + """ + + def __init__(self, app, environ, request=None): + self.app = app + if request is None: + request = app.request_class(environ) + self.request = request + self.url_adapter = app.create_url_adapter(self.request) + self.flashes = None + self.session = None + + # Request contexts can be pushed multiple times and interleaved with + # other request contexts. Now only if the last level is popped we + # get rid of them. Additionally if an application context is missing + # one is created implicitly so for each level we add this information + self._implicit_app_ctx_stack = [] + + # indicator if the context was preserved. Next time another context + # is pushed the preserved context is popped. + self.preserved = False + + # remembers the exception for pop if there is one in case the context + # preservation kicks in. + self._preserved_exc = None + + # Functions that should be executed after the request on the response + # object. These will be called before the regular "after_request" + # functions. + self._after_request_functions = [] + + self.match_request() + + # XXX: Support for deprecated functionality. This is going away with + # Flask 1.0 + blueprint = self.request.blueprint + if blueprint is not None: + # better safe than sorry, we don't want to break code that + # already worked + bp = app.blueprints.get(blueprint) + if bp is not None and blueprint_is_module(bp): + self.request._is_old_module = True + + def _get_g(self): + return _app_ctx_stack.top.g + def _set_g(self, value): + _app_ctx_stack.top.g = value + g = property(_get_g, _set_g) + del _get_g, _set_g + + def copy(self): + """Creates a copy of this request context with the same request object. + This can be used to move a request context to a different greenlet. + Because the actual request object is the same this cannot be used to + move a request context to a different thread unless access to the + request object is locked. + + .. versionadded:: 0.10 + """ + return self.__class__(self.app, + environ=self.request.environ, + request=self.request + ) + + def match_request(self): + """Can be overridden by a subclass to hook into the matching + of the request. + """ + try: + url_rule, self.request.view_args = \ + self.url_adapter.match(return_rule=True) + self.request.url_rule = url_rule + except HTTPException as e: + self.request.routing_exception = e + + def push(self): + """Binds the request context to the current context.""" + # If an exception occurs in debug mode or if context preservation is + # activated under exception situations exactly one context stays + # on the stack. The rationale is that you want to access that + # information under debug situations. However if someone forgets to + # pop that context again we want to make sure that on the next push + # it's invalidated, otherwise we run at risk that something leaks + # memory. This is usually only a problem in testsuite since this + # functionality is not active in production environments. + top = _request_ctx_stack.top + if top is not None and top.preserved: + top.pop(top._preserved_exc) + + # Before we push the request context we have to ensure that there + # is an application context. + app_ctx = _app_ctx_stack.top + if app_ctx is None or app_ctx.app != self.app: + app_ctx = self.app.app_context() + app_ctx.push() + self._implicit_app_ctx_stack.append(app_ctx) + else: + self._implicit_app_ctx_stack.append(None) + + _request_ctx_stack.push(self) + + # Open the session at the moment that the request context is + # available. This allows a custom open_session method to use the + # request context (e.g. code that access database information + # stored on `g` instead of the appcontext). + self.session = self.app.open_session(self.request) + if self.session is None: + self.session = self.app.make_null_session() + + def pop(self, exc=None): + """Pops the request context and unbinds it by doing that. This will + also trigger the execution of functions registered by the + :meth:`~flask.Flask.teardown_request` decorator. + + .. versionchanged:: 0.9 + Added the `exc` argument. + """ + app_ctx = self._implicit_app_ctx_stack.pop() + + clear_request = False + if not self._implicit_app_ctx_stack: + self.preserved = False + self._preserved_exc = None + if exc is None: + exc = sys.exc_info()[1] + self.app.do_teardown_request(exc) + + # If this interpreter supports clearing the exception information + # we do that now. This will only go into effect on Python 2.x, + # on 3.x it disappears automatically at the end of the exception + # stack. + if hasattr(sys, 'exc_clear'): + sys.exc_clear() + + request_close = getattr(self.request, 'close', None) + if request_close is not None: + request_close() + clear_request = True + + rv = _request_ctx_stack.pop() + assert rv is self, 'Popped wrong request context. (%r instead of %r)' \ + % (rv, self) + + # get rid of circular dependencies at the end of the request + # so that we don't require the GC to be active. + if clear_request: + rv.request.environ['werkzeug.request'] = None + + # Get rid of the app as well if necessary. + if app_ctx is not None: + app_ctx.pop(exc) + + def auto_pop(self, exc): + if self.request.environ.get('flask._preserve_context') or \ + (exc is not None and self.app.preserve_context_on_exception): + self.preserved = True + self._preserved_exc = exc + else: + self.pop(exc) + + def __enter__(self): + self.push() + return self + + def __exit__(self, exc_type, exc_value, tb): + # do not pop the request stack if we are in debug mode and an + # exception happened. This will allow the debugger to still + # access the request object in the interactive shell. Furthermore + # the context can be force kept alive for the test client. + # See flask.testing for how this works. + self.auto_pop(exc_value) + + def __repr__(self): + return '<%s \'%s\' [%s] of %s>' % ( + self.__class__.__name__, + self.request.url, + self.request.method, + self.app.name, + ) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/debughelpers.py b/Linux_i686/lib/python2.7/site-packages/flask/debughelpers.py new file mode 100644 index 0000000..2f8510f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/debughelpers.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +""" + flask.debughelpers + ~~~~~~~~~~~~~~~~~~ + + Various helpers to make the development experience better. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" +from ._compat import implements_to_string + + +class UnexpectedUnicodeError(AssertionError, UnicodeError): + """Raised in places where we want some better error reporting for + unexpected unicode or binary data. + """ + + +@implements_to_string +class DebugFilesKeyError(KeyError, AssertionError): + """Raised from request.files during debugging. The idea is that it can + provide a better error message than just a generic KeyError/BadRequest. + """ + + def __init__(self, request, key): + form_matches = request.form.getlist(key) + buf = ['You tried to access the file "%s" in the request.files ' + 'dictionary but it does not exist. The mimetype for the request ' + 'is "%s" instead of "multipart/form-data" which means that no ' + 'file contents were transmitted. To fix this error you should ' + 'provide enctype="multipart/form-data" in your form.' % + (key, request.mimetype)] + if form_matches: + buf.append('\n\nThe browser instead transmitted some file names. ' + 'This was submitted: %s' % ', '.join('"%s"' % x + for x in form_matches)) + self.msg = ''.join(buf) + + def __str__(self): + return self.msg + + +class FormDataRoutingRedirect(AssertionError): + """This exception is raised by Flask in debug mode if it detects a + redirect caused by the routing system when the request method is not + GET, HEAD or OPTIONS. Reasoning: form data will be dropped. + """ + + def __init__(self, request): + exc = request.routing_exception + buf = ['A request was sent to this URL (%s) but a redirect was ' + 'issued automatically by the routing system to "%s".' + % (request.url, exc.new_url)] + + # In case just a slash was appended we can be extra helpful + if request.base_url + '/' == exc.new_url.split('?')[0]: + buf.append(' The URL was defined with a trailing slash so ' + 'Flask will automatically redirect to the URL ' + 'with the trailing slash if it was accessed ' + 'without one.') + + buf.append(' Make sure to directly send your %s-request to this URL ' + 'since we can\'t make browsers or HTTP clients redirect ' + 'with form data reliably or without user interaction.' % + request.method) + buf.append('\n\nNote: this exception is only raised in debug mode') + AssertionError.__init__(self, ''.join(buf).encode('utf-8')) + + +def attach_enctype_error_multidict(request): + """Since Flask 0.8 we're monkeypatching the files object in case a + request is detected that does not use multipart form data but the files + object is accessed. + """ + oldcls = request.files.__class__ + class newcls(oldcls): + def __getitem__(self, key): + try: + return oldcls.__getitem__(self, key) + except KeyError as e: + if key not in request.form: + raise + raise DebugFilesKeyError(request, key) + newcls.__name__ = oldcls.__name__ + newcls.__module__ = oldcls.__module__ + request.files.__class__ = newcls diff --git a/Linux_i686/lib/python2.7/site-packages/flask/ext/__init__.py b/Linux_i686/lib/python2.7/site-packages/flask/ext/__init__.py new file mode 100644 index 0000000..f29958a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/ext/__init__.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +""" + flask.ext + ~~~~~~~~~ + + Redirect imports for extensions. This module basically makes it possible + for us to transition from flaskext.foo to flask_foo without having to + force all extensions to upgrade at the same time. + + When a user does ``from flask.ext.foo import bar`` it will attempt to + import ``from flask_foo import bar`` first and when that fails it will + try to import ``from flaskext.foo import bar``. + + We're switching from namespace packages because it was just too painful for + everybody involved. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" + + +def setup(): + from ..exthook import ExtensionImporter + importer = ExtensionImporter(['flask_%s', 'flaskext.%s'], __name__) + importer.install() + + +setup() +del setup diff --git a/Linux_i686/lib/python2.7/site-packages/flask/exthook.py b/Linux_i686/lib/python2.7/site-packages/flask/exthook.py new file mode 100644 index 0000000..d0d814c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/exthook.py @@ -0,0 +1,120 @@ +# -*- coding: utf-8 -*- +""" + flask.exthook + ~~~~~~~~~~~~~ + + Redirect imports for extensions. This module basically makes it possible + for us to transition from flaskext.foo to flask_foo without having to + force all extensions to upgrade at the same time. + + When a user does ``from flask.ext.foo import bar`` it will attempt to + import ``from flask_foo import bar`` first and when that fails it will + try to import ``from flaskext.foo import bar``. + + We're switching from namespace packages because it was just too painful for + everybody involved. + + This is used by `flask.ext`. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" +import sys +import os +from ._compat import reraise + + +class ExtensionImporter(object): + """This importer redirects imports from this submodule to other locations. + This makes it possible to transition from the old flaskext.name to the + newer flask_name without people having a hard time. + """ + + def __init__(self, module_choices, wrapper_module): + self.module_choices = module_choices + self.wrapper_module = wrapper_module + self.prefix = wrapper_module + '.' + self.prefix_cutoff = wrapper_module.count('.') + 1 + + def __eq__(self, other): + return self.__class__.__module__ == other.__class__.__module__ and \ + self.__class__.__name__ == other.__class__.__name__ and \ + self.wrapper_module == other.wrapper_module and \ + self.module_choices == other.module_choices + + def __ne__(self, other): + return not self.__eq__(other) + + def install(self): + sys.meta_path[:] = [x for x in sys.meta_path if self != x] + [self] + + def find_module(self, fullname, path=None): + if fullname.startswith(self.prefix): + return self + + def load_module(self, fullname): + if fullname in sys.modules: + return sys.modules[fullname] + modname = fullname.split('.', self.prefix_cutoff)[self.prefix_cutoff] + for path in self.module_choices: + realname = path % modname + try: + __import__(realname) + except ImportError: + exc_type, exc_value, tb = sys.exc_info() + # since we only establish the entry in sys.modules at the + # very this seems to be redundant, but if recursive imports + # happen we will call into the move import a second time. + # On the second invocation we still don't have an entry for + # fullname in sys.modules, but we will end up with the same + # fake module name and that import will succeed since this + # one already has a temporary entry in the modules dict. + # Since this one "succeeded" temporarily that second + # invocation now will have created a fullname entry in + # sys.modules which we have to kill. + sys.modules.pop(fullname, None) + + # If it's an important traceback we reraise it, otherwise + # we swallow it and try the next choice. The skipped frame + # is the one from __import__ above which we don't care about + if self.is_important_traceback(realname, tb): + reraise(exc_type, exc_value, tb.tb_next) + continue + module = sys.modules[fullname] = sys.modules[realname] + if '.' not in modname: + setattr(sys.modules[self.wrapper_module], modname, module) + return module + raise ImportError('No module named %s' % fullname) + + def is_important_traceback(self, important_module, tb): + """Walks a traceback's frames and checks if any of the frames + originated in the given important module. If that is the case then we + were able to import the module itself but apparently something went + wrong when the module was imported. (Eg: import of an import failed). + """ + while tb is not None: + if self.is_important_frame(important_module, tb): + return True + tb = tb.tb_next + return False + + def is_important_frame(self, important_module, tb): + """Checks a single frame if it's important.""" + g = tb.tb_frame.f_globals + if '__name__' not in g: + return False + + module_name = g['__name__'] + + # Python 2.7 Behavior. Modules are cleaned up late so the + # name shows up properly here. Success! + if module_name == important_module: + return True + + # Some python versions will will clean up modules so early that the + # module name at that point is no longer set. Try guessing from + # the filename then. + filename = os.path.abspath(tb.tb_frame.f_code.co_filename) + test_string = os.path.sep + important_module.replace('.', os.path.sep) + return test_string + '.py' in filename or \ + test_string + os.path.sep + '__init__.py' in filename diff --git a/Linux_i686/lib/python2.7/site-packages/flask/globals.py b/Linux_i686/lib/python2.7/site-packages/flask/globals.py new file mode 100644 index 0000000..67d41f5 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/globals.py @@ -0,0 +1,44 @@ +# -*- coding: utf-8 -*- +""" + flask.globals + ~~~~~~~~~~~~~ + + Defines all the global objects that are proxies to the current + active context. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" + +from functools import partial +from werkzeug.local import LocalStack, LocalProxy + + +def _lookup_req_object(name): + top = _request_ctx_stack.top + if top is None: + raise RuntimeError('working outside of request context') + return getattr(top, name) + + +def _lookup_app_object(name): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError('working outside of application context') + return getattr(top, name) + + +def _find_app(): + top = _app_ctx_stack.top + if top is None: + raise RuntimeError('working outside of application context') + return top.app + + +# context locals +_request_ctx_stack = LocalStack() +_app_ctx_stack = LocalStack() +current_app = LocalProxy(_find_app) +request = LocalProxy(partial(_lookup_req_object, 'request')) +session = LocalProxy(partial(_lookup_req_object, 'session')) +g = LocalProxy(partial(_lookup_app_object, 'g')) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/helpers.py b/Linux_i686/lib/python2.7/site-packages/flask/helpers.py new file mode 100644 index 0000000..1e7c87f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/helpers.py @@ -0,0 +1,849 @@ +# -*- coding: utf-8 -*- +""" + flask.helpers + ~~~~~~~~~~~~~ + + Implements various helpers. + + :copyright: (c) 2011 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" + +import os +import sys +import pkgutil +import posixpath +import mimetypes +from time import time +from zlib import adler32 +from threading import RLock +from werkzeug.routing import BuildError +from functools import update_wrapper + +try: + from werkzeug.urls import url_quote +except ImportError: + from urlparse import quote as url_quote + +from werkzeug.datastructures import Headers +from werkzeug.exceptions import NotFound + +# this was moved in 0.7 +try: + from werkzeug.wsgi import wrap_file +except ImportError: + from werkzeug.utils import wrap_file + +from jinja2 import FileSystemLoader + +from .signals import message_flashed +from .globals import session, _request_ctx_stack, _app_ctx_stack, \ + current_app, request +from ._compat import string_types, text_type + + +# sentinel +_missing = object() + + +# what separators does this operating system provide that are not a slash? +# this is used by the send_from_directory function to ensure that nobody is +# able to access files from outside the filesystem. +_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] + if sep not in (None, '/')) + + +def _endpoint_from_view_func(view_func): + """Internal helper that returns the default endpoint for a given + function. This always is the function name. + """ + assert view_func is not None, 'expected view func if endpoint ' \ + 'is not provided.' + return view_func.__name__ + + +def stream_with_context(generator_or_function): + """Request contexts disappear when the response is started on the server. + This is done for efficiency reasons and to make it less likely to encounter + memory leaks with badly written WSGI middlewares. The downside is that if + you are using streamed responses, the generator cannot access request bound + information any more. + + This function however can help you keep the context around for longer:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + @stream_with_context + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(generate()) + + Alternatively it can also be used around a specific generator:: + + from flask import stream_with_context, request, Response + + @app.route('/stream') + def streamed_response(): + def generate(): + yield 'Hello ' + yield request.args['name'] + yield '!' + return Response(stream_with_context(generate())) + + .. versionadded:: 0.9 + """ + try: + gen = iter(generator_or_function) + except TypeError: + def decorator(*args, **kwargs): + gen = generator_or_function() + return stream_with_context(gen) + return update_wrapper(decorator, generator_or_function) + + def generator(): + ctx = _request_ctx_stack.top + if ctx is None: + raise RuntimeError('Attempted to stream with context but ' + 'there was no context in the first place to keep around.') + with ctx: + # Dummy sentinel. Has to be inside the context block or we're + # not actually keeping the context around. + yield None + + # The try/finally is here so that if someone passes a WSGI level + # iterator in we're still running the cleanup logic. Generators + # don't need that because they are closed on their destruction + # automatically. + try: + for item in gen: + yield item + finally: + if hasattr(gen, 'close'): + gen.close() + + # The trick is to start the generator. Then the code execution runs until + # the first dummy None is yielded at which point the context was already + # pushed. This item is discarded. Then when the iteration continues the + # real generator is executed. + wrapped_g = generator() + next(wrapped_g) + return wrapped_g + + +def make_response(*args): + """Sometimes it is necessary to set additional headers in a view. Because + views do not have to return response objects but can return a value that + is converted into a response object by Flask itself, it becomes tricky to + add headers to it. This function can be called instead of using a return + and you will get a response object which you can use to attach headers. + + If view looked like this and you want to add a new header:: + + def index(): + return render_template('index.html', foo=42) + + You can now do something like this:: + + def index(): + response = make_response(render_template('index.html', foo=42)) + response.headers['X-Parachutes'] = 'parachutes are cool' + return response + + This function accepts the very same arguments you can return from a + view function. This for example creates a response with a 404 error + code:: + + response = make_response(render_template('not_found.html'), 404) + + The other use case of this function is to force the return value of a + view function into a response which is helpful with view + decorators:: + + response = make_response(view_function()) + response.headers['X-Parachutes'] = 'parachutes are cool' + + Internally this function does the following things: + + - if no arguments are passed, it creates a new response argument + - if one argument is passed, :meth:`flask.Flask.make_response` + is invoked with it. + - if more than one argument is passed, the arguments are passed + to the :meth:`flask.Flask.make_response` function as tuple. + + .. versionadded:: 0.6 + """ + if not args: + return current_app.response_class() + if len(args) == 1: + args = args[0] + return current_app.make_response(args) + + +def url_for(endpoint, **values): + """Generates a URL to the given endpoint with the method provided. + + Variable arguments that are unknown to the target endpoint are appended + to the generated URL as query arguments. If the value of a query argument + is `None`, the whole pair is skipped. In case blueprints are active + you can shortcut references to the same blueprint by prefixing the + local endpoint with a dot (``.``). + + This will reference the index function local to the current blueprint:: + + url_for('.index') + + For more information, head over to the :ref:`Quickstart `. + + To integrate applications, :class:`Flask` has a hook to intercept URL build + errors through :attr:`Flask.build_error_handler`. The `url_for` function + results in a :exc:`~werkzeug.routing.BuildError` when the current app does + not have a URL for the given endpoint and values. When it does, the + :data:`~flask.current_app` calls its :attr:`~Flask.build_error_handler` if + it is not `None`, which can return a string to use as the result of + `url_for` (instead of `url_for`'s default to raise the + :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. + An example:: + + def external_url_handler(error, endpoint, **values): + "Looks up an external URL when `url_for` cannot build a URL." + # This is an example of hooking the build_error_handler. + # Here, lookup_url is some utility function you've built + # which looks up the endpoint in some external URL registry. + url = lookup_url(endpoint, **values) + if url is None: + # External lookup did not have a URL. + # Re-raise the BuildError, in context of original traceback. + exc_type, exc_value, tb = sys.exc_info() + if exc_value is error: + raise exc_type, exc_value, tb + else: + raise error + # url_for will use this result, instead of raising BuildError. + return url + + app.build_error_handler = external_url_handler + + Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and + `endpoint` and `**values` are the arguments passed into `url_for`. Note + that this is for building URLs outside the current application, and not for + handling 404 NotFound errors. + + .. versionadded:: 0.10 + The `_scheme` parameter was added. + + .. versionadded:: 0.9 + The `_anchor` and `_method` parameters were added. + + .. versionadded:: 0.9 + Calls :meth:`Flask.handle_build_error` on + :exc:`~werkzeug.routing.BuildError`. + + :param endpoint: the endpoint of the URL (name of the function) + :param values: the variable arguments of the URL rule + :param _external: if set to `True`, an absolute URL is generated. Server + address can be changed via `SERVER_NAME` configuration variable which + defaults to `localhost`. + :param _scheme: a string specifying the desired URL scheme. The `_external` + parameter must be set to `True` or a `ValueError` is raised. + :param _anchor: if provided this is added as anchor to the URL. + :param _method: if provided this explicitly specifies an HTTP method. + """ + appctx = _app_ctx_stack.top + reqctx = _request_ctx_stack.top + if appctx is None: + raise RuntimeError('Attempted to generate a URL without the ' + 'application context being pushed. This has to be ' + 'executed when application context is available.') + + # If request specific information is available we have some extra + # features that support "relative" urls. + if reqctx is not None: + url_adapter = reqctx.url_adapter + blueprint_name = request.blueprint + if not reqctx.request._is_old_module: + if endpoint[:1] == '.': + if blueprint_name is not None: + endpoint = blueprint_name + endpoint + else: + endpoint = endpoint[1:] + else: + # TODO: get rid of this deprecated functionality in 1.0 + if '.' not in endpoint: + if blueprint_name is not None: + endpoint = blueprint_name + '.' + endpoint + elif endpoint.startswith('.'): + endpoint = endpoint[1:] + external = values.pop('_external', False) + + # Otherwise go with the url adapter from the appctx and make + # the urls external by default. + else: + url_adapter = appctx.url_adapter + if url_adapter is None: + raise RuntimeError('Application was not able to create a URL ' + 'adapter for request independent URL generation. ' + 'You might be able to fix this by setting ' + 'the SERVER_NAME config variable.') + external = values.pop('_external', True) + + anchor = values.pop('_anchor', None) + method = values.pop('_method', None) + scheme = values.pop('_scheme', None) + appctx.app.inject_url_defaults(endpoint, values) + + if scheme is not None: + if not external: + raise ValueError('When specifying _scheme, _external must be True') + url_adapter.url_scheme = scheme + + try: + rv = url_adapter.build(endpoint, values, method=method, + force_external=external) + except BuildError as error: + # We need to inject the values again so that the app callback can + # deal with that sort of stuff. + values['_external'] = external + values['_anchor'] = anchor + values['_method'] = method + return appctx.app.handle_url_build_error(error, endpoint, values) + + if anchor is not None: + rv += '#' + url_quote(anchor) + return rv + + +def get_template_attribute(template_name, attribute): + """Loads a macro (or variable) a template exports. This can be used to + invoke a macro from within Python code. If you for example have a + template named `_cider.html` with the following contents: + + .. sourcecode:: html+jinja + + {% macro hello(name) %}Hello {{ name }}!{% endmacro %} + + You can access this from Python code like this:: + + hello = get_template_attribute('_cider.html', 'hello') + return hello('World') + + .. versionadded:: 0.2 + + :param template_name: the name of the template + :param attribute: the name of the variable of macro to access + """ + return getattr(current_app.jinja_env.get_template(template_name).module, + attribute) + + +def flash(message, category='message'): + """Flashes a message to the next request. In order to remove the + flashed message from the session and to display it to the user, + the template has to call :func:`get_flashed_messages`. + + .. versionchanged:: 0.3 + `category` parameter added. + + :param message: the message to be flashed. + :param category: the category for the message. The following values + are recommended: ``'message'`` for any kind of message, + ``'error'`` for errors, ``'info'`` for information + messages and ``'warning'`` for warnings. However any + kind of string can be used as category. + """ + # Original implementation: + # + # session.setdefault('_flashes', []).append((category, message)) + # + # This assumed that changes made to mutable structures in the session are + # are always in sync with the sess on object, which is not true for session + # implementations that use external storage for keeping their keys/values. + flashes = session.get('_flashes', []) + flashes.append((category, message)) + session['_flashes'] = flashes + message_flashed.send(current_app._get_current_object(), + message=message, category=category) + + +def get_flashed_messages(with_categories=False, category_filter=[]): + """Pulls all flashed messages from the session and returns them. + Further calls in the same request to the function will return + the same messages. By default just the messages are returned, + but when `with_categories` is set to `True`, the return value will + be a list of tuples in the form ``(category, message)`` instead. + + Filter the flashed messages to one or more categories by providing those + categories in `category_filter`. This allows rendering categories in + separate html blocks. The `with_categories` and `category_filter` + arguments are distinct: + + * `with_categories` controls whether categories are returned with message + text (`True` gives a tuple, where `False` gives just the message text). + * `category_filter` filters the messages down to only those matching the + provided categories. + + See :ref:`message-flashing-pattern` for examples. + + .. versionchanged:: 0.3 + `with_categories` parameter added. + + .. versionchanged:: 0.9 + `category_filter` parameter added. + + :param with_categories: set to `True` to also receive categories. + :param category_filter: whitelist of categories to limit return values + """ + flashes = _request_ctx_stack.top.flashes + if flashes is None: + _request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \ + if '_flashes' in session else [] + if category_filter: + flashes = list(filter(lambda f: f[0] in category_filter, flashes)) + if not with_categories: + return [x[1] for x in flashes] + return flashes + + +def send_file(filename_or_fp, mimetype=None, as_attachment=False, + attachment_filename=None, add_etags=True, + cache_timeout=None, conditional=False): + """Sends the contents of a file to the client. This will use the + most efficient method available and configured. By default it will + try to use the WSGI server's file_wrapper support. Alternatively + you can set the application's :attr:`~Flask.use_x_sendfile` attribute + to ``True`` to directly emit an `X-Sendfile` header. This however + requires support of the underlying webserver for `X-Sendfile`. + + By default it will try to guess the mimetype for you, but you can + also explicitly provide one. For extra security you probably want + to send certain files as attachment (HTML for instance). The mimetype + guessing requires a `filename` or an `attachment_filename` to be + provided. + + Please never pass filenames to this function from user sources without + checking them first. Something like this is usually sufficient to + avoid security problems:: + + if '..' in filename or filename.startswith('/'): + abort(404) + + .. versionadded:: 0.2 + + .. versionadded:: 0.5 + The `add_etags`, `cache_timeout` and `conditional` parameters were + added. The default behavior is now to attach etags. + + .. versionchanged:: 0.7 + mimetype guessing and etag support for file objects was + deprecated because it was unreliable. Pass a filename if you are + able to, otherwise attach an etag yourself. This functionality + will be removed in Flask 1.0 + + .. versionchanged:: 0.9 + cache_timeout pulls its default from application config, when None. + + :param filename_or_fp: the filename of the file to send. This is + relative to the :attr:`~Flask.root_path` if a + relative path is specified. + Alternatively a file object might be provided + in which case `X-Sendfile` might not work and + fall back to the traditional method. Make sure + that the file pointer is positioned at the start + of data to send before calling :func:`send_file`. + :param mimetype: the mimetype of the file if provided, otherwise + auto detection happens. + :param as_attachment: set to `True` if you want to send this file with + a ``Content-Disposition: attachment`` header. + :param attachment_filename: the filename for the attachment if it + differs from the file's filename. + :param add_etags: set to `False` to disable attaching of etags. + :param conditional: set to `True` to enable conditional responses. + + :param cache_timeout: the timeout in seconds for the headers. When `None` + (default), this value is set by + :meth:`~Flask.get_send_file_max_age` of + :data:`~flask.current_app`. + """ + mtime = None + if isinstance(filename_or_fp, string_types): + filename = filename_or_fp + file = None + else: + from warnings import warn + file = filename_or_fp + filename = getattr(file, 'name', None) + + # XXX: this behavior is now deprecated because it was unreliable. + # removed in Flask 1.0 + if not attachment_filename and not mimetype \ + and isinstance(filename, string_types): + warn(DeprecationWarning('The filename support for file objects ' + 'passed to send_file is now deprecated. Pass an ' + 'attach_filename if you want mimetypes to be guessed.'), + stacklevel=2) + if add_etags: + warn(DeprecationWarning('In future flask releases etags will no ' + 'longer be generated for file objects passed to the send_file ' + 'function because this behavior was unreliable. Pass ' + 'filenames instead if possible, otherwise attach an etag ' + 'yourself based on another value'), stacklevel=2) + + if filename is not None: + if not os.path.isabs(filename): + filename = os.path.join(current_app.root_path, filename) + if mimetype is None and (filename or attachment_filename): + mimetype = mimetypes.guess_type(filename or attachment_filename)[0] + if mimetype is None: + mimetype = 'application/octet-stream' + + headers = Headers() + if as_attachment: + if attachment_filename is None: + if filename is None: + raise TypeError('filename unavailable, required for ' + 'sending as attachment') + attachment_filename = os.path.basename(filename) + headers.add('Content-Disposition', 'attachment', + filename=attachment_filename) + + if current_app.use_x_sendfile and filename: + if file is not None: + file.close() + headers['X-Sendfile'] = filename + headers['Content-Length'] = os.path.getsize(filename) + data = None + else: + if file is None: + file = open(filename, 'rb') + mtime = os.path.getmtime(filename) + headers['Content-Length'] = os.path.getsize(filename) + data = wrap_file(request.environ, file) + + rv = current_app.response_class(data, mimetype=mimetype, headers=headers, + direct_passthrough=True) + + # if we know the file modification date, we can store it as the + # the time of the last modification. + if mtime is not None: + rv.last_modified = int(mtime) + + rv.cache_control.public = True + if cache_timeout is None: + cache_timeout = current_app.get_send_file_max_age(filename) + if cache_timeout is not None: + rv.cache_control.max_age = cache_timeout + rv.expires = int(time() + cache_timeout) + + if add_etags and filename is not None: + rv.set_etag('flask-%s-%s-%s' % ( + os.path.getmtime(filename), + os.path.getsize(filename), + adler32( + filename.encode('utf-8') if isinstance(filename, text_type) + else filename + ) & 0xffffffff + )) + if conditional: + rv = rv.make_conditional(request) + # make sure we don't send x-sendfile for servers that + # ignore the 304 status code for x-sendfile. + if rv.status_code == 304: + rv.headers.pop('x-sendfile', None) + return rv + + +def safe_join(directory, filename): + """Safely join `directory` and `filename`. + + Example usage:: + + @app.route('/wiki/') + def wiki_page(filename): + filename = safe_join(app.config['WIKI_FOLDER'], filename) + with open(filename, 'rb') as fd: + content = fd.read() # Read and process the file content... + + :param directory: the base directory. + :param filename: the untrusted filename relative to that directory. + :raises: :class:`~werkzeug.exceptions.NotFound` if the resulting path + would fall out of `directory`. + """ + filename = posixpath.normpath(filename) + for sep in _os_alt_seps: + if sep in filename: + raise NotFound() + if os.path.isabs(filename) or \ + filename == '..' or \ + filename.startswith('../'): + raise NotFound() + return os.path.join(directory, filename) + + +def send_from_directory(directory, filename, **options): + """Send a file from a given directory with :func:`send_file`. This + is a secure way to quickly expose static files from an upload folder + or something similar. + + Example usage:: + + @app.route('/uploads/') + def download_file(filename): + return send_from_directory(app.config['UPLOAD_FOLDER'], + filename, as_attachment=True) + + .. admonition:: Sending files and Performance + + It is strongly recommended to activate either `X-Sendfile` support in + your webserver or (if no authentication happens) to tell the webserver + to serve files for the given path on its own without calling into the + web application for improved performance. + + .. versionadded:: 0.5 + + :param directory: the directory where all the files are stored. + :param filename: the filename relative to that directory to + download. + :param options: optional keyword arguments that are directly + forwarded to :func:`send_file`. + """ + filename = safe_join(directory, filename) + if not os.path.isfile(filename): + raise NotFound() + options.setdefault('conditional', True) + return send_file(filename, **options) + + +def get_root_path(import_name): + """Returns the path to a package or cwd if that cannot be found. This + returns the path of a package or the folder that contains a module. + + Not to be confused with the package path returned by :func:`find_package`. + """ + # Module already imported and has a file attribute. Use that first. + mod = sys.modules.get(import_name) + if mod is not None and hasattr(mod, '__file__'): + return os.path.dirname(os.path.abspath(mod.__file__)) + + # Next attempt: check the loader. + loader = pkgutil.get_loader(import_name) + + # Loader does not exist or we're referring to an unloaded main module + # or a main module without path (interactive sessions), go with the + # current working directory. + if loader is None or import_name == '__main__': + return os.getcwd() + + # For .egg, zipimporter does not have get_filename until Python 2.7. + # Some other loaders might exhibit the same behavior. + if hasattr(loader, 'get_filename'): + filepath = loader.get_filename(import_name) + else: + # Fall back to imports. + __import__(import_name) + filepath = sys.modules[import_name].__file__ + + # filepath is import_name.py for a module, or __init__.py for a package. + return os.path.dirname(os.path.abspath(filepath)) + + +def find_package(import_name): + """Finds a package and returns the prefix (or None if the package is + not installed) as well as the folder that contains the package or + module as a tuple. The package path returned is the module that would + have to be added to the pythonpath in order to make it possible to + import the module. The prefix is the path below which a UNIX like + folder structure exists (lib, share etc.). + """ + root_mod_name = import_name.split('.')[0] + loader = pkgutil.get_loader(root_mod_name) + if loader is None or import_name == '__main__': + # import name is not found, or interactive/main module + package_path = os.getcwd() + else: + # For .egg, zipimporter does not have get_filename until Python 2.7. + if hasattr(loader, 'get_filename'): + filename = loader.get_filename(root_mod_name) + elif hasattr(loader, 'archive'): + # zipimporter's loader.archive points to the .egg or .zip + # archive filename is dropped in call to dirname below. + filename = loader.archive + else: + # At least one loader is missing both get_filename and archive: + # Google App Engine's HardenedModulesHook + # + # Fall back to imports. + __import__(import_name) + filename = sys.modules[import_name].__file__ + package_path = os.path.abspath(os.path.dirname(filename)) + # package_path ends with __init__.py for a package + if loader.is_package(root_mod_name): + package_path = os.path.dirname(package_path) + + site_parent, site_folder = os.path.split(package_path) + py_prefix = os.path.abspath(sys.prefix) + if package_path.startswith(py_prefix): + return py_prefix, package_path + elif site_folder.lower() == 'site-packages': + parent, folder = os.path.split(site_parent) + # Windows like installations + if folder.lower() == 'lib': + base_dir = parent + # UNIX like installations + elif os.path.basename(parent).lower() == 'lib': + base_dir = os.path.dirname(parent) + else: + base_dir = site_parent + return base_dir, package_path + return None, package_path + + +class locked_cached_property(object): + """A decorator that converts a function into a lazy property. The + function wrapped is called the first time to retrieve the result + and then that calculated result is used the next time you access + the value. Works like the one in Werkzeug but has a lock for + thread safety. + """ + + def __init__(self, func, name=None, doc=None): + self.__name__ = name or func.__name__ + self.__module__ = func.__module__ + self.__doc__ = doc or func.__doc__ + self.func = func + self.lock = RLock() + + def __get__(self, obj, type=None): + if obj is None: + return self + with self.lock: + value = obj.__dict__.get(self.__name__, _missing) + if value is _missing: + value = self.func(obj) + obj.__dict__[self.__name__] = value + return value + + +class _PackageBoundObject(object): + + def __init__(self, import_name, template_folder=None): + #: The name of the package or module. Do not change this once + #: it was set by the constructor. + self.import_name = import_name + + #: location of the templates. `None` if templates should not be + #: exposed. + self.template_folder = template_folder + + #: Where is the app root located? + self.root_path = get_root_path(self.import_name) + + self._static_folder = None + self._static_url_path = None + + def _get_static_folder(self): + if self._static_folder is not None: + return os.path.join(self.root_path, self._static_folder) + def _set_static_folder(self, value): + self._static_folder = value + static_folder = property(_get_static_folder, _set_static_folder) + del _get_static_folder, _set_static_folder + + def _get_static_url_path(self): + if self._static_url_path is None: + if self.static_folder is None: + return None + return '/' + os.path.basename(self.static_folder) + return self._static_url_path + def _set_static_url_path(self, value): + self._static_url_path = value + static_url_path = property(_get_static_url_path, _set_static_url_path) + del _get_static_url_path, _set_static_url_path + + @property + def has_static_folder(self): + """This is `True` if the package bound object's container has a + folder named ``'static'``. + + .. versionadded:: 0.5 + """ + return self.static_folder is not None + + @locked_cached_property + def jinja_loader(self): + """The Jinja loader for this package bound object. + + .. versionadded:: 0.5 + """ + if self.template_folder is not None: + return FileSystemLoader(os.path.join(self.root_path, + self.template_folder)) + + def get_send_file_max_age(self, filename): + """Provides default cache_timeout for the :func:`send_file` functions. + + By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from + the configuration of :data:`~flask.current_app`. + + Static file functions such as :func:`send_from_directory` use this + function, and :func:`send_file` calls this function on + :data:`~flask.current_app` when the given cache_timeout is `None`. If a + cache_timeout is given in :func:`send_file`, that timeout is used; + otherwise, this method is called. + + This allows subclasses to change the behavior when sending files based + on the filename. For example, to set the cache timeout for .js files + to 60 seconds:: + + class MyFlask(flask.Flask): + def get_send_file_max_age(self, name): + if name.lower().endswith('.js'): + return 60 + return flask.Flask.get_send_file_max_age(self, name) + + .. versionadded:: 0.9 + """ + return current_app.config['SEND_FILE_MAX_AGE_DEFAULT'] + + def send_static_file(self, filename): + """Function used internally to send static files from the static + folder to the browser. + + .. versionadded:: 0.5 + """ + if not self.has_static_folder: + raise RuntimeError('No static folder for this object') + # Ensure get_send_file_max_age is called in all cases. + # Here, we ensure get_send_file_max_age is called for Blueprints. + cache_timeout = self.get_send_file_max_age(filename) + return send_from_directory(self.static_folder, filename, + cache_timeout=cache_timeout) + + def open_resource(self, resource, mode='rb'): + """Opens a resource from the application's resource folder. To see + how this works, consider the following folder structure:: + + /myapplication.py + /schema.sql + /static + /style.css + /templates + /layout.html + /index.html + + If you want to open the `schema.sql` file you would do the + following:: + + with app.open_resource('schema.sql') as f: + contents = f.read() + do_something_with(contents) + + :param resource: the name of the resource. To access resources within + subfolders use forward slashes as separator. + :param mode: resource file opening mode, default is 'rb'. + """ + if mode not in ('r', 'rb'): + raise ValueError('Resources can only be opened for reading') + return open(os.path.join(self.root_path, resource), mode) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/json.py b/Linux_i686/lib/python2.7/site-packages/flask/json.py new file mode 100644 index 0000000..45ba324 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/flask/json.py @@ -0,0 +1,243 @@ +# -*- coding: utf-8 -*- +""" + flask.jsonimpl + ~~~~~~~~~~~~~~ + + Implementation helpers for the JSON support in Flask. + + :copyright: (c) 2012 by Armin Ronacher. + :license: BSD, see LICENSE for more details. +""" +import io +import uuid +from datetime import datetime +from .globals import current_app, request +from ._compat import text_type, PY2 + +from werkzeug.http import http_date +from jinja2 import Markup + +# Use the same json implementation as itsdangerous on which we +# depend anyways. +try: + from itsdangerous import simplejson as _json +except ImportError: + from itsdangerous import json as _json + + +# figure out if simplejson escapes slashes. This behavior was changed +# from one version to another without reason. +_slash_escape = '\\/' not in _json.dumps('/') + + +__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump', + 'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder', + 'jsonify'] + + +def _wrap_reader_for_text(fp, encoding): + if isinstance(fp.read(0), bytes): + fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) + return fp + + +def _wrap_writer_for_text(fp, encoding): + try: + fp.write('') + except TypeError: + fp = io.TextIOWrapper(fp, encoding) + return fp + + +class JSONEncoder(_json.JSONEncoder): + """The default Flask JSON encoder. This one extends the default simplejson + encoder by also supporting ``datetime`` objects, ``UUID`` as well as + ``Markup`` objects which are serialized as RFC 822 datetime strings (same + as the HTTP date format). In order to support more data types override the + :meth:`default` method. + """ + + def default(self, o): + """Implement this method in a subclass such that it returns a + serializable object for ``o``, or calls the base implementation (to + raise a ``TypeError``). + + For example, to support arbitrary iterators, you could implement + default like this:: + + def default(self, o): + try: + iterable = iter(o) + except TypeError: + pass + else: + return list(iterable) + return JSONEncoder.default(self, o) + """ + if isinstance(o, datetime): + return http_date(o) + if isinstance(o, uuid.UUID): + return str(o) + if hasattr(o, '__html__'): + return text_type(o.__html__()) + return _json.JSONEncoder.default(self, o) + + +class JSONDecoder(_json.JSONDecoder): + """The default JSON decoder. This one does not change the behavior from + the default simplejson encoder. Consult the :mod:`json` documentation + for more information. This decoder is not only used for the load + functions of this module but also :attr:`~flask.Request`. + """ + + +def _dump_arg_defaults(kwargs): + """Inject default arguments for dump functions.""" + if current_app: + kwargs.setdefault('cls', current_app.json_encoder) + if not current_app.config['JSON_AS_ASCII']: + kwargs.setdefault('ensure_ascii', False) + kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS']) + else: + kwargs.setdefault('sort_keys', True) + kwargs.setdefault('cls', JSONEncoder) + + +def _load_arg_defaults(kwargs): + """Inject default arguments for load functions.""" + if current_app: + kwargs.setdefault('cls', current_app.json_decoder) + else: + kwargs.setdefault('cls', JSONDecoder) + + +def dumps(obj, **kwargs): + """Serialize ``obj`` to a JSON formatted ``str`` by using the application's + configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an + application on the stack. + + This function can return ``unicode`` strings or ascii-only bytestrings by + default which coerce into unicode strings automatically. That behavior by + default is controlled by the ``JSON_AS_ASCII`` configuration variable + and can be overriden by the simplejson ``ensure_ascii`` parameter. + """ + _dump_arg_defaults(kwargs) + encoding = kwargs.pop('encoding', None) + rv = _json.dumps(obj, **kwargs) + if encoding is not None and isinstance(rv, text_type): + rv = rv.encode(encoding) + return rv + + +def dump(obj, fp, **kwargs): + """Like :func:`dumps` but writes into a file object.""" + _dump_arg_defaults(kwargs) + encoding = kwargs.pop('encoding', None) + if encoding is not None: + fp = _wrap_writer_for_text(fp, encoding) + _json.dump(obj, fp, **kwargs) + + +def loads(s, **kwargs): + """Unserialize a JSON object from a string ``s`` by using the application's + configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an + application on the stack. + """ + _load_arg_defaults(kwargs) + if isinstance(s, bytes): + s = s.decode(kwargs.pop('encoding', None) or 'utf-8') + return _json.loads(s, **kwargs) + + +def load(fp, **kwargs): + """Like :func:`loads` but reads from a file object. + """ + _load_arg_defaults(kwargs) + if not PY2: + fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8') + return _json.load(fp, **kwargs) + + +def htmlsafe_dumps(obj, **kwargs): + """Works exactly like :func:`dumps` but is safe for use in ``') + self.assert_equal(rv, u'"\\u003c/script\\u003e"') + self.assert_equal(type(rv), text_type) + rv = render('{{ ""|tojson }}') + self.assert_equal(rv, '"\\u003c/script\\u003e"') + rv = render('{{ "<\0/script>"|tojson }}') + self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"') + rv = render('{{ "' % ( + render_traceback(self, full=full), + self.render_as_text().decode('utf-8', 'replace') + ) + + @property + def is_template_syntax_error(self): + """`True` if this is a template syntax error.""" + return isinstance(self.exc_value, TemplateSyntaxError) + + @property + def exc_info(self): + """Exception info tuple with a proxy around the frame objects.""" + return self.exc_type, self.exc_value, self.frames[0] + + @property + def standard_exc_info(self): + """Standard python exc_info for re-raising""" + tb = self.frames[0] + # the frame will be an actual traceback (or transparent proxy) if + # we are on pypy or a python implementation with support for tproxy + if type(tb) is not TracebackType: + tb = tb.tb + return self.exc_type, self.exc_value, tb + + +def make_traceback(exc_info, source_hint=None): + """Creates a processed traceback object from the exc_info.""" + exc_type, exc_value, tb = exc_info + if isinstance(exc_value, TemplateSyntaxError): + exc_info = translate_syntax_error(exc_value, source_hint) + initial_skip = 0 + else: + initial_skip = 1 + return translate_exception(exc_info, initial_skip) + + +def translate_syntax_error(error, source=None): + """Rewrites a syntax error to please traceback systems.""" + error.source = source + error.translated = True + exc_info = (error.__class__, error, None) + filename = error.filename + if filename is None: + filename = '' + return fake_exc_info(exc_info, filename, error.lineno) + + +def translate_exception(exc_info, initial_skip=0): + """If passed an exc_info it will automatically rewrite the exceptions + all the way down to the correct line numbers and frames. + """ + tb = exc_info[2] + frames = [] + + # skip some internal frames if wanted + for x in range(initial_skip): + if tb is not None: + tb = tb.tb_next + initial_tb = tb + + while tb is not None: + # skip frames decorated with @internalcode. These are internal + # calls we can't avoid and that are useless in template debugging + # output. + if tb.tb_frame.f_code in internal_code: + tb = tb.tb_next + continue + + # save a reference to the next frame if we override the current + # one with a faked one. + next = tb.tb_next + + # fake template exceptions + template = tb.tb_frame.f_globals.get('__jinja_template__') + if template is not None: + lineno = template.get_corresponding_lineno(tb.tb_lineno) + tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, + lineno)[2] + + frames.append(make_frame_proxy(tb)) + tb = next + + # if we don't have any exceptions in the frames left, we have to + # reraise it unchanged. + # XXX: can we backup here? when could this happen? + if not frames: + reraise(exc_info[0], exc_info[1], exc_info[2]) + + return ProcessedTraceback(exc_info[0], exc_info[1], frames) + + +def fake_exc_info(exc_info, filename, lineno): + """Helper for `translate_exception`.""" + exc_type, exc_value, tb = exc_info + + # figure the real context out + if tb is not None: + real_locals = tb.tb_frame.f_locals.copy() + ctx = real_locals.get('context') + if ctx: + locals = ctx.get_all() + else: + locals = {} + for name, value in iteritems(real_locals): + if name.startswith('l_') and value is not missing: + locals[name[2:]] = value + + # if there is a local called __jinja_exception__, we get + # rid of it to not break the debug functionality. + locals.pop('__jinja_exception__', None) + else: + locals = {} + + # assamble fake globals we need + globals = { + '__name__': filename, + '__file__': filename, + '__jinja_exception__': exc_info[:2], + + # we don't want to keep the reference to the template around + # to not cause circular dependencies, but we mark it as Jinja + # frame for the ProcessedTraceback + '__jinja_template__': None + } + + # and fake the exception + code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') + + # if it's possible, change the name of the code. This won't work + # on some python environments such as google appengine + try: + if tb is None: + location = 'template' + else: + function = tb.tb_frame.f_code.co_name + if function == 'root': + location = 'top-level template code' + elif function.startswith('block_'): + location = 'block "%s"' % function[6:] + else: + location = 'template' + code = code_type(0, code.co_nlocals, code.co_stacksize, + code.co_flags, code.co_code, code.co_consts, + code.co_names, code.co_varnames, filename, + location, code.co_firstlineno, + code.co_lnotab, (), ()) + except: + pass + + # execute the code and catch the new traceback + try: + exec(code, globals, locals) + except: + exc_info = sys.exc_info() + new_tb = exc_info[2].tb_next + + # return without this frame + return exc_info[:2] + (new_tb,) + + +def _init_ugly_crap(): + """This function implements a few ugly things so that we can patch the + traceback objects. The function returned allows resetting `tb_next` on + any python traceback object. Do not attempt to use this on non cpython + interpreters + """ + import ctypes + from types import TracebackType + + # figure out side of _Py_ssize_t + if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): + _Py_ssize_t = ctypes.c_int64 + else: + _Py_ssize_t = ctypes.c_int + + # regular python + class _PyObject(ctypes.Structure): + pass + _PyObject._fields_ = [ + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + # python with trace + if hasattr(sys, 'getobjects'): + class _PyObject(ctypes.Structure): + pass + _PyObject._fields_ = [ + ('_ob_next', ctypes.POINTER(_PyObject)), + ('_ob_prev', ctypes.POINTER(_PyObject)), + ('ob_refcnt', _Py_ssize_t), + ('ob_type', ctypes.POINTER(_PyObject)) + ] + + class _Traceback(_PyObject): + pass + _Traceback._fields_ = [ + ('tb_next', ctypes.POINTER(_Traceback)), + ('tb_frame', ctypes.POINTER(_PyObject)), + ('tb_lasti', ctypes.c_int), + ('tb_lineno', ctypes.c_int) + ] + + def tb_set_next(tb, next): + """Set the tb_next attribute of a traceback object.""" + if not (isinstance(tb, TracebackType) and + (next is None or isinstance(next, TracebackType))): + raise TypeError('tb_set_next arguments must be traceback objects') + obj = _Traceback.from_address(id(tb)) + if tb.tb_next is not None: + old = _Traceback.from_address(id(tb.tb_next)) + old.ob_refcnt -= 1 + if next is None: + obj.tb_next = ctypes.POINTER(_Traceback)() + else: + next = _Traceback.from_address(id(next)) + next.ob_refcnt += 1 + obj.tb_next = ctypes.pointer(next) + + return tb_set_next + + +# try to get a tb_set_next implementation if we don't have transparent +# proxies. +tb_set_next = None +if tproxy is None: + try: + tb_set_next = _init_ugly_crap() + except: + pass + del _init_ugly_crap diff --git a/Linux_i686/lib/python2.7/site-packages/jinja2/defaults.py b/Linux_i686/lib/python2.7/site-packages/jinja2/defaults.py new file mode 100644 index 0000000..a27cb80 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/jinja2/defaults.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +""" + jinja2.defaults + ~~~~~~~~~~~~~~~ + + Jinja default filters and tags. + + :copyright: (c) 2010 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +from jinja2._compat import range_type +from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner + + +# defaults for the parser / lexer +BLOCK_START_STRING = '{%' +BLOCK_END_STRING = '%}' +VARIABLE_START_STRING = '{{' +VARIABLE_END_STRING = '}}' +COMMENT_START_STRING = '{#' +COMMENT_END_STRING = '#}' +LINE_STATEMENT_PREFIX = None +LINE_COMMENT_PREFIX = None +TRIM_BLOCKS = False +LSTRIP_BLOCKS = False +NEWLINE_SEQUENCE = '\n' +KEEP_TRAILING_NEWLINE = False + + +# default filters, tests and namespace +from jinja2.filters import FILTERS as DEFAULT_FILTERS +from jinja2.tests import TESTS as DEFAULT_TESTS +DEFAULT_NAMESPACE = { + 'range': range_type, + 'dict': lambda **kw: kw, + 'lipsum': generate_lorem_ipsum, + 'cycler': Cycler, + 'joiner': Joiner +} + + +# export all constants +__all__ = tuple(x for x in locals().keys() if x.isupper()) diff --git a/Linux_i686/lib/python2.7/site-packages/jinja2/environment.py b/Linux_i686/lib/python2.7/site-packages/jinja2/environment.py new file mode 100644 index 0000000..45fabad --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/jinja2/environment.py @@ -0,0 +1,1191 @@ +# -*- coding: utf-8 -*- +""" + jinja2.environment + ~~~~~~~~~~~~~~~~~~ + + Provides a class that holds runtime and parsing time options. + + :copyright: (c) 2010 by the Jinja Team. + :license: BSD, see LICENSE for more details. +""" +import os +import sys +from jinja2 import nodes +from jinja2.defaults import BLOCK_START_STRING, \ + BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ + COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ + LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ + DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \ + KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS +from jinja2.lexer import get_lexer, TokenStream +from jinja2.parser import Parser +from jinja2.nodes import EvalContext +from jinja2.optimizer import optimize +from jinja2.compiler import generate +from jinja2.runtime import Undefined, new_context +from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \ + TemplatesNotFound, TemplateRuntimeError +from jinja2.utils import import_string, LRUCache, Markup, missing, \ + concat, consume, internalcode +from jinja2._compat import imap, ifilter, string_types, iteritems, \ + text_type, reraise, implements_iterator, implements_to_string, \ + get_next, encode_filename, PY2, PYPY +from functools import reduce + + +# for direct template usage we have up to ten living environments +_spontaneous_environments = LRUCache(10) + +# the function to create jinja traceback objects. This is dynamically +# imported on the first exception in the exception handler. +_make_traceback = None + + +def get_spontaneous_environment(*args): + """Return a new spontaneous environment. A spontaneous environment is an + unnamed and unaccessible (in theory) environment that is used for + templates generated from a string and not from the file system. + """ + try: + env = _spontaneous_environments.get(args) + except TypeError: + return Environment(*args) + if env is not None: + return env + _spontaneous_environments[args] = env = Environment(*args) + env.shared = True + return env + + +def create_cache(size): + """Return the cache class for the given size.""" + if size == 0: + return None + if size < 0: + return {} + return LRUCache(size) + + +def copy_cache(cache): + """Create an empty copy of the given cache.""" + if cache is None: + return None + elif type(cache) is dict: + return {} + return LRUCache(cache.capacity) + + +def load_extensions(environment, extensions): + """Load the extensions from the list and bind it to the environment. + Returns a dict of instantiated environments. + """ + result = {} + for extension in extensions: + if isinstance(extension, string_types): + extension = import_string(extension) + result[extension.identifier] = extension(environment) + return result + + +def _environment_sanity_check(environment): + """Perform a sanity check on the environment.""" + assert issubclass(environment.undefined, Undefined), 'undefined must ' \ + 'be a subclass of undefined because filters depend on it.' + assert environment.block_start_string != \ + environment.variable_start_string != \ + environment.comment_start_string, 'block, variable and comment ' \ + 'start strings must be different' + assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ + 'newline_sequence set to unknown line ending string.' + return environment + + +class Environment(object): + r"""The core component of Jinja is the `Environment`. It contains + important shared variables like configuration, filters, tests, + globals and others. Instances of this class may be modified if + they are not shared and if no template was loaded so far. + Modifications on environments after the first template was loaded + will lead to surprising effects and undefined behavior. + + Here the possible initialization parameters: + + `block_start_string` + The string marking the begin of a block. Defaults to ``'{%'``. + + `block_end_string` + The string marking the end of a block. Defaults to ``'%}'``. + + `variable_start_string` + The string marking the begin of a print statement. + Defaults to ``'{{'``. + + `variable_end_string` + The string marking the end of a print statement. Defaults to + ``'}}'``. + + `comment_start_string` + The string marking the begin of a comment. Defaults to ``'{#'``. + + `comment_end_string` + The string marking the end of a comment. Defaults to ``'#}'``. + + `line_statement_prefix` + If given and a string, this will be used as prefix for line based + statements. See also :ref:`line-statements`. + + `line_comment_prefix` + If given and a string, this will be used as prefix for line based + based comments. See also :ref:`line-statements`. + + .. versionadded:: 2.2 + + `trim_blocks` + If this is set to ``True`` the first newline after a block is + removed (block, not variable tag!). Defaults to `False`. + + `lstrip_blocks` + If this is set to ``True`` leading spaces and tabs are stripped + from the start of a line to a block. Defaults to `False`. + + `newline_sequence` + The sequence that starts a newline. Must be one of ``'\r'``, + ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a + useful default for Linux and OS X systems as well as web + applications. + + `keep_trailing_newline` + Preserve the trailing newline when rendering templates. + The default is ``False``, which causes a single newline, + if present, to be stripped from the end of the template. + + .. versionadded:: 2.7 + + `extensions` + List of Jinja extensions to use. This can either be import paths + as strings or extension classes. For more information have a + look at :ref:`the extensions documentation `. + + `optimized` + should the optimizer be enabled? Default is `True`. + + `undefined` + :class:`Undefined` or a subclass of it that is used to represent + undefined values in the template. + + `finalize` + A callable that can be used to process the result of a variable + expression before it is output. For example one can convert + `None` implicitly into an empty string here. + + `autoescape` + If set to true the XML/HTML autoescaping feature is enabled by + default. For more details about auto escaping see + :class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also + be a callable that is passed the template name and has to + return `True` or `False` depending on autoescape should be + enabled by default. + + .. versionchanged:: 2.4 + `autoescape` can now be a function + + `loader` + The template loader for this environment. + + `cache_size` + The size of the cache. Per default this is ``50`` which means + that if more than 50 templates are loaded the loader will clean + out the least recently used template. If the cache size is set to + ``0`` templates are recompiled all the time, if the cache size is + ``-1`` the cache will not be cleaned. + + `auto_reload` + Some loaders load templates from locations where the template + sources may change (ie: file system or database). If + `auto_reload` is set to `True` (default) every time a template is + requested the loader checks if the source changed and if yes, it + will reload the template. For higher performance it's possible to + disable that. + + `bytecode_cache` + If set to a bytecode cache object, this object will provide a + cache for the internal Jinja bytecode so that templates don't + have to be parsed if they were not changed. + + See :ref:`bytecode-cache` for more information. + """ + + #: if this environment is sandboxed. Modifying this variable won't make + #: the environment sandboxed though. For a real sandboxed environment + #: have a look at jinja2.sandbox. This flag alone controls the code + #: generation by the compiler. + sandboxed = False + + #: True if the environment is just an overlay + overlayed = False + + #: the environment this environment is linked to if it is an overlay + linked_to = None + + #: shared environments have this set to `True`. A shared environment + #: must not be modified + shared = False + + #: these are currently EXPERIMENTAL undocumented features. + exception_handler = None + exception_formatter = None + + def __init__(self, + block_start_string=BLOCK_START_STRING, + block_end_string=BLOCK_END_STRING, + variable_start_string=VARIABLE_START_STRING, + variable_end_string=VARIABLE_END_STRING, + comment_start_string=COMMENT_START_STRING, + comment_end_string=COMMENT_END_STRING, + line_statement_prefix=LINE_STATEMENT_PREFIX, + line_comment_prefix=LINE_COMMENT_PREFIX, + trim_blocks=TRIM_BLOCKS, + lstrip_blocks=LSTRIP_BLOCKS, + newline_sequence=NEWLINE_SEQUENCE, + keep_trailing_newline=KEEP_TRAILING_NEWLINE, + extensions=(), + optimized=True, + undefined=Undefined, + finalize=None, + autoescape=False, + loader=None, + cache_size=50, + auto_reload=True, + bytecode_cache=None): + # !!Important notice!! + # The constructor accepts quite a few arguments that should be + # passed by keyword rather than position. However it's important to + # not change the order of arguments because it's used at least + # internally in those cases: + # - spontaneous environments (i18n extension and Template) + # - unittests + # If parameter changes are required only add parameters at the end + # and don't change the arguments (or the defaults!) of the arguments + # existing already. + + # lexer / parser information + self.block_start_string = block_start_string + self.block_end_string = block_end_string + self.variable_start_string = variable_start_string + self.variable_end_string = variable_end_string + self.comment_start_string = comment_start_string + self.comment_end_string = comment_end_string + self.line_statement_prefix = line_statement_prefix + self.line_comment_prefix = line_comment_prefix + self.trim_blocks = trim_blocks + self.lstrip_blocks = lstrip_blocks + self.newline_sequence = newline_sequence + self.keep_trailing_newline = keep_trailing_newline + + # runtime information + self.undefined = undefined + self.optimized = optimized + self.finalize = finalize + self.autoescape = autoescape + + # defaults + self.filters = DEFAULT_FILTERS.copy() + self.tests = DEFAULT_TESTS.copy() + self.globals = DEFAULT_NAMESPACE.copy() + + # set the loader provided + self.loader = loader + self.cache = create_cache(cache_size) + self.bytecode_cache = bytecode_cache + self.auto_reload = auto_reload + + # load extensions + self.extensions = load_extensions(self, extensions) + + _environment_sanity_check(self) + + def add_extension(self, extension): + """Adds an extension after the environment was created. + + .. versionadded:: 2.5 + """ + self.extensions.update(load_extensions(self, [extension])) + + def extend(self, **attributes): + """Add the items to the instance of the environment if they do not exist + yet. This is used by :ref:`extensions ` to register + callbacks and configuration values without breaking inheritance. + """ + for key, value in iteritems(attributes): + if not hasattr(self, key): + setattr(self, key, value) + + def overlay(self, block_start_string=missing, block_end_string=missing, + variable_start_string=missing, variable_end_string=missing, + comment_start_string=missing, comment_end_string=missing, + line_statement_prefix=missing, line_comment_prefix=missing, + trim_blocks=missing, lstrip_blocks=missing, + extensions=missing, optimized=missing, + undefined=missing, finalize=missing, autoescape=missing, + loader=missing, cache_size=missing, auto_reload=missing, + bytecode_cache=missing): + """Create a new overlay environment that shares all the data with the + current environment except of cache and the overridden attributes. + Extensions cannot be removed for an overlayed environment. An overlayed + environment automatically gets all the extensions of the environment it + is linked to plus optional extra extensions. + + Creating overlays should happen after the initial environment was set + up completely. Not all attributes are truly linked, some are just + copied over so modifications on the original environment may not shine + through. + """ + args = dict(locals()) + del args['self'], args['cache_size'], args['extensions'] + + rv = object.__new__(self.__class__) + rv.__dict__.update(self.__dict__) + rv.overlayed = True + rv.linked_to = self + + for key, value in iteritems(args): + if value is not missing: + setattr(rv, key, value) + + if cache_size is not missing: + rv.cache = create_cache(cache_size) + else: + rv.cache = copy_cache(self.cache) + + rv.extensions = {} + for key, value in iteritems(self.extensions): + rv.extensions[key] = value.bind(rv) + if extensions is not missing: + rv.extensions.update(load_extensions(rv, extensions)) + + return _environment_sanity_check(rv) + + lexer = property(get_lexer, doc="The lexer for this environment.") + + def iter_extensions(self): + """Iterates over the extensions by priority.""" + return iter(sorted(self.extensions.values(), + key=lambda x: x.priority)) + + def getitem(self, obj, argument): + """Get an item or attribute of an object but prefer the item.""" + try: + return obj[argument] + except (TypeError, LookupError): + if isinstance(argument, string_types): + try: + attr = str(argument) + except Exception: + pass + else: + try: + return getattr(obj, attr) + except AttributeError: + pass + return self.undefined(obj=obj, name=argument) + + def getattr(self, obj, attribute): + """Get an item or attribute of an object but prefer the attribute. + Unlike :meth:`getitem` the attribute *must* be a bytestring. + """ + try: + return getattr(obj, attribute) + except AttributeError: + pass + try: + return obj[attribute] + except (TypeError, LookupError, AttributeError): + return self.undefined(obj=obj, name=attribute) + + def call_filter(self, name, value, args=None, kwargs=None, + context=None, eval_ctx=None): + """Invokes a filter on a value the same way the compiler does it. + + .. versionadded:: 2.7 + """ + func = self.filters.get(name) + if func is None: + raise TemplateRuntimeError('no filter named %r' % name) + args = [value] + list(args or ()) + if getattr(func, 'contextfilter', False): + if context is None: + raise TemplateRuntimeError('Attempted to invoke context ' + 'filter without context') + args.insert(0, context) + elif getattr(func, 'evalcontextfilter', False): + if eval_ctx is None: + if context is not None: + eval_ctx = context.eval_ctx + else: + eval_ctx = EvalContext(self) + args.insert(0, eval_ctx) + elif getattr(func, 'environmentfilter', False): + args.insert(0, self) + return func(*args, **(kwargs or {})) + + def call_test(self, name, value, args=None, kwargs=None): + """Invokes a test on a value the same way the compiler does it. + + .. versionadded:: 2.7 + """ + func = self.tests.get(name) + if func is None: + raise TemplateRuntimeError('no test named %r' % name) + return func(value, *(args or ()), **(kwargs or {})) + + @internalcode + def parse(self, source, name=None, filename=None): + """Parse the sourcecode and return the abstract syntax tree. This + tree of nodes is used by the compiler to convert the template into + executable source- or bytecode. This is useful for debugging or to + extract information from templates. + + If you are :ref:`developing Jinja2 extensions ` + this gives you a good overview of the node tree generated. + """ + try: + return self._parse(source, name, filename) + except TemplateSyntaxError: + exc_info = sys.exc_info() + self.handle_exception(exc_info, source_hint=source) + + def _parse(self, source, name, filename): + """Internal parsing function used by `parse` and `compile`.""" + return Parser(self, source, name, encode_filename(filename)).parse() + + def lex(self, source, name=None, filename=None): + """Lex the given sourcecode and return a generator that yields + tokens as tuples in the form ``(lineno, token_type, value)``. + This can be useful for :ref:`extension development ` + and debugging templates. + + This does not perform preprocessing. If you want the preprocessing + of the extensions to be applied you have to filter source through + the :meth:`preprocess` method. + """ + source = text_type(source) + try: + return self.lexer.tokeniter(source, name, filename) + except TemplateSyntaxError: + exc_info = sys.exc_info() + self.handle_exception(exc_info, source_hint=source) + + def preprocess(self, source, name=None, filename=None): + """Preprocesses the source with all extensions. This is automatically + called for all parsing and compiling methods but *not* for :meth:`lex` + because there you usually only want the actual source tokenized. + """ + return reduce(lambda s, e: e.preprocess(s, name, filename), + self.iter_extensions(), text_type(source)) + + def _tokenize(self, source, name, filename=None, state=None): + """Called by the parser to do the preprocessing and filtering + for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. + """ + source = self.preprocess(source, name, filename) + stream = self.lexer.tokenize(source, name, filename, state) + for ext in self.iter_extensions(): + stream = ext.filter_stream(stream) + if not isinstance(stream, TokenStream): + stream = TokenStream(stream, name, filename) + return stream + + def _generate(self, source, name, filename, defer_init=False): + """Internal hook that can be overridden to hook a different generate + method in. + + .. versionadded:: 2.5 + """ + return generate(source, self, name, filename, defer_init=defer_init) + + def _compile(self, source, filename): + """Internal hook that can be overridden to hook a different compile + method in. + + .. versionadded:: 2.5 + """ + return compile(source, filename, 'exec') + + @internalcode + def compile(self, source, name=None, filename=None, raw=False, + defer_init=False): + """Compile a node or template source code. The `name` parameter is + the load name of the template after it was joined using + :meth:`join_path` if necessary, not the filename on the file system. + the `filename` parameter is the estimated filename of the template on + the file system. If the template came from a database or memory this + can be omitted. + + The return value of this method is a python code object. If the `raw` + parameter is `True` the return value will be a string with python + code equivalent to the bytecode returned otherwise. This method is + mainly used internally. + + `defer_init` is use internally to aid the module code generator. This + causes the generated code to be able to import without the global + environment variable to be set. + + .. versionadded:: 2.4 + `defer_init` parameter added. + """ + source_hint = None + try: + if isinstance(source, string_types): + source_hint = source + source = self._parse(source, name, filename) + if self.optimized: + source = optimize(source, self) + source = self._generate(source, name, filename, + defer_init=defer_init) + if raw: + return source + if filename is None: + filename = '