diff --git a/Linux_i686/bin/alembic b/Linux_i686/bin/alembic deleted file mode 100755 index ad61885..0000000 --- a/Linux_i686/bin/alembic +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/python -# EASY-INSTALL-ENTRY-SCRIPT: 'alembic==0.6.5','console_scripts','alembic' -__requires__ = 'alembic==0.6.5' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('alembic==0.6.5', 'console_scripts', 'alembic')() - ) diff --git a/Linux_i686/bin/cftp b/Linux_i686/bin/cftp deleted file mode 100755 index bd7f3db..0000000 --- a/Linux_i686/bin/cftp +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -import sys, os -extra = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, extra) -try: - import _preamble -except ImportError: - sys.exc_clear() -sys.path.remove(extra) - -from twisted.conch.scripts.cftp import run -run() diff --git a/Linux_i686/bin/ckeygen b/Linux_i686/bin/ckeygen deleted file mode 100755 index bf12fe3..0000000 --- a/Linux_i686/bin/ckeygen +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -import sys, os -extra = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, extra) -try: - import _preamble -except ImportError: - sys.exc_clear() -sys.path.remove(extra) - -from twisted.conch.scripts.ckeygen import run -run() diff --git a/Linux_i686/bin/conch b/Linux_i686/bin/conch deleted file mode 100755 index 304dd29..0000000 --- a/Linux_i686/bin/conch +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -import sys, os -extra = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, extra) -try: - import _preamble -except ImportError: - sys.exc_clear() -sys.path.remove(extra) - -from twisted.conch.scripts.conch import run -run() diff --git a/Linux_i686/bin/lore b/Linux_i686/bin/lore deleted file mode 100755 index 0b497e3..0000000 --- a/Linux_i686/bin/lore +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -import sys, os -extra = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, extra) -try: - import _preamble -except ImportError: - sys.exc_clear() -sys.path.remove(extra) - -from twisted.lore.scripts.lore import run -run() - diff --git a/Linux_i686/bin/mailmail b/Linux_i686/bin/mailmail deleted file mode 100755 index 1fc026d..0000000 --- a/Linux_i686/bin/mailmail +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -""" -This script attempts to send some email. -""" - -import sys, os -extra = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, extra) -try: - import _preamble -except ImportError: - sys.exc_clear() -sys.path.remove(extra) - -from twisted.mail.scripts import mailmail -mailmail.run() - diff --git a/Linux_i686/bin/mako-render b/Linux_i686/bin/mako-render deleted file mode 100755 index 62b2386..0000000 --- a/Linux_i686/bin/mako-render +++ /dev/null @@ -1,46 +0,0 @@ -#!/usr/bin/python - -def render(data, filename, kw): - from mako.template import Template - from mako.lookup import TemplateLookup - - lookup = TemplateLookup(["."]) - return Template(data, filename, lookup=lookup).render(**kw) - -def varsplit(var): - if "=" not in var: - return (var, "") - return var.split("=", 1) - -def main(argv=None): - from os.path import isfile - from sys import stdin - - if argv is None: - import sys - argv = sys.argv - - from optparse import OptionParser - - parser = OptionParser("usage: %prog [FILENAME]") - parser.add_option("--var", default=[], action="append", - help="variable (can be used multiple times, use name=value)") - - opts, args = parser.parse_args(argv[1:]) - if len(args) not in (0, 1): - parser.error("wrong number of arguments") # Will exit - - if (len(args) == 0) or (args[0] == "-"): - fo = stdin - else: - filename = args[0] - if not isfile(filename): - raise SystemExit("error: can't find %s" % filename) - fo = open(filename) - - kw = dict([varsplit(var) for var in opts.var]) - data = fo.read() - print(render(data, filename, kw)) - -if __name__ == "__main__": - main() diff --git a/Linux_i686/bin/manhole b/Linux_i686/bin/manhole deleted file mode 100755 index ff66286..0000000 --- a/Linux_i686/bin/manhole +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -""" -This script runs GtkManhole, a client for Twisted.Manhole -""" -import sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -from twisted.scripts import manhole -manhole.run() diff --git a/Linux_i686/bin/pyhtmlizer b/Linux_i686/bin/pyhtmlizer deleted file mode 100755 index 430f788..0000000 --- a/Linux_i686/bin/pyhtmlizer +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. -import sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -from twisted.scripts.htmlizer import run -run() diff --git a/Linux_i686/bin/tap2deb b/Linux_i686/bin/tap2deb deleted file mode 100755 index 3f90d25..0000000 --- a/Linux_i686/bin/tap2deb +++ /dev/null @@ -1,16 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -""" -tap2deb -""" -import sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -from twisted.scripts import tap2deb -tap2deb.run() diff --git a/Linux_i686/bin/tap2rpm b/Linux_i686/bin/tap2rpm deleted file mode 100755 index 3667858..0000000 --- a/Linux_i686/bin/tap2rpm +++ /dev/null @@ -1,19 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -# based off the tap2deb code -# tap2rpm built by Sean Reifschneider, - -""" -tap2rpm -""" -import sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -from twisted.scripts import tap2rpm -tap2rpm.run() diff --git a/Linux_i686/bin/tapconvert b/Linux_i686/bin/tapconvert deleted file mode 100755 index fb7fe59..0000000 --- a/Linux_i686/bin/tapconvert +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. -import sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -from twisted.scripts.tapconvert import run -run() diff --git a/Linux_i686/bin/tkconch b/Linux_i686/bin/tkconch deleted file mode 100755 index 5b123a7..0000000 --- a/Linux_i686/bin/tkconch +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. - -import sys, os -extra = os.path.dirname(os.path.dirname(sys.argv[0])) -sys.path.insert(0, extra) -try: - import _preamble -except ImportError: - sys.exc_clear() -sys.path.remove(extra) - -from twisted.conch.scripts.tkconch import run -run() diff --git a/Linux_i686/bin/trial b/Linux_i686/bin/trial deleted file mode 100755 index 64a38cf..0000000 --- a/Linux_i686/bin/trial +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. -import os, sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -# begin chdir armor -sys.path[:] = map(os.path.abspath, sys.path) -# end chdir armor - -sys.path.insert(0, os.path.abspath(os.getcwd())) - -from twisted.scripts.trial import run -run() diff --git a/Linux_i686/bin/twistd b/Linux_i686/bin/twistd deleted file mode 100755 index 8cf908d..0000000 --- a/Linux_i686/bin/twistd +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/python -# Copyright (c) Twisted Matrix Laboratories. -# See LICENSE for details. -import os, sys - -try: - import _preamble -except ImportError: - sys.exc_clear() - -sys.path.insert(0, os.path.abspath(os.getcwd())) - -from twisted.scripts.twistd import run -run() diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/PKG-INFO deleted file mode 100644 index 1ff4f1a..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/PKG-INFO +++ /dev/null @@ -1,58 +0,0 @@ -Metadata-Version: 1.1 -Name: Flask -Version: 0.10.1 -Summary: A microframework based on Werkzeug, Jinja2 and good intentions -Home-page: http://github.com/mitsuhiko/flask/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -License: BSD -Description: - Flask - ----- - - Flask is a microframework for Python based on Werkzeug, Jinja 2 and good - intentions. And before you ask: It's BSD licensed! - - Flask is Fun - ```````````` - - .. code:: python - - from flask import Flask - app = Flask(__name__) - - @app.route("/") - def hello(): - return "Hello World!" - - if __name__ == "__main__": - app.run() - - And Easy to Setup - ````````````````` - - .. code:: bash - - $ pip install Flask - $ python hello.py - * Running on http://localhost:5000/ - - Links - ````` - - * `website `_ - * `documentation `_ - * `development version - `_ - - -Platform: any -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/SOURCES.txt deleted file mode 100644 index e326cfc..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/SOURCES.txt +++ /dev/null @@ -1,238 +0,0 @@ -AUTHORS -CHANGES -LICENSE -MANIFEST.in -Makefile -README -run-tests.py -setup.cfg -setup.py -Flask.egg-info/PKG-INFO -Flask.egg-info/SOURCES.txt -Flask.egg-info/dependency_links.txt -Flask.egg-info/not-zip-safe -Flask.egg-info/requires.txt -Flask.egg-info/top_level.txt -artwork/.DS_Store -artwork/LICENSE -artwork/logo-full.svg -docs/.gitignore -docs/Makefile -docs/advanced_foreword.rst -docs/api.rst -docs/appcontext.rst -docs/becomingbig.rst -docs/blueprints.rst -docs/changelog.rst -docs/conf.py -docs/config.rst -docs/contents.rst.inc -docs/design.rst -docs/errorhandling.rst -docs/extensiondev.rst -docs/extensions.rst -docs/flaskdocext.py -docs/flaskext.py -docs/flaskstyle.sty -docs/foreword.rst -docs/htmlfaq.rst -docs/index.rst -docs/installation.rst -docs/latexindex.rst -docs/license.rst -docs/logo.pdf -docs/make.bat -docs/python3.rst -docs/quickstart.rst -docs/reqcontext.rst -docs/security.rst -docs/shell.rst -docs/signals.rst -docs/styleguide.rst -docs/templating.rst -docs/testing.rst -docs/unicode.rst -docs/upgrading.rst -docs/views.rst -docs/_static/debugger.png -docs/_static/flask.png -docs/_static/flaskr.png -docs/_static/logo-full.png -docs/_static/no.png -docs/_static/touch-icon.png -docs/_static/yes.png -docs/_templates/sidebarintro.html -docs/_templates/sidebarlogo.html -docs/_themes/.git -docs/_themes/.gitignore -docs/_themes/LICENSE -docs/_themes/README -docs/_themes/flask_theme_support.py -docs/_themes/flask/layout.html -docs/_themes/flask/relations.html -docs/_themes/flask/theme.conf -docs/_themes/flask/static/flasky.css_t -docs/_themes/flask/static/small_flask.css -docs/_themes/flask_small/layout.html -docs/_themes/flask_small/theme.conf -docs/_themes/flask_small/static/flasky.css_t -docs/deploying/cgi.rst -docs/deploying/fastcgi.rst -docs/deploying/index.rst -docs/deploying/mod_wsgi.rst -docs/deploying/uwsgi.rst -docs/deploying/wsgi-standalone.rst -docs/patterns/apierrors.rst -docs/patterns/appdispatch.rst -docs/patterns/appfactories.rst -docs/patterns/caching.rst -docs/patterns/celery.rst -docs/patterns/deferredcallbacks.rst -docs/patterns/distribute.rst -docs/patterns/errorpages.rst -docs/patterns/fabric.rst -docs/patterns/favicon.rst -docs/patterns/fileuploads.rst -docs/patterns/flashing.rst -docs/patterns/index.rst -docs/patterns/jquery.rst -docs/patterns/lazyloading.rst -docs/patterns/methodoverrides.rst -docs/patterns/mongokit.rst -docs/patterns/packages.rst -docs/patterns/requestchecksum.rst -docs/patterns/sqlalchemy.rst -docs/patterns/sqlite3.rst -docs/patterns/streaming.rst -docs/patterns/templateinheritance.rst -docs/patterns/urlprocessors.rst -docs/patterns/viewdecorators.rst -docs/patterns/wtforms.rst -docs/tutorial/css.rst -docs/tutorial/dbcon.rst -docs/tutorial/dbinit.rst -docs/tutorial/folders.rst -docs/tutorial/index.rst -docs/tutorial/introduction.rst -docs/tutorial/schema.rst -docs/tutorial/setup.rst -docs/tutorial/templates.rst -docs/tutorial/testing.rst -docs/tutorial/views.rst -examples/.DS_Store -examples/blueprintexample/blueprintexample.py -examples/blueprintexample/blueprintexample_test.py -examples/blueprintexample/simple_page/__init__.py -examples/blueprintexample/simple_page/simple_page.py -examples/blueprintexample/simple_page/templates/pages/hello.html -examples/blueprintexample/simple_page/templates/pages/index.html -examples/blueprintexample/simple_page/templates/pages/layout.html -examples/blueprintexample/simple_page/templates/pages/world.html -examples/flaskr/README -examples/flaskr/flaskr.py -examples/flaskr/flaskr_tests.py -examples/flaskr/schema.sql -examples/flaskr/static/style.css -examples/flaskr/templates/layout.html -examples/flaskr/templates/login.html -examples/flaskr/templates/show_entries.html -examples/jqueryexample/jqueryexample.py -examples/jqueryexample/templates/index.html -examples/jqueryexample/templates/layout.html -examples/minitwit/README -examples/minitwit/minitwit.py -examples/minitwit/minitwit_tests.py -examples/minitwit/schema.sql -examples/minitwit/static/style.css -examples/minitwit/templates/layout.html -examples/minitwit/templates/login.html -examples/minitwit/templates/register.html -examples/minitwit/templates/timeline.html -examples/persona/.DS_Store -examples/persona/persona.py -examples/persona/static/.DS_Store -examples/persona/static/persona.js -examples/persona/static/spinner.png -examples/persona/static/style.css -examples/persona/templates/index.html -examples/persona/templates/layout.html -flask/__init__.py -flask/_compat.py -flask/app.py -flask/blueprints.py -flask/config.py -flask/ctx.py -flask/debughelpers.py -flask/exthook.py -flask/globals.py -flask/helpers.py -flask/json.py -flask/logging.py -flask/module.py -flask/sessions.py -flask/signals.py -flask/templating.py -flask/testing.py -flask/views.py -flask/wrappers.py -flask/ext/__init__.py -flask/testsuite/__init__.py -flask/testsuite/appctx.py -flask/testsuite/basic.py -flask/testsuite/blueprints.py -flask/testsuite/config.py -flask/testsuite/deprecations.py -flask/testsuite/examples.py -flask/testsuite/ext.py -flask/testsuite/helpers.py -flask/testsuite/regression.py -flask/testsuite/reqctx.py -flask/testsuite/signals.py -flask/testsuite/subclassing.py -flask/testsuite/templating.py -flask/testsuite/testing.py -flask/testsuite/views.py -flask/testsuite/static/index.html -flask/testsuite/templates/_macro.html -flask/testsuite/templates/context_template.html -flask/testsuite/templates/escaping_template.html -flask/testsuite/templates/mail.txt -flask/testsuite/templates/simple_template.html -flask/testsuite/templates/template_filter.html -flask/testsuite/templates/template_test.html -flask/testsuite/templates/nested/nested.txt -flask/testsuite/test_apps/config_module_app.py -flask/testsuite/test_apps/flask_newext_simple.py -flask/testsuite/test_apps/importerror.py -flask/testsuite/test_apps/main_app.py -flask/testsuite/test_apps/blueprintapp/__init__.py -flask/testsuite/test_apps/blueprintapp/apps/__init__.py -flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.py -flask/testsuite/test_apps/blueprintapp/apps/admin/static/test.txt -flask/testsuite/test_apps/blueprintapp/apps/admin/static/css/test.css -flask/testsuite/test_apps/blueprintapp/apps/admin/templates/admin/index.html -flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.py -flask/testsuite/test_apps/blueprintapp/apps/frontend/templates/frontend/index.html -flask/testsuite/test_apps/config_package_app/__init__.py -flask/testsuite/test_apps/flask_broken/__init__.py -flask/testsuite/test_apps/flask_broken/b.py -flask/testsuite/test_apps/flask_newext_package/__init__.py -flask/testsuite/test_apps/flask_newext_package/submodule.py -flask/testsuite/test_apps/flaskext/__init__.py -flask/testsuite/test_apps/flaskext/oldext_simple.py -flask/testsuite/test_apps/flaskext/oldext_package/__init__.py -flask/testsuite/test_apps/flaskext/oldext_package/submodule.py -flask/testsuite/test_apps/lib/python2.5/site-packages/SiteEgg.egg -flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.py -flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.py -flask/testsuite/test_apps/moduleapp/__init__.py -flask/testsuite/test_apps/moduleapp/apps/__init__.py -flask/testsuite/test_apps/moduleapp/apps/admin/__init__.py -flask/testsuite/test_apps/moduleapp/apps/admin/static/test.txt -flask/testsuite/test_apps/moduleapp/apps/admin/static/css/test.css -flask/testsuite/test_apps/moduleapp/apps/admin/templates/index.html -flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.py -flask/testsuite/test_apps/moduleapp/apps/frontend/templates/index.html -flask/testsuite/test_apps/path/installed_package/__init__.py -flask/testsuite/test_apps/subdomaintestmodule/__init__.py -flask/testsuite/test_apps/subdomaintestmodule/static/hello.txt \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/installed-files.txt deleted file mode 100644 index 5b4a661..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/installed-files.txt +++ /dev/null @@ -1,148 +0,0 @@ -../flask/wrappers.py -../flask/_compat.py -../flask/templating.py -../flask/helpers.py -../flask/ctx.py -../flask/views.py -../flask/sessions.py -../flask/blueprints.py -../flask/json.py -../flask/module.py -../flask/signals.py -../flask/logging.py -../flask/globals.py -../flask/__init__.py -../flask/debughelpers.py -../flask/testing.py -../flask/config.py -../flask/app.py -../flask/exthook.py -../flask/ext/__init__.py -../flask/testsuite/deprecations.py -../flask/testsuite/regression.py -../flask/testsuite/ext.py -../flask/testsuite/templating.py -../flask/testsuite/helpers.py -../flask/testsuite/views.py -../flask/testsuite/blueprints.py -../flask/testsuite/subclassing.py -../flask/testsuite/signals.py -../flask/testsuite/examples.py -../flask/testsuite/reqctx.py -../flask/testsuite/__init__.py -../flask/testsuite/basic.py -../flask/testsuite/testing.py -../flask/testsuite/config.py -../flask/testsuite/appctx.py -../flask/testsuite/static/index.html -../flask/testsuite/templates/_macro.html -../flask/testsuite/templates/context_template.html -../flask/testsuite/templates/escaping_template.html -../flask/testsuite/templates/mail.txt -../flask/testsuite/templates/simple_template.html -../flask/testsuite/templates/template_filter.html -../flask/testsuite/templates/template_test.html -../flask/testsuite/templates/nested/nested.txt -../flask/testsuite/test_apps/config_module_app.py -../flask/testsuite/test_apps/flask_newext_simple.py -../flask/testsuite/test_apps/importerror.py -../flask/testsuite/test_apps/main_app.py -../flask/testsuite/test_apps/blueprintapp/__init__.py -../flask/testsuite/test_apps/blueprintapp/apps/__init__.py -../flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.py -../flask/testsuite/test_apps/blueprintapp/apps/admin/static/test.txt -../flask/testsuite/test_apps/blueprintapp/apps/admin/static/css/test.css -../flask/testsuite/test_apps/blueprintapp/apps/admin/templates/admin/index.html -../flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.py -../flask/testsuite/test_apps/blueprintapp/apps/frontend/templates/frontend/index.html -../flask/testsuite/test_apps/config_package_app/__init__.py -../flask/testsuite/test_apps/flask_broken/__init__.py -../flask/testsuite/test_apps/flask_broken/b.py -../flask/testsuite/test_apps/flask_newext_package/__init__.py -../flask/testsuite/test_apps/flask_newext_package/submodule.py -../flask/testsuite/test_apps/flaskext/__init__.py -../flask/testsuite/test_apps/flaskext/oldext_simple.py -../flask/testsuite/test_apps/flaskext/oldext_package/__init__.py -../flask/testsuite/test_apps/flaskext/oldext_package/submodule.py -../flask/testsuite/test_apps/lib/python2.5/site-packages/SiteEgg.egg -../flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.py -../flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.py -../flask/testsuite/test_apps/moduleapp/__init__.py -../flask/testsuite/test_apps/moduleapp/apps/__init__.py -../flask/testsuite/test_apps/moduleapp/apps/admin/__init__.py -../flask/testsuite/test_apps/moduleapp/apps/admin/static/test.txt -../flask/testsuite/test_apps/moduleapp/apps/admin/static/css/test.css -../flask/testsuite/test_apps/moduleapp/apps/admin/templates/index.html -../flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.py -../flask/testsuite/test_apps/moduleapp/apps/frontend/templates/index.html -../flask/testsuite/test_apps/path/installed_package/__init__.py -../flask/testsuite/test_apps/subdomaintestmodule/__init__.py -../flask/testsuite/test_apps/subdomaintestmodule/static/hello.txt -../flask/wrappers.pyc -../flask/_compat.pyc -../flask/templating.pyc -../flask/helpers.pyc -../flask/ctx.pyc -../flask/views.pyc -../flask/sessions.pyc -../flask/blueprints.pyc -../flask/json.pyc -../flask/module.pyc -../flask/signals.pyc -../flask/logging.pyc -../flask/globals.pyc -../flask/__init__.pyc -../flask/debughelpers.pyc -../flask/testing.pyc -../flask/config.pyc -../flask/app.pyc -../flask/exthook.pyc -../flask/ext/__init__.pyc -../flask/testsuite/deprecations.pyc -../flask/testsuite/regression.pyc -../flask/testsuite/ext.pyc -../flask/testsuite/templating.pyc -../flask/testsuite/helpers.pyc -../flask/testsuite/views.pyc -../flask/testsuite/blueprints.pyc -../flask/testsuite/subclassing.pyc -../flask/testsuite/signals.pyc -../flask/testsuite/examples.pyc -../flask/testsuite/reqctx.pyc -../flask/testsuite/__init__.pyc -../flask/testsuite/basic.pyc -../flask/testsuite/testing.pyc -../flask/testsuite/config.pyc -../flask/testsuite/appctx.pyc -../flask/testsuite/test_apps/config_module_app.pyc -../flask/testsuite/test_apps/flask_newext_simple.pyc -../flask/testsuite/test_apps/importerror.pyc -../flask/testsuite/test_apps/main_app.pyc -../flask/testsuite/test_apps/blueprintapp/__init__.pyc -../flask/testsuite/test_apps/blueprintapp/apps/__init__.pyc -../flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.pyc -../flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.pyc -../flask/testsuite/test_apps/config_package_app/__init__.pyc -../flask/testsuite/test_apps/flask_broken/__init__.pyc -../flask/testsuite/test_apps/flask_broken/b.pyc -../flask/testsuite/test_apps/flask_newext_package/__init__.pyc -../flask/testsuite/test_apps/flask_newext_package/submodule.pyc -../flask/testsuite/test_apps/flaskext/__init__.pyc -../flask/testsuite/test_apps/flaskext/oldext_simple.pyc -../flask/testsuite/test_apps/flaskext/oldext_package/__init__.pyc -../flask/testsuite/test_apps/flaskext/oldext_package/submodule.pyc -../flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.pyc -../flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.pyc -../flask/testsuite/test_apps/moduleapp/__init__.pyc -../flask/testsuite/test_apps/moduleapp/apps/__init__.pyc -../flask/testsuite/test_apps/moduleapp/apps/admin/__init__.pyc -../flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.pyc -../flask/testsuite/test_apps/path/installed_package/__init__.pyc -../flask/testsuite/test_apps/subdomaintestmodule/__init__.pyc -./ -requires.txt -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/requires.txt deleted file mode 100644 index a7281e1..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/requires.txt +++ /dev/null @@ -1,3 +0,0 @@ -Werkzeug>=0.7 -Jinja2>=2.4 -itsdangerous>=0.21 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/top_level.txt deleted file mode 100644 index 7e10602..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/PKG-INFO deleted file mode 100644 index e35f909..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/PKG-INFO +++ /dev/null @@ -1,22 +0,0 @@ -Metadata-Version: 1.1 -Name: Flask-Migrate -Version: 1.2.0 -Summary: SQLAlchemy database migrations for Flask applications using Alembic -Home-page: http://github.com/miguelgrinberg/flask-migrate/ -Author: Miguel Grinberg -Author-email: miguelgrinberg50@gmail.com -License: MIT -Description: - Flask-Migrate - -------------- - - SQLAlchemy database migrations for Flask applications using Alembic. - -Platform: any -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/SOURCES.txt deleted file mode 100644 index e0e8111..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/SOURCES.txt +++ /dev/null @@ -1,24 +0,0 @@ -LICENSE -MANIFEST.in -README.md -setup.cfg -setup.py -Flask_Migrate.egg-info/PKG-INFO -Flask_Migrate.egg-info/SOURCES.txt -Flask_Migrate.egg-info/dependency_links.txt -Flask_Migrate.egg-info/not-zip-safe -Flask_Migrate.egg-info/requires.txt -Flask_Migrate.egg-info/top_level.txt -flask_migrate/__init__.py -flask_migrate/templates/flask/README -flask_migrate/templates/flask/alembic.ini.mako -flask_migrate/templates/flask/env.py -flask_migrate/templates/flask/script.py.mako -tests/__init__.py -tests/__init__.pyc -tests/app.py -tests/app.pyc -tests/app2.py -tests/test_migrate.py -tests/test_migrate.pyc -tests/test_migrate_custom_directory.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/installed-files.txt deleted file mode 100644 index 673345e..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/installed-files.txt +++ /dev/null @@ -1,14 +0,0 @@ -../flask_migrate/__init__.py -../flask_migrate/templates/flask/README -../flask_migrate/templates/flask/alembic.ini.mako -../flask_migrate/templates/flask/env.py -../flask_migrate/templates/flask/script.py.mako -../flask_migrate/__init__.pyc -../flask_migrate/templates/flask/env.pyc -./ -requires.txt -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/requires.txt deleted file mode 100644 index 0426413..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/requires.txt +++ /dev/null @@ -1,4 +0,0 @@ -Flask>=0.9 -Flask-SQLAlchemy>=1.0 -alembic>=0.6 -Flask-Script>=0.6 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/top_level.txt deleted file mode 100644 index 0652762..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_migrate diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/PKG-INFO deleted file mode 100644 index 45e77d9..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/PKG-INFO +++ /dev/null @@ -1,30 +0,0 @@ -Metadata-Version: 1.1 -Name: Flask-SQLAlchemy -Version: 1.0 -Summary: Adds SQLAlchemy support to your Flask application -Home-page: http://github.com/mitsuhiko/flask-sqlalchemy -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -License: BSD -Description: - Flask-SQLAlchemy - ---------------- - - Adds SQLAlchemy support to your Flask application. - - Links - ````` - - * `documentation `_ - * `development version - `_ - - -Platform: any -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/SOURCES.txt deleted file mode 100644 index 298e2fe..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/SOURCES.txt +++ /dev/null @@ -1,35 +0,0 @@ -CHANGES -LICENSE -MANIFEST.in -README -setup.cfg -setup.py -test_sqlalchemy.py -Flask_SQLAlchemy.egg-info/PKG-INFO -Flask_SQLAlchemy.egg-info/SOURCES.txt -Flask_SQLAlchemy.egg-info/dependency_links.txt -Flask_SQLAlchemy.egg-info/not-zip-safe -Flask_SQLAlchemy.egg-info/requires.txt -Flask_SQLAlchemy.egg-info/top_level.txt -docs/Makefile -docs/api.rst -docs/binds.rst -docs/changelog.rst -docs/conf.py -docs/config.rst -docs/contents.rst.inc -docs/contexts.rst -docs/flaskstyle.sty -docs/index.rst -docs/logo.pdf -docs/make.bat -docs/models.rst -docs/queries.rst -docs/quickstart.rst -docs/signals.rst -docs/_static/flask-sqlalchemy-small.png -docs/_static/flask-sqlalchemy.png -docs/_templates/sidebarintro.html -docs/_templates/sidebarlogo.html -flask_sqlalchemy/__init__.py -flask_sqlalchemy/_compat.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/installed-files.txt deleted file mode 100644 index ba5a715..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/installed-files.txt +++ /dev/null @@ -1,11 +0,0 @@ -../flask_sqlalchemy/_compat.py -../flask_sqlalchemy/__init__.py -../flask_sqlalchemy/_compat.pyc -../flask_sqlalchemy/__init__.pyc -./ -requires.txt -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/requires.txt deleted file mode 100644 index d07e166..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/requires.txt +++ /dev/null @@ -1,3 +0,0 @@ -setuptools -Flask>=0.10 -SQLAlchemy \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/top_level.txt deleted file mode 100644 index 8a5538e..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_SQLAlchemy-1.0.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_sqlalchemy diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/PKG-INFO deleted file mode 100644 index db853cd..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/PKG-INFO +++ /dev/null @@ -1,35 +0,0 @@ -Metadata-Version: 1.1 -Name: Flask-Script -Version: 2.0.3 -Summary: Scripting support for Flask -Home-page: http://github.com/smurfix/flask-script -Author: Matthias Urlichs -Author-email: matthias@urlichs.de -License: BSD -Download-URL: https://github.com/smurfix/flask-script/tarball/v2.0.3 -Description: - Flask-Script - -------------- - - Flask support for writing external scripts. - - Links - ````` - - * `documentation `_ - - - -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/SOURCES.txt deleted file mode 100644 index 0202fcb..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/SOURCES.txt +++ /dev/null @@ -1,29 +0,0 @@ -LICENSE -MANIFEST.in -README.rst -setup.cfg -setup.py -tests.py -Flask_Script.egg-info/PKG-INFO -Flask_Script.egg-info/SOURCES.txt -Flask_Script.egg-info/dependency_links.txt -Flask_Script.egg-info/not-zip-safe -Flask_Script.egg-info/requires.txt -Flask_Script.egg-info/top_level.txt -docs/Makefile -docs/conf.py -docs/index.rst -docs/make.bat -docs/_static/flask-script.png -docs/_static/index.html -docs/_themes/README -docs/_themes/flask_theme_support.py -docs/_themes/flask/theme.conf -docs/_themes/flask/static/flasky.css_t -docs/_themes/flask_small/layout.html -docs/_themes/flask_small/theme.conf -docs/_themes/flask_small/static/flasky.css_t -flask_script/__init__.py -flask_script/_compat.py -flask_script/cli.py -flask_script/commands.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/installed-files.txt deleted file mode 100644 index 967fb02..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/installed-files.txt +++ /dev/null @@ -1,15 +0,0 @@ -../flask_script/cli.py -../flask_script/_compat.py -../flask_script/commands.py -../flask_script/__init__.py -../flask_script/cli.pyc -../flask_script/_compat.pyc -../flask_script/commands.pyc -../flask_script/__init__.pyc -./ -requires.txt -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/requires.txt deleted file mode 100644 index 2077213..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -Flask \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/top_level.txt deleted file mode 100644 index efd6af0..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Flask_Script-2.0.3.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_script diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/PKG-INFO deleted file mode 100644 index 2c6a330..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/PKG-INFO +++ /dev/null @@ -1,55 +0,0 @@ -Metadata-Version: 1.1 -Name: Jinja2 -Version: 2.7.2 -Summary: A small but fast and easy to use stand-alone template engine written in pure python. -Home-page: http://jinja.pocoo.org/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -License: BSD -Description: - Jinja2 - ~~~~~~ - - Jinja2 is a template engine written in pure Python. It provides a - `Django`_ inspired non-XML syntax but supports inline expressions and - an optional `sandboxed`_ environment. - - Nutshell - -------- - - Here a small example of a Jinja template:: - - {% extends 'base.html' %} - {% block title %}Memberlist{% endblock %} - {% block content %} - - {% endblock %} - - Philosophy - ---------- - - Application logic is for the controller but don't try to make the life - for the template designer too hard by giving him too few functionality. - - For more informations visit the new `Jinja2 webpage`_ and `documentation`_. - - .. _sandboxed: http://en.wikipedia.org/wiki/Sandbox_(computer_security) - .. _Django: http://www.djangoproject.com/ - .. _Jinja2 webpage: http://jinja.pocoo.org/ - .. _documentation: http://jinja.pocoo.org/2/documentation/ - -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup :: HTML diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/SOURCES.txt deleted file mode 100644 index a27a9c4..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/SOURCES.txt +++ /dev/null @@ -1,126 +0,0 @@ -AUTHORS -CHANGES -LICENSE -MANIFEST.in -Makefile -README.rst -run-tests.py -setup.cfg -setup.py -Jinja2.egg-info/PKG-INFO -Jinja2.egg-info/SOURCES.txt -Jinja2.egg-info/dependency_links.txt -Jinja2.egg-info/entry_points.txt -Jinja2.egg-info/not-zip-safe -Jinja2.egg-info/requires.txt -Jinja2.egg-info/top_level.txt -artwork/jinjalogo.svg -docs/Makefile -docs/api.rst -docs/cache_extension.py -docs/changelog.rst -docs/conf.py -docs/contents.rst.inc -docs/extensions.rst -docs/faq.rst -docs/index.rst -docs/integration.rst -docs/intro.rst -docs/jinjaext.py -docs/jinjastyle.sty -docs/latexindex.rst -docs/logo.pdf -docs/sandbox.rst -docs/switching.rst -docs/templates.rst -docs/tricks.rst -docs/_static/.ignore -docs/_static/jinja-small.png -docs/_templates/sidebarintro.html -docs/_templates/sidebarlogo.html -docs/_themes/LICENSE -docs/_themes/README -docs/_themes/jinja/layout.html -docs/_themes/jinja/relations.html -docs/_themes/jinja/theme.conf -docs/_themes/jinja/static/jinja.css_t -examples/bench.py -examples/profile.py -examples/basic/cycle.py -examples/basic/debugger.py -examples/basic/inheritance.py -examples/basic/test.py -examples/basic/test_filter_and_linestatements.py -examples/basic/test_loop_filter.py -examples/basic/translate.py -examples/basic/templates/broken.html -examples/basic/templates/subbroken.html -examples/rwbench/djangoext.py -examples/rwbench/rwbench.py -examples/rwbench/django/_form.html -examples/rwbench/django/_input_field.html -examples/rwbench/django/_textarea.html -examples/rwbench/django/index.html -examples/rwbench/django/layout.html -examples/rwbench/genshi/helpers.html -examples/rwbench/genshi/index.html -examples/rwbench/genshi/layout.html -examples/rwbench/jinja/helpers.html -examples/rwbench/jinja/index.html -examples/rwbench/jinja/layout.html -examples/rwbench/mako/helpers.html -examples/rwbench/mako/index.html -examples/rwbench/mako/layout.html -ext/djangojinja2.py -ext/inlinegettext.py -ext/jinja.el -ext/Vim/jinja.vim -ext/django2jinja/django2jinja.py -ext/django2jinja/example.py -ext/django2jinja/templates/index.html -ext/django2jinja/templates/layout.html -ext/django2jinja/templates/subtemplate.html -jinja2/__init__.py -jinja2/_compat.py -jinja2/_stringdefs.py -jinja2/bccache.py -jinja2/compiler.py -jinja2/constants.py -jinja2/debug.py -jinja2/defaults.py -jinja2/environment.py -jinja2/exceptions.py -jinja2/ext.py -jinja2/filters.py -jinja2/lexer.py -jinja2/loaders.py -jinja2/meta.py -jinja2/nodes.py -jinja2/optimizer.py -jinja2/parser.py -jinja2/runtime.py -jinja2/sandbox.py -jinja2/tests.py -jinja2/utils.py -jinja2/visitor.py -jinja2/testsuite/__init__.py -jinja2/testsuite/api.py -jinja2/testsuite/bytecode_cache.py -jinja2/testsuite/core_tags.py -jinja2/testsuite/debug.py -jinja2/testsuite/doctests.py -jinja2/testsuite/ext.py -jinja2/testsuite/filters.py -jinja2/testsuite/imports.py -jinja2/testsuite/inheritance.py -jinja2/testsuite/lexnparse.py -jinja2/testsuite/loader.py -jinja2/testsuite/regression.py -jinja2/testsuite/security.py -jinja2/testsuite/tests.py -jinja2/testsuite/utils.py -jinja2/testsuite/res/__init__.py -jinja2/testsuite/res/templates/broken.html -jinja2/testsuite/res/templates/syntaxerror.html -jinja2/testsuite/res/templates/test.html -jinja2/testsuite/res/templates/foo/test.html \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/entry_points.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/entry_points.txt deleted file mode 100644 index 32e6b75..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/entry_points.txt +++ /dev/null @@ -1,4 +0,0 @@ - - [babel.extractors] - jinja2 = jinja2.ext:babel_extract[i18n] - \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/installed-files.txt deleted file mode 100644 index b958248..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/installed-files.txt +++ /dev/null @@ -1,92 +0,0 @@ -../jinja2/bccache.py -../jinja2/_compat.py -../jinja2/ext.py -../jinja2/defaults.py -../jinja2/meta.py -../jinja2/_stringdefs.py -../jinja2/nodes.py -../jinja2/runtime.py -../jinja2/exceptions.py -../jinja2/lexer.py -../jinja2/__init__.py -../jinja2/visitor.py -../jinja2/optimizer.py -../jinja2/sandbox.py -../jinja2/debug.py -../jinja2/filters.py -../jinja2/constants.py -../jinja2/tests.py -../jinja2/utils.py -../jinja2/compiler.py -../jinja2/parser.py -../jinja2/loaders.py -../jinja2/environment.py -../jinja2/testsuite/api.py -../jinja2/testsuite/regression.py -../jinja2/testsuite/core_tags.py -../jinja2/testsuite/inheritance.py -../jinja2/testsuite/ext.py -../jinja2/testsuite/security.py -../jinja2/testsuite/doctests.py -../jinja2/testsuite/bytecode_cache.py -../jinja2/testsuite/imports.py -../jinja2/testsuite/lexnparse.py -../jinja2/testsuite/__init__.py -../jinja2/testsuite/debug.py -../jinja2/testsuite/filters.py -../jinja2/testsuite/tests.py -../jinja2/testsuite/loader.py -../jinja2/testsuite/utils.py -../jinja2/testsuite/res/__init__.py -../jinja2/testsuite/res/templates/broken.html -../jinja2/testsuite/res/templates/syntaxerror.html -../jinja2/testsuite/res/templates/test.html -../jinja2/testsuite/res/templates/foo/test.html -../jinja2/bccache.pyc -../jinja2/_compat.pyc -../jinja2/ext.pyc -../jinja2/defaults.pyc -../jinja2/meta.pyc -../jinja2/_stringdefs.pyc -../jinja2/nodes.pyc -../jinja2/runtime.pyc -../jinja2/exceptions.pyc -../jinja2/lexer.pyc -../jinja2/__init__.pyc -../jinja2/visitor.pyc -../jinja2/optimizer.pyc -../jinja2/sandbox.pyc -../jinja2/debug.pyc -../jinja2/filters.pyc -../jinja2/constants.pyc -../jinja2/tests.pyc -../jinja2/utils.pyc -../jinja2/compiler.pyc -../jinja2/parser.pyc -../jinja2/loaders.pyc -../jinja2/environment.pyc -../jinja2/testsuite/api.pyc -../jinja2/testsuite/regression.pyc -../jinja2/testsuite/core_tags.pyc -../jinja2/testsuite/inheritance.pyc -../jinja2/testsuite/ext.pyc -../jinja2/testsuite/security.pyc -../jinja2/testsuite/doctests.pyc -../jinja2/testsuite/bytecode_cache.pyc -../jinja2/testsuite/imports.pyc -../jinja2/testsuite/lexnparse.pyc -../jinja2/testsuite/__init__.pyc -../jinja2/testsuite/debug.pyc -../jinja2/testsuite/filters.pyc -../jinja2/testsuite/tests.pyc -../jinja2/testsuite/loader.pyc -../jinja2/testsuite/utils.pyc -../jinja2/testsuite/res/__init__.pyc -./ -requires.txt -SOURCES.txt -entry_points.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/requires.txt deleted file mode 100644 index ccd0e92..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/requires.txt +++ /dev/null @@ -1,4 +0,0 @@ -markupsafe - -[i18n] -Babel>=0.8 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/top_level.txt deleted file mode 100644 index 7f7afbf..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Jinja2-2.7.2.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -jinja2 diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/PKG-INFO deleted file mode 100644 index 91b8ec7..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/PKG-INFO +++ /dev/null @@ -1,71 +0,0 @@ -Metadata-Version: 1.1 -Name: Mako -Version: 0.9.1 -Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. -Home-page: http://www.makotemplates.org/ -Author: Mike Bayer -Author-email: mike@zzzcomputing.com -License: MIT -Description: ========================= - Mako Templates for Python - ========================= - - Mako is a template library written in Python. It provides a familiar, non-XML - syntax which compiles into Python modules for maximum performance. Mako's - syntax and API borrows from the best ideas of many others, including Django - templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded - Python (i.e. Python Server Page) language, which refines the familiar ideas - of componentized layout and inheritance to produce one of the most - straightforward and flexible models available, while also maintaining close - ties to Python calling and scoping semantics. - - Nutshell - ======== - - :: - - <%inherit file="base.html"/> - <% - rows = [[v for v in range(0,10)] for row in range(0,10)] - %> - - % for row in rows: - ${makerow(row)} - % endfor -
- - <%def name="makerow(row)"> - - % for name in row: - ${name}\ - % endfor - - - - Philosophy - =========== - - Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! - - Documentation - ============== - - See documentation for Mako at http://www.makotemplates.org/docs/ - - License - ======== - - Mako is licensed under an MIT-style license (see LICENSE). - Other incorporated projects may be licensed under different licenses. - All licenses allow for non-commercial and commercial use. - -Keywords: templates -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/SOURCES.txt deleted file mode 100644 index 6f580c3..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/SOURCES.txt +++ /dev/null @@ -1,173 +0,0 @@ -CHANGES -LICENSE -MANIFEST.in -README.rst -distribute_setup.py -setup.cfg -setup.py -Mako.egg-info/PKG-INFO -Mako.egg-info/SOURCES.txt -Mako.egg-info/dependency_links.txt -Mako.egg-info/entry_points.txt -Mako.egg-info/not-zip-safe -Mako.egg-info/requires.txt -Mako.egg-info/top_level.txt -doc/caching.html -doc/defs.html -doc/filtering.html -doc/genindex.html -doc/index.html -doc/inheritance.html -doc/namespaces.html -doc/runtime.html -doc/search.html -doc/searchindex.js -doc/syntax.html -doc/unicode.html -doc/usage.html -doc/_sources/caching.txt -doc/_sources/defs.txt -doc/_sources/filtering.txt -doc/_sources/index.txt -doc/_sources/inheritance.txt -doc/_sources/namespaces.txt -doc/_sources/runtime.txt -doc/_sources/syntax.txt -doc/_sources/unicode.txt -doc/_sources/usage.txt -doc/_static/basic.css -doc/_static/comment-bright.png -doc/_static/comment-close.png -doc/_static/comment.png -doc/_static/default.css -doc/_static/docs.css -doc/_static/doctools.js -doc/_static/down-pressed.png -doc/_static/down.png -doc/_static/file.png -doc/_static/jquery.js -doc/_static/makoLogo.png -doc/_static/minus.png -doc/_static/plus.png -doc/_static/pygments.css -doc/_static/searchtools.js -doc/_static/sidebar.js -doc/_static/site.css -doc/_static/underscore.js -doc/_static/up-pressed.png -doc/_static/up.png -doc/_static/websupport.js -doc/build/Makefile -doc/build/caching.rst -doc/build/conf.py -doc/build/defs.rst -doc/build/filtering.rst -doc/build/index.rst -doc/build/inheritance.rst -doc/build/namespaces.rst -doc/build/runtime.rst -doc/build/syntax.rst -doc/build/unicode.rst -doc/build/usage.rst -doc/build/builder/__init__.py -doc/build/builder/builders.py -doc/build/builder/util.py -doc/build/static/docs.css -doc/build/static/makoLogo.png -doc/build/static/site.css -doc/build/templates/base.mako -doc/build/templates/genindex.mako -doc/build/templates/layout.mako -doc/build/templates/page.mako -doc/build/templates/rtd_layout.mako -doc/build/templates/search.mako -examples/bench/basic.py -examples/bench/cheetah/footer.tmpl -examples/bench/cheetah/header.tmpl -examples/bench/cheetah/template.tmpl -examples/bench/django/templatetags/__init__.py -examples/bench/django/templatetags/bench.py -examples/bench/kid/base.kid -examples/bench/kid/template.kid -examples/bench/myghty/base.myt -examples/bench/myghty/template.myt -examples/wsgi/run_wsgi.py -mako/__init__.py -mako/_ast_util.py -mako/ast.py -mako/cache.py -mako/codegen.py -mako/compat.py -mako/exceptions.py -mako/filters.py -mako/lexer.py -mako/lookup.py -mako/parsetree.py -mako/pygen.py -mako/pyparser.py -mako/runtime.py -mako/template.py -mako/util.py -mako/ext/__init__.py -mako/ext/autohandler.py -mako/ext/babelplugin.py -mako/ext/beaker_cache.py -mako/ext/preprocessors.py -mako/ext/pygmentplugin.py -mako/ext/turbogears.py -scripts/mako-render -test/__init__.py -test/sample_module_namespace.py -test/test_ast.py -test/test_babelplugin.py -test/test_block.py -test/test_cache.py -test/test_call.py -test/test_decorators.py -test/test_def.py -test/test_exceptions.py -test/test_filters.py -test/test_inheritance.py -test/test_lexer.py -test/test_lookup.py -test/test_loop.py -test/test_lru.py -test/test_namespace.py -test/test_pygen.py -test/test_runtime.py -test/test_template.py -test/test_tgplugin.py -test/test_util.py -test/util.py -test/foo/__init__.py -test/foo/test_ns.py -test/templates/badbom.html -test/templates/bom.html -test/templates/bommagic.html -test/templates/chs_unicode.html -test/templates/chs_unicode_py3k.html -test/templates/chs_utf8.html -test/templates/crlf.html -test/templates/gettext.mako -test/templates/index.html -test/templates/internationalization.html -test/templates/modtest.html -test/templates/read_unicode.html -test/templates/read_unicode_py3k.html -test/templates/runtimeerr.html -test/templates/runtimeerr_py3k.html -test/templates/unicode.html -test/templates/unicode_arguments.html -test/templates/unicode_arguments_py3k.html -test/templates/unicode_code.html -test/templates/unicode_code_py3k.html -test/templates/unicode_expr.html -test/templates/unicode_expr_py3k.html -test/templates/unicode_runtime_error.html -test/templates/unicode_syntax_error.html -test/templates/foo/modtest.html.py -test/templates/othersubdir/foo.html -test/templates/subdir/incl.html -test/templates/subdir/index.html -test/templates/subdir/modtest.html -test/templates/subdir/foo/modtest.html.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/entry_points.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/entry_points.txt deleted file mode 100644 index 3717629..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/entry_points.txt +++ /dev/null @@ -1,14 +0,0 @@ - - [python.templating.engines] - mako = mako.ext.turbogears:TGPlugin - - [pygments.lexers] - mako = mako.ext.pygmentplugin:MakoLexer - html+mako = mako.ext.pygmentplugin:MakoHtmlLexer - xml+mako = mako.ext.pygmentplugin:MakoXmlLexer - js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer - css+mako = mako.ext.pygmentplugin:MakoCssLexer - - [babel.extractors] - mako = mako.ext.babelplugin:extract - \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/installed-files.txt deleted file mode 100644 index 3605bd0..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/installed-files.txt +++ /dev/null @@ -1,55 +0,0 @@ -../mako/compat.py -../mako/ast.py -../mako/codegen.py -../mako/util.py -../mako/pygen.py -../mako/runtime.py -../mako/exceptions.py -../mako/lexer.py -../mako/lookup.py -../mako/template.py -../mako/__init__.py -../mako/_ast_util.py -../mako/pyparser.py -../mako/filters.py -../mako/parsetree.py -../mako/cache.py -../mako/ext/babelplugin.py -../mako/ext/turbogears.py -../mako/ext/preprocessors.py -../mako/ext/autohandler.py -../mako/ext/beaker_cache.py -../mako/ext/__init__.py -../mako/ext/pygmentplugin.py -../mako/compat.pyc -../mako/ast.pyc -../mako/codegen.pyc -../mako/util.pyc -../mako/pygen.pyc -../mako/runtime.pyc -../mako/exceptions.pyc -../mako/lexer.pyc -../mako/lookup.pyc -../mako/template.pyc -../mako/__init__.pyc -../mako/_ast_util.pyc -../mako/pyparser.pyc -../mako/filters.pyc -../mako/parsetree.pyc -../mako/cache.pyc -../mako/ext/babelplugin.pyc -../mako/ext/turbogears.pyc -../mako/ext/preprocessors.pyc -../mako/ext/autohandler.pyc -../mako/ext/beaker_cache.pyc -../mako/ext/__init__.pyc -../mako/ext/pygmentplugin.pyc -./ -requires.txt -SOURCES.txt -entry_points.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt -../../../../bin/mako-render diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/requires.txt deleted file mode 100644 index 8d60d23..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/requires.txt +++ /dev/null @@ -1,4 +0,0 @@ -MarkupSafe>=0.9.2 - -[beaker] -Beaker>=1.1 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/top_level.txt deleted file mode 100644 index 2951cdd..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Mako-0.9.1.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -mako diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/PKG-INFO deleted file mode 100644 index 12aa93e..0000000 --- a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/PKG-INFO +++ /dev/null @@ -1,119 +0,0 @@ -Metadata-Version: 1.1 -Name: MarkupSafe -Version: 0.23 -Summary: Implements a XML/HTML/XHTML Markup safe string for Python -Home-page: http://github.com/mitsuhiko/markupsafe -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -License: BSD -Description: MarkupSafe - ========== - - Implements a unicode subclass that supports HTML strings: - - >>> from markupsafe import Markup, escape - >>> escape("") - Markup(u'<script>alert(document.cookie);</script>') - >>> tmpl = Markup("%s") - >>> tmpl % "Peter > Lustig" - Markup(u'Peter > Lustig') - - If you want to make an object unicode that is not yet unicode - but don't want to lose the taint information, you can use the - `soft_unicode` function. (On Python 3 you can also use `soft_str` which - is a different name for the same function). - - >>> from markupsafe import soft_unicode - >>> soft_unicode(42) - u'42' - >>> soft_unicode(Markup('foo')) - Markup(u'foo') - - HTML Representations - -------------------- - - Objects can customize their HTML markup equivalent by overriding - the `__html__` function: - - >>> class Foo(object): - ... def __html__(self): - ... return 'Nice' - ... - >>> escape(Foo()) - Markup(u'Nice') - >>> Markup(Foo()) - Markup(u'Nice') - - Silent Escapes - -------------- - - Since MarkupSafe 0.10 there is now also a separate escape function - called `escape_silent` that returns an empty string for `None` for - consistency with other systems that return empty strings for `None` - when escaping (for instance Pylons' webhelpers). - - If you also want to use this for the escape method of the Markup - object, you can create your own subclass that does that:: - - from markupsafe import Markup, escape_silent as escape - - class SilentMarkup(Markup): - __slots__ = () - - @classmethod - def escape(cls, s): - return cls(escape(s)) - - New-Style String Formatting - --------------------------- - - Starting with MarkupSafe 0.21 new style string formats from Python 2.6 and - 3.x are now fully supported. Previously the escape behavior of those - functions was spotty at best. The new implementations operates under the - following algorithm: - - 1. if an object has an ``__html_format__`` method it is called as - replacement for ``__format__`` with the format specifier. It either - has to return a string or markup object. - 2. if an object has an ``__html__`` method it is called. - 3. otherwise the default format system of Python kicks in and the result - is HTML escaped. - - Here is how you can implement your own formatting:: - - class User(object): - - def __init__(self, id, username): - self.id = id - self.username = username - - def __html_format__(self, format_spec): - if format_spec == 'link': - return Markup('{1}').format( - self.id, - self.__html__(), - ) - elif format_spec: - raise ValueError('Invalid format spec') - return self.__html__() - - def __html__(self): - return Markup('{0}').format(self.username) - - And to format that user: - - >>> user = User(1, 'foo') - >>> Markup('

User: {0:link}').format(user) - Markup(u'

User: foo') - -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Text Processing :: Markup :: HTML diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/SOURCES.txt deleted file mode 100644 index dfeb82b..0000000 --- a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/SOURCES.txt +++ /dev/null @@ -1,17 +0,0 @@ -AUTHORS -LICENSE -MANIFEST.in -README.rst -setup.cfg -setup.py -MarkupSafe.egg-info/PKG-INFO -MarkupSafe.egg-info/SOURCES.txt -MarkupSafe.egg-info/dependency_links.txt -MarkupSafe.egg-info/not-zip-safe -MarkupSafe.egg-info/top_level.txt -markupsafe/__init__.py -markupsafe/_compat.py -markupsafe/_constants.py -markupsafe/_native.py -markupsafe/_speedups.c -markupsafe/tests.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/installed-files.txt deleted file mode 100644 index b2eff87..0000000 --- a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/installed-files.txt +++ /dev/null @@ -1,18 +0,0 @@ -../markupsafe/_compat.py -../markupsafe/_native.py -../markupsafe/__init__.py -../markupsafe/_constants.py -../markupsafe/tests.py -../markupsafe/_speedups.c -../markupsafe/_compat.pyc -../markupsafe/_native.pyc -../markupsafe/__init__.pyc -../markupsafe/_constants.pyc -../markupsafe/tests.pyc -../markupsafe/_speedups.so -./ -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/Linux_i686/lib/python2.7/site-packages/MarkupSafe-0.23.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/PKG-INFO deleted file mode 100644 index 9414e47..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/PKG-INFO +++ /dev/null @@ -1,16 +0,0 @@ -Metadata-Version: 1.1 -Name: Twisted -Version: 14.0.0 -Summary: An asynchronous networking framework written in Python -Home-page: http://twistedmatrix.com/ -Author: Glyph Lefkowitz -Author-email: glyph@twistedmatrix.com -License: MIT -Description: An extensible framework for Python programming, with special focus - on event-based network programming and multiprotocol integration. - -Platform: UNKNOWN -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/SOURCES.txt deleted file mode 100644 index 073c9d5..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/SOURCES.txt +++ /dev/null @@ -1,846 +0,0 @@ -README -Twisted.egg-info/PKG-INFO -Twisted.egg-info/SOURCES.txt -Twisted.egg-info/dependency_links.txt -Twisted.egg-info/not-zip-safe -Twisted.egg-info/requires.txt -Twisted.egg-info/top_level.txt -bin/manhole -bin/pyhtmlizer -bin/tap2deb -bin/tap2rpm -bin/tapconvert -bin/trial -bin/twistd -bin/conch/cftp -bin/conch/ckeygen -bin/conch/conch -bin/conch/tkconch -bin/lore/lore -bin/mail/mailmail -twisted/__init__.py -twisted/_version.py -twisted/copyright.py -twisted/plugin.py -twisted/application/__init__.py -twisted/application/app.py -twisted/application/internet.py -twisted/application/reactors.py -twisted/application/service.py -twisted/application/strports.py -twisted/application/test/__init__.py -twisted/application/test/test_internet.py -twisted/conch/__init__.py -twisted/conch/_version.py -twisted/conch/avatar.py -twisted/conch/checkers.py -twisted/conch/endpoints.py -twisted/conch/error.py -twisted/conch/interfaces.py -twisted/conch/ls.py -twisted/conch/manhole.py -twisted/conch/manhole_ssh.py -twisted/conch/manhole_tap.py -twisted/conch/mixin.py -twisted/conch/recvline.py -twisted/conch/stdio.py -twisted/conch/tap.py -twisted/conch/telnet.py -twisted/conch/ttymodes.py -twisted/conch/unix.py -twisted/conch/client/__init__.py -twisted/conch/client/agent.py -twisted/conch/client/connect.py -twisted/conch/client/default.py -twisted/conch/client/direct.py -twisted/conch/client/knownhosts.py -twisted/conch/client/options.py -twisted/conch/insults/__init__.py -twisted/conch/insults/client.py -twisted/conch/insults/colors.py -twisted/conch/insults/helper.py -twisted/conch/insults/insults.py -twisted/conch/insults/text.py -twisted/conch/insults/window.py -twisted/conch/openssh_compat/__init__.py -twisted/conch/openssh_compat/factory.py -twisted/conch/openssh_compat/primes.py -twisted/conch/scripts/__init__.py -twisted/conch/scripts/cftp.py -twisted/conch/scripts/ckeygen.py -twisted/conch/scripts/conch.py -twisted/conch/scripts/tkconch.py -twisted/conch/ssh/__init__.py -twisted/conch/ssh/address.py -twisted/conch/ssh/agent.py -twisted/conch/ssh/channel.py -twisted/conch/ssh/common.py -twisted/conch/ssh/connection.py -twisted/conch/ssh/factory.py -twisted/conch/ssh/filetransfer.py -twisted/conch/ssh/forwarding.py -twisted/conch/ssh/keys.py -twisted/conch/ssh/service.py -twisted/conch/ssh/session.py -twisted/conch/ssh/sexpy.py -twisted/conch/ssh/transport.py -twisted/conch/ssh/userauth.py -twisted/conch/test/__init__.py -twisted/conch/test/keydata.py -twisted/conch/test/test_address.py -twisted/conch/test/test_agent.py -twisted/conch/test/test_cftp.py -twisted/conch/test/test_channel.py -twisted/conch/test/test_checkers.py -twisted/conch/test/test_ckeygen.py -twisted/conch/test/test_conch.py -twisted/conch/test/test_connection.py -twisted/conch/test/test_default.py -twisted/conch/test/test_endpoints.py -twisted/conch/test/test_filetransfer.py -twisted/conch/test/test_helper.py -twisted/conch/test/test_insults.py -twisted/conch/test/test_keys.py -twisted/conch/test/test_knownhosts.py -twisted/conch/test/test_manhole.py -twisted/conch/test/test_mixin.py -twisted/conch/test/test_openssh_compat.py -twisted/conch/test/test_recvline.py -twisted/conch/test/test_scripts.py -twisted/conch/test/test_session.py -twisted/conch/test/test_ssh.py -twisted/conch/test/test_tap.py -twisted/conch/test/test_telnet.py -twisted/conch/test/test_text.py -twisted/conch/test/test_transport.py -twisted/conch/test/test_userauth.py -twisted/conch/test/test_window.py -twisted/conch/ui/__init__.py -twisted/conch/ui/ansi.py -twisted/conch/ui/tkvt100.py -twisted/cred/__init__.py -twisted/cred/_digest.py -twisted/cred/checkers.py -twisted/cred/credentials.py -twisted/cred/error.py -twisted/cred/pamauth.py -twisted/cred/portal.py -twisted/cred/strcred.py -twisted/enterprise/__init__.py -twisted/enterprise/adbapi.py -twisted/internet/__init__.py -twisted/internet/_baseprocess.py -twisted/internet/_dumbwin32proc.py -twisted/internet/_glibbase.py -twisted/internet/_newtls.py -twisted/internet/_pollingfile.py -twisted/internet/_posixserialport.py -twisted/internet/_posixstdio.py -twisted/internet/_signals.py -twisted/internet/_ssl.py -twisted/internet/_sslverify.py -twisted/internet/_threadedselect.py -twisted/internet/_win32serialport.py -twisted/internet/_win32stdio.py -twisted/internet/abstract.py -twisted/internet/address.py -twisted/internet/base.py -twisted/internet/cfreactor.py -twisted/internet/default.py -twisted/internet/defer.py -twisted/internet/endpoints.py -twisted/internet/epollreactor.py -twisted/internet/error.py -twisted/internet/fdesc.py -twisted/internet/gireactor.py -twisted/internet/glib2reactor.py -twisted/internet/gtk2reactor.py -twisted/internet/gtk3reactor.py -twisted/internet/gtkreactor.py -twisted/internet/inotify.py -twisted/internet/interfaces.py -twisted/internet/kqreactor.py -twisted/internet/main.py -twisted/internet/pollreactor.py -twisted/internet/posixbase.py -twisted/internet/process.py -twisted/internet/protocol.py -twisted/internet/pyuisupport.py -twisted/internet/qtreactor.py -twisted/internet/reactor.py -twisted/internet/selectreactor.py -twisted/internet/serialport.py -twisted/internet/ssl.py -twisted/internet/stdio.py -twisted/internet/task.py -twisted/internet/tcp.py -twisted/internet/threads.py -twisted/internet/tksupport.py -twisted/internet/udp.py -twisted/internet/unix.py -twisted/internet/utils.py -twisted/internet/win32eventreactor.py -twisted/internet/wxreactor.py -twisted/internet/wxsupport.py -twisted/internet/iocpreactor/__init__.py -twisted/internet/iocpreactor/abstract.py -twisted/internet/iocpreactor/const.py -twisted/internet/iocpreactor/interfaces.py -twisted/internet/iocpreactor/reactor.py -twisted/internet/iocpreactor/setup.py -twisted/internet/iocpreactor/tcp.py -twisted/internet/iocpreactor/udp.py -twisted/internet/iocpreactor/iocpsupport/iocpsupport.c -twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c -twisted/internet/test/__init__.py -twisted/internet/test/_posixifaces.py -twisted/internet/test/_win32ifaces.py -twisted/internet/test/connectionmixins.py -twisted/internet/test/fakeendpoint.py -twisted/internet/test/modulehelpers.py -twisted/internet/test/process_gireactornocompat.py -twisted/internet/test/process_helper.py -twisted/internet/test/reactormixins.py -twisted/internet/test/test_abstract.py -twisted/internet/test/test_address.py -twisted/internet/test/test_base.py -twisted/internet/test/test_baseprocess.py -twisted/internet/test/test_core.py -twisted/internet/test/test_default.py -twisted/internet/test/test_endpoints.py -twisted/internet/test/test_epollreactor.py -twisted/internet/test/test_fdset.py -twisted/internet/test/test_filedescriptor.py -twisted/internet/test/test_gireactor.py -twisted/internet/test/test_glibbase.py -twisted/internet/test/test_gtkreactor.py -twisted/internet/test/test_inlinecb.py -twisted/internet/test/test_inotify.py -twisted/internet/test/test_iocp.py -twisted/internet/test/test_main.py -twisted/internet/test/test_newtls.py -twisted/internet/test/test_pollingfile.py -twisted/internet/test/test_posixbase.py -twisted/internet/test/test_posixprocess.py -twisted/internet/test/test_process.py -twisted/internet/test/test_protocol.py -twisted/internet/test/test_qtreactor.py -twisted/internet/test/test_serialport.py -twisted/internet/test/test_sigchld.py -twisted/internet/test/test_socket.py -twisted/internet/test/test_stdio.py -twisted/internet/test/test_tcp.py -twisted/internet/test/test_threads.py -twisted/internet/test/test_time.py -twisted/internet/test/test_tls.py -twisted/internet/test/test_udp.py -twisted/internet/test/test_udp_internals.py -twisted/internet/test/test_unix.py -twisted/internet/test/test_win32events.py -twisted/lore/__init__.py -twisted/lore/_version.py -twisted/lore/default.py -twisted/lore/docbook.py -twisted/lore/htmlbook.py -twisted/lore/indexer.py -twisted/lore/latex.py -twisted/lore/lint.py -twisted/lore/lmath.py -twisted/lore/man2lore.py -twisted/lore/numberer.py -twisted/lore/process.py -twisted/lore/slides.py -twisted/lore/texi.py -twisted/lore/tree.py -twisted/lore/scripts/__init__.py -twisted/lore/scripts/lore.py -twisted/lore/test/__init__.py -twisted/lore/test/test_docbook.py -twisted/lore/test/test_latex.py -twisted/lore/test/test_lint.py -twisted/lore/test/test_lmath.py -twisted/lore/test/test_lore.py -twisted/lore/test/test_man2lore.py -twisted/lore/test/test_scripts.py -twisted/lore/test/test_slides.py -twisted/lore/test/test_texi.py -twisted/mail/__init__.py -twisted/mail/_version.py -twisted/mail/alias.py -twisted/mail/bounce.py -twisted/mail/imap4.py -twisted/mail/mail.py -twisted/mail/maildir.py -twisted/mail/pb.py -twisted/mail/pop3.py -twisted/mail/pop3client.py -twisted/mail/protocols.py -twisted/mail/relay.py -twisted/mail/relaymanager.py -twisted/mail/smtp.py -twisted/mail/tap.py -twisted/mail/scripts/__init__.py -twisted/mail/scripts/mailmail.py -twisted/mail/test/__init__.py -twisted/mail/test/pop3testserver.py -twisted/mail/test/test_bounce.py -twisted/mail/test/test_imap.py -twisted/mail/test/test_mail.py -twisted/mail/test/test_mailmail.py -twisted/mail/test/test_options.py -twisted/mail/test/test_pop3.py -twisted/mail/test/test_pop3client.py -twisted/mail/test/test_scripts.py -twisted/mail/test/test_smtp.py -twisted/manhole/__init__.py -twisted/manhole/_inspectro.py -twisted/manhole/explorer.py -twisted/manhole/gladereactor.py -twisted/manhole/service.py -twisted/manhole/telnet.py -twisted/manhole/test/__init__.py -twisted/manhole/test/test_explorer.py -twisted/manhole/ui/__init__.py -twisted/manhole/ui/gtk2manhole.py -twisted/manhole/ui/test/__init__.py -twisted/manhole/ui/test/test_gtk2manhole.py -twisted/names/__init__.py -twisted/names/_rfc1982.py -twisted/names/_version.py -twisted/names/authority.py -twisted/names/cache.py -twisted/names/client.py -twisted/names/common.py -twisted/names/dns.py -twisted/names/error.py -twisted/names/hosts.py -twisted/names/resolve.py -twisted/names/root.py -twisted/names/secondary.py -twisted/names/server.py -twisted/names/srvconnect.py -twisted/names/tap.py -twisted/names/test/__init__.py -twisted/names/test/test_cache.py -twisted/names/test/test_client.py -twisted/names/test/test_common.py -twisted/names/test/test_dns.py -twisted/names/test/test_examples.py -twisted/names/test/test_hosts.py -twisted/names/test/test_names.py -twisted/names/test/test_resolve.py -twisted/names/test/test_rfc1982.py -twisted/names/test/test_rootresolve.py -twisted/names/test/test_server.py -twisted/names/test/test_srvconnect.py -twisted/names/test/test_tap.py -twisted/news/__init__.py -twisted/news/_version.py -twisted/news/database.py -twisted/news/news.py -twisted/news/nntp.py -twisted/news/tap.py -twisted/news/test/__init__.py -twisted/news/test/test_database.py -twisted/news/test/test_news.py -twisted/news/test/test_nntp.py -twisted/pair/__init__.py -twisted/pair/_version.py -twisted/pair/ethernet.py -twisted/pair/ip.py -twisted/pair/raw.py -twisted/pair/rawudp.py -twisted/pair/testing.py -twisted/pair/tuntap.py -twisted/pair/test/__init__.py -twisted/pair/test/test_ethernet.py -twisted/pair/test/test_ip.py -twisted/pair/test/test_rawudp.py -twisted/pair/test/test_tuntap.py -twisted/persisted/__init__.py -twisted/persisted/aot.py -twisted/persisted/crefutil.py -twisted/persisted/dirdbm.py -twisted/persisted/sob.py -twisted/persisted/styles.py -twisted/persisted/test/__init__.py -twisted/persisted/test/test_styles.py -twisted/plugins/__init__.py -twisted/plugins/cred_anonymous.py -twisted/plugins/cred_file.py -twisted/plugins/cred_memory.py -twisted/plugins/cred_sshkeys.py -twisted/plugins/cred_unix.py -twisted/plugins/twisted_conch.py -twisted/plugins/twisted_core.py -twisted/plugins/twisted_ftp.py -twisted/plugins/twisted_inet.py -twisted/plugins/twisted_lore.py -twisted/plugins/twisted_mail.py -twisted/plugins/twisted_manhole.py -twisted/plugins/twisted_names.py -twisted/plugins/twisted_news.py -twisted/plugins/twisted_portforward.py -twisted/plugins/twisted_qtstub.py -twisted/plugins/twisted_reactors.py -twisted/plugins/twisted_runner.py -twisted/plugins/twisted_socks.py -twisted/plugins/twisted_telnet.py -twisted/plugins/twisted_trial.py -twisted/plugins/twisted_web.py -twisted/plugins/twisted_words.py -twisted/positioning/__init__.py -twisted/positioning/_sentence.py -twisted/positioning/base.py -twisted/positioning/ipositioning.py -twisted/positioning/nmea.py -twisted/positioning/test/__init__.py -twisted/positioning/test/receiver.py -twisted/positioning/test/test_base.py -twisted/positioning/test/test_nmea.py -twisted/positioning/test/test_sentence.py -twisted/protocols/__init__.py -twisted/protocols/amp.py -twisted/protocols/basic.py -twisted/protocols/dict.py -twisted/protocols/finger.py -twisted/protocols/ftp.py -twisted/protocols/htb.py -twisted/protocols/ident.py -twisted/protocols/loopback.py -twisted/protocols/memcache.py -twisted/protocols/pcp.py -twisted/protocols/policies.py -twisted/protocols/portforward.py -twisted/protocols/postfix.py -twisted/protocols/shoutcast.py -twisted/protocols/sip.py -twisted/protocols/socks.py -twisted/protocols/stateful.py -twisted/protocols/telnet.py -twisted/protocols/tls.py -twisted/protocols/wire.py -twisted/protocols/gps/__init__.py -twisted/protocols/gps/nmea.py -twisted/protocols/gps/rockwell.py -twisted/protocols/mice/__init__.py -twisted/protocols/mice/mouseman.py -twisted/protocols/test/__init__.py -twisted/protocols/test/test_basic.py -twisted/protocols/test/test_tls.py -twisted/python/__init__.py -twisted/python/_inotify.py -twisted/python/_release.py -twisted/python/_shellcomp.py -twisted/python/_textattributes.py -twisted/python/compat.py -twisted/python/components.py -twisted/python/constants.py -twisted/python/context.py -twisted/python/deprecate.py -twisted/python/dist.py -twisted/python/dist3.py -twisted/python/failure.py -twisted/python/fakepwd.py -twisted/python/filepath.py -twisted/python/finalize.py -twisted/python/formmethod.py -twisted/python/hashlib.py -twisted/python/hook.py -twisted/python/htmlizer.py -twisted/python/lockfile.py -twisted/python/log.py -twisted/python/logfile.py -twisted/python/modules.py -twisted/python/monkey.py -twisted/python/procutils.py -twisted/python/randbytes.py -twisted/python/rebuild.py -twisted/python/reflect.py -twisted/python/release.py -twisted/python/roots.py -twisted/python/runtime.py -twisted/python/sendmsg.c -twisted/python/shortcut.py -twisted/python/syslog.py -twisted/python/systemd.py -twisted/python/text.py -twisted/python/threadable.py -twisted/python/threadpool.py -twisted/python/urlpath.py -twisted/python/usage.py -twisted/python/util.py -twisted/python/versions.py -twisted/python/win32.py -twisted/python/zippath.py -twisted/python/zipstream.py -twisted/python/test/__init__.py -twisted/python/test/deprecatedattributes.py -twisted/python/test/modules_helpers.py -twisted/python/test/pullpipe.py -twisted/python/test/test_components.py -twisted/python/test/test_constants.py -twisted/python/test/test_deprecate.py -twisted/python/test/test_dist.py -twisted/python/test/test_dist3.py -twisted/python/test/test_fakepwd.py -twisted/python/test/test_hashlib.py -twisted/python/test/test_htmlizer.py -twisted/python/test/test_inotify.py -twisted/python/test/test_release.py -twisted/python/test/test_runtime.py -twisted/python/test/test_sendmsg.py -twisted/python/test/test_shellcomp.py -twisted/python/test/test_syslog.py -twisted/python/test/test_systemd.py -twisted/python/test/test_textattributes.py -twisted/python/test/test_urlpath.py -twisted/python/test/test_util.py -twisted/python/test/test_versions.py -twisted/python/test/test_win32.py -twisted/python/test/test_zippath.py -twisted/python/test/test_zipstream.py -twisted/runner/__init__.py -twisted/runner/_version.py -twisted/runner/inetd.py -twisted/runner/inetdconf.py -twisted/runner/inetdtap.py -twisted/runner/portmap.c -twisted/runner/procmon.py -twisted/runner/procmontap.py -twisted/runner/test/__init__.py -twisted/runner/test/test_procmon.py -twisted/runner/test/test_procmontap.py -twisted/scripts/__init__.py -twisted/scripts/_twistd_unix.py -twisted/scripts/_twistw.py -twisted/scripts/htmlizer.py -twisted/scripts/manhole.py -twisted/scripts/tap2deb.py -twisted/scripts/tap2rpm.py -twisted/scripts/tapconvert.py -twisted/scripts/tkunzip.py -twisted/scripts/trial.py -twisted/scripts/twistd.py -twisted/scripts/test/__init__.py -twisted/scripts/test/test_scripts.py -twisted/scripts/test/test_tap2deb.py -twisted/scripts/test/test_tap2rpm.py -twisted/spread/__init__.py -twisted/spread/banana.py -twisted/spread/flavors.py -twisted/spread/interfaces.py -twisted/spread/jelly.py -twisted/spread/pb.py -twisted/spread/publish.py -twisted/spread/util.py -twisted/spread/ui/__init__.py -twisted/spread/ui/gtk2util.py -twisted/spread/ui/tktree.py -twisted/spread/ui/tkutil.py -twisted/tap/__init__.py -twisted/tap/ftp.py -twisted/tap/manhole.py -twisted/tap/portforward.py -twisted/tap/socks.py -twisted/tap/telnet.py -twisted/test/__init__.py -twisted/test/_preamble.py -twisted/test/crash_test_dummy.py -twisted/test/iosim.py -twisted/test/mock_win32process.py -twisted/test/myrebuilder1.py -twisted/test/myrebuilder2.py -twisted/test/plugin_basic.py -twisted/test/plugin_extra1.py -twisted/test/plugin_extra2.py -twisted/test/process_cmdline.py -twisted/test/process_echoer.py -twisted/test/process_fds.py -twisted/test/process_linger.py -twisted/test/process_reader.py -twisted/test/process_signal.py -twisted/test/process_stdinreader.py -twisted/test/process_tester.py -twisted/test/process_tty.py -twisted/test/process_twisted.py -twisted/test/proto_helpers.py -twisted/test/raiser.c -twisted/test/reflect_helper_IE.py -twisted/test/reflect_helper_VE.py -twisted/test/reflect_helper_ZDE.py -twisted/test/ssl_helpers.py -twisted/test/stdio_test_consumer.py -twisted/test/stdio_test_halfclose.py -twisted/test/stdio_test_hostpeer.py -twisted/test/stdio_test_lastwrite.py -twisted/test/stdio_test_loseconn.py -twisted/test/stdio_test_producer.py -twisted/test/stdio_test_write.py -twisted/test/stdio_test_writeseq.py -twisted/test/test_abstract.py -twisted/test/test_adbapi.py -twisted/test/test_amp.py -twisted/test/test_application.py -twisted/test/test_banana.py -twisted/test/test_compat.py -twisted/test/test_context.py -twisted/test/test_cooperator.py -twisted/test/test_defer.py -twisted/test/test_defgen.py -twisted/test/test_dict.py -twisted/test/test_digestauth.py -twisted/test/test_dirdbm.py -twisted/test/test_doc.py -twisted/test/test_error.py -twisted/test/test_explorer.py -twisted/test/test_factories.py -twisted/test/test_failure.py -twisted/test/test_fdesc.py -twisted/test/test_finger.py -twisted/test/test_formmethod.py -twisted/test/test_ftp.py -twisted/test/test_ftp_options.py -twisted/test/test_hook.py -twisted/test/test_htb.py -twisted/test/test_ident.py -twisted/test/test_internet.py -twisted/test/test_iosim.py -twisted/test/test_iutils.py -twisted/test/test_jelly.py -twisted/test/test_lockfile.py -twisted/test/test_log.py -twisted/test/test_logfile.py -twisted/test/test_loopback.py -twisted/test/test_manhole.py -twisted/test/test_memcache.py -twisted/test/test_modules.py -twisted/test/test_monkey.py -twisted/test/test_newcred.py -twisted/test/test_nmea.py -twisted/test/test_paths.py -twisted/test/test_pb.py -twisted/test/test_pbfailure.py -twisted/test/test_pcp.py -twisted/test/test_persisted.py -twisted/test/test_plugin.py -twisted/test/test_policies.py -twisted/test/test_postfix.py -twisted/test/test_process.py -twisted/test/test_protocols.py -twisted/test/test_randbytes.py -twisted/test/test_rebuild.py -twisted/test/test_reflect.py -twisted/test/test_roots.py -twisted/test/test_setup.py -twisted/test/test_shortcut.py -twisted/test/test_sip.py -twisted/test/test_sob.py -twisted/test/test_socks.py -twisted/test/test_ssl.py -twisted/test/test_sslverify.py -twisted/test/test_stateful.py -twisted/test/test_stdio.py -twisted/test/test_strcred.py -twisted/test/test_strerror.py -twisted/test/test_stringtransport.py -twisted/test/test_strports.py -twisted/test/test_task.py -twisted/test/test_tcp.py -twisted/test/test_tcp_internals.py -twisted/test/test_text.py -twisted/test/test_threadable.py -twisted/test/test_threadpool.py -twisted/test/test_threads.py -twisted/test/test_tpfile.py -twisted/test/test_twistd.py -twisted/test/test_twisted.py -twisted/test/test_udp.py -twisted/test/test_unix.py -twisted/test/test_usage.py -twisted/test/testutils.py -twisted/trial/__init__.py -twisted/trial/_asyncrunner.py -twisted/trial/_asynctest.py -twisted/trial/_synctest.py -twisted/trial/itrial.py -twisted/trial/reporter.py -twisted/trial/runner.py -twisted/trial/unittest.py -twisted/trial/util.py -twisted/trial/_dist/__init__.py -twisted/trial/_dist/distreporter.py -twisted/trial/_dist/disttrial.py -twisted/trial/_dist/managercommands.py -twisted/trial/_dist/options.py -twisted/trial/_dist/worker.py -twisted/trial/_dist/workercommands.py -twisted/trial/_dist/workerreporter.py -twisted/trial/_dist/workertrial.py -twisted/trial/_dist/test/__init__.py -twisted/trial/_dist/test/test_distreporter.py -twisted/trial/_dist/test/test_disttrial.py -twisted/trial/_dist/test/test_options.py -twisted/trial/_dist/test/test_worker.py -twisted/trial/_dist/test/test_workerreporter.py -twisted/trial/_dist/test/test_workertrial.py -twisted/trial/test/__init__.py -twisted/trial/test/detests.py -twisted/trial/test/erroneous.py -twisted/trial/test/mockcustomsuite.py -twisted/trial/test/mockcustomsuite2.py -twisted/trial/test/mockcustomsuite3.py -twisted/trial/test/mockdoctest.py -twisted/trial/test/moduleself.py -twisted/trial/test/moduletest.py -twisted/trial/test/novars.py -twisted/trial/test/ordertests.py -twisted/trial/test/packages.py -twisted/trial/test/sample.py -twisted/trial/test/scripttest.py -twisted/trial/test/skipping.py -twisted/trial/test/suppression.py -twisted/trial/test/test_assertions.py -twisted/trial/test/test_asyncassertions.py -twisted/trial/test/test_deferred.py -twisted/trial/test/test_doctest.py -twisted/trial/test/test_keyboard.py -twisted/trial/test/test_loader.py -twisted/trial/test/test_log.py -twisted/trial/test/test_output.py -twisted/trial/test/test_plugins.py -twisted/trial/test/test_pyunitcompat.py -twisted/trial/test/test_reporter.py -twisted/trial/test/test_runner.py -twisted/trial/test/test_script.py -twisted/trial/test/test_suppression.py -twisted/trial/test/test_testcase.py -twisted/trial/test/test_tests.py -twisted/trial/test/test_util.py -twisted/trial/test/test_warning.py -twisted/trial/test/weird.py -twisted/web/__init__.py -twisted/web/_element.py -twisted/web/_flatten.py -twisted/web/_newclient.py -twisted/web/_responses.py -twisted/web/_stan.py -twisted/web/_version.py -twisted/web/client.py -twisted/web/demo.py -twisted/web/distrib.py -twisted/web/domhelpers.py -twisted/web/error.py -twisted/web/guard.py -twisted/web/html.py -twisted/web/http.py -twisted/web/http_headers.py -twisted/web/iweb.py -twisted/web/microdom.py -twisted/web/proxy.py -twisted/web/resource.py -twisted/web/rewrite.py -twisted/web/script.py -twisted/web/server.py -twisted/web/soap.py -twisted/web/static.py -twisted/web/sux.py -twisted/web/tap.py -twisted/web/template.py -twisted/web/twcgi.py -twisted/web/util.py -twisted/web/vhost.py -twisted/web/wsgi.py -twisted/web/xmlrpc.py -twisted/web/_auth/__init__.py -twisted/web/_auth/basic.py -twisted/web/_auth/digest.py -twisted/web/_auth/wrapper.py -twisted/web/test/__init__.py -twisted/web/test/_util.py -twisted/web/test/requesthelper.py -twisted/web/test/test_agent.py -twisted/web/test/test_cgi.py -twisted/web/test/test_distrib.py -twisted/web/test/test_domhelpers.py -twisted/web/test/test_error.py -twisted/web/test/test_flatten.py -twisted/web/test/test_http.py -twisted/web/test/test_http_headers.py -twisted/web/test/test_httpauth.py -twisted/web/test/test_newclient.py -twisted/web/test/test_proxy.py -twisted/web/test/test_resource.py -twisted/web/test/test_script.py -twisted/web/test/test_soap.py -twisted/web/test/test_stan.py -twisted/web/test/test_static.py -twisted/web/test/test_tap.py -twisted/web/test/test_template.py -twisted/web/test/test_util.py -twisted/web/test/test_vhost.py -twisted/web/test/test_web.py -twisted/web/test/test_webclient.py -twisted/web/test/test_wsgi.py -twisted/web/test/test_xml.py -twisted/web/test/test_xmlrpc.py -twisted/words/__init__.py -twisted/words/_version.py -twisted/words/ewords.py -twisted/words/iwords.py -twisted/words/service.py -twisted/words/tap.py -twisted/words/xmpproutertap.py -twisted/words/im/__init__.py -twisted/words/im/baseaccount.py -twisted/words/im/basechat.py -twisted/words/im/basesupport.py -twisted/words/im/interfaces.py -twisted/words/im/ircsupport.py -twisted/words/im/locals.py -twisted/words/im/pbsupport.py -twisted/words/protocols/__init__.py -twisted/words/protocols/irc.py -twisted/words/protocols/msn.py -twisted/words/protocols/oscar.py -twisted/words/protocols/jabber/__init__.py -twisted/words/protocols/jabber/client.py -twisted/words/protocols/jabber/component.py -twisted/words/protocols/jabber/error.py -twisted/words/protocols/jabber/ijabber.py -twisted/words/protocols/jabber/jid.py -twisted/words/protocols/jabber/jstrports.py -twisted/words/protocols/jabber/sasl.py -twisted/words/protocols/jabber/sasl_mechanisms.py -twisted/words/protocols/jabber/xmlstream.py -twisted/words/protocols/jabber/xmpp_stringprep.py -twisted/words/test/__init__.py -twisted/words/test/test_basechat.py -twisted/words/test/test_basesupport.py -twisted/words/test/test_domish.py -twisted/words/test/test_irc.py -twisted/words/test/test_irc_service.py -twisted/words/test/test_ircsupport.py -twisted/words/test/test_jabberclient.py -twisted/words/test/test_jabbercomponent.py -twisted/words/test/test_jabbererror.py -twisted/words/test/test_jabberjid.py -twisted/words/test/test_jabberjstrports.py -twisted/words/test/test_jabbersasl.py -twisted/words/test/test_jabbersaslmechanisms.py -twisted/words/test/test_jabberxmlstream.py -twisted/words/test/test_jabberxmppstringprep.py -twisted/words/test/test_msn.py -twisted/words/test/test_oscar.py -twisted/words/test/test_service.py -twisted/words/test/test_tap.py -twisted/words/test/test_xishutil.py -twisted/words/test/test_xmlstream.py -twisted/words/test/test_xmpproutertap.py -twisted/words/test/test_xpath.py -twisted/words/xish/__init__.py -twisted/words/xish/domish.py -twisted/words/xish/utility.py -twisted/words/xish/xmlstream.py -twisted/words/xish/xpath.py -twisted/words/xish/xpathparser.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/installed-files.txt deleted file mode 100644 index dd46462..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/installed-files.txt +++ /dev/null @@ -1,1757 +0,0 @@ -../twisted/copyright.py -../twisted/_version.py -../twisted/plugin.py -../twisted/__init__.py -../twisted/manhole/telnet.py -../twisted/manhole/explorer.py -../twisted/manhole/service.py -../twisted/manhole/_inspectro.py -../twisted/manhole/__init__.py -../twisted/manhole/gladereactor.py -../twisted/manhole/ui/gtk2manhole.py -../twisted/manhole/ui/__init__.py -../twisted/manhole/ui/test/__init__.py -../twisted/manhole/ui/test/test_gtk2manhole.py -../twisted/manhole/test/__init__.py -../twisted/manhole/test/test_explorer.py -../twisted/mail/pb.py -../twisted/mail/relaymanager.py -../twisted/mail/imap4.py -../twisted/mail/_version.py -../twisted/mail/relay.py -../twisted/mail/pop3client.py -../twisted/mail/bounce.py -../twisted/mail/pop3.py -../twisted/mail/mail.py -../twisted/mail/__init__.py -../twisted/mail/alias.py -../twisted/mail/smtp.py -../twisted/mail/protocols.py -../twisted/mail/maildir.py -../twisted/mail/tap.py -../twisted/mail/scripts/mailmail.py -../twisted/mail/scripts/__init__.py -../twisted/mail/test/test_pop3client.py -../twisted/mail/test/test_smtp.py -../twisted/mail/test/test_scripts.py -../twisted/mail/test/test_imap.py -../twisted/mail/test/test_bounce.py -../twisted/mail/test/pop3testserver.py -../twisted/mail/test/__init__.py -../twisted/mail/test/test_mail.py -../twisted/mail/test/test_pop3.py -../twisted/mail/test/test_options.py -../twisted/mail/test/test_mailmail.py -../twisted/names/error.py -../twisted/names/client.py -../twisted/names/common.py -../twisted/names/server.py -../twisted/names/_version.py -../twisted/names/root.py -../twisted/names/hosts.py -../twisted/names/_rfc1982.py -../twisted/names/dns.py -../twisted/names/resolve.py -../twisted/names/__init__.py -../twisted/names/secondary.py -../twisted/names/srvconnect.py -../twisted/names/cache.py -../twisted/names/authority.py -../twisted/names/tap.py -../twisted/names/test/test_rfc1982.py -../twisted/names/test/test_client.py -../twisted/names/test/test_hosts.py -../twisted/names/test/test_server.py -../twisted/names/test/test_names.py -../twisted/names/test/test_resolve.py -../twisted/names/test/test_common.py -../twisted/names/test/__init__.py -../twisted/names/test/test_examples.py -../twisted/names/test/test_cache.py -../twisted/names/test/test_tap.py -../twisted/names/test/test_rootresolve.py -../twisted/names/test/test_srvconnect.py -../twisted/names/test/test_dns.py -../twisted/trial/unittest.py -../twisted/trial/_synctest.py -../twisted/trial/util.py -../twisted/trial/__init__.py -../twisted/trial/reporter.py -../twisted/trial/itrial.py -../twisted/trial/_asynctest.py -../twisted/trial/runner.py -../twisted/trial/_asyncrunner.py -../twisted/trial/test/scripttest.py -../twisted/trial/test/weird.py -../twisted/trial/test/test_assertions.py -../twisted/trial/test/mockcustomsuite.py -../twisted/trial/test/skipping.py -../twisted/trial/test/test_loader.py -../twisted/trial/test/test_reporter.py -../twisted/trial/test/novars.py -../twisted/trial/test/test_keyboard.py -../twisted/trial/test/moduletest.py -../twisted/trial/test/test_deferred.py -../twisted/trial/test/test_script.py -../twisted/trial/test/mockdoctest.py -../twisted/trial/test/test_testcase.py -../twisted/trial/test/test_util.py -../twisted/trial/test/ordertests.py -../twisted/trial/test/suppression.py -../twisted/trial/test/test_tests.py -../twisted/trial/test/test_warning.py -../twisted/trial/test/test_doctest.py -../twisted/trial/test/__init__.py -../twisted/trial/test/test_log.py -../twisted/trial/test/erroneous.py -../twisted/trial/test/test_plugins.py -../twisted/trial/test/test_asyncassertions.py -../twisted/trial/test/test_suppression.py -../twisted/trial/test/sample.py -../twisted/trial/test/detests.py -../twisted/trial/test/mockcustomsuite2.py -../twisted/trial/test/test_pyunitcompat.py -../twisted/trial/test/test_runner.py -../twisted/trial/test/test_output.py -../twisted/trial/test/mockcustomsuite3.py -../twisted/trial/test/moduleself.py -../twisted/trial/test/packages.py -../twisted/trial/_dist/managercommands.py -../twisted/trial/_dist/disttrial.py -../twisted/trial/_dist/workerreporter.py -../twisted/trial/_dist/distreporter.py -../twisted/trial/_dist/workertrial.py -../twisted/trial/_dist/__init__.py -../twisted/trial/_dist/workercommands.py -../twisted/trial/_dist/worker.py -../twisted/trial/_dist/options.py -../twisted/trial/_dist/test/test_workerreporter.py -../twisted/trial/_dist/test/test_worker.py -../twisted/trial/_dist/test/test_distreporter.py -../twisted/trial/_dist/test/test_workertrial.py -../twisted/trial/_dist/test/__init__.py -../twisted/trial/_dist/test/test_options.py -../twisted/trial/_dist/test/test_disttrial.py -../twisted/cred/error.py -../twisted/cred/portal.py -../twisted/cred/strcred.py -../twisted/cred/__init__.py -../twisted/cred/_digest.py -../twisted/cred/credentials.py -../twisted/cred/checkers.py -../twisted/cred/pamauth.py -../twisted/conch/error.py -../twisted/conch/stdio.py -../twisted/conch/manhole_tap.py -../twisted/conch/telnet.py -../twisted/conch/_version.py -../twisted/conch/unix.py -../twisted/conch/interfaces.py -../twisted/conch/endpoints.py -../twisted/conch/recvline.py -../twisted/conch/__init__.py -../twisted/conch/mixin.py -../twisted/conch/ls.py -../twisted/conch/checkers.py -../twisted/conch/avatar.py -../twisted/conch/manhole_ssh.py -../twisted/conch/ttymodes.py -../twisted/conch/manhole.py -../twisted/conch/tap.py -../twisted/conch/ui/tkvt100.py -../twisted/conch/ui/__init__.py -../twisted/conch/ui/ansi.py -../twisted/conch/client/connect.py -../twisted/conch/client/default.py -../twisted/conch/client/agent.py -../twisted/conch/client/direct.py -../twisted/conch/client/__init__.py -../twisted/conch/client/knownhosts.py -../twisted/conch/client/options.py -../twisted/conch/openssh_compat/primes.py -../twisted/conch/openssh_compat/__init__.py -../twisted/conch/openssh_compat/factory.py -../twisted/conch/scripts/conch.py -../twisted/conch/scripts/tkconch.py -../twisted/conch/scripts/cftp.py -../twisted/conch/scripts/ckeygen.py -../twisted/conch/scripts/__init__.py -../twisted/conch/ssh/session.py -../twisted/conch/ssh/channel.py -../twisted/conch/ssh/common.py -../twisted/conch/ssh/keys.py -../twisted/conch/ssh/userauth.py -../twisted/conch/ssh/forwarding.py -../twisted/conch/ssh/connection.py -../twisted/conch/ssh/agent.py -../twisted/conch/ssh/service.py -../twisted/conch/ssh/transport.py -../twisted/conch/ssh/__init__.py -../twisted/conch/ssh/sexpy.py -../twisted/conch/ssh/address.py -../twisted/conch/ssh/factory.py -../twisted/conch/ssh/filetransfer.py -../twisted/conch/test/test_openssh_compat.py -../twisted/conch/test/test_helper.py -../twisted/conch/test/test_knownhosts.py -../twisted/conch/test/keydata.py -../twisted/conch/test/test_ckeygen.py -../twisted/conch/test/test_cftp.py -../twisted/conch/test/test_conch.py -../twisted/conch/test/test_keys.py -../twisted/conch/test/test_filetransfer.py -../twisted/conch/test/test_scripts.py -../twisted/conch/test/test_text.py -../twisted/conch/test/test_recvline.py -../twisted/conch/test/test_endpoints.py -../twisted/conch/test/test_userauth.py -../twisted/conch/test/test_manhole.py -../twisted/conch/test/__init__.py -../twisted/conch/test/test_telnet.py -../twisted/conch/test/test_channel.py -../twisted/conch/test/test_mixin.py -../twisted/conch/test/test_agent.py -../twisted/conch/test/test_transport.py -../twisted/conch/test/test_window.py -../twisted/conch/test/test_checkers.py -../twisted/conch/test/test_address.py -../twisted/conch/test/test_tap.py -../twisted/conch/test/test_connection.py -../twisted/conch/test/test_insults.py -../twisted/conch/test/test_ssh.py -../twisted/conch/test/test_default.py -../twisted/conch/test/test_session.py -../twisted/conch/insults/client.py -../twisted/conch/insults/text.py -../twisted/conch/insults/colors.py -../twisted/conch/insults/helper.py -../twisted/conch/insults/window.py -../twisted/conch/insults/__init__.py -../twisted/conch/insults/insults.py -../twisted/python/logfile.py -../twisted/python/formmethod.py -../twisted/python/compat.py -../twisted/python/threadpool.py -../twisted/python/monkey.py -../twisted/python/log.py -../twisted/python/hook.py -../twisted/python/fakepwd.py -../twisted/python/usage.py -../twisted/python/dist3.py -../twisted/python/util.py -../twisted/python/syslog.py -../twisted/python/threadable.py -../twisted/python/text.py -../twisted/python/runtime.py -../twisted/python/context.py -../twisted/python/_textattributes.py -../twisted/python/roots.py -../twisted/python/win32.py -../twisted/python/randbytes.py -../twisted/python/hashlib.py -../twisted/python/components.py -../twisted/python/_inotify.py -../twisted/python/modules.py -../twisted/python/zippath.py -../twisted/python/versions.py -../twisted/python/_release.py -../twisted/python/deprecate.py -../twisted/python/failure.py -../twisted/python/urlpath.py -../twisted/python/reflect.py -../twisted/python/__init__.py -../twisted/python/rebuild.py -../twisted/python/procutils.py -../twisted/python/finalize.py -../twisted/python/zipstream.py -../twisted/python/filepath.py -../twisted/python/dist.py -../twisted/python/systemd.py -../twisted/python/constants.py -../twisted/python/_shellcomp.py -../twisted/python/shortcut.py -../twisted/python/release.py -../twisted/python/htmlizer.py -../twisted/python/lockfile.py -../twisted/python/test/test_zippath.py -../twisted/python/test/test_fakepwd.py -../twisted/python/test/test_release.py -../twisted/python/test/deprecatedattributes.py -../twisted/python/test/test_constants.py -../twisted/python/test/test_sendmsg.py -../twisted/python/test/test_textattributes.py -../twisted/python/test/test_shellcomp.py -../twisted/python/test/test_htmlizer.py -../twisted/python/test/test_runtime.py -../twisted/python/test/pullpipe.py -../twisted/python/test/test_deprecate.py -../twisted/python/test/test_util.py -../twisted/python/test/test_hashlib.py -../twisted/python/test/test_inotify.py -../twisted/python/test/__init__.py -../twisted/python/test/test_dist.py -../twisted/python/test/test_syslog.py -../twisted/python/test/test_zipstream.py -../twisted/python/test/test_urlpath.py -../twisted/python/test/test_components.py -../twisted/python/test/test_win32.py -../twisted/python/test/test_systemd.py -../twisted/python/test/test_versions.py -../twisted/python/test/test_dist3.py -../twisted/python/test/modules_helpers.py -../twisted/runner/procmon.py -../twisted/runner/_version.py -../twisted/runner/inetdconf.py -../twisted/runner/inetdtap.py -../twisted/runner/__init__.py -../twisted/runner/procmontap.py -../twisted/runner/inetd.py -../twisted/runner/test/test_procmon.py -../twisted/runner/test/test_procmontap.py -../twisted/runner/test/__init__.py -../twisted/tap/telnet.py -../twisted/tap/ftp.py -../twisted/tap/socks.py -../twisted/tap/portforward.py -../twisted/tap/__init__.py -../twisted/tap/manhole.py -../twisted/positioning/ipositioning.py -../twisted/positioning/__init__.py -../twisted/positioning/_sentence.py -../twisted/positioning/base.py -../twisted/positioning/nmea.py -../twisted/positioning/test/test_sentence.py -../twisted/positioning/test/test_base.py -../twisted/positioning/test/receiver.py -../twisted/positioning/test/__init__.py -../twisted/positioning/test/test_nmea.py -../twisted/spread/pb.py -../twisted/spread/util.py -../twisted/spread/banana.py -../twisted/spread/interfaces.py -../twisted/spread/jelly.py -../twisted/spread/__init__.py -../twisted/spread/flavors.py -../twisted/spread/publish.py -../twisted/spread/ui/gtk2util.py -../twisted/spread/ui/__init__.py -../twisted/spread/ui/tkutil.py -../twisted/spread/ui/tktree.py -../twisted/internet/error.py -../twisted/internet/default.py -../twisted/internet/qtreactor.py -../twisted/internet/win32eventreactor.py -../twisted/internet/stdio.py -../twisted/internet/cfreactor.py -../twisted/internet/_posixserialport.py -../twisted/internet/ssl.py -../twisted/internet/threads.py -../twisted/internet/_pollingfile.py -../twisted/internet/gtk2reactor.py -../twisted/internet/tksupport.py -../twisted/internet/gireactor.py -../twisted/internet/glib2reactor.py -../twisted/internet/_newtls.py -../twisted/internet/gtk3reactor.py -../twisted/internet/_baseprocess.py -../twisted/internet/abstract.py -../twisted/internet/inotify.py -../twisted/internet/unix.py -../twisted/internet/interfaces.py -../twisted/internet/protocol.py -../twisted/internet/_dumbwin32proc.py -../twisted/internet/tcp.py -../twisted/internet/endpoints.py -../twisted/internet/main.py -../twisted/internet/udp.py -../twisted/internet/wxsupport.py -../twisted/internet/pyuisupport.py -../twisted/internet/process.py -../twisted/internet/_signals.py -../twisted/internet/__init__.py -../twisted/internet/posixbase.py -../twisted/internet/pollreactor.py -../twisted/internet/serialport.py -../twisted/internet/_sslverify.py -../twisted/internet/kqreactor.py -../twisted/internet/fdesc.py -../twisted/internet/reactor.py -../twisted/internet/base.py -../twisted/internet/address.py -../twisted/internet/_threadedselect.py -../twisted/internet/_ssl.py -../twisted/internet/selectreactor.py -../twisted/internet/_win32serialport.py -../twisted/internet/wxreactor.py -../twisted/internet/epollreactor.py -../twisted/internet/defer.py -../twisted/internet/utils.py -../twisted/internet/task.py -../twisted/internet/_win32stdio.py -../twisted/internet/gtkreactor.py -../twisted/internet/_posixstdio.py -../twisted/internet/_glibbase.py -../twisted/internet/iocpreactor/abstract.py -../twisted/internet/iocpreactor/interfaces.py -../twisted/internet/iocpreactor/tcp.py -../twisted/internet/iocpreactor/udp.py -../twisted/internet/iocpreactor/const.py -../twisted/internet/iocpreactor/__init__.py -../twisted/internet/iocpreactor/reactor.py -../twisted/internet/iocpreactor/setup.py -../twisted/internet/test/test_posixbase.py -../twisted/internet/test/modulehelpers.py -../twisted/internet/test/test_stdio.py -../twisted/internet/test/test_qtreactor.py -../twisted/internet/test/test_serialport.py -../twisted/internet/test/test_time.py -../twisted/internet/test/process_helper.py -../twisted/internet/test/test_threads.py -../twisted/internet/test/test_core.py -../twisted/internet/test/test_posixprocess.py -../twisted/internet/test/test_filedescriptor.py -../twisted/internet/test/_win32ifaces.py -../twisted/internet/test/test_main.py -../twisted/internet/test/test_sigchld.py -../twisted/internet/test/test_unix.py -../twisted/internet/test/test_base.py -../twisted/internet/test/test_epollreactor.py -../twisted/internet/test/test_socket.py -../twisted/internet/test/test_endpoints.py -../twisted/internet/test/test_newtls.py -../twisted/internet/test/test_abstract.py -../twisted/internet/test/test_inotify.py -../twisted/internet/test/test_protocol.py -../twisted/internet/test/test_baseprocess.py -../twisted/internet/test/__init__.py -../twisted/internet/test/test_fdset.py -../twisted/internet/test/test_glibbase.py -../twisted/internet/test/test_gireactor.py -../twisted/internet/test/process_gireactornocompat.py -../twisted/internet/test/fakeendpoint.py -../twisted/internet/test/test_tcp.py -../twisted/internet/test/test_address.py -../twisted/internet/test/test_iocp.py -../twisted/internet/test/reactormixins.py -../twisted/internet/test/test_tls.py -../twisted/internet/test/_posixifaces.py -../twisted/internet/test/test_pollingfile.py -../twisted/internet/test/test_gtkreactor.py -../twisted/internet/test/test_udp.py -../twisted/internet/test/test_default.py -../twisted/internet/test/test_inlinecb.py -../twisted/internet/test/test_process.py -../twisted/internet/test/test_udp_internals.py -../twisted/internet/test/connectionmixins.py -../twisted/internet/test/test_win32events.py -../twisted/news/nntp.py -../twisted/news/database.py -../twisted/news/_version.py -../twisted/news/news.py -../twisted/news/__init__.py -../twisted/news/tap.py -../twisted/news/test/test_database.py -../twisted/news/test/__init__.py -../twisted/news/test/test_nntp.py -../twisted/news/test/test_news.py -../twisted/words/_version.py -../twisted/words/service.py -../twisted/words/iwords.py -../twisted/words/__init__.py -../twisted/words/ewords.py -../twisted/words/xmpproutertap.py -../twisted/words/tap.py -../twisted/words/xish/xpathparser.py -../twisted/words/xish/utility.py -../twisted/words/xish/xmlstream.py -../twisted/words/xish/__init__.py -../twisted/words/xish/xpath.py -../twisted/words/xish/domish.py -../twisted/words/im/basesupport.py -../twisted/words/im/baseaccount.py -../twisted/words/im/basechat.py -../twisted/words/im/interfaces.py -../twisted/words/im/ircsupport.py -../twisted/words/im/__init__.py -../twisted/words/im/pbsupport.py -../twisted/words/im/locals.py -../twisted/words/test/test_msn.py -../twisted/words/test/test_jabberjid.py -../twisted/words/test/test_ircsupport.py -../twisted/words/test/test_service.py -../twisted/words/test/test_jabbersaslmechanisms.py -../twisted/words/test/test_xmlstream.py -../twisted/words/test/test_irc.py -../twisted/words/test/test_xmpproutertap.py -../twisted/words/test/test_jabberxmlstream.py -../twisted/words/test/test_basesupport.py -../twisted/words/test/test_irc_service.py -../twisted/words/test/test_domish.py -../twisted/words/test/__init__.py -../twisted/words/test/test_jabbercomponent.py -../twisted/words/test/test_jabberxmppstringprep.py -../twisted/words/test/test_tap.py -../twisted/words/test/test_basechat.py -../twisted/words/test/test_oscar.py -../twisted/words/test/test_jabberjstrports.py -../twisted/words/test/test_jabberclient.py -../twisted/words/test/test_jabbersasl.py -../twisted/words/test/test_xpath.py -../twisted/words/test/test_xishutil.py -../twisted/words/test/test_jabbererror.py -../twisted/words/protocols/irc.py -../twisted/words/protocols/oscar.py -../twisted/words/protocols/__init__.py -../twisted/words/protocols/msn.py -../twisted/words/protocols/jabber/error.py -../twisted/words/protocols/jabber/client.py -../twisted/words/protocols/jabber/ijabber.py -../twisted/words/protocols/jabber/jid.py -../twisted/words/protocols/jabber/sasl_mechanisms.py -../twisted/words/protocols/jabber/xmpp_stringprep.py -../twisted/words/protocols/jabber/component.py -../twisted/words/protocols/jabber/xmlstream.py -../twisted/words/protocols/jabber/__init__.py -../twisted/words/protocols/jabber/jstrports.py -../twisted/words/protocols/jabber/sasl.py -../twisted/scripts/tapconvert.py -../twisted/scripts/tap2rpm.py -../twisted/scripts/twistd.py -../twisted/scripts/trial.py -../twisted/scripts/_twistd_unix.py -../twisted/scripts/_twistw.py -../twisted/scripts/tap2deb.py -../twisted/scripts/__init__.py -../twisted/scripts/tkunzip.py -../twisted/scripts/manhole.py -../twisted/scripts/htmlizer.py -../twisted/scripts/test/test_scripts.py -../twisted/scripts/test/test_tap2deb.py -../twisted/scripts/test/__init__.py -../twisted/scripts/test/test_tap2rpm.py -../twisted/lore/slides.py -../twisted/lore/lint.py -../twisted/lore/default.py -../twisted/lore/_version.py -../twisted/lore/numberer.py -../twisted/lore/docbook.py -../twisted/lore/htmlbook.py -../twisted/lore/process.py -../twisted/lore/__init__.py -../twisted/lore/man2lore.py -../twisted/lore/lmath.py -../twisted/lore/latex.py -../twisted/lore/indexer.py -../twisted/lore/tree.py -../twisted/lore/texi.py -../twisted/lore/scripts/lore.py -../twisted/lore/scripts/__init__.py -../twisted/lore/test/test_man2lore.py -../twisted/lore/test/test_lint.py -../twisted/lore/test/test_scripts.py -../twisted/lore/test/test_lmath.py -../twisted/lore/test/test_lore.py -../twisted/lore/test/test_latex.py -../twisted/lore/test/__init__.py -../twisted/lore/test/test_texi.py -../twisted/lore/test/test_docbook.py -../twisted/lore/test/test_slides.py -../twisted/web/error.py -../twisted/web/client.py -../twisted/web/twcgi.py -../twisted/web/soap.py -../twisted/web/xmlrpc.py -../twisted/web/server.py -../twisted/web/util.py -../twisted/web/_stan.py -../twisted/web/distrib.py -../twisted/web/_version.py -../twisted/web/http.py -../twisted/web/wsgi.py -../twisted/web/sux.py -../twisted/web/static.py -../twisted/web/http_headers.py -../twisted/web/domhelpers.py -../twisted/web/_newclient.py -../twisted/web/script.py -../twisted/web/iweb.py -../twisted/web/vhost.py -../twisted/web/guard.py -../twisted/web/_flatten.py -../twisted/web/template.py -../twisted/web/demo.py -../twisted/web/_responses.py -../twisted/web/resource.py -../twisted/web/proxy.py -../twisted/web/__init__.py -../twisted/web/microdom.py -../twisted/web/_element.py -../twisted/web/html.py -../twisted/web/rewrite.py -../twisted/web/tap.py -../twisted/web/_auth/digest.py -../twisted/web/_auth/wrapper.py -../twisted/web/_auth/__init__.py -../twisted/web/_auth/basic.py -../twisted/web/test/test_http_headers.py -../twisted/web/test/test_xml.py -../twisted/web/test/requesthelper.py -../twisted/web/test/test_httpauth.py -../twisted/web/test/test_error.py -../twisted/web/test/test_newclient.py -../twisted/web/test/test_stan.py -../twisted/web/test/test_script.py -../twisted/web/test/test_wsgi.py -../twisted/web/test/test_util.py -../twisted/web/test/test_cgi.py -../twisted/web/test/test_http.py -../twisted/web/test/_util.py -../twisted/web/test/__init__.py -../twisted/web/test/test_flatten.py -../twisted/web/test/test_static.py -../twisted/web/test/test_proxy.py -../twisted/web/test/test_agent.py -../twisted/web/test/test_soap.py -../twisted/web/test/test_webclient.py -../twisted/web/test/test_web.py -../twisted/web/test/test_tap.py -../twisted/web/test/test_template.py -../twisted/web/test/test_domhelpers.py -../twisted/web/test/test_distrib.py -../twisted/web/test/test_xmlrpc.py -../twisted/web/test/test_resource.py -../twisted/web/test/test_vhost.py -../twisted/pair/_version.py -../twisted/pair/ip.py -../twisted/pair/ethernet.py -../twisted/pair/__init__.py -../twisted/pair/rawudp.py -../twisted/pair/testing.py -../twisted/pair/raw.py -../twisted/pair/tuntap.py -../twisted/pair/test/test_ip.py -../twisted/pair/test/test_ethernet.py -../twisted/pair/test/__init__.py -../twisted/pair/test/test_tuntap.py -../twisted/pair/test/test_rawudp.py -../twisted/persisted/aot.py -../twisted/persisted/crefutil.py -../twisted/persisted/sob.py -../twisted/persisted/__init__.py -../twisted/persisted/dirdbm.py -../twisted/persisted/styles.py -../twisted/persisted/test/test_styles.py -../twisted/persisted/test/__init__.py -../twisted/application/reactors.py -../twisted/application/service.py -../twisted/application/internet.py -../twisted/application/__init__.py -../twisted/application/strports.py -../twisted/application/app.py -../twisted/application/test/__init__.py -../twisted/application/test/test_internet.py -../twisted/plugins/twisted_reactors.py -../twisted/plugins/twisted_lore.py -../twisted/plugins/twisted_runner.py -../twisted/plugins/twisted_conch.py -../twisted/plugins/twisted_portforward.py -../twisted/plugins/twisted_qtstub.py -../twisted/plugins/twisted_names.py -../twisted/plugins/twisted_news.py -../twisted/plugins/cred_memory.py -../twisted/plugins/cred_anonymous.py -../twisted/plugins/twisted_web.py -../twisted/plugins/twisted_ftp.py -../twisted/plugins/__init__.py -../twisted/plugins/twisted_mail.py -../twisted/plugins/twisted_socks.py -../twisted/plugins/twisted_words.py -../twisted/plugins/cred_file.py -../twisted/plugins/twisted_inet.py -../twisted/plugins/cred_sshkeys.py -../twisted/plugins/twisted_telnet.py -../twisted/plugins/twisted_core.py -../twisted/plugins/cred_unix.py -../twisted/plugins/twisted_trial.py -../twisted/plugins/twisted_manhole.py -../twisted/test/plugin_extra1.py -../twisted/test/test_stdio.py -../twisted/test/test_compat.py -../twisted/test/test_sip.py -../twisted/test/test_shortcut.py -../twisted/test/stdio_test_writeseq.py -../twisted/test/stdio_test_consumer.py -../twisted/test/test_rebuild.py -../twisted/test/process_echoer.py -../twisted/test/iosim.py -../twisted/test/stdio_test_write.py -../twisted/test/test_usage.py -../twisted/test/stdio_test_hostpeer.py -../twisted/test/test_dirdbm.py -../twisted/test/test_threads.py -../twisted/test/test_htb.py -../twisted/test/test_logfile.py -../twisted/test/process_tester.py -../twisted/test/test_monkey.py -../twisted/test/test_adbapi.py -../twisted/test/test_task.py -../twisted/test/test_strerror.py -../twisted/test/test_amp.py -../twisted/test/test_stringtransport.py -../twisted/test/test_threadpool.py -../twisted/test/test_error.py -../twisted/test/test_formmethod.py -../twisted/test/process_cmdline.py -../twisted/test/test_socks.py -../twisted/test/myrebuilder2.py -../twisted/test/test_sslverify.py -../twisted/test/test_defgen.py -../twisted/test/test_unix.py -../twisted/test/test_banana.py -../twisted/test/stdio_test_halfclose.py -../twisted/test/test_text.py -../twisted/test/test_iutils.py -../twisted/test/process_fds.py -../twisted/test/process_stdinreader.py -../twisted/test/test_twisted.py -../twisted/test/test_ftp_options.py -../twisted/test/reflect_helper_IE.py -../twisted/test/test_roots.py -../twisted/test/test_dict.py -../twisted/test/test_postfix.py -../twisted/test/test_ssl.py -../twisted/test/stdio_test_producer.py -../twisted/test/test_persisted.py -../twisted/test/test_defer.py -../twisted/test/test_jelly.py -../twisted/test/proto_helpers.py -../twisted/test/test_setup.py -../twisted/test/test_strcred.py -../twisted/test/test_abstract.py -../twisted/test/mock_win32process.py -../twisted/test/process_signal.py -../twisted/test/test_tcp_internals.py -../twisted/test/test_threadable.py -../twisted/test/test_doc.py -../twisted/test/test_manhole.py -../twisted/test/process_tty.py -../twisted/test/test_hook.py -../twisted/test/test_loopback.py -../twisted/test/__init__.py -../twisted/test/test_failure.py -../twisted/test/test_log.py -../twisted/test/stdio_test_lastwrite.py -../twisted/test/test_strports.py -../twisted/test/test_cooperator.py -../twisted/test/test_tpfile.py -../twisted/test/stdio_test_loseconn.py -../twisted/test/crash_test_dummy.py -../twisted/test/myrebuilder1.py -../twisted/test/test_ftp.py -../twisted/test/test_pcp.py -../twisted/test/plugin_basic.py -../twisted/test/test_protocols.py -../twisted/test/test_policies.py -../twisted/test/test_sob.py -../twisted/test/test_explorer.py -../twisted/test/test_factories.py -../twisted/test/test_tcp.py -../twisted/test/test_digestauth.py -../twisted/test/test_pb.py -../twisted/test/test_application.py -../twisted/test/test_context.py -../twisted/test/test_iosim.py -../twisted/test/process_reader.py -../twisted/test/test_stateful.py -../twisted/test/test_reflect.py -../twisted/test/test_pbfailure.py -../twisted/test/test_nmea.py -../twisted/test/plugin_extra2.py -../twisted/test/test_finger.py -../twisted/test/test_randbytes.py -../twisted/test/process_twisted.py -../twisted/test/ssl_helpers.py -../twisted/test/test_udp.py -../twisted/test/test_ident.py -../twisted/test/test_paths.py -../twisted/test/reflect_helper_ZDE.py -../twisted/test/test_twistd.py -../twisted/test/test_memcache.py -../twisted/test/reflect_helper_VE.py -../twisted/test/test_process.py -../twisted/test/test_newcred.py -../twisted/test/testutils.py -../twisted/test/test_modules.py -../twisted/test/test_plugin.py -../twisted/test/_preamble.py -../twisted/test/test_internet.py -../twisted/test/test_fdesc.py -../twisted/test/process_linger.py -../twisted/test/test_lockfile.py -../twisted/protocols/sip.py -../twisted/protocols/stateful.py -../twisted/protocols/telnet.py -../twisted/protocols/wire.py -../twisted/protocols/shoutcast.py -../twisted/protocols/dict.py -../twisted/protocols/ident.py -../twisted/protocols/ftp.py -../twisted/protocols/finger.py -../twisted/protocols/amp.py -../twisted/protocols/postfix.py -../twisted/protocols/socks.py -../twisted/protocols/portforward.py -../twisted/protocols/__init__.py -../twisted/protocols/tls.py -../twisted/protocols/basic.py -../twisted/protocols/memcache.py -../twisted/protocols/htb.py -../twisted/protocols/loopback.py -../twisted/protocols/policies.py -../twisted/protocols/pcp.py -../twisted/protocols/gps/__init__.py -../twisted/protocols/gps/rockwell.py -../twisted/protocols/gps/nmea.py -../twisted/protocols/mice/__init__.py -../twisted/protocols/mice/mouseman.py -../twisted/protocols/test/test_basic.py -../twisted/protocols/test/__init__.py -../twisted/protocols/test/test_tls.py -../twisted/enterprise/__init__.py -../twisted/enterprise/adbapi.py -../twisted/python/sendmsg.c -../twisted/runner/portmap.c -../twisted/internet/iocpreactor/iocpsupport/iocpsupport.c -../twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c -../twisted/test/raiser.c -../twisted/copyright.pyc -../twisted/_version.pyc -../twisted/plugin.pyc -../twisted/__init__.pyc -../twisted/manhole/telnet.pyc -../twisted/manhole/explorer.pyc -../twisted/manhole/service.pyc -../twisted/manhole/_inspectro.pyc -../twisted/manhole/__init__.pyc -../twisted/manhole/gladereactor.pyc -../twisted/manhole/ui/gtk2manhole.pyc -../twisted/manhole/ui/__init__.pyc -../twisted/manhole/ui/test/__init__.pyc -../twisted/manhole/ui/test/test_gtk2manhole.pyc -../twisted/manhole/test/__init__.pyc -../twisted/manhole/test/test_explorer.pyc -../twisted/mail/pb.pyc -../twisted/mail/relaymanager.pyc -../twisted/mail/imap4.pyc -../twisted/mail/_version.pyc -../twisted/mail/relay.pyc -../twisted/mail/pop3client.pyc -../twisted/mail/bounce.pyc -../twisted/mail/pop3.pyc -../twisted/mail/mail.pyc -../twisted/mail/__init__.pyc -../twisted/mail/alias.pyc -../twisted/mail/smtp.pyc -../twisted/mail/protocols.pyc -../twisted/mail/maildir.pyc -../twisted/mail/tap.pyc -../twisted/mail/scripts/mailmail.pyc -../twisted/mail/scripts/__init__.pyc -../twisted/mail/test/test_pop3client.pyc -../twisted/mail/test/test_smtp.pyc -../twisted/mail/test/test_scripts.pyc -../twisted/mail/test/test_imap.pyc -../twisted/mail/test/test_bounce.pyc -../twisted/mail/test/pop3testserver.pyc -../twisted/mail/test/__init__.pyc -../twisted/mail/test/test_mail.pyc -../twisted/mail/test/test_pop3.pyc -../twisted/mail/test/test_options.pyc -../twisted/mail/test/test_mailmail.pyc -../twisted/names/error.pyc -../twisted/names/client.pyc -../twisted/names/common.pyc -../twisted/names/server.pyc -../twisted/names/_version.pyc -../twisted/names/root.pyc -../twisted/names/hosts.pyc -../twisted/names/_rfc1982.pyc -../twisted/names/dns.pyc -../twisted/names/resolve.pyc -../twisted/names/__init__.pyc -../twisted/names/secondary.pyc -../twisted/names/srvconnect.pyc -../twisted/names/cache.pyc -../twisted/names/authority.pyc -../twisted/names/tap.pyc -../twisted/names/test/test_rfc1982.pyc -../twisted/names/test/test_client.pyc -../twisted/names/test/test_hosts.pyc -../twisted/names/test/test_server.pyc -../twisted/names/test/test_names.pyc -../twisted/names/test/test_resolve.pyc -../twisted/names/test/test_common.pyc -../twisted/names/test/__init__.pyc -../twisted/names/test/test_examples.pyc -../twisted/names/test/test_cache.pyc -../twisted/names/test/test_tap.pyc -../twisted/names/test/test_rootresolve.pyc -../twisted/names/test/test_srvconnect.pyc -../twisted/names/test/test_dns.pyc -../twisted/trial/unittest.pyc -../twisted/trial/_synctest.pyc -../twisted/trial/util.pyc -../twisted/trial/__init__.pyc -../twisted/trial/reporter.pyc -../twisted/trial/itrial.pyc -../twisted/trial/_asynctest.pyc -../twisted/trial/runner.pyc -../twisted/trial/_asyncrunner.pyc -../twisted/trial/test/scripttest.pyc -../twisted/trial/test/weird.pyc -../twisted/trial/test/test_assertions.pyc -../twisted/trial/test/mockcustomsuite.pyc -../twisted/trial/test/skipping.pyc -../twisted/trial/test/test_loader.pyc -../twisted/trial/test/test_reporter.pyc -../twisted/trial/test/novars.pyc -../twisted/trial/test/test_keyboard.pyc -../twisted/trial/test/moduletest.pyc -../twisted/trial/test/test_deferred.pyc -../twisted/trial/test/test_script.pyc -../twisted/trial/test/mockdoctest.pyc -../twisted/trial/test/test_testcase.pyc -../twisted/trial/test/test_util.pyc -../twisted/trial/test/ordertests.pyc -../twisted/trial/test/suppression.pyc -../twisted/trial/test/test_tests.pyc -../twisted/trial/test/test_warning.pyc -../twisted/trial/test/test_doctest.pyc -../twisted/trial/test/__init__.pyc -../twisted/trial/test/test_log.pyc -../twisted/trial/test/erroneous.pyc -../twisted/trial/test/test_plugins.pyc -../twisted/trial/test/test_asyncassertions.pyc -../twisted/trial/test/test_suppression.pyc -../twisted/trial/test/sample.pyc -../twisted/trial/test/detests.pyc -../twisted/trial/test/mockcustomsuite2.pyc -../twisted/trial/test/test_pyunitcompat.pyc -../twisted/trial/test/test_runner.pyc -../twisted/trial/test/test_output.pyc -../twisted/trial/test/mockcustomsuite3.pyc -../twisted/trial/test/moduleself.pyc -../twisted/trial/test/packages.pyc -../twisted/trial/_dist/managercommands.pyc -../twisted/trial/_dist/disttrial.pyc -../twisted/trial/_dist/workerreporter.pyc -../twisted/trial/_dist/distreporter.pyc -../twisted/trial/_dist/workertrial.pyc -../twisted/trial/_dist/__init__.pyc -../twisted/trial/_dist/workercommands.pyc -../twisted/trial/_dist/worker.pyc -../twisted/trial/_dist/options.pyc -../twisted/trial/_dist/test/test_workerreporter.pyc -../twisted/trial/_dist/test/test_worker.pyc -../twisted/trial/_dist/test/test_distreporter.pyc -../twisted/trial/_dist/test/test_workertrial.pyc -../twisted/trial/_dist/test/__init__.pyc -../twisted/trial/_dist/test/test_options.pyc -../twisted/trial/_dist/test/test_disttrial.pyc -../twisted/cred/error.pyc -../twisted/cred/portal.pyc -../twisted/cred/strcred.pyc -../twisted/cred/__init__.pyc -../twisted/cred/_digest.pyc -../twisted/cred/credentials.pyc -../twisted/cred/checkers.pyc -../twisted/cred/pamauth.pyc -../twisted/conch/error.pyc -../twisted/conch/stdio.pyc -../twisted/conch/manhole_tap.pyc -../twisted/conch/telnet.pyc -../twisted/conch/_version.pyc -../twisted/conch/unix.pyc -../twisted/conch/interfaces.pyc -../twisted/conch/endpoints.pyc -../twisted/conch/recvline.pyc -../twisted/conch/__init__.pyc -../twisted/conch/mixin.pyc -../twisted/conch/ls.pyc -../twisted/conch/checkers.pyc -../twisted/conch/avatar.pyc -../twisted/conch/manhole_ssh.pyc -../twisted/conch/ttymodes.pyc -../twisted/conch/manhole.pyc -../twisted/conch/tap.pyc -../twisted/conch/ui/tkvt100.pyc -../twisted/conch/ui/__init__.pyc -../twisted/conch/ui/ansi.pyc -../twisted/conch/client/connect.pyc -../twisted/conch/client/default.pyc -../twisted/conch/client/agent.pyc -../twisted/conch/client/direct.pyc -../twisted/conch/client/__init__.pyc -../twisted/conch/client/knownhosts.pyc -../twisted/conch/client/options.pyc -../twisted/conch/openssh_compat/primes.pyc -../twisted/conch/openssh_compat/__init__.pyc -../twisted/conch/openssh_compat/factory.pyc -../twisted/conch/scripts/conch.pyc -../twisted/conch/scripts/tkconch.pyc -../twisted/conch/scripts/cftp.pyc -../twisted/conch/scripts/ckeygen.pyc -../twisted/conch/scripts/__init__.pyc -../twisted/conch/ssh/session.pyc -../twisted/conch/ssh/channel.pyc -../twisted/conch/ssh/common.pyc -../twisted/conch/ssh/keys.pyc -../twisted/conch/ssh/userauth.pyc -../twisted/conch/ssh/forwarding.pyc -../twisted/conch/ssh/connection.pyc -../twisted/conch/ssh/agent.pyc -../twisted/conch/ssh/service.pyc -../twisted/conch/ssh/transport.pyc -../twisted/conch/ssh/__init__.pyc -../twisted/conch/ssh/sexpy.pyc -../twisted/conch/ssh/address.pyc -../twisted/conch/ssh/factory.pyc -../twisted/conch/ssh/filetransfer.pyc -../twisted/conch/test/test_openssh_compat.pyc -../twisted/conch/test/test_helper.pyc -../twisted/conch/test/test_knownhosts.pyc -../twisted/conch/test/keydata.pyc -../twisted/conch/test/test_ckeygen.pyc -../twisted/conch/test/test_cftp.pyc -../twisted/conch/test/test_conch.pyc -../twisted/conch/test/test_keys.pyc -../twisted/conch/test/test_filetransfer.pyc -../twisted/conch/test/test_scripts.pyc -../twisted/conch/test/test_text.pyc -../twisted/conch/test/test_recvline.pyc -../twisted/conch/test/test_endpoints.pyc -../twisted/conch/test/test_userauth.pyc -../twisted/conch/test/test_manhole.pyc -../twisted/conch/test/__init__.pyc -../twisted/conch/test/test_telnet.pyc -../twisted/conch/test/test_channel.pyc -../twisted/conch/test/test_mixin.pyc -../twisted/conch/test/test_agent.pyc -../twisted/conch/test/test_transport.pyc -../twisted/conch/test/test_window.pyc -../twisted/conch/test/test_checkers.pyc -../twisted/conch/test/test_address.pyc -../twisted/conch/test/test_tap.pyc -../twisted/conch/test/test_connection.pyc -../twisted/conch/test/test_insults.pyc -../twisted/conch/test/test_ssh.pyc -../twisted/conch/test/test_default.pyc -../twisted/conch/test/test_session.pyc -../twisted/conch/insults/client.pyc -../twisted/conch/insults/text.pyc -../twisted/conch/insults/colors.pyc -../twisted/conch/insults/helper.pyc -../twisted/conch/insults/window.pyc -../twisted/conch/insults/__init__.pyc -../twisted/conch/insults/insults.pyc -../twisted/python/logfile.pyc -../twisted/python/formmethod.pyc -../twisted/python/compat.pyc -../twisted/python/threadpool.pyc -../twisted/python/monkey.pyc -../twisted/python/log.pyc -../twisted/python/hook.pyc -../twisted/python/fakepwd.pyc -../twisted/python/usage.pyc -../twisted/python/dist3.pyc -../twisted/python/util.pyc -../twisted/python/syslog.pyc -../twisted/python/threadable.pyc -../twisted/python/text.pyc -../twisted/python/runtime.pyc -../twisted/python/context.pyc -../twisted/python/_textattributes.pyc -../twisted/python/roots.pyc -../twisted/python/win32.pyc -../twisted/python/randbytes.pyc -../twisted/python/hashlib.pyc -../twisted/python/components.pyc -../twisted/python/_inotify.pyc -../twisted/python/modules.pyc -../twisted/python/zippath.pyc -../twisted/python/versions.pyc -../twisted/python/_release.pyc -../twisted/python/deprecate.pyc -../twisted/python/failure.pyc -../twisted/python/urlpath.pyc -../twisted/python/reflect.pyc -../twisted/python/__init__.pyc -../twisted/python/rebuild.pyc -../twisted/python/procutils.pyc -../twisted/python/finalize.pyc -../twisted/python/zipstream.pyc -../twisted/python/filepath.pyc -../twisted/python/dist.pyc -../twisted/python/systemd.pyc -../twisted/python/constants.pyc -../twisted/python/_shellcomp.pyc -../twisted/python/shortcut.pyc -../twisted/python/release.pyc -../twisted/python/htmlizer.pyc -../twisted/python/lockfile.pyc -../twisted/python/test/test_zippath.pyc -../twisted/python/test/test_fakepwd.pyc -../twisted/python/test/test_release.pyc -../twisted/python/test/deprecatedattributes.pyc -../twisted/python/test/test_constants.pyc -../twisted/python/test/test_sendmsg.pyc -../twisted/python/test/test_textattributes.pyc -../twisted/python/test/test_shellcomp.pyc -../twisted/python/test/test_htmlizer.pyc -../twisted/python/test/test_runtime.pyc -../twisted/python/test/pullpipe.pyc -../twisted/python/test/test_deprecate.pyc -../twisted/python/test/test_util.pyc -../twisted/python/test/test_hashlib.pyc -../twisted/python/test/test_inotify.pyc -../twisted/python/test/__init__.pyc -../twisted/python/test/test_dist.pyc -../twisted/python/test/test_syslog.pyc -../twisted/python/test/test_zipstream.pyc -../twisted/python/test/test_urlpath.pyc -../twisted/python/test/test_components.pyc -../twisted/python/test/test_win32.pyc -../twisted/python/test/test_systemd.pyc -../twisted/python/test/test_versions.pyc -../twisted/python/test/test_dist3.pyc -../twisted/python/test/modules_helpers.pyc -../twisted/runner/procmon.pyc -../twisted/runner/_version.pyc -../twisted/runner/inetdconf.pyc -../twisted/runner/inetdtap.pyc -../twisted/runner/__init__.pyc -../twisted/runner/procmontap.pyc -../twisted/runner/inetd.pyc -../twisted/runner/test/test_procmon.pyc -../twisted/runner/test/test_procmontap.pyc -../twisted/runner/test/__init__.pyc -../twisted/tap/telnet.pyc -../twisted/tap/ftp.pyc -../twisted/tap/socks.pyc -../twisted/tap/portforward.pyc -../twisted/tap/__init__.pyc -../twisted/tap/manhole.pyc -../twisted/positioning/ipositioning.pyc -../twisted/positioning/__init__.pyc -../twisted/positioning/_sentence.pyc -../twisted/positioning/base.pyc -../twisted/positioning/nmea.pyc -../twisted/positioning/test/test_sentence.pyc -../twisted/positioning/test/test_base.pyc -../twisted/positioning/test/receiver.pyc -../twisted/positioning/test/__init__.pyc -../twisted/positioning/test/test_nmea.pyc -../twisted/spread/pb.pyc -../twisted/spread/util.pyc -../twisted/spread/banana.pyc -../twisted/spread/interfaces.pyc -../twisted/spread/jelly.pyc -../twisted/spread/__init__.pyc -../twisted/spread/flavors.pyc -../twisted/spread/publish.pyc -../twisted/spread/ui/gtk2util.pyc -../twisted/spread/ui/__init__.pyc -../twisted/spread/ui/tkutil.pyc -../twisted/spread/ui/tktree.pyc -../twisted/internet/error.pyc -../twisted/internet/default.pyc -../twisted/internet/qtreactor.pyc -../twisted/internet/win32eventreactor.pyc -../twisted/internet/stdio.pyc -../twisted/internet/cfreactor.pyc -../twisted/internet/_posixserialport.pyc -../twisted/internet/ssl.pyc -../twisted/internet/threads.pyc -../twisted/internet/_pollingfile.pyc -../twisted/internet/gtk2reactor.pyc -../twisted/internet/tksupport.pyc -../twisted/internet/gireactor.pyc -../twisted/internet/glib2reactor.pyc -../twisted/internet/_newtls.pyc -../twisted/internet/gtk3reactor.pyc -../twisted/internet/_baseprocess.pyc -../twisted/internet/abstract.pyc -../twisted/internet/inotify.pyc -../twisted/internet/unix.pyc -../twisted/internet/interfaces.pyc -../twisted/internet/protocol.pyc -../twisted/internet/_dumbwin32proc.pyc -../twisted/internet/tcp.pyc -../twisted/internet/endpoints.pyc -../twisted/internet/main.pyc -../twisted/internet/udp.pyc -../twisted/internet/wxsupport.pyc -../twisted/internet/pyuisupport.pyc -../twisted/internet/process.pyc -../twisted/internet/_signals.pyc -../twisted/internet/__init__.pyc -../twisted/internet/posixbase.pyc -../twisted/internet/pollreactor.pyc -../twisted/internet/serialport.pyc -../twisted/internet/_sslverify.pyc -../twisted/internet/kqreactor.pyc -../twisted/internet/fdesc.pyc -../twisted/internet/reactor.pyc -../twisted/internet/base.pyc -../twisted/internet/address.pyc -../twisted/internet/_threadedselect.pyc -../twisted/internet/_ssl.pyc -../twisted/internet/selectreactor.pyc -../twisted/internet/_win32serialport.pyc -../twisted/internet/wxreactor.pyc -../twisted/internet/epollreactor.pyc -../twisted/internet/defer.pyc -../twisted/internet/utils.pyc -../twisted/internet/task.pyc -../twisted/internet/_win32stdio.pyc -../twisted/internet/gtkreactor.pyc -../twisted/internet/_posixstdio.pyc -../twisted/internet/_glibbase.pyc -../twisted/internet/iocpreactor/abstract.pyc -../twisted/internet/iocpreactor/interfaces.pyc -../twisted/internet/iocpreactor/tcp.pyc -../twisted/internet/iocpreactor/udp.pyc -../twisted/internet/iocpreactor/const.pyc -../twisted/internet/iocpreactor/__init__.pyc -../twisted/internet/iocpreactor/reactor.pyc -../twisted/internet/iocpreactor/setup.pyc -../twisted/internet/test/test_posixbase.pyc -../twisted/internet/test/modulehelpers.pyc -../twisted/internet/test/test_stdio.pyc -../twisted/internet/test/test_qtreactor.pyc -../twisted/internet/test/test_serialport.pyc -../twisted/internet/test/test_time.pyc -../twisted/internet/test/process_helper.pyc -../twisted/internet/test/test_threads.pyc -../twisted/internet/test/test_core.pyc -../twisted/internet/test/test_posixprocess.pyc -../twisted/internet/test/test_filedescriptor.pyc -../twisted/internet/test/_win32ifaces.pyc -../twisted/internet/test/test_main.pyc -../twisted/internet/test/test_sigchld.pyc -../twisted/internet/test/test_unix.pyc -../twisted/internet/test/test_base.pyc -../twisted/internet/test/test_epollreactor.pyc -../twisted/internet/test/test_socket.pyc -../twisted/internet/test/test_endpoints.pyc -../twisted/internet/test/test_newtls.pyc -../twisted/internet/test/test_abstract.pyc -../twisted/internet/test/test_inotify.pyc -../twisted/internet/test/test_protocol.pyc -../twisted/internet/test/test_baseprocess.pyc -../twisted/internet/test/__init__.pyc -../twisted/internet/test/test_fdset.pyc -../twisted/internet/test/test_glibbase.pyc -../twisted/internet/test/test_gireactor.pyc -../twisted/internet/test/process_gireactornocompat.pyc -../twisted/internet/test/fakeendpoint.pyc -../twisted/internet/test/test_tcp.pyc -../twisted/internet/test/test_address.pyc -../twisted/internet/test/test_iocp.pyc -../twisted/internet/test/reactormixins.pyc -../twisted/internet/test/test_tls.pyc -../twisted/internet/test/_posixifaces.pyc -../twisted/internet/test/test_pollingfile.pyc -../twisted/internet/test/test_gtkreactor.pyc -../twisted/internet/test/test_udp.pyc -../twisted/internet/test/test_default.pyc -../twisted/internet/test/test_inlinecb.pyc -../twisted/internet/test/test_process.pyc -../twisted/internet/test/test_udp_internals.pyc -../twisted/internet/test/connectionmixins.pyc -../twisted/internet/test/test_win32events.pyc -../twisted/news/nntp.pyc -../twisted/news/database.pyc -../twisted/news/_version.pyc -../twisted/news/news.pyc -../twisted/news/__init__.pyc -../twisted/news/tap.pyc -../twisted/news/test/test_database.pyc -../twisted/news/test/__init__.pyc -../twisted/news/test/test_nntp.pyc -../twisted/news/test/test_news.pyc -../twisted/words/_version.pyc -../twisted/words/service.pyc -../twisted/words/iwords.pyc -../twisted/words/__init__.pyc -../twisted/words/ewords.pyc -../twisted/words/xmpproutertap.pyc -../twisted/words/tap.pyc -../twisted/words/xish/xpathparser.pyc -../twisted/words/xish/utility.pyc -../twisted/words/xish/xmlstream.pyc -../twisted/words/xish/__init__.pyc -../twisted/words/xish/xpath.pyc -../twisted/words/xish/domish.pyc -../twisted/words/im/basesupport.pyc -../twisted/words/im/baseaccount.pyc -../twisted/words/im/basechat.pyc -../twisted/words/im/interfaces.pyc -../twisted/words/im/ircsupport.pyc -../twisted/words/im/__init__.pyc -../twisted/words/im/pbsupport.pyc -../twisted/words/im/locals.pyc -../twisted/words/test/test_msn.pyc -../twisted/words/test/test_jabberjid.pyc -../twisted/words/test/test_ircsupport.pyc -../twisted/words/test/test_service.pyc -../twisted/words/test/test_jabbersaslmechanisms.pyc -../twisted/words/test/test_xmlstream.pyc -../twisted/words/test/test_irc.pyc -../twisted/words/test/test_xmpproutertap.pyc -../twisted/words/test/test_jabberxmlstream.pyc -../twisted/words/test/test_basesupport.pyc -../twisted/words/test/test_irc_service.pyc -../twisted/words/test/test_domish.pyc -../twisted/words/test/__init__.pyc -../twisted/words/test/test_jabbercomponent.pyc -../twisted/words/test/test_jabberxmppstringprep.pyc -../twisted/words/test/test_tap.pyc -../twisted/words/test/test_basechat.pyc -../twisted/words/test/test_oscar.pyc -../twisted/words/test/test_jabberjstrports.pyc -../twisted/words/test/test_jabberclient.pyc -../twisted/words/test/test_jabbersasl.pyc -../twisted/words/test/test_xpath.pyc -../twisted/words/test/test_xishutil.pyc -../twisted/words/test/test_jabbererror.pyc -../twisted/words/protocols/irc.pyc -../twisted/words/protocols/oscar.pyc -../twisted/words/protocols/__init__.pyc -../twisted/words/protocols/msn.pyc -../twisted/words/protocols/jabber/error.pyc -../twisted/words/protocols/jabber/client.pyc -../twisted/words/protocols/jabber/ijabber.pyc -../twisted/words/protocols/jabber/jid.pyc -../twisted/words/protocols/jabber/sasl_mechanisms.pyc -../twisted/words/protocols/jabber/xmpp_stringprep.pyc -../twisted/words/protocols/jabber/component.pyc -../twisted/words/protocols/jabber/xmlstream.pyc -../twisted/words/protocols/jabber/__init__.pyc -../twisted/words/protocols/jabber/jstrports.pyc -../twisted/words/protocols/jabber/sasl.pyc -../twisted/scripts/tapconvert.pyc -../twisted/scripts/tap2rpm.pyc -../twisted/scripts/twistd.pyc -../twisted/scripts/trial.pyc -../twisted/scripts/_twistd_unix.pyc -../twisted/scripts/_twistw.pyc -../twisted/scripts/tap2deb.pyc -../twisted/scripts/__init__.pyc -../twisted/scripts/tkunzip.pyc -../twisted/scripts/manhole.pyc -../twisted/scripts/htmlizer.pyc -../twisted/scripts/test/test_scripts.pyc -../twisted/scripts/test/test_tap2deb.pyc -../twisted/scripts/test/__init__.pyc -../twisted/scripts/test/test_tap2rpm.pyc -../twisted/lore/slides.pyc -../twisted/lore/lint.pyc -../twisted/lore/default.pyc -../twisted/lore/_version.pyc -../twisted/lore/numberer.pyc -../twisted/lore/docbook.pyc -../twisted/lore/htmlbook.pyc -../twisted/lore/process.pyc -../twisted/lore/__init__.pyc -../twisted/lore/man2lore.pyc -../twisted/lore/lmath.pyc -../twisted/lore/latex.pyc -../twisted/lore/indexer.pyc -../twisted/lore/tree.pyc -../twisted/lore/texi.pyc -../twisted/lore/scripts/lore.pyc -../twisted/lore/scripts/__init__.pyc -../twisted/lore/test/test_man2lore.pyc -../twisted/lore/test/test_lint.pyc -../twisted/lore/test/test_scripts.pyc -../twisted/lore/test/test_lmath.pyc -../twisted/lore/test/test_lore.pyc -../twisted/lore/test/test_latex.pyc -../twisted/lore/test/__init__.pyc -../twisted/lore/test/test_texi.pyc -../twisted/lore/test/test_docbook.pyc -../twisted/lore/test/test_slides.pyc -../twisted/web/error.pyc -../twisted/web/client.pyc -../twisted/web/twcgi.pyc -../twisted/web/soap.pyc -../twisted/web/xmlrpc.pyc -../twisted/web/server.pyc -../twisted/web/util.pyc -../twisted/web/_stan.pyc -../twisted/web/distrib.pyc -../twisted/web/_version.pyc -../twisted/web/http.pyc -../twisted/web/wsgi.pyc -../twisted/web/sux.pyc -../twisted/web/static.pyc -../twisted/web/http_headers.pyc -../twisted/web/domhelpers.pyc -../twisted/web/_newclient.pyc -../twisted/web/script.pyc -../twisted/web/iweb.pyc -../twisted/web/vhost.pyc -../twisted/web/guard.pyc -../twisted/web/_flatten.pyc -../twisted/web/template.pyc -../twisted/web/demo.pyc -../twisted/web/_responses.pyc -../twisted/web/resource.pyc -../twisted/web/proxy.pyc -../twisted/web/__init__.pyc -../twisted/web/microdom.pyc -../twisted/web/_element.pyc -../twisted/web/html.pyc -../twisted/web/rewrite.pyc -../twisted/web/tap.pyc -../twisted/web/_auth/digest.pyc -../twisted/web/_auth/wrapper.pyc -../twisted/web/_auth/__init__.pyc -../twisted/web/_auth/basic.pyc -../twisted/web/test/test_http_headers.pyc -../twisted/web/test/test_xml.pyc -../twisted/web/test/requesthelper.pyc -../twisted/web/test/test_httpauth.pyc -../twisted/web/test/test_error.pyc -../twisted/web/test/test_newclient.pyc -../twisted/web/test/test_stan.pyc -../twisted/web/test/test_script.pyc -../twisted/web/test/test_wsgi.pyc -../twisted/web/test/test_util.pyc -../twisted/web/test/test_cgi.pyc -../twisted/web/test/test_http.pyc -../twisted/web/test/_util.pyc -../twisted/web/test/__init__.pyc -../twisted/web/test/test_flatten.pyc -../twisted/web/test/test_static.pyc -../twisted/web/test/test_proxy.pyc -../twisted/web/test/test_agent.pyc -../twisted/web/test/test_soap.pyc -../twisted/web/test/test_webclient.pyc -../twisted/web/test/test_web.pyc -../twisted/web/test/test_tap.pyc -../twisted/web/test/test_template.pyc -../twisted/web/test/test_domhelpers.pyc -../twisted/web/test/test_distrib.pyc -../twisted/web/test/test_xmlrpc.pyc -../twisted/web/test/test_resource.pyc -../twisted/web/test/test_vhost.pyc -../twisted/pair/_version.pyc -../twisted/pair/ip.pyc -../twisted/pair/ethernet.pyc -../twisted/pair/__init__.pyc -../twisted/pair/rawudp.pyc -../twisted/pair/testing.pyc -../twisted/pair/raw.pyc -../twisted/pair/tuntap.pyc -../twisted/pair/test/test_ip.pyc -../twisted/pair/test/test_ethernet.pyc -../twisted/pair/test/__init__.pyc -../twisted/pair/test/test_tuntap.pyc -../twisted/pair/test/test_rawudp.pyc -../twisted/persisted/aot.pyc -../twisted/persisted/crefutil.pyc -../twisted/persisted/sob.pyc -../twisted/persisted/__init__.pyc -../twisted/persisted/dirdbm.pyc -../twisted/persisted/styles.pyc -../twisted/persisted/test/test_styles.pyc -../twisted/persisted/test/__init__.pyc -../twisted/application/reactors.pyc -../twisted/application/service.pyc -../twisted/application/internet.pyc -../twisted/application/__init__.pyc -../twisted/application/strports.pyc -../twisted/application/app.pyc -../twisted/application/test/__init__.pyc -../twisted/application/test/test_internet.pyc -../twisted/plugins/twisted_reactors.pyc -../twisted/plugins/twisted_lore.pyc -../twisted/plugins/twisted_runner.pyc -../twisted/plugins/twisted_conch.pyc -../twisted/plugins/twisted_portforward.pyc -../twisted/plugins/twisted_qtstub.pyc -../twisted/plugins/twisted_names.pyc -../twisted/plugins/twisted_news.pyc -../twisted/plugins/cred_memory.pyc -../twisted/plugins/cred_anonymous.pyc -../twisted/plugins/twisted_web.pyc -../twisted/plugins/twisted_ftp.pyc -../twisted/plugins/__init__.pyc -../twisted/plugins/twisted_mail.pyc -../twisted/plugins/twisted_socks.pyc -../twisted/plugins/twisted_words.pyc -../twisted/plugins/cred_file.pyc -../twisted/plugins/twisted_inet.pyc -../twisted/plugins/cred_sshkeys.pyc -../twisted/plugins/twisted_telnet.pyc -../twisted/plugins/twisted_core.pyc -../twisted/plugins/cred_unix.pyc -../twisted/plugins/twisted_trial.pyc -../twisted/plugins/twisted_manhole.pyc -../twisted/test/plugin_extra1.pyc -../twisted/test/test_stdio.pyc -../twisted/test/test_compat.pyc -../twisted/test/test_sip.pyc -../twisted/test/test_shortcut.pyc -../twisted/test/stdio_test_writeseq.pyc -../twisted/test/stdio_test_consumer.pyc -../twisted/test/test_rebuild.pyc -../twisted/test/process_echoer.pyc -../twisted/test/iosim.pyc -../twisted/test/stdio_test_write.pyc -../twisted/test/test_usage.pyc -../twisted/test/stdio_test_hostpeer.pyc -../twisted/test/test_dirdbm.pyc -../twisted/test/test_threads.pyc -../twisted/test/test_htb.pyc -../twisted/test/test_logfile.pyc -../twisted/test/process_tester.pyc -../twisted/test/test_monkey.pyc -../twisted/test/test_adbapi.pyc -../twisted/test/test_task.pyc -../twisted/test/test_strerror.pyc -../twisted/test/test_amp.pyc -../twisted/test/test_stringtransport.pyc -../twisted/test/test_threadpool.pyc -../twisted/test/test_error.pyc -../twisted/test/test_formmethod.pyc -../twisted/test/process_cmdline.pyc -../twisted/test/test_socks.pyc -../twisted/test/myrebuilder2.pyc -../twisted/test/test_sslverify.pyc -../twisted/test/test_defgen.pyc -../twisted/test/test_unix.pyc -../twisted/test/test_banana.pyc -../twisted/test/stdio_test_halfclose.pyc -../twisted/test/test_text.pyc -../twisted/test/test_iutils.pyc -../twisted/test/process_fds.pyc -../twisted/test/process_stdinreader.pyc -../twisted/test/test_twisted.pyc -../twisted/test/test_ftp_options.pyc -../twisted/test/reflect_helper_IE.pyc -../twisted/test/test_roots.pyc -../twisted/test/test_dict.pyc -../twisted/test/test_postfix.pyc -../twisted/test/test_ssl.pyc -../twisted/test/stdio_test_producer.pyc -../twisted/test/test_persisted.pyc -../twisted/test/test_defer.pyc -../twisted/test/test_jelly.pyc -../twisted/test/proto_helpers.pyc -../twisted/test/test_setup.pyc -../twisted/test/test_strcred.pyc -../twisted/test/test_abstract.pyc -../twisted/test/mock_win32process.pyc -../twisted/test/process_signal.pyc -../twisted/test/test_tcp_internals.pyc -../twisted/test/test_threadable.pyc -../twisted/test/test_doc.pyc -../twisted/test/test_manhole.pyc -../twisted/test/process_tty.pyc -../twisted/test/test_hook.pyc -../twisted/test/test_loopback.pyc -../twisted/test/__init__.pyc -../twisted/test/test_failure.pyc -../twisted/test/test_log.pyc -../twisted/test/stdio_test_lastwrite.pyc -../twisted/test/test_strports.pyc -../twisted/test/test_cooperator.pyc -../twisted/test/test_tpfile.pyc -../twisted/test/stdio_test_loseconn.pyc -../twisted/test/crash_test_dummy.pyc -../twisted/test/myrebuilder1.pyc -../twisted/test/test_ftp.pyc -../twisted/test/test_pcp.pyc -../twisted/test/plugin_basic.pyc -../twisted/test/test_protocols.pyc -../twisted/test/test_policies.pyc -../twisted/test/test_sob.pyc -../twisted/test/test_explorer.pyc -../twisted/test/test_factories.pyc -../twisted/test/test_tcp.pyc -../twisted/test/test_digestauth.pyc -../twisted/test/test_pb.pyc -../twisted/test/test_application.pyc -../twisted/test/test_context.pyc -../twisted/test/test_iosim.pyc -../twisted/test/process_reader.pyc -../twisted/test/test_stateful.pyc -../twisted/test/test_reflect.pyc -../twisted/test/test_pbfailure.pyc -../twisted/test/test_nmea.pyc -../twisted/test/plugin_extra2.pyc -../twisted/test/test_finger.pyc -../twisted/test/test_randbytes.pyc -../twisted/test/process_twisted.pyc -../twisted/test/ssl_helpers.pyc -../twisted/test/test_udp.pyc -../twisted/test/test_ident.pyc -../twisted/test/test_paths.pyc -../twisted/test/reflect_helper_ZDE.pyc -../twisted/test/test_twistd.pyc -../twisted/test/test_memcache.pyc -../twisted/test/reflect_helper_VE.pyc -../twisted/test/test_process.pyc -../twisted/test/test_newcred.pyc -../twisted/test/testutils.pyc -../twisted/test/test_modules.pyc -../twisted/test/test_plugin.pyc -../twisted/test/_preamble.pyc -../twisted/test/test_internet.pyc -../twisted/test/test_fdesc.pyc -../twisted/test/process_linger.pyc -../twisted/test/test_lockfile.pyc -../twisted/protocols/sip.pyc -../twisted/protocols/stateful.pyc -../twisted/protocols/telnet.pyc -../twisted/protocols/wire.pyc -../twisted/protocols/shoutcast.pyc -../twisted/protocols/dict.pyc -../twisted/protocols/ident.pyc -../twisted/protocols/ftp.pyc -../twisted/protocols/finger.pyc -../twisted/protocols/amp.pyc -../twisted/protocols/postfix.pyc -../twisted/protocols/socks.pyc -../twisted/protocols/portforward.pyc -../twisted/protocols/__init__.pyc -../twisted/protocols/tls.pyc -../twisted/protocols/basic.pyc -../twisted/protocols/memcache.pyc -../twisted/protocols/htb.pyc -../twisted/protocols/loopback.pyc -../twisted/protocols/policies.pyc -../twisted/protocols/pcp.pyc -../twisted/protocols/gps/__init__.pyc -../twisted/protocols/gps/rockwell.pyc -../twisted/protocols/gps/nmea.pyc -../twisted/protocols/mice/__init__.pyc -../twisted/protocols/mice/mouseman.pyc -../twisted/protocols/test/test_basic.pyc -../twisted/protocols/test/__init__.pyc -../twisted/protocols/test/test_tls.pyc -../twisted/enterprise/__init__.pyc -../twisted/enterprise/adbapi.pyc -../twisted/runner/portmap.so -../twisted/test/raiser.so -../twisted/python/sendmsg.so -../twisted/manhole/inspectro.glade -../twisted/manhole/logview.glade -../twisted/manhole/gladereactor.glade -../twisted/manhole/ui/gtk2manhole.glade -../twisted/mail/topfiles/README -../twisted/mail/topfiles/NEWS -../twisted/mail/test/rfc822.message -../twisted/mail/test/server.pem -../twisted/names/topfiles/README -../twisted/names/topfiles/NEWS -../twisted/conch/topfiles/README -../twisted/conch/topfiles/NEWS -../twisted/python/sendmsg.c -../twisted/python/twisted-completion.zsh -../twisted/python/_initgroups.c -../twisted/python/zsh/_tkmktap -../twisted/python/zsh/_twistd -../twisted/python/zsh/_mktap -../twisted/python/zsh/_lore -../twisted/python/zsh/_pyhtmlizer -../twisted/python/zsh/_tap2deb -../twisted/python/zsh/_websetroot -../twisted/python/zsh/README.txt -../twisted/python/zsh/_cftp -../twisted/python/zsh/_conch -../twisted/python/zsh/_tkconch -../twisted/python/zsh/_manhole -../twisted/python/zsh/_tapconvert -../twisted/python/zsh/_ckeygen -../twisted/python/zsh/_trial -../twisted/python/zsh/_tap2rpm -../twisted/runner/portmap.c -../twisted/runner/topfiles/README -../twisted/runner/topfiles/NEWS -../twisted/spread/ui/login2.glade -../twisted/internet/iocpreactor/notes.txt -../twisted/internet/iocpreactor/build.bat -../twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c -../twisted/internet/iocpreactor/iocpsupport/wsasend.pxi -../twisted/internet/iocpreactor/iocpsupport/acceptex.pxi -../twisted/internet/iocpreactor/iocpsupport/winsock_pointers.h -../twisted/internet/iocpreactor/iocpsupport/wsarecv.pxi -../twisted/internet/iocpreactor/iocpsupport/connectex.pxi -../twisted/internet/iocpreactor/iocpsupport/iocpsupport.pyx -../twisted/internet/iocpreactor/iocpsupport/iocpsupport.c -../twisted/internet/test/fake_CAs/chain.pem -../twisted/internet/test/fake_CAs/thing2-duplicate.pem -../twisted/internet/test/fake_CAs/thing1.pem -../twisted/internet/test/fake_CAs/not-a-certificate -../twisted/internet/test/fake_CAs/thing2.pem -../twisted/news/topfiles/README -../twisted/news/topfiles/NEWS -../twisted/topfiles/README -../twisted/topfiles/NEWS -../twisted/topfiles/CREDITS -../twisted/topfiles/ChangeLog.Old -../twisted/words/xish/xpathparser.g -../twisted/words/topfiles/README -../twisted/words/topfiles/NEWS -../twisted/words/im/instancemessenger.glade -../twisted/lore/xhtml-symbol.ent -../twisted/lore/xhtml-lat1.ent -../twisted/lore/template.mgp -../twisted/lore/xhtml1-transitional.dtd -../twisted/lore/xhtml-special.ent -../twisted/lore/xhtml1-strict.dtd -../twisted/lore/topfiles/README -../twisted/lore/topfiles/NEWS -../twisted/lore/test/lore_index_file_unnumbered_out.html -../twisted/lore/test/template.tpl -../twisted/lore/test/lore_index_file_out_multiple.html -../twisted/lore/test/lore_numbering_test_out.html -../twisted/lore/test/simple.html -../twisted/lore/test/lore_numbering_test_out2.html -../twisted/lore/test/lore_index_file_out.html -../twisted/lore/test/lore_index_test.xhtml -../twisted/lore/test/simple3.html -../twisted/lore/test/simple4.html -../twisted/lore/test/lore_index_test2.xhtml -../twisted/web/failure.xhtml -../twisted/web/topfiles/README -../twisted/web/topfiles/NEWS -../twisted/pair/topfiles/README -../twisted/pair/topfiles/NEWS -../twisted/test/raiser.c -../twisted/test/server.pem -../twisted/test/raiser.pyx -./ -requires.txt -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt -../../../../bin/manhole -../../../../bin/twistd -../../../../bin/tap2deb -../../../../bin/mailmail -../../../../bin/tkconch -../../../../bin/trial -../../../../bin/ckeygen -../../../../bin/conch -../../../../bin/tap2rpm -../../../../bin/pyhtmlizer -../../../../bin/lore -../../../../bin/tapconvert -../../../../bin/cftp diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/requires.txt deleted file mode 100644 index fcdfa93..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/requires.txt +++ /dev/null @@ -1 +0,0 @@ -zope.interface >= 3.6.0 \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/top_level.txt deleted file mode 100644 index 3eb29f0..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Twisted-14.0.0.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -twisted diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/PKG-INFO deleted file mode 100644 index a4d99cb..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/PKG-INFO +++ /dev/null @@ -1,72 +0,0 @@ -Metadata-Version: 1.1 -Name: Werkzeug -Version: 0.9.4 -Summary: The Swiss Army knife of Python web development -Home-page: http://werkzeug.pocoo.org/ -Author: Armin Ronacher -Author-email: armin.ronacher@active-4.com -License: BSD -Description: - Werkzeug - ======== - - Werkzeug started as simple collection of various utilities for WSGI - applications and has become one of the most advanced WSGI utility - modules. It includes a powerful debugger, full featured request and - response objects, HTTP utilities to handle entity tags, cache control - headers, HTTP dates, cookie handling, file uploads, a powerful URL - routing system and a bunch of community contributed addon modules. - - Werkzeug is unicode aware and doesn't enforce a specific template - engine, database adapter or anything else. It doesn't even enforce - a specific way of handling requests and leaves all that up to the - developer. It's most useful for end user applications which should work - on as many server environments as possible (such as blogs, wikis, - bulletin boards, etc.). - - Details and example applications are available on the - `Werkzeug website `_. - - - Features - -------- - - - unicode awareness - - - request and response objects - - - various utility functions for dealing with HTTP headers such as - `Accept` and `Cache-Control` headers. - - - thread local objects with proper cleanup at request end - - - an interactive debugger - - - A simple WSGI server with support for threading and forking - with an automatic reloader. - - - a flexible URL routing system with REST support. - - - fully WSGI compatible - - - Development Version - ------------------- - - The Werkzeug development version can be installed by cloning the git - repository from `github`_:: - - git clone git@github.com:mitsuhiko/werkzeug.git - - .. _github: http://github.com/mitsuhiko/werkzeug - -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/SOURCES.txt deleted file mode 100644 index be54ff7..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/SOURCES.txt +++ /dev/null @@ -1,289 +0,0 @@ -AUTHORS -CHANGES -LICENSE -MANIFEST.in -Makefile -setup.cfg -setup.py -Werkzeug.egg-info/PKG-INFO -Werkzeug.egg-info/SOURCES.txt -Werkzeug.egg-info/dependency_links.txt -Werkzeug.egg-info/not-zip-safe -Werkzeug.egg-info/top_level.txt -artwork/logo.png -artwork/logo.svg -docs/Makefile -docs/changes.rst -docs/conf.py -docs/contents.rst.inc -docs/datastructures.rst -docs/debug.rst -docs/exceptions.rst -docs/http.rst -docs/index.rst -docs/installation.rst -docs/latexindex.rst -docs/levels.rst -docs/local.rst -docs/logo.pdf -docs/make.bat -docs/makearchive.py -docs/middlewares.rst -docs/python3.rst -docs/quickstart.rst -docs/request_data.rst -docs/routing.rst -docs/serving.rst -docs/terms.rst -docs/test.rst -docs/transition.rst -docs/tutorial.rst -docs/unicode.rst -docs/utils.rst -docs/werkzeugext.py -docs/werkzeugstyle.sty -docs/wrappers.rst -docs/wsgi.rst -docs/_static/background.png -docs/_static/codebackground.png -docs/_static/contents.png -docs/_static/debug-screenshot.png -docs/_static/favicon.ico -docs/_static/header.png -docs/_static/navigation.png -docs/_static/navigation_active.png -docs/_static/shortly.png -docs/_static/shorty-screenshot.png -docs/_static/style.css -docs/_static/werkzeug.js -docs/_static/werkzeug.png -docs/_templates/sidebarintro.html -docs/_templates/sidebarlogo.html -docs/_themes/LICENSE -docs/_themes/README -docs/_themes/werkzeug_theme_support.py -docs/_themes/werkzeug/layout.html -docs/_themes/werkzeug/relations.html -docs/_themes/werkzeug/theme.conf -docs/_themes/werkzeug/static/werkzeug.css_t -docs/contrib/atom.rst -docs/contrib/cache.rst -docs/contrib/fixers.rst -docs/contrib/index.rst -docs/contrib/iterio.rst -docs/contrib/lint.rst -docs/contrib/profiler.rst -docs/contrib/securecookie.rst -docs/contrib/sessions.rst -docs/contrib/wrappers.rst -docs/deployment/cgi.rst -docs/deployment/fastcgi.rst -docs/deployment/index.rst -docs/deployment/mod_wsgi.rst -docs/deployment/proxying.rst -examples/README -examples/cookieauth.py -examples/httpbasicauth.py -examples/manage-coolmagic.py -examples/manage-couchy.py -examples/manage-cupoftee.py -examples/manage-i18nurls.py -examples/manage-plnt.py -examples/manage-shorty.py -examples/manage-simplewiki.py -examples/manage-webpylike.py -examples/upload.py -examples/contrib/README -examples/contrib/securecookie.py -examples/contrib/sessions.py -examples/coolmagic/__init__.py -examples/coolmagic/application.py -examples/coolmagic/helpers.py -examples/coolmagic/utils.py -examples/coolmagic/public/style.css -examples/coolmagic/templates/layout.html -examples/coolmagic/templates/static/about.html -examples/coolmagic/templates/static/index.html -examples/coolmagic/templates/static/not_found.html -examples/coolmagic/views/__init__.py -examples/coolmagic/views/static.py -examples/couchy/README -examples/couchy/__init__.py -examples/couchy/application.py -examples/couchy/models.py -examples/couchy/utils.py -examples/couchy/views.py -examples/couchy/static/style.css -examples/couchy/templates/display.html -examples/couchy/templates/layout.html -examples/couchy/templates/list.html -examples/couchy/templates/new.html -examples/couchy/templates/not_found.html -examples/cupoftee/__init__.py -examples/cupoftee/application.py -examples/cupoftee/db.py -examples/cupoftee/network.py -examples/cupoftee/pages.py -examples/cupoftee/utils.py -examples/cupoftee/shared/content.png -examples/cupoftee/shared/down.png -examples/cupoftee/shared/favicon.ico -examples/cupoftee/shared/header.png -examples/cupoftee/shared/logo.png -examples/cupoftee/shared/style.css -examples/cupoftee/shared/up.png -examples/cupoftee/templates/layout.html -examples/cupoftee/templates/missingpage.html -examples/cupoftee/templates/search.html -examples/cupoftee/templates/server.html -examples/cupoftee/templates/serverlist.html -examples/i18nurls/__init__.py -examples/i18nurls/application.py -examples/i18nurls/urls.py -examples/i18nurls/views.py -examples/i18nurls/templates/about.html -examples/i18nurls/templates/blog.html -examples/i18nurls/templates/index.html -examples/i18nurls/templates/layout.html -examples/partial/README -examples/partial/complex_routing.py -examples/plnt/__init__.py -examples/plnt/database.py -examples/plnt/sync.py -examples/plnt/utils.py -examples/plnt/views.py -examples/plnt/webapp.py -examples/plnt/shared/style.css -examples/plnt/templates/about.html -examples/plnt/templates/index.html -examples/plnt/templates/layout.html -examples/shortly/shortly.py -examples/shortly/static/style.css -examples/shortly/templates/404.html -examples/shortly/templates/layout.html -examples/shortly/templates/new_url.html -examples/shortly/templates/short_link_details.html -examples/shorty/__init__.py -examples/shorty/application.py -examples/shorty/models.py -examples/shorty/utils.py -examples/shorty/views.py -examples/shorty/static/style.css -examples/shorty/templates/display.html -examples/shorty/templates/layout.html -examples/shorty/templates/list.html -examples/shorty/templates/new.html -examples/shorty/templates/not_found.html -examples/simplewiki/__init__.py -examples/simplewiki/actions.py -examples/simplewiki/application.py -examples/simplewiki/database.py -examples/simplewiki/specialpages.py -examples/simplewiki/utils.py -examples/simplewiki/shared/style.css -examples/simplewiki/templates/action_diff.html -examples/simplewiki/templates/action_edit.html -examples/simplewiki/templates/action_log.html -examples/simplewiki/templates/action_revert.html -examples/simplewiki/templates/action_show.html -examples/simplewiki/templates/layout.html -examples/simplewiki/templates/macros.xml -examples/simplewiki/templates/missing_action.html -examples/simplewiki/templates/page_index.html -examples/simplewiki/templates/page_missing.html -examples/simplewiki/templates/recent_changes.html -examples/webpylike/example.py -examples/webpylike/webpylike.py -werkzeug/__init__.py -werkzeug/_compat.py -werkzeug/_internal.py -werkzeug/datastructures.py -werkzeug/exceptions.py -werkzeug/formparser.py -werkzeug/http.py -werkzeug/local.py -werkzeug/posixemulation.py -werkzeug/routing.py -werkzeug/script.py -werkzeug/security.py -werkzeug/serving.py -werkzeug/test.py -werkzeug/testapp.py -werkzeug/urls.py -werkzeug/useragents.py -werkzeug/utils.py -werkzeug/wrappers.py -werkzeug/wsgi.py -werkzeug/contrib/__init__.py -werkzeug/contrib/atom.py -werkzeug/contrib/cache.py -werkzeug/contrib/fixers.py -werkzeug/contrib/iterio.py -werkzeug/contrib/jsrouting.py -werkzeug/contrib/limiter.py -werkzeug/contrib/lint.py -werkzeug/contrib/profiler.py -werkzeug/contrib/securecookie.py -werkzeug/contrib/sessions.py -werkzeug/contrib/testtools.py -werkzeug/contrib/wrappers.py -werkzeug/debug/__init__.py -werkzeug/debug/console.py -werkzeug/debug/repr.py -werkzeug/debug/tbtools.py -werkzeug/debug/shared/FONT_LICENSE -werkzeug/debug/shared/console.png -werkzeug/debug/shared/debugger.js -werkzeug/debug/shared/jquery.js -werkzeug/debug/shared/less.png -werkzeug/debug/shared/more.png -werkzeug/debug/shared/source.png -werkzeug/debug/shared/style.css -werkzeug/debug/shared/ubuntu.ttf -werkzeug/testsuite/__init__.py -werkzeug/testsuite/compat.py -werkzeug/testsuite/datastructures.py -werkzeug/testsuite/debug.py -werkzeug/testsuite/exceptions.py -werkzeug/testsuite/formparser.py -werkzeug/testsuite/http.py -werkzeug/testsuite/internal.py -werkzeug/testsuite/local.py -werkzeug/testsuite/routing.py -werkzeug/testsuite/security.py -werkzeug/testsuite/serving.py -werkzeug/testsuite/test.py -werkzeug/testsuite/urls.py -werkzeug/testsuite/utils.py -werkzeug/testsuite/wrappers.py -werkzeug/testsuite/wsgi.py -werkzeug/testsuite/contrib/__init__.py -werkzeug/testsuite/contrib/cache.py -werkzeug/testsuite/contrib/fixers.py -werkzeug/testsuite/contrib/iterio.py -werkzeug/testsuite/contrib/securecookie.py -werkzeug/testsuite/contrib/sessions.py -werkzeug/testsuite/contrib/wrappers.py -werkzeug/testsuite/multipart/collect.py -werkzeug/testsuite/multipart/ie7_full_path_request.txt -werkzeug/testsuite/multipart/firefox3-2png1txt/file1.png -werkzeug/testsuite/multipart/firefox3-2png1txt/file2.png -werkzeug/testsuite/multipart/firefox3-2png1txt/request.txt -werkzeug/testsuite/multipart/firefox3-2png1txt/text.txt -werkzeug/testsuite/multipart/firefox3-2pnglongtext/file1.png -werkzeug/testsuite/multipart/firefox3-2pnglongtext/file2.png -werkzeug/testsuite/multipart/firefox3-2pnglongtext/request.txt -werkzeug/testsuite/multipart/firefox3-2pnglongtext/text.txt -werkzeug/testsuite/multipart/ie6-2png1txt/file1.png -werkzeug/testsuite/multipart/ie6-2png1txt/file2.png -werkzeug/testsuite/multipart/ie6-2png1txt/request.txt -werkzeug/testsuite/multipart/ie6-2png1txt/text.txt -werkzeug/testsuite/multipart/opera8-2png1txt/file1.png -werkzeug/testsuite/multipart/opera8-2png1txt/file2.png -werkzeug/testsuite/multipart/opera8-2png1txt/request.txt -werkzeug/testsuite/multipart/opera8-2png1txt/text.txt -werkzeug/testsuite/multipart/webkit3-2png1txt/file1.png -werkzeug/testsuite/multipart/webkit3-2png1txt/file2.png -werkzeug/testsuite/multipart/webkit3-2png1txt/request.txt -werkzeug/testsuite/multipart/webkit3-2png1txt/text.txt -werkzeug/testsuite/res/test.txt \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/installed-files.txt deleted file mode 100644 index c213d9d..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/installed-files.txt +++ /dev/null @@ -1,161 +0,0 @@ -../werkzeug/wrappers.py -../werkzeug/_compat.py -../werkzeug/security.py -../werkzeug/http.py -../werkzeug/wsgi.py -../werkzeug/useragents.py -../werkzeug/script.py -../werkzeug/exceptions.py -../werkzeug/datastructures.py -../werkzeug/posixemulation.py -../werkzeug/testapp.py -../werkzeug/urls.py -../werkzeug/local.py -../werkzeug/__init__.py -../werkzeug/serving.py -../werkzeug/formparser.py -../werkzeug/_internal.py -../werkzeug/test.py -../werkzeug/utils.py -../werkzeug/routing.py -../werkzeug/debug/repr.py -../werkzeug/debug/console.py -../werkzeug/debug/tbtools.py -../werkzeug/debug/__init__.py -../werkzeug/contrib/wrappers.py -../werkzeug/contrib/lint.py -../werkzeug/contrib/profiler.py -../werkzeug/contrib/iterio.py -../werkzeug/contrib/fixers.py -../werkzeug/contrib/sessions.py -../werkzeug/contrib/securecookie.py -../werkzeug/contrib/testtools.py -../werkzeug/contrib/__init__.py -../werkzeug/contrib/limiter.py -../werkzeug/contrib/jsrouting.py -../werkzeug/contrib/cache.py -../werkzeug/contrib/atom.py -../werkzeug/testsuite/wrappers.py -../werkzeug/testsuite/compat.py -../werkzeug/testsuite/internal.py -../werkzeug/testsuite/security.py -../werkzeug/testsuite/http.py -../werkzeug/testsuite/wsgi.py -../werkzeug/testsuite/exceptions.py -../werkzeug/testsuite/datastructures.py -../werkzeug/testsuite/urls.py -../werkzeug/testsuite/local.py -../werkzeug/testsuite/__init__.py -../werkzeug/testsuite/serving.py -../werkzeug/testsuite/formparser.py -../werkzeug/testsuite/test.py -../werkzeug/testsuite/debug.py -../werkzeug/testsuite/utils.py -../werkzeug/testsuite/routing.py -../werkzeug/testsuite/contrib/wrappers.py -../werkzeug/testsuite/contrib/iterio.py -../werkzeug/testsuite/contrib/fixers.py -../werkzeug/testsuite/contrib/sessions.py -../werkzeug/testsuite/contrib/securecookie.py -../werkzeug/testsuite/contrib/__init__.py -../werkzeug/testsuite/contrib/cache.py -../werkzeug/debug/shared/FONT_LICENSE -../werkzeug/debug/shared/console.png -../werkzeug/debug/shared/debugger.js -../werkzeug/debug/shared/jquery.js -../werkzeug/debug/shared/less.png -../werkzeug/debug/shared/more.png -../werkzeug/debug/shared/source.png -../werkzeug/debug/shared/style.css -../werkzeug/debug/shared/ubuntu.ttf -../werkzeug/testsuite/multipart/collect.py -../werkzeug/testsuite/multipart/ie7_full_path_request.txt -../werkzeug/testsuite/multipart/firefox3-2png1txt/file1.png -../werkzeug/testsuite/multipart/firefox3-2png1txt/file2.png -../werkzeug/testsuite/multipart/firefox3-2png1txt/request.txt -../werkzeug/testsuite/multipart/firefox3-2png1txt/text.txt -../werkzeug/testsuite/multipart/firefox3-2pnglongtext/file1.png -../werkzeug/testsuite/multipart/firefox3-2pnglongtext/file2.png -../werkzeug/testsuite/multipart/firefox3-2pnglongtext/request.txt -../werkzeug/testsuite/multipart/firefox3-2pnglongtext/text.txt -../werkzeug/testsuite/multipart/ie6-2png1txt/file1.png -../werkzeug/testsuite/multipart/ie6-2png1txt/file2.png -../werkzeug/testsuite/multipart/ie6-2png1txt/request.txt -../werkzeug/testsuite/multipart/ie6-2png1txt/text.txt -../werkzeug/testsuite/multipart/opera8-2png1txt/file1.png -../werkzeug/testsuite/multipart/opera8-2png1txt/file2.png -../werkzeug/testsuite/multipart/opera8-2png1txt/request.txt -../werkzeug/testsuite/multipart/opera8-2png1txt/text.txt -../werkzeug/testsuite/multipart/webkit3-2png1txt/file1.png -../werkzeug/testsuite/multipart/webkit3-2png1txt/file2.png -../werkzeug/testsuite/multipart/webkit3-2png1txt/request.txt -../werkzeug/testsuite/multipart/webkit3-2png1txt/text.txt -../werkzeug/testsuite/res/test.txt -../werkzeug/wrappers.pyc -../werkzeug/_compat.pyc -../werkzeug/security.pyc -../werkzeug/http.pyc -../werkzeug/wsgi.pyc -../werkzeug/useragents.pyc -../werkzeug/script.pyc -../werkzeug/exceptions.pyc -../werkzeug/datastructures.pyc -../werkzeug/posixemulation.pyc -../werkzeug/testapp.pyc -../werkzeug/urls.pyc -../werkzeug/local.pyc -../werkzeug/__init__.pyc -../werkzeug/serving.pyc -../werkzeug/formparser.pyc -../werkzeug/_internal.pyc -../werkzeug/test.pyc -../werkzeug/utils.pyc -../werkzeug/routing.pyc -../werkzeug/debug/repr.pyc -../werkzeug/debug/console.pyc -../werkzeug/debug/tbtools.pyc -../werkzeug/debug/__init__.pyc -../werkzeug/contrib/wrappers.pyc -../werkzeug/contrib/lint.pyc -../werkzeug/contrib/profiler.pyc -../werkzeug/contrib/iterio.pyc -../werkzeug/contrib/fixers.pyc -../werkzeug/contrib/sessions.pyc -../werkzeug/contrib/securecookie.pyc -../werkzeug/contrib/testtools.pyc -../werkzeug/contrib/__init__.pyc -../werkzeug/contrib/limiter.pyc -../werkzeug/contrib/jsrouting.pyc -../werkzeug/contrib/cache.pyc -../werkzeug/contrib/atom.pyc -../werkzeug/testsuite/wrappers.pyc -../werkzeug/testsuite/compat.pyc -../werkzeug/testsuite/internal.pyc -../werkzeug/testsuite/security.pyc -../werkzeug/testsuite/http.pyc -../werkzeug/testsuite/wsgi.pyc -../werkzeug/testsuite/exceptions.pyc -../werkzeug/testsuite/datastructures.pyc -../werkzeug/testsuite/urls.pyc -../werkzeug/testsuite/local.pyc -../werkzeug/testsuite/__init__.pyc -../werkzeug/testsuite/serving.pyc -../werkzeug/testsuite/formparser.pyc -../werkzeug/testsuite/test.pyc -../werkzeug/testsuite/debug.pyc -../werkzeug/testsuite/utils.pyc -../werkzeug/testsuite/routing.pyc -../werkzeug/testsuite/contrib/wrappers.pyc -../werkzeug/testsuite/contrib/iterio.pyc -../werkzeug/testsuite/contrib/fixers.pyc -../werkzeug/testsuite/contrib/sessions.pyc -../werkzeug/testsuite/contrib/securecookie.pyc -../werkzeug/testsuite/contrib/__init__.pyc -../werkzeug/testsuite/contrib/cache.pyc -../werkzeug/testsuite/multipart/collect.pyc -./ -SOURCES.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/top_level.txt deleted file mode 100644 index 6fe8da8..0000000 --- a/Linux_i686/lib/python2.7/site-packages/Werkzeug-0.9.4.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -werkzeug diff --git a/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so b/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so index a42fe86..999c09c 100755 Binary files a/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so and b/Linux_i686/lib/python2.7/site-packages/_cffi_backend.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/PKG-INFO deleted file mode 100644 index 3e0ec60..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/PKG-INFO +++ /dev/null @@ -1,96 +0,0 @@ -Metadata-Version: 1.1 -Name: alembic -Version: 0.6.5 -Summary: A database migration tool for SQLAlchemy. -Home-page: http://bitbucket.org/zzzeek/alembic -Author: Mike Bayer -Author-email: mike@zzzcomputing.com -License: MIT -Description: Alembic is a new database migrations tool, written by the author - of `SQLAlchemy `_. A migrations tool - offers the following functionality: - - * Can emit ALTER statements to a database in order to change - the structure of tables and other constructs - * Provides a system whereby "migration scripts" may be constructed; - each script indicates a particular series of steps that can "upgrade" a - target database to a new version, and optionally a series of steps that can - "downgrade" similarly, doing the same steps in reverse. - * Allows the scripts to execute in some sequential manner. - - The goals of Alembic are: - - * Very open ended and transparent configuration and operation. A new - Alembic environment is generated from a set of templates which is selected - among a set of options when setup first occurs. The templates then deposit a - series of scripts that define fully how database connectivity is established - and how migration scripts are invoked; the migration scripts themselves are - generated from a template within that series of scripts. The scripts can - then be further customized to define exactly how databases will be - interacted with and what structure new migration files should take. - * Full support for transactional DDL. The default scripts ensure that all - migrations occur within a transaction - for those databases which support - this (Postgresql, Microsoft SQL Server), migrations can be tested with no - need to manually undo changes upon failure. - * Minimalist script construction. Basic operations like renaming - tables/columns, adding/removing columns, changing column attributes can be - performed through one line commands like alter_column(), rename_table(), - add_constraint(). There is no need to recreate full SQLAlchemy Table - structures for simple operations like these - the functions themselves - generate minimalist schema structures behind the scenes to achieve the given - DDL sequence. - * "auto generation" of migrations. While real world migrations are far more - complex than what can be automatically determined, Alembic can still - eliminate the initial grunt work in generating new migration directives - from an altered schema. The ``--autogenerate`` feature will inspect the - current status of a database using SQLAlchemy's schema inspection - capabilities, compare it to the current state of the database model as - specified in Python, and generate a series of "candidate" migrations, - rendering them into a new migration script as Python directives. The - developer then edits the new file, adding additional directives and data - migrations as needed, to produce a finished migration. Table and column - level changes can be detected, with constraints and indexes to follow as - well. - * Full support for migrations generated as SQL scripts. Those of us who - work in corporate environments know that direct access to DDL commands on a - production database is a rare privilege, and DBAs want textual SQL scripts. - Alembic's usage model and commands are oriented towards being able to run a - series of migrations into a textual output file as easily as it runs them - directly to a database. Care must be taken in this mode to not invoke other - operations that rely upon in-memory SELECTs of rows - Alembic tries to - provide helper constructs like bulk_insert() to help with data-oriented - operations that are compatible with script-based DDL. - * Non-linear versioning. Scripts are given UUID identifiers similarly - to a DVCS, and the linkage of one script to the next is achieved via markers - within the scripts themselves. Through this open-ended mechanism, branches - containing other migration scripts can be merged - the linkages can be - manually edited within the script files to create the new sequence. - * Provide a library of ALTER constructs that can be used by any SQLAlchemy - application. The DDL constructs build upon SQLAlchemy's own DDLElement base - and can be used standalone by any application or script. - * Don't break our necks over SQLite's inability to ALTER things. SQLite - has almost no support for table or column alteration, and this is likely - intentional. Alembic's design - is kept simple by not contorting its core API around these limitations, - understanding that SQLite is simply not intended to support schema - changes. While Alembic's architecture can support SQLite's workarounds, and - we will support these features provided someone takes the initiative - to implement and test, until the SQLite developers decide - to provide a fully working version of ALTER, it's still vastly preferable - to use Alembic, or any migrations tool, with databases that - are designed to work under the assumption of in-place schema migrations - taking place. - - Documentation and status of Alembic is at http://readthedocs.org/docs/alembic/. - - -Keywords: SQLAlchemy migrations -Platform: UNKNOWN -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/SOURCES.txt deleted file mode 100644 index 19bf79b..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/SOURCES.txt +++ /dev/null @@ -1,123 +0,0 @@ -CHANGES -LICENSE -MANIFEST.in -README.rst -README.unittests -setup.cfg -setup.py -test.cfg -alembic/__init__.py -alembic/command.py -alembic/compat.py -alembic/config.py -alembic/context.py -alembic/environment.py -alembic/migration.py -alembic/op.py -alembic/operations.py -alembic/script.py -alembic/util.py -alembic.egg-info/PKG-INFO -alembic.egg-info/SOURCES.txt -alembic.egg-info/dependency_links.txt -alembic.egg-info/entry_points.txt -alembic.egg-info/not-zip-safe -alembic.egg-info/requires.txt -alembic.egg-info/top_level.txt -alembic/autogenerate/__init__.py -alembic/autogenerate/api.py -alembic/autogenerate/compare.py -alembic/autogenerate/render.py -alembic/ddl/__init__.py -alembic/ddl/base.py -alembic/ddl/impl.py -alembic/ddl/mssql.py -alembic/ddl/mysql.py -alembic/ddl/oracle.py -alembic/ddl/postgresql.py -alembic/ddl/sqlite.py -alembic/templates/generic/README -alembic/templates/generic/alembic.ini.mako -alembic/templates/generic/env.py -alembic/templates/generic/script.py.mako -alembic/templates/multidb/README -alembic/templates/multidb/alembic.ini.mako -alembic/templates/multidb/env.py -alembic/templates/multidb/script.py.mako -alembic/templates/pylons/README -alembic/templates/pylons/alembic.ini.mako -alembic/templates/pylons/env.py -alembic/templates/pylons/script.py.mako -docs/api.html -docs/changelog.html -docs/cookbook.html -docs/front.html -docs/genindex.html -docs/index.html -docs/ops.html -docs/py-modindex.html -docs/search.html -docs/searchindex.js -docs/tutorial.html -docs/_images/api_overview.png -docs/_sources/api.txt -docs/_sources/changelog.txt -docs/_sources/cookbook.txt -docs/_sources/front.txt -docs/_sources/index.txt -docs/_sources/ops.txt -docs/_sources/tutorial.txt -docs/_static/basic.css -docs/_static/changelog.css -docs/_static/comment-bright.png -docs/_static/comment-close.png -docs/_static/comment.png -docs/_static/doctools.js -docs/_static/down-pressed.png -docs/_static/down.png -docs/_static/file.png -docs/_static/jquery.js -docs/_static/minus.png -docs/_static/nature.css -docs/_static/nature_override.css -docs/_static/plus.png -docs/_static/pygments.css -docs/_static/searchtools.js -docs/_static/sphinx_paramlinks.css -docs/_static/underscore.js -docs/_static/up-pressed.png -docs/_static/up.png -docs/_static/websupport.js -docs/build/Makefile -docs/build/api.rst -docs/build/api_overview.png -docs/build/changelog.rst -docs/build/conf.py -docs/build/cookbook.rst -docs/build/front.rst -docs/build/index.rst -docs/build/ops.rst -docs/build/requirements.txt -docs/build/tutorial.rst -docs/build/_static/nature_override.css -tests/__init__.py -tests/test_autogen_indexes.py -tests/test_autogen_render.py -tests/test_autogenerate.py -tests/test_bulk_insert.py -tests/test_command.py -tests/test_config.py -tests/test_environment.py -tests/test_mssql.py -tests/test_mysql.py -tests/test_offline_environment.py -tests/test_op.py -tests/test_op_naming_convention.py -tests/test_oracle.py -tests/test_postgresql.py -tests/test_revision_create.py -tests/test_revision_paths.py -tests/test_sql_script.py -tests/test_sqlite.py -tests/test_version_table.py -tests/test_versioning.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/entry_points.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/entry_points.txt deleted file mode 100644 index 27ac374..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -alembic = alembic.config:main - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/installed-files.txt deleted file mode 100644 index 1c6e409..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/installed-files.txt +++ /dev/null @@ -1,70 +0,0 @@ -../alembic/compat.py -../alembic/util.py -../alembic/script.py -../alembic/op.py -../alembic/context.py -../alembic/command.py -../alembic/__init__.py -../alembic/operations.py -../alembic/config.py -../alembic/migration.py -../alembic/environment.py -../alembic/ddl/impl.py -../alembic/ddl/mysql.py -../alembic/ddl/mssql.py -../alembic/ddl/__init__.py -../alembic/ddl/oracle.py -../alembic/ddl/sqlite.py -../alembic/ddl/base.py -../alembic/ddl/postgresql.py -../alembic/autogenerate/api.py -../alembic/autogenerate/compare.py -../alembic/autogenerate/render.py -../alembic/autogenerate/__init__.py -../alembic/templates/generic/README -../alembic/templates/generic/alembic.ini.mako -../alembic/templates/generic/env.py -../alembic/templates/generic/script.py.mako -../alembic/templates/multidb/README -../alembic/templates/multidb/alembic.ini.mako -../alembic/templates/multidb/env.py -../alembic/templates/multidb/script.py.mako -../alembic/templates/pylons/README -../alembic/templates/pylons/alembic.ini.mako -../alembic/templates/pylons/env.py -../alembic/templates/pylons/script.py.mako -../alembic/compat.pyc -../alembic/util.pyc -../alembic/script.pyc -../alembic/op.pyc -../alembic/context.pyc -../alembic/command.pyc -../alembic/__init__.pyc -../alembic/operations.pyc -../alembic/config.pyc -../alembic/migration.pyc -../alembic/environment.pyc -../alembic/ddl/impl.pyc -../alembic/ddl/mysql.pyc -../alembic/ddl/mssql.pyc -../alembic/ddl/__init__.pyc -../alembic/ddl/oracle.pyc -../alembic/ddl/sqlite.pyc -../alembic/ddl/base.pyc -../alembic/ddl/postgresql.pyc -../alembic/autogenerate/api.pyc -../alembic/autogenerate/compare.pyc -../alembic/autogenerate/render.pyc -../alembic/autogenerate/__init__.pyc -../alembic/templates/generic/env.pyc -../alembic/templates/multidb/env.pyc -../alembic/templates/pylons/env.pyc -./ -requires.txt -SOURCES.txt -entry_points.txt -dependency_links.txt -PKG-INFO -not-zip-safe -top_level.txt -../../../../bin/alembic diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/requires.txt deleted file mode 100644 index 39a2c32..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/requires.txt +++ /dev/null @@ -1,2 +0,0 @@ -SQLAlchemy>=0.7.3 -Mako \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/top_level.txt deleted file mode 100644 index b5bd98d..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic-0.6.5.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alembic diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/__init__.py b/Linux_i686/lib/python2.7/site-packages/alembic/__init__.py deleted file mode 100644 index 6680966..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from os import path - -__version__ = '0.6.5' - -package_dir = path.abspath(path.dirname(__file__)) - - -from . import op -from . import context - - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/__init__.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/__init__.py deleted file mode 100644 index d0f54ba..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .api import compare_metadata, _produce_migration_diffs, _produce_net_changes diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/api.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/api.py deleted file mode 100644 index 148e352..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/api.py +++ /dev/null @@ -1,301 +0,0 @@ -"""Provide the 'autogenerate' feature which can produce migration operations -automatically.""" - -import logging -import re - -from sqlalchemy.engine.reflection import Inspector -from sqlalchemy.util import OrderedSet -from .compare import _compare_tables -from .render import _drop_table, _drop_column, _drop_index, _drop_constraint, \ - _add_table, _add_column, _add_index, _add_constraint, _modify_col -from .. import util - -log = logging.getLogger(__name__) - -################################################### -# public -def compare_metadata(context, metadata): - """Compare a database schema to that given in a - :class:`~sqlalchemy.schema.MetaData` instance. - - The database connection is presented in the context - of a :class:`.MigrationContext` object, which - provides database connectivity as well as optional - comparison functions to use for datatypes and - server defaults - see the "autogenerate" arguments - at :meth:`.EnvironmentContext.configure` - for details on these. - - The return format is a list of "diff" directives, - each representing individual differences:: - - from alembic.migration import MigrationContext - from alembic.autogenerate import compare_metadata - from sqlalchemy.schema import SchemaItem - from sqlalchemy.types import TypeEngine - from sqlalchemy import (create_engine, MetaData, Column, - Integer, String, Table) - import pprint - - engine = create_engine("sqlite://") - - engine.execute(''' - create table foo ( - id integer not null primary key, - old_data varchar, - x integer - )''') - - engine.execute(''' - create table bar ( - data varchar - )''') - - metadata = MetaData() - Table('foo', metadata, - Column('id', Integer, primary_key=True), - Column('data', Integer), - Column('x', Integer, nullable=False) - ) - Table('bat', metadata, - Column('info', String) - ) - - mc = MigrationContext.configure(engine.connect()) - - diff = compare_metadata(mc, metadata) - pprint.pprint(diff, indent=2, width=20) - - Output:: - - [ ( 'add_table', - Table('bat', MetaData(bind=None), - Column('info', String(), table=), schema=None)), - ( 'remove_table', - Table(u'bar', MetaData(bind=None), - Column(u'data', VARCHAR(), table=), schema=None)), - ( 'add_column', - None, - 'foo', - Column('data', Integer(), table=)), - ( 'remove_column', - None, - 'foo', - Column(u'old_data', VARCHAR(), table=None)), - [ ( 'modify_nullable', - None, - 'foo', - u'x', - { 'existing_server_default': None, - 'existing_type': INTEGER()}, - True, - False)]] - - - :param context: a :class:`.MigrationContext` - instance. - :param metadata: a :class:`~sqlalchemy.schema.MetaData` - instance. - - """ - autogen_context, connection = _autogen_context(context, None) - diffs = [] - - object_filters = _get_object_filters(context.opts) - include_schemas = context.opts.get('include_schemas', False) - - _produce_net_changes(connection, metadata, diffs, autogen_context, - object_filters, include_schemas) - - return diffs - -################################################### -# top level - -def _produce_migration_diffs(context, template_args, - imports, include_symbol=None, - include_object=None, - include_schemas=False): - opts = context.opts - metadata = opts['target_metadata'] - include_schemas = opts.get('include_schemas', include_schemas) - - object_filters = _get_object_filters(opts, include_symbol, include_object) - - if metadata is None: - raise util.CommandError( - "Can't proceed with --autogenerate option; environment " - "script %s does not provide " - "a MetaData object to the context." % ( - context.script.env_py_location - )) - autogen_context, connection = _autogen_context(context, imports) - - diffs = [] - _produce_net_changes(connection, metadata, diffs, - autogen_context, object_filters, include_schemas) - template_args[opts['upgrade_token']] = \ - _indent(_produce_upgrade_commands(diffs, autogen_context)) - template_args[opts['downgrade_token']] = \ - _indent(_produce_downgrade_commands(diffs, autogen_context)) - template_args['imports'] = "\n".join(sorted(imports)) - - -def _get_object_filters(context_opts, include_symbol=None, include_object=None): - include_symbol = context_opts.get('include_symbol', include_symbol) - include_object = context_opts.get('include_object', include_object) - - object_filters = [] - if include_symbol: - def include_symbol_filter(object, name, type_, reflected, compare_to): - if type_ == "table": - return include_symbol(name, object.schema) - else: - return True - object_filters.append(include_symbol_filter) - if include_object: - object_filters.append(include_object) - - return object_filters - - -def _autogen_context(context, imports): - opts = context.opts - connection = context.bind - return { - 'imports': imports, - 'connection': connection, - 'dialect': connection.dialect, - 'context': context, - 'opts': opts - }, connection - -def _indent(text): - text = "### commands auto generated by Alembic - "\ - "please adjust! ###\n" + text - text += "\n### end Alembic commands ###" - text = re.compile(r'^', re.M).sub(" ", text).strip() - return text - -################################################### -# walk structures - - -def _produce_net_changes(connection, metadata, diffs, autogen_context, - object_filters=(), - include_schemas=False): - inspector = Inspector.from_engine(connection) - # TODO: not hardcode alembic_version here ? - conn_table_names = set() - - default_schema = connection.dialect.default_schema_name - if include_schemas: - schemas = set(inspector.get_schema_names()) - # replace default schema name with None - schemas.discard("information_schema") - # replace the "default" schema with None - schemas.add(None) - schemas.discard(default_schema) - else: - schemas = [None] - - for s in schemas: - tables = set(inspector.get_table_names(schema=s)).\ - difference(['alembic_version']) - conn_table_names.update(zip([s] * len(tables), tables)) - - metadata_table_names = OrderedSet([(table.schema, table.name) - for table in metadata.sorted_tables]) - - _compare_tables(conn_table_names, metadata_table_names, - object_filters, - inspector, metadata, diffs, autogen_context) - - -################################################### -# element comparison - - -################################################### -# render python - - -################################################### -# produce command structure - -def _produce_upgrade_commands(diffs, autogen_context): - buf = [] - for diff in diffs: - buf.append(_invoke_command("upgrade", diff, autogen_context)) - if not buf: - buf = ["pass"] - return "\n".join(buf) - -def _produce_downgrade_commands(diffs, autogen_context): - buf = [] - for diff in reversed(diffs): - buf.append(_invoke_command("downgrade", diff, autogen_context)) - if not buf: - buf = ["pass"] - return "\n".join(buf) - -def _invoke_command(updown, args, autogen_context): - if isinstance(args, tuple): - return _invoke_adddrop_command(updown, args, autogen_context) - else: - return _invoke_modify_command(updown, args, autogen_context) - -def _invoke_adddrop_command(updown, args, autogen_context): - cmd_type = args[0] - adddrop, cmd_type = cmd_type.split("_") - - cmd_args = args[1:] + (autogen_context,) - - _commands = { - "table": (_drop_table, _add_table), - "column": (_drop_column, _add_column), - "index": (_drop_index, _add_index), - "constraint": (_drop_constraint, _add_constraint), - } - - cmd_callables = _commands[cmd_type] - - if ( - updown == "upgrade" and adddrop == "add" - ) or ( - updown == "downgrade" and adddrop == "remove" - ): - return cmd_callables[1](*cmd_args) - else: - return cmd_callables[0](*cmd_args) - -def _invoke_modify_command(updown, args, autogen_context): - sname, tname, cname = args[0][1:4] - kw = {} - - _arg_struct = { - "modify_type": ("existing_type", "type_"), - "modify_nullable": ("existing_nullable", "nullable"), - "modify_default": ("existing_server_default", "server_default"), - } - for diff in args: - diff_kw = diff[4] - for arg in ("existing_type", \ - "existing_nullable", \ - "existing_server_default"): - if arg in diff_kw: - kw.setdefault(arg, diff_kw[arg]) - old_kw, new_kw = _arg_struct[diff[0]] - if updown == "upgrade": - kw[new_kw] = diff[-1] - kw[old_kw] = diff[-2] - else: - kw[new_kw] = diff[-2] - kw[old_kw] = diff[-1] - - if "nullable" in kw: - kw.pop("existing_nullable", None) - if "server_default" in kw: - kw.pop("existing_server_default", None) - return _modify_col(tname, cname, autogen_context, schema=sname, **kw) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/compare.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/compare.py deleted file mode 100644 index ec077fd..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/compare.py +++ /dev/null @@ -1,490 +0,0 @@ -from sqlalchemy.exc import NoSuchTableError -from sqlalchemy import schema as sa_schema, types as sqltypes -import logging -from .. import compat -from .render import _render_server_default -from sqlalchemy.util import OrderedSet - - -log = logging.getLogger(__name__) - -def _run_filters(object_, name, type_, reflected, compare_to, object_filters): - for fn in object_filters: - if not fn(object_, name, type_, reflected, compare_to): - return False - else: - return True - -def _compare_tables(conn_table_names, metadata_table_names, - object_filters, - inspector, metadata, diffs, autogen_context): - - default_schema = inspector.bind.dialect.default_schema_name - - # tables coming from the connection will not have "schema" - # set if it matches default_schema_name; so we need a list - # of table names from local metadata that also have "None" if schema - # == default_schema_name. Most setups will be like this anyway but - # some are not (see #170) - metadata_table_names_no_dflt_schema = OrderedSet([ - (schema if schema != default_schema else None, tname) - for schema, tname in metadata_table_names - ]) - - # to adjust for the MetaData collection storing the tables either - # as "schemaname.tablename" or just "tablename", create a new lookup - # which will match the "non-default-schema" keys to the Table object. - tname_to_table = dict( - ( - no_dflt_schema, - metadata.tables[sa_schema._get_table_key(tname, schema)] - ) - for no_dflt_schema, (schema, tname) in zip( - metadata_table_names_no_dflt_schema, - metadata_table_names) - ) - metadata_table_names = metadata_table_names_no_dflt_schema - - for s, tname in metadata_table_names.difference(conn_table_names): - name = '%s.%s' % (s, tname) if s else tname - metadata_table = tname_to_table[(s, tname)] - if _run_filters(metadata_table, tname, "table", False, None, object_filters): - diffs.append(("add_table", metadata_table)) - log.info("Detected added table %r", name) - _compare_indexes_and_uniques(s, tname, object_filters, - None, - metadata_table, - diffs, autogen_context, inspector) - - removal_metadata = sa_schema.MetaData() - for s, tname in conn_table_names.difference(metadata_table_names): - name = sa_schema._get_table_key(tname, s) - exists = name in removal_metadata.tables - t = sa_schema.Table(tname, removal_metadata, schema=s) - if not exists: - inspector.reflecttable(t, None) - if _run_filters(t, tname, "table", True, None, object_filters): - diffs.append(("remove_table", t)) - log.info("Detected removed table %r", name) - - existing_tables = conn_table_names.intersection(metadata_table_names) - - existing_metadata = sa_schema.MetaData() - conn_column_info = {} - for s, tname in existing_tables: - name = sa_schema._get_table_key(tname, s) - exists = name in existing_metadata.tables - t = sa_schema.Table(tname, existing_metadata, schema=s) - if not exists: - inspector.reflecttable(t, None) - conn_column_info[(s, tname)] = t - - for s, tname in sorted(existing_tables): - name = '%s.%s' % (s, tname) if s else tname - metadata_table = tname_to_table[(s, tname)] - conn_table = existing_metadata.tables[name] - - if _run_filters(metadata_table, tname, "table", False, conn_table, object_filters): - _compare_columns(s, tname, object_filters, - conn_table, - metadata_table, - diffs, autogen_context, inspector) - _compare_indexes_and_uniques(s, tname, object_filters, - conn_table, - metadata_table, - diffs, autogen_context, inspector) - - # TODO: - # table constraints - # sequences - -def _make_index(params, conn_table): - return sa_schema.Index( - params['name'], - *[conn_table.c[cname] for cname in params['column_names']], - unique=params['unique'] - ) - -def _make_unique_constraint(params, conn_table): - return sa_schema.UniqueConstraint( - *[conn_table.c[cname] for cname in params['column_names']], - name=params['name'] - ) - -def _compare_columns(schema, tname, object_filters, conn_table, metadata_table, - diffs, autogen_context, inspector): - name = '%s.%s' % (schema, tname) if schema else tname - metadata_cols_by_name = dict((c.name, c) for c in metadata_table.c) - conn_col_names = dict((c.name, c) for c in conn_table.c) - metadata_col_names = OrderedSet(sorted(metadata_cols_by_name)) - - for cname in metadata_col_names.difference(conn_col_names): - if _run_filters(metadata_cols_by_name[cname], cname, - "column", False, None, object_filters): - diffs.append( - ("add_column", schema, tname, metadata_cols_by_name[cname]) - ) - log.info("Detected added column '%s.%s'", name, cname) - - for cname in set(conn_col_names).difference(metadata_col_names): - if _run_filters(conn_table.c[cname], cname, - "column", True, None, object_filters): - diffs.append( - ("remove_column", schema, tname, conn_table.c[cname]) - ) - log.info("Detected removed column '%s.%s'", name, cname) - - for colname in metadata_col_names.intersection(conn_col_names): - metadata_col = metadata_cols_by_name[colname] - conn_col = conn_table.c[colname] - if not _run_filters( - metadata_col, colname, "column", False, conn_col, object_filters): - continue - col_diff = [] - _compare_type(schema, tname, colname, - conn_col, - metadata_col, - col_diff, autogen_context - ) - _compare_nullable(schema, tname, colname, - conn_col, - metadata_col.nullable, - col_diff, autogen_context - ) - _compare_server_default(schema, tname, colname, - conn_col, - metadata_col, - col_diff, autogen_context - ) - if col_diff: - diffs.append(col_diff) - -class _constraint_sig(object): - def __eq__(self, other): - return self.const == other.const - - def __ne__(self, other): - return self.const != other.const - - def __hash__(self): - return hash(self.const) - -class _uq_constraint_sig(_constraint_sig): - is_index = False - is_unique = True - - def __init__(self, const): - self.const = const - self.name = const.name - self.sig = tuple(sorted([col.name for col in const.columns])) - - @property - def column_names(self): - return [col.name for col in self.const.columns] - -class _ix_constraint_sig(_constraint_sig): - is_index = True - - def __init__(self, const): - self.const = const - self.name = const.name - self.sig = tuple(sorted([col.name for col in const.columns])) - self.is_unique = bool(const.unique) - - @property - def column_names(self): - return _get_index_column_names(self.const) - -def _get_index_column_names(idx): - if compat.sqla_08: - return [getattr(exp, "name", None) for exp in idx.expressions] - else: - return [getattr(col, "name", None) for col in idx.columns] - -def _compare_indexes_and_uniques(schema, tname, object_filters, conn_table, - metadata_table, diffs, autogen_context, inspector): - - is_create_table = conn_table is None - - # 1a. get raw indexes and unique constraints from metadata ... - metadata_unique_constraints = set(uq for uq in metadata_table.constraints - if isinstance(uq, sa_schema.UniqueConstraint) - ) - metadata_indexes = set(metadata_table.indexes) - - conn_uniques = conn_indexes = frozenset() - - supports_unique_constraints = False - - if conn_table is not None: - # 1b. ... and from connection, if the table exists - if hasattr(inspector, "get_unique_constraints"): - try: - conn_uniques = inspector.get_unique_constraints( - tname, schema=schema) - supports_unique_constraints = True - except NotImplementedError: - pass - try: - conn_indexes = inspector.get_indexes(tname, schema=schema) - except NotImplementedError: - pass - - # 2. convert conn-level objects from raw inspector records - # into schema objects - conn_uniques = set(_make_unique_constraint(uq_def, conn_table) - for uq_def in conn_uniques) - conn_indexes = set(_make_index(ix, conn_table) for ix in conn_indexes) - - # 3. give the dialect a chance to omit indexes and constraints that - # we know are either added implicitly by the DB or that the DB - # can't accurately report on - autogen_context['context'].impl.\ - correct_for_autogen_constraints( - conn_uniques, conn_indexes, - metadata_unique_constraints, - metadata_indexes - ) - - # 4. organize the constraints into "signature" collections, the - # _constraint_sig() objects provide a consistent facade over both - # Index and UniqueConstraint so we can easily work with them - # interchangeably - metadata_unique_constraints = set(_uq_constraint_sig(uq) - for uq in metadata_unique_constraints - ) - - metadata_indexes = set(_ix_constraint_sig(ix) for ix in metadata_indexes) - - conn_unique_constraints = set(_uq_constraint_sig(uq) for uq in conn_uniques) - - conn_indexes = set(_ix_constraint_sig(ix) for ix in conn_indexes) - - # 5. index things by name, for those objects that have names - metadata_names = dict( - (c.name, c) for c in - metadata_unique_constraints.union(metadata_indexes) - if c.name is not None) - - conn_uniques_by_name = dict((c.name, c) for c in conn_unique_constraints) - conn_indexes_by_name = dict((c.name, c) for c in conn_indexes) - - conn_names = dict((c.name, c) for c in - conn_unique_constraints.union(conn_indexes) - if c.name is not None) - - doubled_constraints = dict( - (name, (conn_uniques_by_name[name], conn_indexes_by_name[name])) - for name in set(conn_uniques_by_name).intersection(conn_indexes_by_name) - ) - - # 6. index things by "column signature", to help with unnamed unique - # constraints. - conn_uniques_by_sig = dict((uq.sig, uq) for uq in conn_unique_constraints) - metadata_uniques_by_sig = dict( - (uq.sig, uq) for uq in metadata_unique_constraints) - metadata_indexes_by_sig = dict( - (ix.sig, ix) for ix in metadata_indexes) - unnamed_metadata_uniques = dict((uq.sig, uq) for uq in - metadata_unique_constraints if uq.name is None) - - # assumptions: - # 1. a unique constraint or an index from the connection *always* - # has a name. - # 2. an index on the metadata side *always* has a name. - # 3. a unique constraint on the metadata side *might* have a name. - # 4. The backend may double up indexes as unique constraints and - # vice versa (e.g. MySQL, Postgresql) - - def obj_added(obj): - if obj.is_index: - diffs.append(("add_index", obj.const)) - log.info("Detected added index '%s' on %s", - obj.name, ', '.join([ - "'%s'" % obj.column_names - ]) - ) - else: - if not supports_unique_constraints: - # can't report unique indexes as added if we don't - # detect them - return - if is_create_table: - # unique constraints are created inline with table defs - return - diffs.append(("add_constraint", obj.const)) - log.info("Detected added unique constraint '%s' on %s", - obj.name, ', '.join([ - "'%s'" % obj.column_names - ]) - ) - - def obj_removed(obj): - if obj.is_index: - if obj.is_unique and not supports_unique_constraints: - # many databases double up unique constraints - # as unique indexes. without that list we can't - # be sure what we're doing here - return - - diffs.append(("remove_index", obj.const)) - log.info("Detected removed index '%s' on '%s'", obj.name, tname) - else: - diffs.append(("remove_constraint", obj.const)) - log.info("Detected removed unique constraint '%s' on '%s'", - obj.name, tname - ) - - def obj_changed(old, new, msg): - if old.is_index: - log.info("Detected changed index '%s' on '%s':%s", - old.name, tname, ', '.join(msg) - ) - diffs.append(("remove_index", old.const)) - diffs.append(("add_index", new.const)) - else: - log.info("Detected changed unique constraint '%s' on '%s':%s", - old.name, tname, ', '.join(msg) - ) - diffs.append(("remove_constraint", old.const)) - diffs.append(("add_constraint", new.const)) - - for added_name in sorted(set(metadata_names).difference(conn_names)): - obj = metadata_names[added_name] - obj_added(obj) - - - for existing_name in sorted(set(metadata_names).intersection(conn_names)): - metadata_obj = metadata_names[existing_name] - - if existing_name in doubled_constraints: - conn_uq, conn_idx = doubled_constraints[existing_name] - if metadata_obj.is_index: - conn_obj = conn_idx - else: - conn_obj = conn_uq - else: - conn_obj = conn_names[existing_name] - - if conn_obj.is_index != metadata_obj.is_index: - obj_removed(conn_obj) - obj_added(metadata_obj) - else: - msg = [] - if conn_obj.is_unique != metadata_obj.is_unique: - msg.append(' unique=%r to unique=%r' % ( - conn_obj.is_unique, metadata_obj.is_unique - )) - if conn_obj.sig != metadata_obj.sig: - msg.append(' columns %r to %r' % ( - conn_obj.sig, metadata_obj.sig - )) - - if msg: - obj_changed(conn_obj, metadata_obj, msg) - - - for removed_name in sorted(set(conn_names).difference(metadata_names)): - conn_obj = conn_names[removed_name] - if not conn_obj.is_index and conn_obj.sig in unnamed_metadata_uniques: - continue - elif removed_name in doubled_constraints: - if conn_obj.sig not in metadata_indexes_by_sig and \ - conn_obj.sig not in metadata_uniques_by_sig: - conn_uq, conn_idx = doubled_constraints[removed_name] - obj_removed(conn_uq) - obj_removed(conn_idx) - else: - obj_removed(conn_obj) - - for uq_sig in unnamed_metadata_uniques: - if uq_sig not in conn_uniques_by_sig: - obj_added(unnamed_metadata_uniques[uq_sig]) - - -def _compare_nullable(schema, tname, cname, conn_col, - metadata_col_nullable, diffs, - autogen_context): - conn_col_nullable = conn_col.nullable - if conn_col_nullable is not metadata_col_nullable: - diffs.append( - ("modify_nullable", schema, tname, cname, - { - "existing_type": conn_col.type, - "existing_server_default": conn_col.server_default, - }, - conn_col_nullable, - metadata_col_nullable), - ) - log.info("Detected %s on column '%s.%s'", - "NULL" if metadata_col_nullable else "NOT NULL", - tname, - cname - ) - -def _compare_type(schema, tname, cname, conn_col, - metadata_col, diffs, - autogen_context): - - conn_type = conn_col.type - metadata_type = metadata_col.type - if conn_type._type_affinity is sqltypes.NullType: - log.info("Couldn't determine database type " - "for column '%s.%s'", tname, cname) - return - if metadata_type._type_affinity is sqltypes.NullType: - log.info("Column '%s.%s' has no type within " - "the model; can't compare", tname, cname) - return - - isdiff = autogen_context['context']._compare_type(conn_col, metadata_col) - - if isdiff: - - diffs.append( - ("modify_type", schema, tname, cname, - { - "existing_nullable": conn_col.nullable, - "existing_server_default": conn_col.server_default, - }, - conn_type, - metadata_type), - ) - log.info("Detected type change from %r to %r on '%s.%s'", - conn_type, metadata_type, tname, cname - ) - -def _compare_server_default(schema, tname, cname, conn_col, metadata_col, - diffs, autogen_context): - - metadata_default = metadata_col.server_default - conn_col_default = conn_col.server_default - if conn_col_default is None and metadata_default is None: - return False - rendered_metadata_default = _render_server_default( - metadata_default, autogen_context) - rendered_conn_default = conn_col.server_default.arg.text \ - if conn_col.server_default else None - isdiff = autogen_context['context']._compare_server_default( - conn_col, metadata_col, - rendered_metadata_default, - rendered_conn_default - ) - if isdiff: - conn_col_default = rendered_conn_default - diffs.append( - ("modify_default", schema, tname, cname, - { - "existing_nullable": conn_col.nullable, - "existing_type": conn_col.type, - }, - conn_col_default, - metadata_default), - ) - log.info("Detected server default on column '%s.%s'", - tname, - cname - ) - - - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/render.py b/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/render.py deleted file mode 100644 index ed9536c..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/autogenerate/render.py +++ /dev/null @@ -1,457 +0,0 @@ -from sqlalchemy import schema as sa_schema, types as sqltypes, sql -import logging -from .. import compat -import re -from ..compat import string_types - -log = logging.getLogger(__name__) - -try: - from sqlalchemy.sql.naming import conv - def _render_gen_name(autogen_context, name): - if isinstance(name, conv): - return _f_name(_alembic_autogenerate_prefix(autogen_context), name) - else: - return name -except ImportError: - def _render_gen_name(autogen_context, name): - return name - -class _f_name(object): - def __init__(self, prefix, name): - self.prefix = prefix - self.name = name - - def __repr__(self): - return "%sf(%r)" % (self.prefix, self.name) - -def _render_potential_expr(value, autogen_context): - if isinstance(value, sql.ClauseElement): - if compat.sqla_08: - compile_kw = dict(compile_kwargs={'literal_binds': True}) - else: - compile_kw = {} - - return "%(prefix)stext(%(sql)r)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "sql": str( - value.compile(dialect=autogen_context['dialect'], - **compile_kw) - ) - } - - else: - return repr(value) - -def _add_table(table, autogen_context): - text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % { - 'tablename': table.name, - 'prefix': _alembic_autogenerate_prefix(autogen_context), - 'args': ',\n'.join( - [col for col in - [_render_column(col, autogen_context) for col in table.c] - if col] + - sorted([rcons for rcons in - [_render_constraint(cons, autogen_context) for cons in - table.constraints] - if rcons is not None - ]) - ) - } - if table.schema: - text += ",\nschema=%r" % table.schema - for k in sorted(table.kwargs): - text += ",\n%s=%r" % (k.replace(" ", "_"), table.kwargs[k]) - text += "\n)" - return text - -def _drop_table(table, autogen_context): - text = "%(prefix)sdrop_table(%(tname)r" % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": table.name - } - if table.schema: - text += ", schema=%r" % table.schema - text += ")" - return text - -def _add_index(index, autogen_context): - """ - Generate Alembic operations for the CREATE INDEX of an - :class:`~sqlalchemy.schema.Index` instance. - """ - from .compare import _get_index_column_names - - text = "%(prefix)screate_index(%(name)r, '%(table)s', %(columns)s, "\ - "unique=%(unique)r%(schema)s%(kwargs)s)" % { - 'prefix': _alembic_autogenerate_prefix(autogen_context), - 'name': _render_gen_name(autogen_context, index.name), - 'table': index.table.name, - 'columns': _get_index_column_names(index), - 'unique': index.unique or False, - 'schema': (", schema='%s'" % index.table.schema) if index.table.schema else '', - 'kwargs': (', '+', '.join( - ["%s=%s" % (key, _render_potential_expr(val, autogen_context)) - for key, val in index.kwargs.items()]))\ - if len(index.kwargs) else '' - } - return text - -def _drop_index(index, autogen_context): - """ - Generate Alembic operations for the DROP INDEX of an - :class:`~sqlalchemy.schema.Index` instance. - """ - text = "%(prefix)sdrop_index(%(name)r, "\ - "table_name='%(table_name)s'%(schema)s)" % { - 'prefix': _alembic_autogenerate_prefix(autogen_context), - 'name': _render_gen_name(autogen_context, index.name), - 'table_name': index.table.name, - 'schema': ((", schema='%s'" % index.table.schema) - if index.table.schema else '') - } - return text - - -def _render_unique_constraint(constraint, autogen_context): - rendered = _user_defined_render("unique", constraint, autogen_context) - if rendered is not False: - return rendered - - return _uq_constraint(constraint, autogen_context, False) - - -def _add_unique_constraint(constraint, autogen_context): - """ - Generate Alembic operations for the ALTER TABLE .. ADD CONSTRAINT ... - UNIQUE of a :class:`~sqlalchemy.schema.UniqueConstraint` instance. - """ - return _uq_constraint(constraint, autogen_context, True) - -def _uq_constraint(constraint, autogen_context, alter): - opts = [] - if constraint.deferrable: - opts.append(("deferrable", str(constraint.deferrable))) - if constraint.initially: - opts.append(("initially", str(constraint.initially))) - if alter and constraint.table.schema: - opts.append(("schema", str(constraint.table.schema))) - if not alter and constraint.name: - opts.append(("name", _render_gen_name(autogen_context, constraint.name))) - - if alter: - args = [repr(_render_gen_name(autogen_context, constraint.name)), - repr(constraint.table.name)] - args.append(repr([col.name for col in constraint.columns])) - args.extend(["%s=%r" % (k, v) for k, v in opts]) - return "%(prefix)screate_unique_constraint(%(args)s)" % { - 'prefix': _alembic_autogenerate_prefix(autogen_context), - 'args': ", ".join(args) - } - else: - args = [repr(col.name) for col in constraint.columns] - args.extend(["%s=%r" % (k, v) for k, v in opts]) - return "%(prefix)sUniqueConstraint(%(args)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "args": ", ".join(args) - } - - -def _add_fk_constraint(constraint, autogen_context): - raise NotImplementedError() - -def _add_pk_constraint(constraint, autogen_context): - raise NotImplementedError() - -def _add_check_constraint(constraint, autogen_context): - raise NotImplementedError() - -def _add_constraint(constraint, autogen_context): - """ - Dispatcher for the different types of constraints. - """ - funcs = { - "unique_constraint": _add_unique_constraint, - "foreign_key_constraint": _add_fk_constraint, - "primary_key_constraint": _add_pk_constraint, - "check_constraint": _add_check_constraint, - "column_check_constraint": _add_check_constraint, - } - return funcs[constraint.__visit_name__](constraint, autogen_context) - -def _drop_constraint(constraint, autogen_context): - """ - Generate Alembic operations for the ALTER TABLE ... DROP CONSTRAINT - of a :class:`~sqlalchemy.schema.UniqueConstraint` instance. - """ - text = "%(prefix)sdrop_constraint(%(name)r, '%(table_name)s'%(schema)s)" % { - 'prefix': _alembic_autogenerate_prefix(autogen_context), - 'name': _render_gen_name(autogen_context, constraint.name), - 'table_name': constraint.table.name, - 'schema': (", schema='%s'" % constraint.table.schema) - if constraint.table.schema else '', - } - return text - -def _add_column(schema, tname, column, autogen_context): - text = "%(prefix)sadd_column(%(tname)r, %(column)s" % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": tname, - "column": _render_column(column, autogen_context) - } - if schema: - text += ", schema=%r" % schema - text += ")" - return text - -def _drop_column(schema, tname, column, autogen_context): - text = "%(prefix)sdrop_column(%(tname)r, %(cname)r" % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": tname, - "cname": column.name - } - if schema: - text += ", schema=%r" % schema - text += ")" - return text - -def _modify_col(tname, cname, - autogen_context, - server_default=False, - type_=None, - nullable=None, - existing_type=None, - existing_nullable=None, - existing_server_default=False, - schema=None): - indent = " " * 11 - text = "%(prefix)salter_column(%(tname)r, %(cname)r" % { - 'prefix': _alembic_autogenerate_prefix( - autogen_context), - 'tname': tname, - 'cname': cname} - text += ",\n%sexisting_type=%s" % (indent, - _repr_type(existing_type, autogen_context)) - if server_default is not False: - rendered = _render_server_default( - server_default, autogen_context) - text += ",\n%sserver_default=%s" % (indent, rendered) - - if type_ is not None: - text += ",\n%stype_=%s" % (indent, - _repr_type(type_, autogen_context)) - if nullable is not None: - text += ",\n%snullable=%r" % ( - indent, nullable,) - if existing_nullable is not None: - text += ",\n%sexisting_nullable=%r" % ( - indent, existing_nullable) - if existing_server_default: - rendered = _render_server_default( - existing_server_default, - autogen_context) - text += ",\n%sexisting_server_default=%s" % ( - indent, rendered) - if schema: - text += ",\n%sschema=%r" % (indent, schema) - text += ")" - return text - -def _user_autogenerate_prefix(autogen_context): - prefix = autogen_context['opts']['user_module_prefix'] - if prefix is None: - return _sqlalchemy_autogenerate_prefix(autogen_context) - else: - return prefix - -def _sqlalchemy_autogenerate_prefix(autogen_context): - return autogen_context['opts']['sqlalchemy_module_prefix'] or '' - -def _alembic_autogenerate_prefix(autogen_context): - return autogen_context['opts']['alembic_module_prefix'] or '' - -def _user_defined_render(type_, object_, autogen_context): - if 'opts' in autogen_context and \ - 'render_item' in autogen_context['opts']: - render = autogen_context['opts']['render_item'] - if render: - rendered = render(type_, object_, autogen_context) - if rendered is not False: - return rendered - return False - -def _render_column(column, autogen_context): - rendered = _user_defined_render("column", column, autogen_context) - if rendered is not False: - return rendered - - opts = [] - if column.server_default: - rendered = _render_server_default( - column.server_default, autogen_context - ) - if rendered: - opts.append(("server_default", rendered)) - - if not column.autoincrement: - opts.append(("autoincrement", column.autoincrement)) - - if column.nullable is not None: - opts.append(("nullable", column.nullable)) - - # TODO: for non-ascii colname, assign a "key" - return "%(prefix)sColumn(%(name)r, %(type)s, %(kw)s)" % { - 'prefix': _sqlalchemy_autogenerate_prefix(autogen_context), - 'name': column.name, - 'type': _repr_type(column.type, autogen_context), - 'kw': ", ".join(["%s=%s" % (kwname, val) for kwname, val in opts]) - } - -def _render_server_default(default, autogen_context): - rendered = _user_defined_render("server_default", default, autogen_context) - if rendered is not False: - return rendered - - if isinstance(default, sa_schema.DefaultClause): - if isinstance(default.arg, string_types): - default = default.arg - else: - default = str(default.arg.compile( - dialect=autogen_context['dialect'])) - if isinstance(default, string_types): - # TODO: this is just a hack to get - # tests to pass until we figure out - # WTF sqlite is doing - default = re.sub(r"^'|'$", "", default) - return repr(default) - else: - return None - -def _repr_type(type_, autogen_context): - rendered = _user_defined_render("type", type_, autogen_context) - if rendered is not False: - return rendered - - mod = type(type_).__module__ - imports = autogen_context.get('imports', None) - if mod.startswith("sqlalchemy.dialects"): - dname = re.match(r"sqlalchemy\.dialects\.(\w+)", mod).group(1) - if imports is not None: - imports.add("from sqlalchemy.dialects import %s" % dname) - return "%s.%r" % (dname, type_) - elif mod.startswith("sqlalchemy"): - prefix = _sqlalchemy_autogenerate_prefix(autogen_context) - return "%s%r" % (prefix, type_) - else: - prefix = _user_autogenerate_prefix(autogen_context) - return "%s%r" % (prefix, type_) - -def _render_constraint(constraint, autogen_context): - renderer = _constraint_renderers.get(type(constraint), None) - if renderer: - return renderer(constraint, autogen_context) - else: - return None - -def _render_primary_key(constraint, autogen_context): - rendered = _user_defined_render("primary_key", constraint, autogen_context) - if rendered is not False: - return rendered - - if not constraint.columns: - return None - - opts = [] - if constraint.name: - opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name)))) - return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "args": ", ".join( - [repr(c.key) for c in constraint.columns] + - ["%s=%s" % (kwname, val) for kwname, val in opts] - ), - } - -def _fk_colspec(fk, metadata_schema): - """Implement a 'safe' version of ForeignKey._get_colspec() that - never tries to resolve the remote table. - - """ - if metadata_schema is None: - return fk._get_colspec() - else: - # need to render schema breaking up tokens by hand, since the - # ForeignKeyConstraint here may not actually have a remote - # Table present - tokens = fk._colspec.split(".") - # no schema in the colspec, render it - if len(tokens) == 2: - return "%s.%s" % (metadata_schema, fk._colspec) - else: - return fk._colspec - -def _render_foreign_key(constraint, autogen_context): - rendered = _user_defined_render("foreign_key", constraint, autogen_context) - if rendered is not False: - return rendered - - opts = [] - if constraint.name: - opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name)))) - if constraint.onupdate: - opts.append(("onupdate", repr(constraint.onupdate))) - if constraint.ondelete: - opts.append(("ondelete", repr(constraint.ondelete))) - if constraint.initially: - opts.append(("initially", repr(constraint.initially))) - if constraint.deferrable: - opts.append(("deferrable", repr(constraint.deferrable))) - if constraint.use_alter: - opts.append(("use_alter", repr(constraint.use_alter))) - - apply_metadata_schema = constraint.parent.metadata.schema - return "%(prefix)sForeignKeyConstraint([%(cols)s], "\ - "[%(refcols)s], %(args)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "cols": ", ".join("'%s'" % f.parent.key for f in constraint.elements), - "refcols": ", ".join(repr(_fk_colspec(f, apply_metadata_schema)) - for f in constraint.elements), - "args": ", ".join( - ["%s=%s" % (kwname, val) for kwname, val in opts] - ), - } - -def _render_check_constraint(constraint, autogen_context): - rendered = _user_defined_render("check", constraint, autogen_context) - if rendered is not False: - return rendered - - # detect the constraint being part of - # a parent type which is probably in the Table already. - # ideally SQLAlchemy would give us more of a first class - # way to detect this. - if constraint._create_rule and \ - hasattr(constraint._create_rule, 'target') and \ - isinstance(constraint._create_rule.target, - sqltypes.TypeEngine): - return None - opts = [] - if constraint.name: - opts.append(("name", repr(_render_gen_name(autogen_context, constraint.name)))) - return "%(prefix)sCheckConstraint(%(sqltext)r%(opts)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "opts": ", " + (", ".join("%s=%s" % (k, v) - for k, v in opts)) if opts else "", - "sqltext": str( - constraint.sqltext.compile( - dialect=autogen_context['dialect'] - ) - ) - } - -_constraint_renderers = { - sa_schema.PrimaryKeyConstraint: _render_primary_key, - sa_schema.ForeignKeyConstraint: _render_foreign_key, - sa_schema.UniqueConstraint: _render_unique_constraint, - sa_schema.CheckConstraint: _render_check_constraint -} diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/command.py b/Linux_i686/lib/python2.7/site-packages/alembic/command.py deleted file mode 100644 index f1c5962..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/command.py +++ /dev/null @@ -1,266 +0,0 @@ -import os - -from .script import ScriptDirectory -from .environment import EnvironmentContext -from . import util, autogenerate as autogen - -def list_templates(config): - """List available templates""" - - config.print_stdout("Available templates:\n") - for tempname in os.listdir(config.get_template_directory()): - with open(os.path.join( - config.get_template_directory(), - tempname, - 'README')) as readme: - synopsis = next(readme) - config.print_stdout("%s - %s", tempname, synopsis) - - config.print_stdout("\nTemplates are used via the 'init' command, e.g.:") - config.print_stdout("\n alembic init --template pylons ./scripts") - -def init(config, directory, template='generic'): - """Initialize a new scripts directory.""" - - if os.access(directory, os.F_OK): - raise util.CommandError("Directory %s already exists" % directory) - - template_dir = os.path.join(config.get_template_directory(), - template) - if not os.access(template_dir, os.F_OK): - raise util.CommandError("No such template %r" % template) - - util.status("Creating directory %s" % os.path.abspath(directory), - os.makedirs, directory) - - versions = os.path.join(directory, 'versions') - util.status("Creating directory %s" % os.path.abspath(versions), - os.makedirs, versions) - - script = ScriptDirectory(directory) - - for file_ in os.listdir(template_dir): - file_path = os.path.join(template_dir, file_) - if file_ == 'alembic.ini.mako': - config_file = os.path.abspath(config.config_file_name) - if os.access(config_file, os.F_OK): - util.msg("File %s already exists, skipping" % config_file) - else: - script._generate_template( - file_path, - config_file, - script_location=directory - ) - elif os.path.isfile(file_path): - output_file = os.path.join(directory, file_) - script._copy_file( - file_path, - output_file - ) - - util.msg("Please edit configuration/connection/logging "\ - "settings in %r before proceeding." % config_file) - -def revision(config, message=None, autogenerate=False, sql=False): - """Create a new revision file.""" - - script = ScriptDirectory.from_config(config) - template_args = { - 'config': config # Let templates use config for - # e.g. multiple databases - } - imports = set() - - environment = util.asbool( - config.get_main_option("revision_environment") - ) - - if autogenerate: - environment = True - def retrieve_migrations(rev, context): - if script.get_revision(rev) is not script.get_revision("head"): - raise util.CommandError("Target database is not up to date.") - autogen._produce_migration_diffs(context, template_args, imports) - return [] - elif environment: - def retrieve_migrations(rev, context): - return [] - - if environment: - with EnvironmentContext( - config, - script, - fn=retrieve_migrations, - as_sql=sql, - template_args=template_args, - ): - script.run_env() - return script.generate_revision(util.rev_id(), message, refresh=True, - **template_args) - - -def upgrade(config, revision, sql=False, tag=None): - """Upgrade to a later version.""" - - script = ScriptDirectory.from_config(config) - - starting_rev = None - if ":" in revision: - if not sql: - raise util.CommandError("Range revision not allowed") - starting_rev, revision = revision.split(':', 2) - - def upgrade(rev, context): - return script._upgrade_revs(revision, rev) - - with EnvironmentContext( - config, - script, - fn=upgrade, - as_sql=sql, - starting_rev=starting_rev, - destination_rev=revision, - tag=tag - ): - script.run_env() - -def downgrade(config, revision, sql=False, tag=None): - """Revert to a previous version.""" - - script = ScriptDirectory.from_config(config) - starting_rev = None - if ":" in revision: - if not sql: - raise util.CommandError("Range revision not allowed") - starting_rev, revision = revision.split(':', 2) - elif sql: - raise util.CommandError("downgrade with --sql requires :") - - def downgrade(rev, context): - return script._downgrade_revs(revision, rev) - - with EnvironmentContext( - config, - script, - fn=downgrade, - as_sql=sql, - starting_rev=starting_rev, - destination_rev=revision, - tag=tag - ): - script.run_env() - -def history(config, rev_range=None): - """List changeset scripts in chronological order.""" - - script = ScriptDirectory.from_config(config) - if rev_range is not None: - if ":" not in rev_range: - raise util.CommandError( - "History range requires [start]:[end], " - "[start]:, or :[end]") - base, head = rev_range.strip().split(":") - else: - base = head = None - - def _display_history(config, script, base, head): - for sc in script.walk_revisions( - base=base or "base", - head=head or "head"): - if sc.is_head: - config.print_stdout("") - config.print_stdout(sc.log_entry) - - def _display_history_w_current(config, script, base=None, head=None): - def _display_current_history(rev, context): - if head is None: - _display_history(config, script, base, rev) - elif base is None: - _display_history(config, script, rev, head) - return [] - - with EnvironmentContext( - config, - script, - fn=_display_current_history - ): - script.run_env() - - if base == "current": - _display_history_w_current(config, script, head=head) - elif head == "current": - _display_history_w_current(config, script, base=base) - else: - _display_history(config, script, base, head) - - -def branches(config): - """Show current un-spliced branch points""" - script = ScriptDirectory.from_config(config) - for sc in script.walk_revisions(): - if sc.is_branch_point: - config.print_stdout(sc) - for rev in sc.nextrev: - config.print_stdout("%s -> %s", - " " * len(str(sc.down_revision)), - script.get_revision(rev) - ) - -def current(config, head_only=False): - """Display the current revision for each database.""" - - script = ScriptDirectory.from_config(config) - def display_version(rev, context): - rev = script.get_revision(rev) - - if head_only: - config.print_stdout("%s%s" % ( - rev.revision if rev else None, - " (head)" if rev and rev.is_head else "")) - - else: - config.print_stdout("Current revision for %s: %s", - util.obfuscate_url_pw( - context.connection.engine.url), - rev) - return [] - - with EnvironmentContext( - config, - script, - fn=display_version - ): - script.run_env() - -def stamp(config, revision, sql=False, tag=None): - """'stamp' the revision table with the given revision; don't - run any migrations.""" - - script = ScriptDirectory.from_config(config) - def do_stamp(rev, context): - if sql: - current = False - else: - current = context._current_rev() - dest = script.get_revision(revision) - if dest is not None: - dest = dest.revision - context._update_current_rev(current, dest) - return [] - with EnvironmentContext( - config, - script, - fn=do_stamp, - as_sql=sql, - destination_rev=revision, - tag=tag - ): - script.run_env() - -def splice(config, parent, child): - """'splice' two branches, creating a new revision file. - - this command isn't implemented right now. - - """ - raise NotImplementedError() diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/compat.py b/Linux_i686/lib/python2.7/site-packages/alembic/compat.py deleted file mode 100644 index aac0560..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/compat.py +++ /dev/null @@ -1,130 +0,0 @@ -import io -import sys -from sqlalchemy import __version__ as sa_version - -if sys.version_info < (2, 6): - raise NotImplementedError("Python 2.6 or greater is required.") - -sqla_08 = sa_version >= '0.8.0' -sqla_09 = sa_version >= '0.9.0' - -py2k = sys.version_info < (3, 0) -py3k = sys.version_info >= (3, 0) -py33 = sys.version_info >= (3, 3) - -if py3k: - import builtins as compat_builtins - string_types = str, - binary_type = bytes - text_type = str - def callable(fn): - return hasattr(fn, '__call__') - - def u(s): - return s - -else: - import __builtin__ as compat_builtins - string_types = basestring, - binary_type = str - text_type = unicode - callable = callable - - def u(s): - return unicode(s, "utf-8") - -if py3k: - from configparser import ConfigParser as SafeConfigParser - import configparser -else: - from ConfigParser import SafeConfigParser - import ConfigParser as configparser - -if py2k: - from mako.util import parse_encoding - -if py33: - from importlib import machinery - def load_module_py(module_id, path): - return machinery.SourceFileLoader(module_id, path).load_module(module_id) - - def load_module_pyc(module_id, path): - return machinery.SourcelessFileLoader(module_id, path).load_module(module_id) - -else: - import imp - def load_module_py(module_id, path): - with open(path, 'rb') as fp: - mod = imp.load_source(module_id, path, fp) - if py2k: - source_encoding = parse_encoding(fp) - if source_encoding: - mod._alembic_source_encoding = source_encoding - return mod - - def load_module_pyc(module_id, path): - with open(path, 'rb') as fp: - mod = imp.load_compiled(module_id, path, fp) - # no source encoding here - return mod - -try: - exec_ = getattr(compat_builtins, 'exec') -except AttributeError: - # Python 2 - def exec_(func_text, globals_, lcl): - exec('exec func_text in globals_, lcl') - -################################################ -# cross-compatible metaclass implementation -# Copyright (c) 2010-2012 Benjamin Peterson -def with_metaclass(meta, base=object): - """Create a base class with a metaclass.""" - return meta("%sBase" % meta.__name__, (base,), {}) -################################################ - - -# produce a wrapper that allows encoded text to stream -# into a given buffer, but doesn't close it. -# not sure of a more idiomatic approach to this. -class EncodedIO(io.TextIOWrapper): - def close(self): - pass - -if py2k: - # in Py2K, the io.* package is awkward because it does not - # easily wrap the file type (e.g. sys.stdout) and I can't - # figure out at all how to wrap StringIO.StringIO (used by nosetests) - # and also might be user specified too. So create a full - # adapter. - - class ActLikePy3kIO(object): - """Produce an object capable of wrapping either - sys.stdout (e.g. file) *or* StringIO.StringIO(). - - """ - def _false(self): - return False - - def _true(self): - return True - - readable = seekable = _false - writable = _true - closed = False - - def __init__(self, file_): - self.file_ = file_ - - def write(self, text): - return self.file_.write(text) - - def flush(self): - return self.file_.flush() - - class EncodedIO(EncodedIO): - def __init__(self, file_, encoding): - super(EncodedIO, self).__init__( - ActLikePy3kIO(file_), encoding=encoding) - - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/config.py b/Linux_i686/lib/python2.7/site-packages/alembic/config.py deleted file mode 100644 index 86ff1df..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/config.py +++ /dev/null @@ -1,301 +0,0 @@ -from argparse import ArgumentParser -from .compat import SafeConfigParser -import inspect -import os -import sys - -from . import command, util, package_dir, compat - -class Config(object): - """Represent an Alembic configuration. - - Within an ``env.py`` script, this is available - via the :attr:`.EnvironmentContext.config` attribute, - which in turn is available at ``alembic.context``:: - - from alembic import context - - some_param = context.config.get_main_option("my option") - - When invoking Alembic programatically, a new - :class:`.Config` can be created by passing - the name of an .ini file to the constructor:: - - from alembic.config import Config - alembic_cfg = Config("/path/to/yourapp/alembic.ini") - - With a :class:`.Config` object, you can then - run Alembic commands programmatically using the directives - in :mod:`alembic.command`. - - The :class:`.Config` object can also be constructed without - a filename. Values can be set programmatically, and - new sections will be created as needed:: - - from alembic.config import Config - alembic_cfg = Config() - alembic_cfg.set_main_option("script_location", "myapp:migrations") - alembic_cfg.set_main_option("url", "postgresql://foo/bar") - alembic_cfg.set_section_option("mysection", "foo", "bar") - - :param file_: name of the .ini file to open. - :param ini_section: name of the main Alembic section within the - .ini file - :param output_buffer: optional file-like input buffer which - will be passed to the :class:`.MigrationContext` - used to redirect - the output of "offline generation" when using Alembic programmatically. - :param stdout: buffer where the "print" output of commands will be sent. - Defaults to ``sys.stdout``. - - ..versionadded:: 0.4 - - """ - def __init__(self, file_=None, ini_section='alembic', output_buffer=None, - stdout=sys.stdout, cmd_opts=None): - """Construct a new :class:`.Config` - - """ - self.config_file_name = file_ - self.config_ini_section = ini_section - self.output_buffer = output_buffer - self.stdout = stdout - self.cmd_opts = cmd_opts - - cmd_opts = None - """The command-line options passed to the ``alembic`` script. - - Within an ``env.py`` script this can be accessed via the - :attr:`.EnvironmentContext.config` attribute. - - .. versionadded:: 0.6.0 - - .. seealso:: - - :meth:`.EnvironmentContext.get_x_argument` - - """ - - config_file_name = None - """Filesystem path to the .ini file in use.""" - - config_ini_section = None - """Name of the config file section to read basic configuration - from. Defaults to ``alembic``, that is the ``[alembic]`` section - of the .ini file. This value is modified using the ``-n/--name`` - option to the Alembic runnier. - - """ - - def print_stdout(self, text, *arg): - """Render a message to standard out.""" - - util.write_outstream( - self.stdout, - (compat.text_type(text) % arg), - "\n" - ) - - @util.memoized_property - def file_config(self): - """Return the underlying :class:`ConfigParser` object. - - Direct access to the .ini file is available here, - though the :meth:`.Config.get_section` and - :meth:`.Config.get_main_option` - methods provide a possibly simpler interface. - - """ - - if self.config_file_name: - here = os.path.abspath(os.path.dirname(self.config_file_name)) - else: - here = "" - file_config = SafeConfigParser({'here': here}) - if self.config_file_name: - file_config.read([self.config_file_name]) - else: - file_config.add_section(self.config_ini_section) - return file_config - - def get_template_directory(self): - """Return the directory where Alembic setup templates are found. - - This method is used by the alembic ``init`` and ``list_templates`` - commands. - - """ - return os.path.join(package_dir, 'templates') - - def get_section(self, name): - """Return all the configuration options from a given .ini file section - as a dictionary. - - """ - return dict(self.file_config.items(name)) - - def set_main_option(self, name, value): - """Set an option programmatically within the 'main' section. - - This overrides whatever was in the .ini file. - - """ - self.file_config.set(self.config_ini_section, name, value) - - def remove_main_option(self, name): - self.file_config.remove_option(self.config_ini_section, name) - - def set_section_option(self, section, name, value): - """Set an option programmatically within the given section. - - The section is created if it doesn't exist already. - The value here will override whatever was in the .ini - file. - - """ - if not self.file_config.has_section(section): - self.file_config.add_section(section) - self.file_config.set(section, name, value) - - def get_section_option(self, section, name, default=None): - """Return an option from the given section of the .ini file. - - """ - if not self.file_config.has_section(section): - raise util.CommandError("No config file %r found, or file has no " - "'[%s]' section" % - (self.config_file_name, section)) - if self.file_config.has_option(section, name): - return self.file_config.get(section, name) - else: - return default - - def get_main_option(self, name, default=None): - """Return an option from the 'main' section of the .ini file. - - This defaults to being a key from the ``[alembic]`` - section, unless the ``-n/--name`` flag were used to - indicate a different section. - - """ - return self.get_section_option(self.config_ini_section, name, default) - - -class CommandLine(object): - def __init__(self, prog=None): - self._generate_args(prog) - - - def _generate_args(self, prog): - def add_options(parser, positional, kwargs): - if 'template' in kwargs: - parser.add_argument("-t", "--template", - default='generic', - type=str, - help="Setup template for use with 'init'") - if 'message' in kwargs: - parser.add_argument("-m", "--message", - type=str, - help="Message string to use with 'revision'") - if 'sql' in kwargs: - parser.add_argument("--sql", - action="store_true", - help="Don't emit SQL to database - dump to " - "standard output/file instead") - if 'tag' in kwargs: - parser.add_argument("--tag", - type=str, - help="Arbitrary 'tag' name - can be used by " - "custom env.py scripts.") - if 'autogenerate' in kwargs: - parser.add_argument("--autogenerate", - action="store_true", - help="Populate revision script with candidate " - "migration operations, based on comparison " - "of database to model.") - # "current" command - if 'head_only' in kwargs: - parser.add_argument("--head-only", - action="store_true", - help="Only show current version and " - "whether or not this is the head revision.") - - if 'rev_range' in kwargs: - parser.add_argument("-r", "--rev-range", - action="store", - help="Specify a revision range; " - "format is [start]:[end]") - - - positional_help = { - 'directory': "location of scripts directory", - 'revision': "revision identifier" - } - for arg in positional: - subparser.add_argument(arg, help=positional_help.get(arg)) - - parser = ArgumentParser(prog=prog) - parser.add_argument("-c", "--config", - type=str, - default="alembic.ini", - help="Alternate config file") - parser.add_argument("-n", "--name", - type=str, - default="alembic", - help="Name of section in .ini file to " - "use for Alembic config") - parser.add_argument("-x", action="append", - help="Additional arguments consumed by " - "custom env.py scripts, e.g. -x " - "setting1=somesetting -x setting2=somesetting") - - subparsers = parser.add_subparsers() - - for fn in [getattr(command, n) for n in dir(command)]: - if inspect.isfunction(fn) and \ - fn.__name__[0] != '_' and \ - fn.__module__ == 'alembic.command': - - spec = inspect.getargspec(fn) - if spec[3]: - positional = spec[0][1:-len(spec[3])] - kwarg = spec[0][-len(spec[3]):] - else: - positional = spec[0][1:] - kwarg = [] - - subparser = subparsers.add_parser( - fn.__name__, - help=fn.__doc__) - add_options(subparser, positional, kwarg) - subparser.set_defaults(cmd=(fn, positional, kwarg)) - self.parser = parser - - def run_cmd(self, config, options): - fn, positional, kwarg = options.cmd - - try: - fn(config, - *[getattr(options, k) for k in positional], - **dict((k, getattr(options, k)) for k in kwarg) - ) - except util.CommandError as e: - util.err(str(e)) - - def main(self, argv=None): - options = self.parser.parse_args(argv) - if not hasattr(options, "cmd"): - # see http://bugs.python.org/issue9253, argparse - # behavior changed incompatibly in py3.3 - self.parser.error("too few arguments") - else: - cfg = Config(file_=options.config, - ini_section=options.name, cmd_opts=options) - self.run_cmd(cfg, options) - -def main(argv=None, prog=None, **kwargs): - """The console runner function for Alembic.""" - - CommandLine(prog=prog).main(argv=argv) - -if __name__ == '__main__': - main() \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/context.py b/Linux_i686/lib/python2.7/site-packages/alembic/context.py deleted file mode 100644 index 9c0f676..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/context.py +++ /dev/null @@ -1,6 +0,0 @@ -from .environment import EnvironmentContext -from . import util - -# create proxy functions for -# each method on the EnvironmentContext class. -util.create_module_class_proxy(EnvironmentContext, globals(), locals()) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/__init__.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/__init__.py deleted file mode 100644 index bfc8ab4..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from . import postgresql, mysql, sqlite, mssql, oracle -from .impl import DefaultImpl diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/base.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/base.py deleted file mode 100644 index 5d703a5..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/base.py +++ /dev/null @@ -1,161 +0,0 @@ -import functools - -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.schema import DDLElement, Column -from sqlalchemy import Integer -from sqlalchemy import types as sqltypes - -class AlterTable(DDLElement): - """Represent an ALTER TABLE statement. - - Only the string name and optional schema name of the table - is required, not a full Table object. - - """ - def __init__(self, table_name, schema=None): - self.table_name = table_name - self.schema = schema - -class RenameTable(AlterTable): - def __init__(self, old_table_name, new_table_name, schema=None): - super(RenameTable, self).__init__(old_table_name, schema=schema) - self.new_table_name = new_table_name - -class AlterColumn(AlterTable): - def __init__(self, name, column_name, schema=None, - existing_type=None, - existing_nullable=None, - existing_server_default=None): - super(AlterColumn, self).__init__(name, schema=schema) - self.column_name = column_name - self.existing_type=sqltypes.to_instance(existing_type) \ - if existing_type is not None else None - self.existing_nullable=existing_nullable - self.existing_server_default=existing_server_default - -class ColumnNullable(AlterColumn): - def __init__(self, name, column_name, nullable, **kw): - super(ColumnNullable, self).__init__(name, column_name, - **kw) - self.nullable = nullable - -class ColumnType(AlterColumn): - def __init__(self, name, column_name, type_, **kw): - super(ColumnType, self).__init__(name, column_name, - **kw) - self.type_ = sqltypes.to_instance(type_) - -class ColumnName(AlterColumn): - def __init__(self, name, column_name, newname, **kw): - super(ColumnName, self).__init__(name, column_name, **kw) - self.newname = newname - -class ColumnDefault(AlterColumn): - def __init__(self, name, column_name, default, **kw): - super(ColumnDefault, self).__init__(name, column_name, **kw) - self.default = default - -class AddColumn(AlterTable): - def __init__(self, name, column, schema=None): - super(AddColumn, self).__init__(name, schema=schema) - self.column = column - -class DropColumn(AlterTable): - def __init__(self, name, column, schema=None): - super(DropColumn, self).__init__(name, schema=schema) - self.column = column - - -@compiles(RenameTable) -def visit_rename_table(element, compiler, **kw): - return "%s RENAME TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, element.schema) - ) - -@compiles(AddColumn) -def visit_add_column(element, compiler, **kw): - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - add_column(compiler, element.column, **kw) - ) - -@compiles(DropColumn) -def visit_drop_column(element, compiler, **kw): - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - drop_column(compiler, element.column.name, **kw) - ) - -@compiles(ColumnNullable) -def visit_column_nullable(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "DROP NOT NULL" if element.nullable else "SET NOT NULL" - ) - -@compiles(ColumnType) -def visit_column_type(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "TYPE %s" % format_type(compiler, element.type_) - ) - -@compiles(ColumnName) -def visit_column_name(element, compiler, **kw): - return "%s RENAME %s TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname) - ) - -@compiles(ColumnDefault) -def visit_column_default(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "SET DEFAULT %s" % - format_server_default(compiler, element.default) - if element.default is not None - else "DROP DEFAULT" - ) - -def quote_dotted(name, quote): - """quote the elements of a dotted name""" - - result = '.'.join([quote(x) for x in name.split('.')]) - return result - -def format_table_name(compiler, name, schema): - quote = functools.partial(compiler.preparer.quote, force=None) - if schema: - return quote_dotted(schema, quote) + "." + quote(name) - else: - return quote(name) - -def format_column_name(compiler, name): - return compiler.preparer.quote(name, None) - -def format_server_default(compiler, default): - return compiler.get_column_default_string( - Column("x", Integer, server_default=default) - ) - -def format_type(compiler, type_): - return compiler.dialect.type_compiler.process(type_) - -def alter_table(compiler, name, schema): - return "ALTER TABLE %s" % format_table_name(compiler, name, schema) - -def drop_column(compiler, name): - return 'DROP COLUMN %s' % format_column_name(compiler, name) - -def alter_column(compiler, name): - return 'ALTER COLUMN %s' % format_column_name(compiler, name) - -def add_column(compiler, column, **kw): - return "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) - - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/impl.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/impl.py deleted file mode 100644 index 79cbd36..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/impl.py +++ /dev/null @@ -1,279 +0,0 @@ -from sqlalchemy.sql.expression import _BindParamClause -from sqlalchemy.ext.compiler import compiles -from sqlalchemy import schema, text -from sqlalchemy import types as sqltypes - -from ..compat import string_types, text_type, with_metaclass -from .. import util -from . import base - -class ImplMeta(type): - def __init__(cls, classname, bases, dict_): - newtype = type.__init__(cls, classname, bases, dict_) - if '__dialect__' in dict_: - _impls[dict_['__dialect__']] = cls - return newtype - -_impls = {} - -class DefaultImpl(with_metaclass(ImplMeta)): - """Provide the entrypoint for major migration operations, - including database-specific behavioral variances. - - While individual SQL/DDL constructs already provide - for database-specific implementations, variances here - allow for entirely different sequences of operations - to take place for a particular migration, such as - SQL Server's special 'IDENTITY INSERT' step for - bulk inserts. - - """ - __dialect__ = 'default' - - transactional_ddl = False - command_terminator = ";" - - def __init__(self, dialect, connection, as_sql, - transactional_ddl, output_buffer, - context_opts): - self.dialect = dialect - self.connection = connection - self.as_sql = as_sql - self.output_buffer = output_buffer - self.memo = {} - self.context_opts = context_opts - if transactional_ddl is not None: - self.transactional_ddl = transactional_ddl - - @classmethod - def get_by_dialect(cls, dialect): - return _impls[dialect.name] - - def static_output(self, text): - self.output_buffer.write(text_type(text + "\n\n")) - self.output_buffer.flush() - - @property - def bind(self): - return self.connection - - def _exec(self, construct, execution_options=None, - multiparams=(), - params=util.immutabledict()): - if isinstance(construct, string_types): - construct = text(construct) - if self.as_sql: - if multiparams or params: - # TODO: coverage - raise Exception("Execution arguments not allowed with as_sql") - self.static_output(text_type( - construct.compile(dialect=self.dialect) - ).replace("\t", " ").strip() + self.command_terminator) - else: - conn = self.connection - if execution_options: - conn = conn.execution_options(**execution_options) - conn.execute(construct, *multiparams, **params) - - def execute(self, sql, execution_options=None): - self._exec(sql, execution_options) - - def alter_column(self, table_name, column_name, - nullable=None, - server_default=False, - name=None, - type_=None, - schema=None, - autoincrement=None, - existing_type=None, - existing_server_default=None, - existing_nullable=None, - existing_autoincrement=None - ): - if autoincrement is not None or existing_autoincrement is not None: - util.warn("nautoincrement and existing_autoincrement only make sense for MySQL") - if nullable is not None: - self._exec(base.ColumnNullable(table_name, column_name, - nullable, schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - )) - if server_default is not False: - self._exec(base.ColumnDefault( - table_name, column_name, server_default, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - )) - if type_ is not None: - self._exec(base.ColumnType( - table_name, column_name, type_, schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - )) - # do the new name last ;) - if name is not None: - self._exec(base.ColumnName( - table_name, column_name, name, schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - )) - - def add_column(self, table_name, column, schema=None): - self._exec(base.AddColumn(table_name, column, schema=schema)) - - def drop_column(self, table_name, column, schema=None, **kw): - self._exec(base.DropColumn(table_name, column, schema=schema)) - - def add_constraint(self, const): - if const._create_rule is None or \ - const._create_rule(self): - self._exec(schema.AddConstraint(const)) - - def drop_constraint(self, const): - self._exec(schema.DropConstraint(const)) - - def rename_table(self, old_table_name, new_table_name, schema=None): - self._exec(base.RenameTable(old_table_name, - new_table_name, schema=schema)) - - def create_table(self, table): - if util.sqla_07: - table.dispatch.before_create(table, self.connection, - checkfirst=False, - _ddl_runner=self) - self._exec(schema.CreateTable(table)) - if util.sqla_07: - table.dispatch.after_create(table, self.connection, - checkfirst=False, - _ddl_runner=self) - for index in table.indexes: - self._exec(schema.CreateIndex(index)) - - def drop_table(self, table): - self._exec(schema.DropTable(table)) - - def create_index(self, index): - self._exec(schema.CreateIndex(index)) - - def drop_index(self, index): - self._exec(schema.DropIndex(index)) - - def bulk_insert(self, table, rows, multiinsert=True): - if not isinstance(rows, list): - raise TypeError("List expected") - elif rows and not isinstance(rows[0], dict): - raise TypeError("List of dictionaries expected") - if self.as_sql: - for row in rows: - self._exec(table.insert(inline=True).values(**dict( - (k, - _literal_bindparam(k, v, type_=table.c[k].type) - if not isinstance(v, _literal_bindparam) else v) - for k, v in row.items() - ))) - else: - # work around http://www.sqlalchemy.org/trac/ticket/2461 - if not hasattr(table, '_autoincrement_column'): - table._autoincrement_column = None - if rows: - if multiinsert: - self._exec(table.insert(inline=True), multiparams=rows) - else: - for row in rows: - self._exec(table.insert(inline=True).values(**row)) - - def compare_type(self, inspector_column, metadata_column): - - conn_type = inspector_column.type - metadata_type = metadata_column.type - - metadata_impl = metadata_type.dialect_impl(self.dialect) - - # work around SQLAlchemy bug "stale value for type affinity" - # fixed in 0.7.4 - metadata_impl.__dict__.pop('_type_affinity', None) - - if conn_type._compare_type_affinity( - metadata_impl - ): - comparator = _type_comparators.get(conn_type._type_affinity, None) - - return comparator and comparator(metadata_type, conn_type) - else: - return True - - def compare_server_default(self, inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default): - return rendered_inspector_default != rendered_metadata_default - - def correct_for_autogen_constraints(self, conn_uniques, conn_indexes, - metadata_unique_constraints, - metadata_indexes): - pass - - def start_migrations(self): - """A hook called when :meth:`.EnvironmentContext.run_migrations` - is called. - - Implementations can set up per-migration-run state here. - - """ - - def emit_begin(self): - """Emit the string ``BEGIN``, or the backend-specific - equivalent, on the current connection context. - - This is used in offline mode and typically - via :meth:`.EnvironmentContext.begin_transaction`. - - """ - self.static_output("BEGIN" + self.command_terminator) - - def emit_commit(self): - """Emit the string ``COMMIT``, or the backend-specific - equivalent, on the current connection context. - - This is used in offline mode and typically - via :meth:`.EnvironmentContext.begin_transaction`. - - """ - self.static_output("COMMIT" + self.command_terminator) - -class _literal_bindparam(_BindParamClause): - pass - -@compiles(_literal_bindparam) -def _render_literal_bindparam(element, compiler, **kw): - return compiler.render_literal_bindparam(element, **kw) - - -def _string_compare(t1, t2): - return \ - t1.length is not None and \ - t1.length != t2.length - -def _numeric_compare(t1, t2): - return \ - ( - t1.precision is not None and \ - t1.precision != t2.precision - ) or \ - ( - t1.scale is not None and \ - t1.scale != t2.scale - ) -_type_comparators = { - sqltypes.String:_string_compare, - sqltypes.Numeric:_numeric_compare -} - - - - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mssql.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mssql.py deleted file mode 100644 index fece08b..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mssql.py +++ /dev/null @@ -1,217 +0,0 @@ -from sqlalchemy.ext.compiler import compiles - -from .. import util -from .impl import DefaultImpl -from .base import alter_table, AddColumn, ColumnName, \ - format_table_name, format_column_name, ColumnNullable, alter_column,\ - format_server_default,ColumnDefault, format_type, ColumnType -from sqlalchemy.sql.expression import ClauseElement, Executable - -class MSSQLImpl(DefaultImpl): - __dialect__ = 'mssql' - transactional_ddl = True - batch_separator = "GO" - - def __init__(self, *arg, **kw): - super(MSSQLImpl, self).__init__(*arg, **kw) - self.batch_separator = self.context_opts.get( - "mssql_batch_separator", - self.batch_separator) - - def _exec(self, construct, *args, **kw): - super(MSSQLImpl, self)._exec(construct, *args, **kw) - if self.as_sql and self.batch_separator: - self.static_output(self.batch_separator) - - def emit_begin(self): - self.static_output("BEGIN TRANSACTION" + self.command_terminator) - - def emit_commit(self): - super(MSSQLImpl, self).emit_commit() - if self.as_sql and self.batch_separator: - self.static_output(self.batch_separator) - - def alter_column(self, table_name, column_name, - nullable=None, - server_default=False, - name=None, - type_=None, - schema=None, - autoincrement=None, - existing_type=None, - existing_server_default=None, - existing_nullable=None, - existing_autoincrement=None - ): - - if nullable is not None and existing_type is None: - if type_ is not None: - existing_type = type_ - # the NULL/NOT NULL alter will handle - # the type alteration - type_ = None - else: - raise util.CommandError( - "MS-SQL ALTER COLUMN operations " - "with NULL or NOT NULL require the " - "existing_type or a new type_ be passed.") - - super(MSSQLImpl, self).alter_column( - table_name, column_name, - nullable=nullable, - type_=type_, - schema=schema, - autoincrement=autoincrement, - existing_type=existing_type, - existing_nullable=existing_nullable, - existing_autoincrement=existing_autoincrement - ) - - if server_default is not False: - if existing_server_default is not False or \ - server_default is None: - self._exec( - _ExecDropConstraint( - table_name, column_name, - 'sys.default_constraints') - ) - if server_default is not None: - super(MSSQLImpl, self).alter_column( - table_name, column_name, - schema=schema, - server_default=server_default) - - if name is not None: - super(MSSQLImpl, self).alter_column( - table_name, column_name, - schema=schema, - name=name) - - def bulk_insert(self, table, rows, **kw): - if self.as_sql: - self._exec( - "SET IDENTITY_INSERT %s ON" % - self.dialect.identifier_preparer.format_table(table) - ) - super(MSSQLImpl, self).bulk_insert(table, rows, **kw) - self._exec( - "SET IDENTITY_INSERT %s OFF" % - self.dialect.identifier_preparer.format_table(table) - ) - else: - super(MSSQLImpl, self).bulk_insert(table, rows, **kw) - - - def drop_column(self, table_name, column, **kw): - drop_default = kw.pop('mssql_drop_default', False) - if drop_default: - self._exec( - _ExecDropConstraint( - table_name, column, - 'sys.default_constraints') - ) - drop_check = kw.pop('mssql_drop_check', False) - if drop_check: - self._exec( - _ExecDropConstraint( - table_name, column, - 'sys.check_constraints') - ) - drop_fks = kw.pop('mssql_drop_foreign_key', False) - if drop_fks: - self._exec( - _ExecDropFKConstraint(table_name, column) - ) - super(MSSQLImpl, self).drop_column(table_name, column) - -class _ExecDropConstraint(Executable, ClauseElement): - def __init__(self, tname, colname, type_): - self.tname = tname - self.colname = colname - self.type_ = type_ - -class _ExecDropFKConstraint(Executable, ClauseElement): - def __init__(self, tname, colname): - self.tname = tname - self.colname = colname - - -@compiles(_ExecDropConstraint, 'mssql') -def _exec_drop_col_constraint(element, compiler, **kw): - tname, colname, type_ = element.tname, element.colname, element.type_ - # from http://www.mssqltips.com/sqlservertip/1425/working-with-default-constraints-in-sql-server/ - # TODO: needs table formatting, etc. - return """declare @const_name varchar(256) -select @const_name = [name] from %(type)s -where parent_object_id = object_id('%(tname)s') -and col_name(parent_object_id, parent_column_id) = '%(colname)s' -exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { - 'type': type_, - 'tname': tname, - 'colname': colname, - 'tname_quoted': format_table_name(compiler, tname, None), - } - -@compiles(_ExecDropFKConstraint, 'mssql') -def _exec_drop_col_fk_constraint(element, compiler, **kw): - tname, colname = element.tname, element.colname - - return """declare @const_name varchar(256) -select @const_name = [name] from - sys.foreign_keys fk join sys.foreign_key_columns fkc - on fk.object_id=fkc.constraint_object_id -where fkc.parent_object_id = object_id('%(tname)s') -and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s' -exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { - 'tname': tname, - 'colname': colname, - 'tname_quoted': format_table_name(compiler, tname, None), - } - - - -@compiles(AddColumn, 'mssql') -def visit_add_column(element, compiler, **kw): - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - mssql_add_column(compiler, element.column, **kw) - ) - -def mssql_add_column(compiler, column, **kw): - return "ADD %s" % compiler.get_column_specification(column, **kw) - -@compiles(ColumnNullable, 'mssql') -def visit_column_nullable(element, compiler, **kw): - return "%s %s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - format_type(compiler, element.existing_type), - "NULL" if element.nullable else "NOT NULL" - ) - -@compiles(ColumnDefault, 'mssql') -def visit_column_default(element, compiler, **kw): - # TODO: there can also be a named constraint - # with ADD CONSTRAINT here - return "%s ADD DEFAULT %s FOR %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_server_default(compiler, element.default), - format_column_name(compiler, element.column_name) - ) - -@compiles(ColumnName, 'mssql') -def visit_rename_column(element, compiler, **kw): - return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % ( - format_table_name(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname) - ) - -@compiles(ColumnType, 'mssql') -def visit_column_type(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - format_type(compiler, element.type_) - ) - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mysql.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mysql.py deleted file mode 100644 index 96f42f3..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/mysql.py +++ /dev/null @@ -1,212 +0,0 @@ -from sqlalchemy.ext.compiler import compiles -from sqlalchemy import types as sqltypes -from sqlalchemy import schema - -from ..compat import string_types -from .. import util -from .impl import DefaultImpl -from .base import ColumnNullable, ColumnName, ColumnDefault, \ - ColumnType, AlterColumn, format_column_name, \ - format_server_default -from .base import alter_table - -class MySQLImpl(DefaultImpl): - __dialect__ = 'mysql' - - transactional_ddl = False - - def alter_column(self, table_name, column_name, - nullable=None, - server_default=False, - name=None, - type_=None, - schema=None, - autoincrement=None, - existing_type=None, - existing_server_default=None, - existing_nullable=None, - existing_autoincrement=None - ): - if name is not None: - self._exec( - MySQLChangeColumn( - table_name, column_name, - schema=schema, - newname=name, - nullable=nullable if nullable is not None else - existing_nullable - if existing_nullable is not None - else True, - type_=type_ if type_ is not None else existing_type, - default=server_default if server_default is not False - else existing_server_default, - autoincrement=autoincrement if autoincrement is not None - else existing_autoincrement - ) - ) - elif nullable is not None or \ - type_ is not None or \ - autoincrement is not None: - self._exec( - MySQLModifyColumn( - table_name, column_name, - schema=schema, - newname=name if name is not None else column_name, - nullable=nullable if nullable is not None else - existing_nullable - if existing_nullable is not None - else True, - type_=type_ if type_ is not None else existing_type, - default=server_default if server_default is not False - else existing_server_default, - autoincrement=autoincrement if autoincrement is not None - else existing_autoincrement - ) - ) - elif server_default is not False: - self._exec( - MySQLAlterDefault( - table_name, column_name, server_default, - schema=schema, - ) - ) - - def correct_for_autogen_constraints(self, conn_unique_constraints, - conn_indexes, - metadata_unique_constraints, - metadata_indexes): - removed = set() - for idx in list(conn_indexes): - # MySQL puts implicit indexes on FK columns, even if - # composite and even if MyISAM, so can't check this too easily - if idx.name == idx.columns.keys()[0]: - conn_indexes.remove(idx) - removed.add(idx.name) - - # then remove indexes from the "metadata_indexes" - # that we've removed from reflected, otherwise they come out - # as adds (see #202) - for idx in list(metadata_indexes): - if idx.name in removed: - metadata_indexes.remove(idx) - -class MySQLAlterDefault(AlterColumn): - def __init__(self, name, column_name, default, schema=None): - super(AlterColumn, self).__init__(name, schema=schema) - self.column_name = column_name - self.default = default - - -class MySQLChangeColumn(AlterColumn): - def __init__(self, name, column_name, schema=None, - newname=None, - type_=None, - nullable=None, - default=False, - autoincrement=None): - super(AlterColumn, self).__init__(name, schema=schema) - self.column_name = column_name - self.nullable = nullable - self.newname = newname - self.default = default - self.autoincrement = autoincrement - if type_ is None: - raise util.CommandError( - "All MySQL CHANGE/MODIFY COLUMN operations " - "require the existing type." - ) - - self.type_ = sqltypes.to_instance(type_) - -class MySQLModifyColumn(MySQLChangeColumn): - pass - - -@compiles(ColumnNullable, 'mysql') -@compiles(ColumnName, 'mysql') -@compiles(ColumnDefault, 'mysql') -@compiles(ColumnType, 'mysql') -def _mysql_doesnt_support_individual(element, compiler, **kw): - raise NotImplementedError( - "Individual alter column constructs not supported by MySQL" - ) - - -@compiles(MySQLAlterDefault, "mysql") -def _mysql_alter_default(element, compiler, **kw): - return "%s ALTER COLUMN %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - "SET DEFAULT %s" % format_server_default(compiler, element.default) - if element.default is not None - else "DROP DEFAULT" - ) - -@compiles(MySQLModifyColumn, "mysql") -def _mysql_modify_column(element, compiler, **kw): - return "%s MODIFY %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - _mysql_colspec( - compiler, - nullable=element.nullable, - server_default=element.default, - type_=element.type_, - autoincrement=element.autoincrement - ), - ) - - -@compiles(MySQLChangeColumn, "mysql") -def _mysql_change_column(element, compiler, **kw): - return "%s CHANGE %s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname), - _mysql_colspec( - compiler, - nullable=element.nullable, - server_default=element.default, - type_=element.type_, - autoincrement=element.autoincrement - ), - ) - -def _render_value(compiler, expr): - if isinstance(expr, string_types): - return "'%s'" % expr - else: - return compiler.sql_compiler.process(expr) - -def _mysql_colspec(compiler, nullable, server_default, type_, - autoincrement): - spec = "%s %s" % ( - compiler.dialect.type_compiler.process(type_), - "NULL" if nullable else "NOT NULL" - ) - if autoincrement: - spec += " AUTO_INCREMENT" - if server_default is not False and server_default is not None: - spec += " DEFAULT %s" % _render_value(compiler, server_default) - - return spec - -@compiles(schema.DropConstraint, "mysql") -def _mysql_drop_constraint(element, compiler, **kw): - """Redefine SQLAlchemy's drop constraint to - raise errors for invalid constraint type.""" - - constraint = element.element - if isinstance(constraint, (schema.ForeignKeyConstraint, - schema.PrimaryKeyConstraint, - schema.UniqueConstraint) - ): - return compiler.visit_drop_constraint(element, **kw) - elif isinstance(constraint, schema.CheckConstraint): - raise NotImplementedError( - "MySQL does not support CHECK constraints.") - else: - raise NotImplementedError( - "No generic 'DROP CONSTRAINT' in MySQL - " - "please specify constraint type") - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/oracle.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/oracle.py deleted file mode 100644 index 28eb246..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/oracle.py +++ /dev/null @@ -1,77 +0,0 @@ -from sqlalchemy.ext.compiler import compiles - -from .impl import DefaultImpl -from .base import alter_table, AddColumn, ColumnName, \ - format_column_name, ColumnNullable, \ - format_server_default,ColumnDefault, format_type, ColumnType - -class OracleImpl(DefaultImpl): - __dialect__ = 'oracle' - transactional_ddl = True - batch_separator = "/" - command_terminator = "" - - def __init__(self, *arg, **kw): - super(OracleImpl, self).__init__(*arg, **kw) - self.batch_separator = self.context_opts.get( - "oracle_batch_separator", - self.batch_separator) - - def _exec(self, construct, *args, **kw): - super(OracleImpl, self)._exec(construct, *args, **kw) - if self.as_sql and self.batch_separator: - self.static_output(self.batch_separator) - - def emit_begin(self): - self._exec("SET TRANSACTION READ WRITE") - - def emit_commit(self): - self._exec("COMMIT") - -@compiles(AddColumn, 'oracle') -def visit_add_column(element, compiler, **kw): - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - add_column(compiler, element.column, **kw), - ) - -@compiles(ColumnNullable, 'oracle') -def visit_column_nullable(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "NULL" if element.nullable else "NOT NULL" - ) - -@compiles(ColumnType, 'oracle') -def visit_column_type(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "%s" % format_type(compiler, element.type_) - ) - -@compiles(ColumnName, 'oracle') -def visit_column_name(element, compiler, **kw): - return "%s RENAME COLUMN %s TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname) - ) - -@compiles(ColumnDefault, 'oracle') -def visit_column_default(element, compiler, **kw): - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "DEFAULT %s" % - format_server_default(compiler, element.default) - if element.default is not None - else "DEFAULT NULL" - ) - -def alter_column(compiler, name): - return 'MODIFY %s' % format_column_name(compiler, name) - -def add_column(compiler, column, **kw): - return "ADD %s" % compiler.get_column_specification(column, **kw) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/postgresql.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/postgresql.py deleted file mode 100644 index 5ca0d1f..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/postgresql.py +++ /dev/null @@ -1,43 +0,0 @@ -import re - -from sqlalchemy import types as sqltypes - -from .base import compiles, alter_table, format_table_name, RenameTable -from .impl import DefaultImpl - -class PostgresqlImpl(DefaultImpl): - __dialect__ = 'postgresql' - transactional_ddl = True - - def compare_server_default(self, inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default): - - # don't do defaults for SERIAL columns - if metadata_column.primary_key and \ - metadata_column is metadata_column.table._autoincrement_column: - return False - - conn_col_default = rendered_inspector_default - - if None in (conn_col_default, rendered_metadata_default): - return conn_col_default != rendered_metadata_default - - if metadata_column.type._type_affinity is not sqltypes.String: - rendered_metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default) - - return not self.connection.scalar( - "SELECT %s = %s" % ( - conn_col_default, - rendered_metadata_default - ) - ) - - -@compiles(RenameTable, "postgresql") -def visit_rename_table(element, compiler, **kw): - return "%s RENAME TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, None) - ) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/sqlite.py b/Linux_i686/lib/python2.7/site-packages/alembic/ddl/sqlite.py deleted file mode 100644 index a3c73ce..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/ddl/sqlite.py +++ /dev/null @@ -1,73 +0,0 @@ -from .. import util -from .impl import DefaultImpl - -#from sqlalchemy.ext.compiler import compiles -#from .base import AddColumn, alter_table -#from sqlalchemy.schema import AddConstraint - -class SQLiteImpl(DefaultImpl): - __dialect__ = 'sqlite' - - transactional_ddl = False - """SQLite supports transactional DDL, but pysqlite does not: - see: http://bugs.python.org/issue10740 - """ - - def add_constraint(self, const): - # attempt to distinguish between an - # auto-gen constraint and an explicit one - if const._create_rule is None: - raise NotImplementedError( - "No support for ALTER of constraints in SQLite dialect") - elif const._create_rule(self): - util.warn("Skipping unsupported ALTER for " - "creation of implicit constraint") - - - def drop_constraint(self, const): - if const._create_rule is None: - raise NotImplementedError( - "No support for ALTER of constraints in SQLite dialect") - - def correct_for_autogen_constraints(self, conn_unique_constraints, conn_indexes, - metadata_unique_constraints, - metadata_indexes): - - def uq_sig(uq): - return tuple(sorted(uq.columns.keys())) - - conn_unique_sigs = set( - uq_sig(uq) - for uq in conn_unique_constraints - ) - - for idx in list(metadata_unique_constraints): - # SQLite backend can't report on unnamed UNIQUE constraints, - # so remove these, unless we see an exact signature match - if idx.name is None and uq_sig(idx) not in conn_unique_sigs: - metadata_unique_constraints.remove(idx) - - for idx in list(conn_unique_constraints): - # just in case we fix the backend such that it does report - # on them, blow them out of the reflected collection too otherwise - # they will come up as removed. if the backend supports this now, - # add a version check here for the dialect. - if idx.name is None: - conn_uniques.remove(idx) - -#@compiles(AddColumn, 'sqlite') -#def visit_add_column(element, compiler, **kw): -# return "%s %s" % ( -# alter_table(compiler, element.table_name, element.schema), -# add_column(compiler, element.column, **kw) -# ) - - -#def add_column(compiler, column, **kw): -# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) -# # need to modify SQLAlchemy so that the CHECK associated with a Boolean -# # or Enum gets placed as part of the column constraints, not the Table -# # see ticket 98 -# for const in column.constraints: -# text += compiler.process(AddConstraint(const)) -# return text diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/environment.py b/Linux_i686/lib/python2.7/site-packages/alembic/environment.py deleted file mode 100644 index f8875a2..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/environment.py +++ /dev/null @@ -1,791 +0,0 @@ -from .operations import Operations -from .migration import MigrationContext -from . import util - -class EnvironmentContext(object): - """Represent the state made available to an ``env.py`` script. - - :class:`.EnvironmentContext` is normally instantiated - by the commands present in the :mod:`alembic.command` - module. From within an ``env.py`` script, the current - :class:`.EnvironmentContext` is available via the - ``alembic.context`` datamember. - - :class:`.EnvironmentContext` is also a Python context - manager, that is, is intended to be used using the - ``with:`` statement. A typical use of :class:`.EnvironmentContext`:: - - from alembic.config import Config - from alembic.script import ScriptDirectory - - config = Config() - config.set_main_option("script_location", "myapp:migrations") - script = ScriptDirectory.from_config(config) - - def my_function(rev, context): - '''do something with revision "rev", which - will be the current database revision, - and "context", which is the MigrationContext - that the env.py will create''' - - with EnvironmentContext( - config, - script, - fn = my_function, - as_sql = False, - starting_rev = 'base', - destination_rev = 'head', - tag = "sometag" - ): - script.run_env() - - The above script will invoke the ``env.py`` script - within the migration environment. If and when ``env.py`` - calls :meth:`.MigrationContext.run_migrations`, the - ``my_function()`` function above will be called - by the :class:`.MigrationContext`, given the context - itself as well as the current revision in the database. - - .. note:: - - For most API usages other than full blown - invocation of migration scripts, the :class:`.MigrationContext` - and :class:`.ScriptDirectory` objects can be created and - used directly. The :class:`.EnvironmentContext` object - is *only* needed when you need to actually invoke the - ``env.py`` module present in the migration environment. - - """ - - _migration_context = None - - config = None - """An instance of :class:`.Config` representing the - configuration file contents as well as other variables - set programmatically within it.""" - - script = None - """An instance of :class:`.ScriptDirectory` which provides - programmatic access to version files within the ``versions/`` - directory. - - """ - - def __init__(self, config, script, **kw): - """Construct a new :class:`.EnvironmentContext`. - - :param config: a :class:`.Config` instance. - :param script: a :class:`.ScriptDirectory` instance. - :param \**kw: keyword options that will be ultimately - passed along to the :class:`.MigrationContext` when - :meth:`.EnvironmentContext.configure` is called. - - """ - self.config = config - self.script = script - self.context_opts = kw - - def __enter__(self): - """Establish a context which provides a - :class:`.EnvironmentContext` object to - env.py scripts. - - The :class:`.EnvironmentContext` will - be made available as ``from alembic import context``. - - """ - from .context import _install_proxy - _install_proxy(self) - return self - - def __exit__(self, *arg, **kw): - from . import context, op - context._remove_proxy() - op._remove_proxy() - - def is_offline_mode(self): - """Return True if the current migrations environment - is running in "offline mode". - - This is ``True`` or ``False`` depending - on the the ``--sql`` flag passed. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - return self.context_opts.get('as_sql', False) - - def is_transactional_ddl(self): - """Return True if the context is configured to expect a - transactional DDL capable backend. - - This defaults to the type of database in use, and - can be overridden by the ``transactional_ddl`` argument - to :meth:`.configure` - - This function requires that a :class:`.MigrationContext` - has first been made available via :meth:`.configure`. - - """ - return self.get_context().impl.transactional_ddl - - def requires_connection(self): - return not self.is_offline_mode() - - def get_head_revision(self): - """Return the hex identifier of the 'head' revision. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - return self.script._as_rev_number("head") - - def get_starting_revision_argument(self): - """Return the 'starting revision' argument, - if the revision was passed using ``start:end``. - - This is only meaningful in "offline" mode. - Returns ``None`` if no value is available - or was configured. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - if self._migration_context is not None: - return self.script._as_rev_number( - self.get_context()._start_from_rev) - elif 'starting_rev' in self.context_opts: - return self.script._as_rev_number( - self.context_opts['starting_rev']) - else: - raise util.CommandError( - "No starting revision argument is available.") - - def get_revision_argument(self): - """Get the 'destination' revision argument. - - This is typically the argument passed to the - ``upgrade`` or ``downgrade`` command. - - If it was specified as ``head``, the actual - version number is returned; if specified - as ``base``, ``None`` is returned. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - return self.script._as_rev_number( - self.context_opts['destination_rev']) - - def get_tag_argument(self): - """Return the value passed for the ``--tag`` argument, if any. - - The ``--tag`` argument is not used directly by Alembic, - but is available for custom ``env.py`` configurations that - wish to use it; particularly for offline generation scripts - that wish to generate tagged filenames. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: - - :meth:`.EnvironmentContext.get_x_argument` - a newer and more - open ended system of extending ``env.py`` scripts via the command - line. - - """ - return self.context_opts.get('tag', None) - - def get_x_argument(self, as_dictionary=False): - """Return the value(s) passed for the ``-x`` argument, if any. - - The ``-x`` argument is an open ended flag that allows any user-defined - value or values to be passed on the command line, then available - here for consumption by a custom ``env.py`` script. - - The return value is a list, returned directly from the ``argparse`` - structure. If ``as_dictionary=True`` is passed, the ``x`` arguments - are parsed using ``key=value`` format into a dictionary that is - then returned. - - For example, to support passing a database URL on the command line, - the standard ``env.py`` script can be modified like this:: - - cmd_line_url = context.get_x_argument(as_dictionary=True).get('dbname') - if cmd_line_url: - engine = create_engine(cmd_line_url) - else: - engine = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - This then takes effect by running the ``alembic`` script as:: - - alembic -x dbname=postgresql://user:pass@host/dbname upgrade head - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. versionadded:: 0.6.0 - - .. seealso:: - - :meth:`.EnvironmentContext.get_tag_argument` - - :attr:`.Config.cmd_opts` - - """ - if self.config.cmd_opts is not None: - value = self.config.cmd_opts.x or [] - else: - value = [] - if as_dictionary: - value = dict( - arg.split('=', 1) for arg in value - ) - return value - - def configure(self, - connection=None, - url=None, - dialect_name=None, - transactional_ddl=None, - transaction_per_migration=False, - output_buffer=None, - starting_rev=None, - tag=None, - template_args=None, - target_metadata=None, - include_symbol=None, - include_object=None, - include_schemas=False, - compare_type=False, - compare_server_default=False, - render_item=None, - upgrade_token="upgrades", - downgrade_token="downgrades", - alembic_module_prefix="op.", - sqlalchemy_module_prefix="sa.", - user_module_prefix=None, - **kw - ): - """Configure a :class:`.MigrationContext` within this - :class:`.EnvironmentContext` which will provide database - connectivity and other configuration to a series of - migration scripts. - - Many methods on :class:`.EnvironmentContext` require that - this method has been called in order to function, as they - ultimately need to have database access or at least access - to the dialect in use. Those which do are documented as such. - - The important thing needed by :meth:`.configure` is a - means to determine what kind of database dialect is in use. - An actual connection to that database is needed only if - the :class:`.MigrationContext` is to be used in - "online" mode. - - If the :meth:`.is_offline_mode` function returns ``True``, - then no connection is needed here. Otherwise, the - ``connection`` parameter should be present as an - instance of :class:`sqlalchemy.engine.Connection`. - - This function is typically called from the ``env.py`` - script within a migration environment. It can be called - multiple times for an invocation. The most recent - :class:`~sqlalchemy.engine.Connection` - for which it was called is the one that will be operated upon - by the next call to :meth:`.run_migrations`. - - General parameters: - - :param connection: a :class:`~sqlalchemy.engine.Connection` - to use - for SQL execution in "online" mode. When present, is also - used to determine the type of dialect in use. - :param url: a string database url, or a - :class:`sqlalchemy.engine.url.URL` object. - The type of dialect to be used will be derived from this if - ``connection`` is not passed. - :param dialect_name: string name of a dialect, such as - "postgresql", "mssql", etc. - The type of dialect to be used will be derived from this if - ``connection`` and ``url`` are not passed. - :param transactional_ddl: Force the usage of "transactional" - DDL on or off; - this otherwise defaults to whether or not the dialect in - use supports it. - :param transaction_per_migration: if True, nest each migration script - in a transaction rather than the full series of migrations to - run. - - .. versionadded:: 0.6.5 - - :param output_buffer: a file-like object that will be used - for textual output - when the ``--sql`` option is used to generate SQL scripts. - Defaults to - ``sys.stdout`` if not passed here and also not present on - the :class:`.Config` - object. The value here overrides that of the :class:`.Config` - object. - :param output_encoding: when using ``--sql`` to generate SQL - scripts, apply this encoding to the string output. - - .. versionadded:: 0.5.0 - - :param starting_rev: Override the "starting revision" argument - when using ``--sql`` mode. - :param tag: a string tag for usage by custom ``env.py`` scripts. - Set via the ``--tag`` option, can be overridden here. - :param template_args: dictionary of template arguments which - will be added to the template argument environment when - running the "revision" command. Note that the script environment - is only run within the "revision" command if the --autogenerate - option is used, or if the option "revision_environment=true" - is present in the alembic.ini file. - - .. versionadded:: 0.3.3 - - :param version_table: The name of the Alembic version table. - The default is ``'alembic_version'``. - :param version_table_schema: Optional schema to place version - table within. - - .. versionadded:: 0.5.0 - - Parameters specific to the autogenerate feature, when - ``alembic revision`` is run with the ``--autogenerate`` feature: - - :param target_metadata: a :class:`sqlalchemy.schema.MetaData` - object that - will be consulted during autogeneration. The tables present - will be compared against - what is locally available on the target - :class:`~sqlalchemy.engine.Connection` - to produce candidate upgrade/downgrade operations. - - :param compare_type: Indicates type comparison behavior during - an autogenerate - operation. Defaults to ``False`` which disables type - comparison. Set to - ``True`` to turn on default type comparison, which has varied - accuracy depending on backend. - - To customize type comparison behavior, a callable may be - specified which - can filter type comparisons during an autogenerate operation. - The format of this callable is:: - - def my_compare_type(context, inspected_column, - metadata_column, inspected_type, metadata_type): - # return True if the types are different, - # False if not, or None to allow the default implementation - # to compare these types - return None - - context.configure( - # ... - compare_type = my_compare_type - ) - - - ``inspected_column`` is a :class:`sqlalchemy.schema.Column` as returned by - :meth:`sqlalchemy.engine.reflection.Inspector.reflecttable`, whereas - ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from - the local model environment. - - A return value of ``None`` indicates to allow default type - comparison to proceed. - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.compare_server_default` - - :param compare_server_default: Indicates server default comparison - behavior during - an autogenerate operation. Defaults to ``False`` which disables - server default - comparison. Set to ``True`` to turn on server default comparison, - which has - varied accuracy depending on backend. - - To customize server default comparison behavior, a callable may - be specified - which can filter server default comparisons during an - autogenerate operation. - defaults during an autogenerate operation. The format of this - callable is:: - - def my_compare_server_default(context, inspected_column, - metadata_column, inspected_default, metadata_default, - rendered_metadata_default): - # return True if the defaults are different, - # False if not, or None to allow the default implementation - # to compare these defaults - return None - - context.configure( - # ... - compare_server_default = my_compare_server_default - ) - - ``inspected_column`` is a dictionary structure as returned by - :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas - ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from - the local model environment. - - A return value of ``None`` indicates to allow default server default - comparison - to proceed. Note that some backends such as Postgresql actually - execute - the two defaults on the database side to compare for equivalence. - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.compare_type` - - :param include_object: A callable function which is given - the chance to return ``True`` or ``False`` for any object, - indicating if the given object should be considered in the - autogenerate sweep. - - The function accepts the following positional arguments: - - * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such as a - :class:`~sqlalchemy.schema.Table` or :class:`~sqlalchemy.schema.Column` - object - * ``name``: the name of the object. This is typically available - via ``object.name``. - * ``type``: a string describing the type of object; currently - ``"table"`` or ``"column"`` - * ``reflected``: ``True`` if the given object was produced based on - table reflection, ``False`` if it's from a local :class:`.MetaData` - object. - * ``compare_to``: the object being compared against, if available, - else ``None``. - - E.g.:: - - def include_object(object, name, type_, reflected, compare_to): - if (type_ == "column" and - not reflected and - object.info.get("skip_autogenerate", False)): - return False - else: - return True - - context.configure( - # ... - include_object = include_object - ) - - :paramref:`.EnvironmentContext.configure.include_object` can also - be used to filter on specific schemas to include or omit, when - the :paramref:`.EnvironmentContext.configure.include_schemas` - flag is set to ``True``. The :attr:`.Table.schema` attribute - on each :class:`.Table` object reflected will indicate the name of the - schema from which the :class:`.Table` originates. - - .. versionadded:: 0.6.0 - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.include_schemas` - - :param include_symbol: A callable function which, given a table name - and schema name (may be ``None``), returns ``True`` or ``False``, indicating - if the given table should be considered in the autogenerate sweep. - - .. deprecated:: 0.6.0 :paramref:`.EnvironmentContext.configure.include_symbol` - is superceded by the more generic - :paramref:`.EnvironmentContext.configure.include_object` - parameter. - - E.g.:: - - def include_symbol(tablename, schema): - return tablename not in ("skip_table_one", "skip_table_two") - - context.configure( - # ... - include_symbol = include_symbol - ) - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.include_schemas` - - :paramref:`.EnvironmentContext.configure.include_object` - - :param include_schemas: If True, autogenerate will scan across - all schemas located by the SQLAlchemy - :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` - method, and include all differences in tables found across all - those schemas. When using this option, you may want to also - use the :paramref:`.EnvironmentContext.configure.include_object` - option to specify a callable which - can filter the tables/schemas that get included. - - .. versionadded :: 0.4.0 - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.include_object` - - :param render_item: Callable that can be used to override how - any schema item, i.e. column, constraint, type, - etc., is rendered for autogenerate. The callable receives a - string describing the type of object, the object, and - the autogen context. If it returns False, the - default rendering method will be used. If it returns None, - the item will not be rendered in the context of a Table - construct, that is, can be used to skip columns or constraints - within op.create_table():: - - def my_render_column(type_, col, autogen_context): - if type_ == "column" and isinstance(col, MySpecialCol): - return repr(col) - else: - return False - - context.configure( - # ... - render_item = my_render_column - ) - - Available values for the type string include: ``"column"``, - ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, - ``"type"``, ``"server_default"``. - - .. versionadded:: 0.5.0 - - .. seealso:: - - :ref:`autogen_render_types` - - :param upgrade_token: When autogenerate completes, the text of the - candidate upgrade operations will be present in this template - variable when ``script.py.mako`` is rendered. Defaults to - ``upgrades``. - :param downgrade_token: When autogenerate completes, the text of the - candidate downgrade operations will be present in this - template variable when ``script.py.mako`` is rendered. Defaults to - ``downgrades``. - - :param alembic_module_prefix: When autogenerate refers to Alembic - :mod:`alembic.operations` constructs, this prefix will be used - (i.e. ``op.create_table``) Defaults to "``op.``". - Can be ``None`` to indicate no prefix. - - :param sqlalchemy_module_prefix: When autogenerate refers to - SQLAlchemy - :class:`~sqlalchemy.schema.Column` or type classes, this prefix - will be used - (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". - Can be ``None`` to indicate no prefix. - Note that when dialect-specific types are rendered, autogenerate - will render them using the dialect module name, i.e. ``mssql.BIT()``, - ``postgresql.UUID()``. - - :param user_module_prefix: When autogenerate refers to a SQLAlchemy - type (e.g. :class:`.TypeEngine`) where the module name is not - under the ``sqlalchemy`` namespace, this prefix will be used - within autogenerate, if non-``None``; if left at its default of - ``None``, the - :paramref:`.EnvironmentContext.configure.sqlalchemy_module_prefix` - is used instead. - - .. versionadded:: 0.6.3 added - :paramref:`.EnvironmentContext.configure.user_module_prefix` - - .. seealso:: - - :ref:`autogen_module_prefix` - - Parameters specific to individual backends: - - :param mssql_batch_separator: The "batch separator" which will - be placed between each statement when generating offline SQL Server - migrations. Defaults to ``GO``. Note this is in addition to the - customary semicolon ``;`` at the end of each statement; SQL Server - considers the "batch separator" to denote the end of an - individual statement execution, and cannot group certain - dependent operations in one step. - :param oracle_batch_separator: The "batch separator" which will - be placed between each statement when generating offline - Oracle migrations. Defaults to ``/``. Oracle doesn't add a - semicolon between statements like most other backends. - - """ - opts = self.context_opts - if transactional_ddl is not None: - opts["transactional_ddl"] = transactional_ddl - if output_buffer is not None: - opts["output_buffer"] = output_buffer - elif self.config.output_buffer is not None: - opts["output_buffer"] = self.config.output_buffer - if starting_rev: - opts['starting_rev'] = starting_rev - if tag: - opts['tag'] = tag - if template_args and 'template_args' in opts: - opts['template_args'].update(template_args) - opts["transaction_per_migration"] = transaction_per_migration - opts['target_metadata'] = target_metadata - opts['include_symbol'] = include_symbol - opts['include_object'] = include_object - opts['include_schemas'] = include_schemas - opts['upgrade_token'] = upgrade_token - opts['downgrade_token'] = downgrade_token - opts['sqlalchemy_module_prefix'] = sqlalchemy_module_prefix - opts['alembic_module_prefix'] = alembic_module_prefix - opts['user_module_prefix'] = user_module_prefix - if render_item is not None: - opts['render_item'] = render_item - if compare_type is not None: - opts['compare_type'] = compare_type - if compare_server_default is not None: - opts['compare_server_default'] = compare_server_default - opts['script'] = self.script - - opts.update(kw) - - self._migration_context = MigrationContext.configure( - connection=connection, - url=url, - dialect_name=dialect_name, - opts=opts - ) - - def run_migrations(self, **kw): - """Run migrations as determined by the current command line - configuration - as well as versioning information present (or not) in the current - database connection (if one is present). - - The function accepts optional ``**kw`` arguments. If these are - passed, they are sent directly to the ``upgrade()`` and - ``downgrade()`` - functions within each target revision file. By modifying the - ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` - functions accept arguments, parameters can be passed here so that - contextual information, usually information to identify a particular - database in use, can be passed from a custom ``env.py`` script - to the migration functions. - - This function requires that a :class:`.MigrationContext` has - first been made available via :meth:`.configure`. - - """ - with Operations.context(self._migration_context): - self.get_context().run_migrations(**kw) - - def execute(self, sql, execution_options=None): - """Execute the given SQL using the current change context. - - The behavior of :meth:`.execute` is the same - as that of :meth:`.Operations.execute`. Please see that - function's documentation for full detail including - caveats and limitations. - - This function requires that a :class:`.MigrationContext` has - first been made available via :meth:`.configure`. - - """ - self.get_context().execute(sql, - execution_options=execution_options) - - def static_output(self, text): - """Emit text directly to the "offline" SQL stream. - - Typically this is for emitting comments that - start with --. The statement is not treated - as a SQL execution, no ; or batch separator - is added, etc. - - """ - self.get_context().impl.static_output(text) - - - def begin_transaction(self): - """Return a context manager that will - enclose an operation within a "transaction", - as defined by the environment's offline - and transactional DDL settings. - - e.g.:: - - with context.begin_transaction(): - context.run_migrations() - - :meth:`.begin_transaction` is intended to - "do the right thing" regardless of - calling context: - - * If :meth:`.is_transactional_ddl` is ``False``, - returns a "do nothing" context manager - which otherwise produces no transactional - state or directives. - * If :meth:`.is_offline_mode` is ``True``, - returns a context manager that will - invoke the :meth:`.DefaultImpl.emit_begin` - and :meth:`.DefaultImpl.emit_commit` - methods, which will produce the string - directives ``BEGIN`` and ``COMMIT`` on - the output stream, as rendered by the - target backend (e.g. SQL Server would - emit ``BEGIN TRANSACTION``). - * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` - on the current online connection, which - returns a :class:`sqlalchemy.engine.Transaction` - object. This object demarcates a real - transaction and is itself a context manager, - which will roll back if an exception - is raised. - - Note that a custom ``env.py`` script which - has more specific transactional needs can of course - manipulate the :class:`~sqlalchemy.engine.Connection` - directly to produce transactional state in "online" - mode. - - """ - - return self.get_context().begin_transaction() - - - def get_context(self): - """Return the current :class:`.MigrationContext` object. - - If :meth:`.EnvironmentContext.configure` has not been - called yet, raises an exception. - - """ - - if self._migration_context is None: - raise Exception("No context has been configured yet.") - return self._migration_context - - def get_bind(self): - """Return the current 'bind'. - - In "online" mode, this is the - :class:`sqlalchemy.engine.Connection` currently being used - to emit SQL to the database. - - This function requires that a :class:`.MigrationContext` - has first been made available via :meth:`.configure`. - - """ - return self.get_context().bind - - def get_impl(self): - return self.get_context().impl - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/migration.py b/Linux_i686/lib/python2.7/site-packages/alembic/migration.py deleted file mode 100644 index e554515..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/migration.py +++ /dev/null @@ -1,352 +0,0 @@ -import io -import logging -import sys -from contextlib import contextmanager - - -from sqlalchemy import MetaData, Table, Column, String, literal_column -from sqlalchemy import create_engine -from sqlalchemy.engine import url as sqla_url - -from .compat import callable, EncodedIO -from . import ddl, util - -log = logging.getLogger(__name__) - -class MigrationContext(object): - """Represent the database state made available to a migration - script. - - :class:`.MigrationContext` is the front end to an actual - database connection, or alternatively a string output - stream given a particular database dialect, - from an Alembic perspective. - - When inside the ``env.py`` script, the :class:`.MigrationContext` - is available via the - :meth:`.EnvironmentContext.get_context` method, - which is available at ``alembic.context``:: - - # from within env.py script - from alembic import context - migration_context = context.get_context() - - For usage outside of an ``env.py`` script, such as for - utility routines that want to check the current version - in the database, the :meth:`.MigrationContext.configure` - method to create new :class:`.MigrationContext` objects. - For example, to get at the current revision in the - database using :meth:`.MigrationContext.get_current_revision`:: - - # in any application, outside of an env.py script - from alembic.migration import MigrationContext - from sqlalchemy import create_engine - - engine = create_engine("postgresql://mydatabase") - conn = engine.connect() - - context = MigrationContext.configure(conn) - current_rev = context.get_current_revision() - - The above context can also be used to produce - Alembic migration operations with an :class:`.Operations` - instance:: - - # in any application, outside of the normal Alembic environment - from alembic.operations import Operations - op = Operations(context) - op.alter_column("mytable", "somecolumn", nullable=True) - - """ - def __init__(self, dialect, connection, opts): - self.opts = opts - self.dialect = dialect - self.script = opts.get('script') - - as_sql = opts.get('as_sql', False) - transactional_ddl = opts.get("transactional_ddl") - - self._transaction_per_migration = opts.get( - "transaction_per_migration", False) - - if as_sql: - self.connection = self._stdout_connection(connection) - assert self.connection is not None - else: - self.connection = connection - self._migrations_fn = opts.get('fn') - self.as_sql = as_sql - - if "output_encoding" in opts: - self.output_buffer = EncodedIO( - opts.get("output_buffer") or sys.stdout, - opts['output_encoding'] - ) - else: - self.output_buffer = opts.get("output_buffer", sys.stdout) - - self._user_compare_type = opts.get('compare_type', False) - self._user_compare_server_default = opts.get( - 'compare_server_default', - False) - version_table = opts.get('version_table', 'alembic_version') - version_table_schema = opts.get('version_table_schema', None) - self._version = Table( - version_table, MetaData(), - Column('version_num', String(32), nullable=False), - schema=version_table_schema) - - self._start_from_rev = opts.get("starting_rev") - self.impl = ddl.DefaultImpl.get_by_dialect(dialect)( - dialect, self.connection, self.as_sql, - transactional_ddl, - self.output_buffer, - opts - ) - log.info("Context impl %s.", self.impl.__class__.__name__) - if self.as_sql: - log.info("Generating static SQL") - log.info("Will assume %s DDL.", - "transactional" if self.impl.transactional_ddl - else "non-transactional") - - @classmethod - def configure(cls, - connection=None, - url=None, - dialect_name=None, - opts={}, - ): - """Create a new :class:`.MigrationContext`. - - This is a factory method usually called - by :meth:`.EnvironmentContext.configure`. - - :param connection: a :class:`~sqlalchemy.engine.Connection` - to use for SQL execution in "online" mode. When present, - is also used to determine the type of dialect in use. - :param url: a string database url, or a - :class:`sqlalchemy.engine.url.URL` object. - The type of dialect to be used will be derived from this if - ``connection`` is not passed. - :param dialect_name: string name of a dialect, such as - "postgresql", "mssql", etc. The type of dialect to be used will be - derived from this if ``connection`` and ``url`` are not passed. - :param opts: dictionary of options. Most other options - accepted by :meth:`.EnvironmentContext.configure` are passed via - this dictionary. - - """ - if connection: - dialect = connection.dialect - elif url: - url = sqla_url.make_url(url) - dialect = url.get_dialect()() - elif dialect_name: - url = sqla_url.make_url("%s://" % dialect_name) - dialect = url.get_dialect()() - else: - raise Exception("Connection, url, or dialect_name is required.") - - return MigrationContext(dialect, connection, opts) - - - def begin_transaction(self, _per_migration=False): - transaction_now = _per_migration == self._transaction_per_migration - - if not transaction_now: - @contextmanager - def do_nothing(): - yield - return do_nothing() - - elif not self.impl.transactional_ddl: - @contextmanager - def do_nothing(): - yield - return do_nothing() - elif self.as_sql: - @contextmanager - def begin_commit(): - self.impl.emit_begin() - yield - self.impl.emit_commit() - return begin_commit() - else: - return self.bind.begin() - - def get_current_revision(self): - """Return the current revision, usually that which is present - in the ``alembic_version`` table in the database. - - If this :class:`.MigrationContext` was configured in "offline" - mode, that is with ``as_sql=True``, the ``starting_rev`` - parameter is returned instead, if any. - - """ - if self.as_sql: - return self._start_from_rev - else: - if self._start_from_rev: - raise util.CommandError( - "Can't specify current_rev to context " - "when using a database connection") - self._version.create(self.connection, checkfirst=True) - return self.connection.scalar(self._version.select()) - - _current_rev = get_current_revision - """The 0.2 method name, for backwards compat.""" - - def _update_current_rev(self, old, new): - if old == new: - return - if new is None: - self.impl._exec(self._version.delete()) - elif old is None: - self.impl._exec(self._version.insert(). - values(version_num=literal_column("'%s'" % new)) - ) - else: - self.impl._exec(self._version.update(). - values(version_num=literal_column("'%s'" % new)) - ) - - def run_migrations(self, **kw): - """Run the migration scripts established for this :class:`.MigrationContext`, - if any. - - The commands in :mod:`alembic.command` will set up a function - that is ultimately passed to the :class:`.MigrationContext` - as the ``fn`` argument. This function represents the "work" - that will be done when :meth:`.MigrationContext.run_migrations` - is called, typically from within the ``env.py`` script of the - migration environment. The "work function" then provides an iterable - of version callables and other version information which - in the case of the ``upgrade`` or ``downgrade`` commands are the - list of version scripts to invoke. Other commands yield nothing, - in the case that a command wants to run some other operation - against the database such as the ``current`` or ``stamp`` commands. - - :param \**kw: keyword arguments here will be passed to each - migration callable, that is the ``upgrade()`` or ``downgrade()`` - method within revision scripts. - - """ - current_rev = rev = False - stamp_per_migration = not self.impl.transactional_ddl or \ - self._transaction_per_migration - - self.impl.start_migrations() - for change, prev_rev, rev, doc in self._migrations_fn( - self.get_current_revision(), - self): - with self.begin_transaction(_per_migration=True): - if current_rev is False: - current_rev = prev_rev - if self.as_sql and not current_rev: - self._version.create(self.connection) - if doc: - log.info("Running %s %s -> %s, %s", change.__name__, prev_rev, - rev, doc) - else: - log.info("Running %s %s -> %s", change.__name__, prev_rev, rev) - if self.as_sql: - self.impl.static_output( - "-- Running %s %s -> %s" % - (change.__name__, prev_rev, rev) - ) - change(**kw) - if stamp_per_migration: - self._update_current_rev(prev_rev, rev) - prev_rev = rev - - if rev is not False: - if not stamp_per_migration: - self._update_current_rev(current_rev, rev) - - if self.as_sql and not rev: - self._version.drop(self.connection) - - def execute(self, sql, execution_options=None): - """Execute a SQL construct or string statement. - - The underlying execution mechanics are used, that is - if this is "offline mode" the SQL is written to the - output buffer, otherwise the SQL is emitted on - the current SQLAlchemy connection. - - """ - self.impl._exec(sql, execution_options) - - def _stdout_connection(self, connection): - def dump(construct, *multiparams, **params): - self.impl._exec(construct) - - return create_engine("%s://" % self.dialect.name, - strategy="mock", executor=dump) - - @property - def bind(self): - """Return the current "bind". - - In online mode, this is an instance of - :class:`sqlalchemy.engine.Connection`, and is suitable - for ad-hoc execution of any kind of usage described - in :ref:`sqlexpression_toplevel` as well as - for usage with the :meth:`sqlalchemy.schema.Table.create` - and :meth:`sqlalchemy.schema.MetaData.create_all` methods - of :class:`~sqlalchemy.schema.Table`, :class:`~sqlalchemy.schema.MetaData`. - - Note that when "standard output" mode is enabled, - this bind will be a "mock" connection handler that cannot - return results and is only appropriate for a very limited - subset of commands. - - """ - return self.connection - - def _compare_type(self, inspector_column, metadata_column): - if self._user_compare_type is False: - return False - - if callable(self._user_compare_type): - user_value = self._user_compare_type( - self, - inspector_column, - metadata_column, - inspector_column.type, - metadata_column.type - ) - if user_value is not None: - return user_value - - return self.impl.compare_type( - inspector_column, - metadata_column) - - def _compare_server_default(self, inspector_column, - metadata_column, - rendered_metadata_default, - rendered_column_default): - - if self._user_compare_server_default is False: - return False - - if callable(self._user_compare_server_default): - user_value = self._user_compare_server_default( - self, - inspector_column, - metadata_column, - rendered_column_default, - metadata_column.server_default, - rendered_metadata_default - ) - if user_value is not None: - return user_value - - return self.impl.compare_server_default( - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_column_default) - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/op.py b/Linux_i686/lib/python2.7/site-packages/alembic/op.py deleted file mode 100644 index 8e5f777..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/op.py +++ /dev/null @@ -1,6 +0,0 @@ -from .operations import Operations -from . import util - -# create proxy functions for -# each method on the Operations class. -util.create_module_class_proxy(Operations, globals(), locals()) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/operations.py b/Linux_i686/lib/python2.7/site-packages/alembic/operations.py deleted file mode 100644 index f1d06a5..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/operations.py +++ /dev/null @@ -1,1037 +0,0 @@ -from contextlib import contextmanager - -from sqlalchemy.types import NULLTYPE, Integer -from sqlalchemy import schema as sa_schema - -from . import util -from .compat import string_types -from .ddl import impl - -__all__ = ('Operations',) - -try: - from sqlalchemy.sql.naming import conv -except: - conv = None - -class Operations(object): - """Define high level migration operations. - - Each operation corresponds to some schema migration operation, - executed against a particular :class:`.MigrationContext` - which in turn represents connectivity to a database, - or a file output stream. - - While :class:`.Operations` is normally configured as - part of the :meth:`.EnvironmentContext.run_migrations` - method called from an ``env.py`` script, a standalone - :class:`.Operations` instance can be - made for use cases external to regular Alembic - migrations by passing in a :class:`.MigrationContext`:: - - from alembic.migration import MigrationContext - from alembic.operations import Operations - - conn = myengine.connect() - ctx = MigrationContext.configure(conn) - op = Operations(ctx) - - op.alter_column("t", "c", nullable=True) - - """ - def __init__(self, migration_context): - """Construct a new :class:`.Operations` - - :param migration_context: a :class:`.MigrationContext` - instance. - - """ - self.migration_context = migration_context - self.impl = migration_context.impl - - @classmethod - @contextmanager - def context(cls, migration_context): - from .op import _install_proxy, _remove_proxy - op = Operations(migration_context) - _install_proxy(op) - yield op - _remove_proxy() - - - def _primary_key_constraint(self, name, table_name, cols, schema=None): - m = self._metadata() - columns = [sa_schema.Column(n, NULLTYPE) for n in cols] - t1 = sa_schema.Table(table_name, m, - *columns, - schema=schema) - p = sa_schema.PrimaryKeyConstraint(*columns, name=name) - t1.append_constraint(p) - return p - - def _foreign_key_constraint(self, name, source, referent, - local_cols, remote_cols, - onupdate=None, ondelete=None, - deferrable=None, source_schema=None, - referent_schema=None, initially=None, - match=None, **dialect_kw): - m = self._metadata() - if source == referent: - t1_cols = local_cols + remote_cols - else: - t1_cols = local_cols - sa_schema.Table(referent, m, - *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], - schema=referent_schema) - - t1 = sa_schema.Table(source, m, - *[sa_schema.Column(n, NULLTYPE) for n in t1_cols], - schema=source_schema) - - tname = "%s.%s" % (referent_schema, referent) if referent_schema \ - else referent - f = sa_schema.ForeignKeyConstraint(local_cols, - ["%s.%s" % (tname, n) - for n in remote_cols], - name=name, - onupdate=onupdate, - ondelete=ondelete, - deferrable=deferrable, - initially=initially, - match=match, - **dialect_kw - ) - t1.append_constraint(f) - - return f - - def _unique_constraint(self, name, source, local_cols, schema=None, **kw): - t = sa_schema.Table(source, self._metadata(), - *[sa_schema.Column(n, NULLTYPE) for n in local_cols], - schema=schema) - kw['name'] = name - uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) - # TODO: need event tests to ensure the event - # is fired off here - t.append_constraint(uq) - return uq - - def _check_constraint(self, name, source, condition, schema=None, **kw): - t = sa_schema.Table(source, self._metadata(), - sa_schema.Column('x', Integer), schema=schema) - ck = sa_schema.CheckConstraint(condition, name=name, **kw) - t.append_constraint(ck) - return ck - - def _metadata(self): - kw = {} - if 'target_metadata' in self.migration_context.opts: - mt = self.migration_context.opts['target_metadata'] - if hasattr(mt, 'naming_convention'): - kw['naming_convention'] = mt.naming_convention - return sa_schema.MetaData(**kw) - - def _table(self, name, *columns, **kw): - m = self._metadata() - t = sa_schema.Table(name, m, *columns, **kw) - for f in t.foreign_keys: - self._ensure_table_for_fk(m, f) - return t - - def _column(self, name, type_, **kw): - return sa_schema.Column(name, type_, **kw) - - def _index(self, name, tablename, columns, schema=None, **kw): - t = sa_schema.Table(tablename or 'no_table', self._metadata(), - *[sa_schema.Column(n, NULLTYPE) for n in columns], - schema=schema - ) - return sa_schema.Index(name, *[t.c[n] for n in columns], **kw) - - def _parse_table_key(self, table_key): - if '.' in table_key: - tokens = table_key.split('.') - sname = ".".join(tokens[0:-1]) - tname = tokens[-1] - else: - tname = table_key - sname = None - return (sname, tname) - - def _ensure_table_for_fk(self, metadata, fk): - """create a placeholder Table object for the referent of a - ForeignKey. - - """ - if isinstance(fk._colspec, string_types): - table_key, cname = fk._colspec.rsplit('.', 1) - sname, tname = self._parse_table_key(table_key) - if table_key not in metadata.tables: - rel_t = sa_schema.Table(tname, metadata, schema=sname) - else: - rel_t = metadata.tables[table_key] - if cname not in rel_t.c: - rel_t.append_column(sa_schema.Column(cname, NULLTYPE)) - - def get_context(self): - """Return the :class:`.MigrationContext` object that's - currently in use. - - """ - - return self.migration_context - - def rename_table(self, old_table_name, new_table_name, schema=None): - """Emit an ALTER TABLE to rename a table. - - :param old_table_name: old name. - :param new_table_name: new name. - :param schema: Optional schema name to operate within. - - """ - self.impl.rename_table( - old_table_name, - new_table_name, - schema=schema - ) - - @util._with_legacy_names([('name', 'new_column_name')]) - def alter_column(self, table_name, column_name, - nullable=None, - server_default=False, - new_column_name=None, - type_=None, - autoincrement=None, - existing_type=None, - existing_server_default=False, - existing_nullable=None, - existing_autoincrement=None, - schema=None - ): - """Issue an "alter column" instruction using the - current migration context. - - Generally, only that aspect of the column which - is being changed, i.e. name, type, nullability, - default, needs to be specified. Multiple changes - can also be specified at once and the backend should - "do the right thing", emitting each change either - separately or together as the backend allows. - - MySQL has special requirements here, since MySQL - cannot ALTER a column without a full specification. - When producing MySQL-compatible migration files, - it is recommended that the ``existing_type``, - ``existing_server_default``, and ``existing_nullable`` - parameters be present, if not being altered. - - Type changes which are against the SQLAlchemy - "schema" types :class:`~sqlalchemy.types.Boolean` - and :class:`~sqlalchemy.types.Enum` may also - add or drop constraints which accompany those - types on backends that don't support them natively. - The ``existing_server_default`` argument is - used in this case as well to remove a previous - constraint. - - :param table_name: string name of the target table. - :param column_name: string name of the target column, - as it exists before the operation begins. - :param nullable: Optional; specify ``True`` or ``False`` - to alter the column's nullability. - :param server_default: Optional; specify a string - SQL expression, :func:`~sqlalchemy.sql.expression.text`, - or :class:`~sqlalchemy.schema.DefaultClause` to indicate - an alteration to the column's default value. - Set to ``None`` to have the default removed. - :param new_column_name: Optional; specify a string name here to - indicate the new name within a column rename operation. - - .. versionchanged:: 0.5.0 - The ``name`` parameter is now named ``new_column_name``. - The old name will continue to function for backwards - compatibility. - - :param ``type_``: Optional; a :class:`~sqlalchemy.types.TypeEngine` - type object to specify a change to the column's type. - For SQLAlchemy types that also indicate a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), - the constraint is also generated. - :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; - currently understood by the MySQL dialect. - :param existing_type: Optional; a - :class:`~sqlalchemy.types.TypeEngine` - type object to specify the previous type. This - is required for all MySQL column alter operations that - don't otherwise specify a new type, as well as for - when nullability is being changed on a SQL Server - column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, - :class:`~sqlalchemy.types.Enum`), - so that the constraint can be dropped. - :param existing_server_default: Optional; The existing - default value of the column. Required on MySQL if - an existing default is not being changed; else MySQL - removes the default. - :param existing_nullable: Optional; the existing nullability - of the column. Required on MySQL if the existing nullability - is not being changed; else MySQL sets this to NULL. - :param existing_autoincrement: Optional; the existing autoincrement - of the column. Used for MySQL's system of altering a column - that specifies ``AUTO_INCREMENT``. - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - """ - - compiler = self.impl.dialect.statement_compiler( - self.impl.dialect, - None - ) - def _count_constraint(constraint): - return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and \ - (not constraint._create_rule or - constraint._create_rule(compiler)) - - if existing_type and type_: - t = self._table(table_name, - sa_schema.Column(column_name, existing_type), - schema=schema - ) - for constraint in t.constraints: - if _count_constraint(constraint): - self.impl.drop_constraint(constraint) - - self.impl.alter_column(table_name, column_name, - nullable=nullable, - server_default=server_default, - name=new_column_name, - type_=type_, - schema=schema, - autoincrement=autoincrement, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_autoincrement=existing_autoincrement - ) - - if type_: - t = self._table(table_name, - sa_schema.Column(column_name, type_), - schema=schema - ) - for constraint in t.constraints: - if _count_constraint(constraint): - self.impl.add_constraint(constraint) - - def f(self, name): - """Indicate a string name that has already had a naming convention - applied to it. - - This feature combines with the SQLAlchemy ``naming_convention`` feature - to disambiguate constraint names that have already had naming - conventions applied to them, versus those that have not. This is - necessary in the case that the ``"%(constraint_name)s"`` token - is used within a naming convention, so that it can be identified - that this particular name should remain fixed. - - If the :meth:`.Operations.f` is used on a constraint, the naming - convention will not take effect:: - - op.add_column('t', 'x', Boolean(name=op.f('ck_bool_t_x'))) - - Above, the CHECK constraint generated will have the name ``ck_bool_t_x`` - regardless of whether or not a naming convention is in use. - - Alternatively, if a naming convention is in use, and 'f' is not used, - names will be converted along conventions. If the ``target_metadata`` - contains the naming convention - ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the - output of the following: - - op.add_column('t', 'x', Boolean(name='x')) - - will be:: - - CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) - - The function is rendered in the output of autogenerate when - a particular constraint name is already converted, for SQLAlchemy - version **0.9.4 and greater only**. Even though ``naming_convention`` - was introduced in 0.9.2, the string disambiguation service is new - as of 0.9.4. - - .. versionadded:: 0.6.4 - - """ - if conv: - return conv(name) - else: - raise NotImplementedError( - "op.f() feature requires SQLAlchemy 0.9.4 or greater.") - - def add_column(self, table_name, column, schema=None): - """Issue an "add column" instruction using the current - migration context. - - e.g.:: - - from alembic import op - from sqlalchemy import Column, String - - op.add_column('organization', - Column('name', String()) - ) - - The provided :class:`~sqlalchemy.schema.Column` object can also - specify a :class:`~sqlalchemy.schema.ForeignKey`, referencing - a remote table name. Alembic will automatically generate a stub - "referenced" table and emit a second ALTER statement in order - to add the constraint separately:: - - from alembic import op - from sqlalchemy import Column, INTEGER, ForeignKey - - op.add_column('organization', - Column('account_id', INTEGER, ForeignKey('accounts.id')) - ) - - Note that this statement uses the :class:`~sqlalchemy.schema.Column` - construct as is from the SQLAlchemy library. In particular, - default values to be created on the database side are - specified using the ``server_default`` parameter, and not - ``default`` which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the column add - op.add_column('account', - Column('timestamp', TIMESTAMP, server_default=func.now()) - ) - - :param table_name: String name of the parent table. - :param column: a :class:`sqlalchemy.schema.Column` object - representing the new column. - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - """ - - t = self._table(table_name, column, schema=schema) - self.impl.add_column( - table_name, - column, - schema=schema - ) - for constraint in t.constraints: - if not isinstance(constraint, sa_schema.PrimaryKeyConstraint): - self.impl.add_constraint(constraint) - - def drop_column(self, table_name, column_name, **kw): - """Issue a "drop column" instruction using the current - migration context. - - e.g.:: - - drop_column('organization', 'account_id') - - :param table_name: name of table - :param column_name: name of column - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - :param mssql_drop_check: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the CHECK constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.check_constraints, - then exec's a separate DROP CONSTRAINT for that constraint. - :param mssql_drop_default: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the DEFAULT constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.default_constraints, - then exec's a separate DROP CONSTRAINT for that default. - :param mssql_drop_foreign_key: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop a single FOREIGN KEY constraint on the column using a - SQL-script-compatible - block that selects into a @variable from - sys.foreign_keys/sys.foreign_key_columns, - then exec's a separate DROP CONSTRAINT for that default. Only - works if the column has exactly one FK constraint which refers to - it, at the moment. - - .. versionadded:: 0.6.2 - - """ - - self.impl.drop_column( - table_name, - self._column(column_name, NULLTYPE), - **kw - ) - - - def create_primary_key(self, name, table_name, cols, schema=None): - """Issue a "create primary key" instruction using the current - migration context. - - e.g.:: - - from alembic import op - op.create_primary_key( - "pk_my_table", "my_table", - ["id", "version"] - ) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.PrimaryKeyConstraint` - object which it then associates with the :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - .. versionadded:: 0.5.0 - - :param name: Name of the primary key constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - `NamingConventions `_, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the target table. - :param cols: a list of string column names to be applied to the - primary key constraint. - :param schema: Optional schema name of the table. - - """ - self.impl.add_constraint( - self._primary_key_constraint(name, table_name, cols, - schema) - ) - - - def create_foreign_key(self, name, source, referent, local_cols, - remote_cols, onupdate=None, ondelete=None, - deferrable=None, initially=None, match=None, - source_schema=None, referent_schema=None, - **dialect_kw): - """Issue a "create foreign key" instruction using the - current migration context. - - e.g.:: - - from alembic import op - op.create_foreign_key( - "fk_user_address", "address", - "user", ["user_id"], ["id"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.ForeignKeyConstraint` - object which it then associates with the :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param name: Name of the foreign key constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - `NamingConventions `_, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param source: String name of the source table. - :param referent: String name of the destination table. - :param local_cols: a list of string column names in the - source table. - :param remote_cols: a list of string column names in the - remote table. - :param onupdate: Optional string. If set, emit ON UPDATE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param ondelete: Optional string. If set, emit ON DELETE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param deferrable: optional bool. If set, emit DEFERRABLE or NOT - DEFERRABLE when issuing DDL for this constraint. - :param source_schema: Optional schema name of the source table. - :param referent_schema: Optional schema name of the destination table. - - """ - - self.impl.add_constraint( - self._foreign_key_constraint(name, source, referent, - local_cols, remote_cols, - onupdate=onupdate, ondelete=ondelete, - deferrable=deferrable, source_schema=source_schema, - referent_schema=referent_schema, - initially=initially, match=match, **dialect_kw) - ) - - def create_unique_constraint(self, name, source, local_cols, - schema=None, **kw): - """Issue a "create unique constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - op.create_unique_constraint("uq_user_name", "user", ["name"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.UniqueConstraint` - object which it then associates with the :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param name: Name of the unique constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - `NamingConventions `_, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param source: String name of the source table. Dotted schema names are - supported. - :param local_cols: a list of string column names in the - source table. - :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when - issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY when issuing DDL - for this constraint. - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - """ - - self.impl.add_constraint( - self._unique_constraint(name, source, local_cols, - schema=schema, **kw) - ) - - def create_check_constraint(self, name, source, condition, - schema=None, **kw): - """Issue a "create check constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - from sqlalchemy.sql import column, func - - op.create_check_constraint( - "ck_user_name_len", - "user", - func.len(column('name')) > 5 - ) - - CHECK constraints are usually against a SQL expression, so ad-hoc - table metadata is usually needed. The function will convert the given - arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound - to an anonymous table in order to emit the CREATE statement. - - :param name: Name of the check constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - `NamingConventions `_, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param source: String name of the source table. - :param condition: SQL expression that's the condition of the constraint. - Can be a string or SQLAlchemy expression language structure. - :param deferrable: optional bool. If set, emit DEFERRABLE or NOT DEFERRABLE when - issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY when issuing DDL - for this constraint. - :param schema: Optional schema name to operate within. - - ..versionadded:: 0.4.0 - - """ - self.impl.add_constraint( - self._check_constraint(name, source, condition, schema=schema, **kw) - ) - - def create_table(self, name, *columns, **kw): - """Issue a "create table" instruction using the current migration context. - - This directive receives an argument list similar to that of the - traditional :class:`sqlalchemy.schema.Table` construct, but without the - metadata:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - op.create_table( - 'account', - Column('id', INTEGER, primary_key=True), - Column('name', VARCHAR(50), nullable=False), - Column('description', NVARCHAR(200)) - Column('timestamp', TIMESTAMP, server_default=func.now()) - ) - - Note that :meth:`.create_table` accepts :class:`~sqlalchemy.schema.Column` - constructs directly from the SQLAlchemy library. In particular, - default values to be created on the database side are - specified using the ``server_default`` parameter, and not - ``default`` which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the "timestamp" column - op.create_table('account', - Column('id', INTEGER, primary_key=True), - Column('timestamp', TIMESTAMP, server_default=func.now()) - ) - - :param name: Name of the table - :param \*columns: collection of :class:`~sqlalchemy.schema.Column` - objects within - the table, as well as optional :class:`~sqlalchemy.schema.Constraint` - objects - and :class:`~.sqlalchemy.schema.Index` objects. - :param schema: Optional schema name to operate within. - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - """ - self.impl.create_table( - self._table(name, *columns, **kw) - ) - - def drop_table(self, name, **kw): - """Issue a "drop table" instruction using the current - migration context. - - - e.g.:: - - drop_table("accounts") - - :param name: Name of the table - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - """ - self.impl.drop_table( - self._table(name, **kw) - ) - - def create_index(self, name, table_name, columns, schema=None, **kw): - """Issue a "create index" instruction using the current - migration context. - - e.g.:: - - from alembic import op - op.create_index('ik_test', 't1', ['foo', 'bar']) - - :param name: name of the index. - :param table_name: name of the owning table. - - .. versionchanged:: 0.5.0 - The ``tablename`` parameter is now named ``table_name``. - As this is a positional argument, the old name is no - longer present. - - :param columns: a list of string column names in the - table. - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - """ - - self.impl.create_index( - self._index(name, table_name, columns, schema=schema, **kw) - ) - - @util._with_legacy_names([('tablename', 'table_name')]) - def drop_index(self, name, table_name=None, schema=None): - """Issue a "drop index" instruction using the current - migration context. - - e.g.:: - - drop_index("accounts") - - :param name: name of the index. - :param table_name: name of the owning table. Some - backends such as Microsoft SQL Server require this. - - .. versionchanged:: 0.5.0 - The ``tablename`` parameter is now named ``table_name``. - The old name will continue to function for backwards - compatibility. - - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - """ - # need a dummy column name here since SQLAlchemy - # 0.7.6 and further raises on Index with no columns - self.impl.drop_index( - self._index(name, table_name, ['x'], schema=schema) - ) - - @util._with_legacy_names([("type", "type_")]) - def drop_constraint(self, name, table_name, type_=None, schema=None): - """Drop a constraint of the given name, typically via DROP CONSTRAINT. - - :param name: name of the constraint. - :param table_name: table name. - - .. versionchanged:: 0.5.0 - The ``tablename`` parameter is now named ``table_name``. - As this is a positional argument, the old name is no - longer present. - - :param ``type_``: optional, required on MySQL. can be - 'foreignkey', 'primary', 'unique', or 'check'. - - .. versionchanged:: 0.5.0 - The ``type`` parameter is now named ``type_``. The old name - ``type`` will remain for backwards compatibility. - - .. versionadded:: 0.3.6 'primary' qualfier to enable - dropping of MySQL primary key constraints. - - :param schema: Optional schema name to operate within. - - .. versionadded:: 0.4.0 - - """ - - t = self._table(table_name, schema=schema) - types = { - 'foreignkey': lambda name: sa_schema.ForeignKeyConstraint( - [], [], name=name), - 'primary': sa_schema.PrimaryKeyConstraint, - 'unique': sa_schema.UniqueConstraint, - 'check': lambda name: sa_schema.CheckConstraint("", name=name), - None: sa_schema.Constraint - } - try: - const = types[type_] - except KeyError: - raise TypeError("'type' can be one of %s" % - ", ".join(sorted(repr(x) for x in types))) - - const = const(name=name) - t.append_constraint(const) - self.impl.drop_constraint(const) - - def bulk_insert(self, table, rows, multiinsert=True): - """Issue a "bulk insert" operation using the current - migration context. - - This provides a means of representing an INSERT of multiple rows - which works equally well in the context of executing on a live - connection as well as that of generating a SQL script. In the - case of a SQL script, the values are rendered inline into the - statement. - - e.g.:: - - from alembic import op - from datetime import date - from sqlalchemy.sql import table, column - from sqlalchemy import String, Integer, Date - - # Create an ad-hoc table to use for the insert statement. - accounts_table = table('account', - column('id', Integer), - column('name', String), - column('create_date', Date) - ) - - op.bulk_insert(accounts_table, - [ - {'id':1, 'name':'John Smith', - 'create_date':date(2010, 10, 5)}, - {'id':2, 'name':'Ed Williams', - 'create_date':date(2007, 5, 27)}, - {'id':3, 'name':'Wendy Jones', - 'create_date':date(2008, 8, 15)}, - ] - ) - - When using --sql mode, some datatypes may not render inline automatically, - such as dates and other special types. When this issue is present, - :meth:`.Operations.inline_literal` may be used:: - - op.bulk_insert(accounts_table, - [ - {'id':1, 'name':'John Smith', - 'create_date':op.inline_literal("2010-10-05")}, - {'id':2, 'name':'Ed Williams', - 'create_date':op.inline_literal("2007-05-27")}, - {'id':3, 'name':'Wendy Jones', - 'create_date':op.inline_literal("2008-08-15")}, - ], - multiinsert=False - ) - - When using :meth:`.Operations.inline_literal` in conjunction with - :meth:`.Operations.bulk_insert`, in order for the statement to work - in "online" (e.g. non --sql) mode, the - :paramref:`~.Operations.bulk_insert.multiinsert` - flag should be set to ``False``, which will have the effect of - individual INSERT statements being emitted to the database, each - with a distinct VALUES clause, so that the "inline" values can - still be rendered, rather than attempting to pass the values - as bound parameters. - - .. versionadded:: 0.6.4 :meth:`.Operations.inline_literal` can now - be used with :meth:`.Operations.bulk_insert`, and the - :paramref:`~.Operations.bulk_insert.multiinsert` flag has - been added to assist in this usage when running in "online" - mode. - - :param table: a table object which represents the target of the INSERT. - - :param rows: a list of dictionaries indicating rows. - - :param multiinsert: when at its default of True and --sql mode is not - enabled, the INSERT statement will be executed using - "executemany()" style, where all elements in the list of dictionaries - are passed as bound parameters in a single list. Setting this - to False results in individual INSERT statements being emitted - per parameter set, and is needed in those cases where non-literal - values are present in the parameter sets. - - .. versionadded:: 0.6.4 - - """ - self.impl.bulk_insert(table, rows, multiinsert=multiinsert) - - def inline_literal(self, value, type_=None): - """Produce an 'inline literal' expression, suitable for - using in an INSERT, UPDATE, or DELETE statement. - - When using Alembic in "offline" mode, CRUD operations - aren't compatible with SQLAlchemy's default behavior surrounding - literal values, - which is that they are converted into bound values and passed - separately into the ``execute()`` method of the DBAPI cursor. - An offline SQL - script needs to have these rendered inline. While it should - always be noted that inline literal values are an **enormous** - security hole in an application that handles untrusted input, - a schema migration is not run in this context, so - literals are safe to render inline, with the caveat that - advanced types like dates may not be supported directly - by SQLAlchemy. - - See :meth:`.execute` for an example usage of - :meth:`.inline_literal`. - - :param value: The value to render. Strings, integers, and simple - numerics should be supported. Other types like boolean, - dates, etc. may or may not be supported yet by various - backends. - :param ``type_``: optional - a :class:`sqlalchemy.types.TypeEngine` - subclass stating the type of this value. In SQLAlchemy - expressions, this is usually derived automatically - from the Python type of the value itself, as well as - based on the context in which the value is used. - - """ - return impl._literal_bindparam(None, value, type_=type_) - - def execute(self, sql, execution_options=None): - """Execute the given SQL using the current migration context. - - In a SQL script context, the statement is emitted directly to the - output stream. There is *no* return result, however, as this - function is oriented towards generating a change script - that can run in "offline" mode. For full interaction - with a connected database, use the "bind" available - from the context:: - - from alembic import op - connection = op.get_bind() - - Also note that any parameterized statement here *will not work* - in offline mode - INSERT, UPDATE and DELETE statements which refer - to literal values would need to render - inline expressions. For simple use cases, the - :meth:`.inline_literal` function can be used for **rudimentary** - quoting of string values. For "bulk" inserts, consider using - :meth:`.bulk_insert`. - - For example, to emit an UPDATE statement which is equally - compatible with both online and offline mode:: - - from sqlalchemy.sql import table, column - from sqlalchemy import String - from alembic import op - - account = table('account', - column('name', String) - ) - op.execute( - account.update().\\ - where(account.c.name==op.inline_literal('account 1')).\\ - values({'name':op.inline_literal('account 2')}) - ) - - Note above we also used the SQLAlchemy - :func:`sqlalchemy.sql.expression.table` - and :func:`sqlalchemy.sql.expression.column` constructs to make a brief, - ad-hoc table construct just for our UPDATE statement. A full - :class:`~sqlalchemy.schema.Table` construct of course works perfectly - fine as well, though note it's a recommended practice to at least ensure - the definition of a table is self-contained within the migration script, - rather than imported from a module that may break compatibility with - older migrations. - - :param sql: Any legal SQLAlchemy expression, including: - - * a string - * a :func:`sqlalchemy.sql.expression.text` construct. - * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. - * Pretty much anything that's "executable" as described - in :ref:`sqlexpression_toplevel`. - - :param execution_options: Optional dictionary of - execution options, will be passed to - :meth:`sqlalchemy.engine.Connection.execution_options`. - """ - self.migration_context.impl.execute(sql, - execution_options=execution_options) - - def get_bind(self): - """Return the current 'bind'. - - Under normal circumstances, this is the - :class:`~sqlalchemy.engine.Connection` currently being used - to emit SQL to the database. - - In a SQL script context, this value is ``None``. [TODO: verify this] - - """ - return self.migration_context.impl.bind - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/script.py b/Linux_i686/lib/python2.7/site-packages/alembic/script.py deleted file mode 100644 index 3294366..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/script.py +++ /dev/null @@ -1,513 +0,0 @@ -import datetime -import os -import re -import shutil -from . import util - -_sourceless_rev_file = re.compile(r'(.*\.py)(c|o)?$') -_only_source_rev_file = re.compile(r'(.*\.py)$') -_legacy_rev = re.compile(r'([a-f0-9]+)\.py$') -_mod_def_re = re.compile(r'(upgrade|downgrade)_([a-z0-9]+)') -_slug_re = re.compile(r'\w+') -_default_file_template = "%(rev)s_%(slug)s" -_relative_destination = re.compile(r'(?:\+|-)\d+') - -class ScriptDirectory(object): - """Provides operations upon an Alembic script directory. - - This object is useful to get information as to current revisions, - most notably being able to get at the "head" revision, for schemes - that want to test if the current revision in the database is the most - recent:: - - from alembic.script import ScriptDirectory - from alembic.config import Config - config = Config() - config.set_main_option("script_location", "myapp:migrations") - script = ScriptDirectory.from_config(config) - - head_revision = script.get_current_head() - - - - """ - def __init__(self, dir, file_template=_default_file_template, - truncate_slug_length=40, - sourceless=False): - self.dir = dir - self.versions = os.path.join(self.dir, 'versions') - self.file_template = file_template - self.truncate_slug_length = truncate_slug_length or 40 - self.sourceless = sourceless - - if not os.access(dir, os.F_OK): - raise util.CommandError("Path doesn't exist: %r. Please use " - "the 'init' command to create a new " - "scripts folder." % dir) - - @classmethod - def from_config(cls, config): - """Produce a new :class:`.ScriptDirectory` given a :class:`.Config` - instance. - - The :class:`.Config` need only have the ``script_location`` key - present. - - """ - script_location = config.get_main_option('script_location') - if script_location is None: - raise util.CommandError("No 'script_location' key " - "found in configuration.") - truncate_slug_length = config.get_main_option("truncate_slug_length") - if truncate_slug_length is not None: - truncate_slug_length = int(truncate_slug_length) - return ScriptDirectory( - util.coerce_resource_to_filename(script_location), - file_template=config.get_main_option( - 'file_template', - _default_file_template), - truncate_slug_length=truncate_slug_length, - sourceless=config.get_main_option("sourceless") == "true" - ) - - def walk_revisions(self, base="base", head="head"): - """Iterate through all revisions. - - This is actually a breadth-first tree traversal, - with leaf nodes being heads. - - """ - if head == "head": - heads = set(self.get_heads()) - else: - heads = set([head]) - while heads: - todo = set(heads) - heads = set() - for head in todo: - if head in heads: - break - for sc in self.iterate_revisions(head, base): - if sc.is_branch_point and sc.revision not in todo: - heads.add(sc.revision) - break - else: - yield sc - - def get_revision(self, id_): - """Return the :class:`.Script` instance with the given rev id.""" - - id_ = self.as_revision_number(id_) - try: - return self._revision_map[id_] - except KeyError: - # do a partial lookup - revs = [x for x in self._revision_map - if x is not None and x.startswith(id_)] - if not revs: - raise util.CommandError("No such revision '%s'" % id_) - elif len(revs) > 1: - raise util.CommandError( - "Multiple revisions start " - "with '%s', %s..." % ( - id_, - ", ".join("'%s'" % r for r in revs[0:3]) - )) - else: - return self._revision_map[revs[0]] - - _get_rev = get_revision - - def as_revision_number(self, id_): - """Convert a symbolic revision, i.e. 'head' or 'base', into - an actual revision number.""" - - if id_ == 'head': - id_ = self.get_current_head() - elif id_ == 'base': - id_ = None - return id_ - - _as_rev_number = as_revision_number - - def iterate_revisions(self, upper, lower): - """Iterate through script revisions, starting at the given - upper revision identifier and ending at the lower. - - The traversal uses strictly the `down_revision` - marker inside each migration script, so - it is a requirement that upper >= lower, - else you'll get nothing back. - - The iterator yields :class:`.Script` objects. - - """ - if upper is not None and _relative_destination.match(upper): - relative = int(upper) - revs = list(self._iterate_revisions("head", lower)) - revs = revs[-relative:] - if len(revs) != abs(relative): - raise util.CommandError("Relative revision %s didn't " - "produce %d migrations" % (upper, abs(relative))) - return iter(revs) - elif lower is not None and _relative_destination.match(lower): - relative = int(lower) - revs = list(self._iterate_revisions(upper, "base")) - revs = revs[0:-relative] - if len(revs) != abs(relative): - raise util.CommandError("Relative revision %s didn't " - "produce %d migrations" % (lower, abs(relative))) - return iter(revs) - else: - return self._iterate_revisions(upper, lower) - - def _iterate_revisions(self, upper, lower): - lower = self.get_revision(lower) - upper = self.get_revision(upper) - orig = lower.revision if lower else 'base', \ - upper.revision if upper else 'base' - script = upper - while script != lower: - if script is None and lower is not None: - raise util.CommandError( - "Revision %s is not an ancestor of %s" % orig) - yield script - downrev = script.down_revision - script = self._revision_map[downrev] - - def _upgrade_revs(self, destination, current_rev): - revs = self.iterate_revisions(destination, current_rev) - return [ - (script.module.upgrade, script.down_revision, script.revision, - script.doc) - for script in reversed(list(revs)) - ] - - def _downgrade_revs(self, destination, current_rev): - revs = self.iterate_revisions(current_rev, destination) - return [ - (script.module.downgrade, script.revision, script.down_revision, - script.doc) - for script in revs - ] - - def run_env(self): - """Run the script environment. - - This basically runs the ``env.py`` script present - in the migration environment. It is called exclusively - by the command functions in :mod:`alembic.command`. - - - """ - util.load_python_file(self.dir, 'env.py') - - @property - def env_py_location(self): - return os.path.abspath(os.path.join(self.dir, "env.py")) - - @util.memoized_property - def _revision_map(self): - map_ = {} - for file_ in os.listdir(self.versions): - script = Script._from_filename(self, self.versions, file_) - if script is None: - continue - if script.revision in map_: - util.warn("Revision %s is present more than once" % - script.revision) - map_[script.revision] = script - for rev in map_.values(): - if rev.down_revision is None: - continue - if rev.down_revision not in map_: - util.warn("Revision %s referenced from %s is not present" - % (rev.down_revision, rev)) - rev.down_revision = None - else: - map_[rev.down_revision].add_nextrev(rev.revision) - map_[None] = None - return map_ - - def _rev_path(self, rev_id, message, create_date): - slug = "_".join(_slug_re.findall(message or "")).lower() - if len(slug) > self.truncate_slug_length: - slug = slug[:self.truncate_slug_length].rsplit('_', 1)[0] + '_' - filename = "%s.py" % ( - self.file_template % { - 'rev': rev_id, - 'slug': slug, - 'year': create_date.year, - 'month': create_date.month, - 'day': create_date.day, - 'hour': create_date.hour, - 'minute': create_date.minute, - 'second': create_date.second - } - ) - return os.path.join(self.versions, filename) - - def get_current_head(self): - """Return the current head revision. - - If the script directory has multiple heads - due to branching, an error is raised. - - Returns a string revision number. - - """ - current_heads = self.get_heads() - if len(current_heads) > 1: - raise util.CommandError('Only a single head is supported. The ' - 'script directory has multiple heads (due to branching), which ' - 'must be resolved by manually editing the revision files to ' - 'form a linear sequence. Run `alembic branches` to see the ' - 'divergence(s).') - - if current_heads: - return current_heads[0] - else: - return None - - _current_head = get_current_head - """the 0.2 name, for backwards compat.""" - - def get_heads(self): - """Return all "head" revisions as strings. - - Returns a list of string revision numbers. - - This is normally a list of length one, - unless branches are present. The - :meth:`.ScriptDirectory.get_current_head()` method - can be used normally when a script directory - has only one head. - - """ - heads = [] - for script in self._revision_map.values(): - if script and script.is_head: - heads.append(script.revision) - return heads - - def get_base(self): - """Return the "base" revision as a string. - - This is the revision number of the script that - has a ``down_revision`` of None. - - Behavior is not defined if more than one script - has a ``down_revision`` of None. - - """ - for script in self._revision_map.values(): - if script and script.down_revision is None \ - and script.revision in self._revision_map: - return script.revision - else: - return None - - def _generate_template(self, src, dest, **kw): - util.status("Generating %s" % os.path.abspath(dest), - util.template_to_file, - src, - dest, - **kw - ) - - def _copy_file(self, src, dest): - util.status("Generating %s" % os.path.abspath(dest), - shutil.copy, - src, dest) - - def generate_revision(self, revid, message, refresh=False, **kw): - """Generate a new revision file. - - This runs the ``script.py.mako`` template, given - template arguments, and creates a new file. - - :param revid: String revision id. Typically this - comes from ``alembic.util.rev_id()``. - :param message: the revision message, the one passed - by the -m argument to the ``revision`` command. - :param refresh: when True, the in-memory state of this - :class:`.ScriptDirectory` will be updated with a new - :class:`.Script` instance representing the new revision; - the :class:`.Script` instance is returned. - If False, the file is created but the state of the - :class:`.ScriptDirectory` is unmodified; ``None`` - is returned. - - """ - current_head = self.get_current_head() - create_date = datetime.datetime.now() - path = self._rev_path(revid, message, create_date) - self._generate_template( - os.path.join(self.dir, "script.py.mako"), - path, - up_revision=str(revid), - down_revision=current_head, - create_date=create_date, - message=message if message is not None else ("empty message"), - **kw - ) - if refresh: - script = Script._from_path(self, path) - self._revision_map[script.revision] = script - if script.down_revision: - self._revision_map[script.down_revision].\ - add_nextrev(script.revision) - return script - else: - return None - - -class Script(object): - """Represent a single revision file in a ``versions/`` directory. - - The :class:`.Script` instance is returned by methods - such as :meth:`.ScriptDirectory.iterate_revisions`. - - """ - - nextrev = frozenset() - - def __init__(self, module, rev_id, path): - self.module = module - self.revision = rev_id - self.path = path - self.down_revision = getattr(module, 'down_revision', None) - - revision = None - """The string revision number for this :class:`.Script` instance.""" - - module = None - """The Python module representing the actual script itself.""" - - path = None - """Filesystem path of the script.""" - - down_revision = None - """The ``down_revision`` identifier within the migration script.""" - - @property - def doc(self): - """Return the docstring given in the script.""" - - return re.split("\n\n", self.longdoc)[0] - - @property - def longdoc(self): - """Return the docstring given in the script.""" - - doc = self.module.__doc__ - if doc: - if hasattr(self.module, "_alembic_source_encoding"): - doc = doc.decode(self.module._alembic_source_encoding) - return doc.strip() - else: - return "" - - def add_nextrev(self, rev): - self.nextrev = self.nextrev.union([rev]) - - @property - def is_head(self): - """Return True if this :class:`.Script` is a 'head' revision. - - This is determined based on whether any other :class:`.Script` - within the :class:`.ScriptDirectory` refers to this - :class:`.Script`. Multiple heads can be present. - - """ - return not bool(self.nextrev) - - @property - def is_branch_point(self): - """Return True if this :class:`.Script` is a branch point. - - A branchpoint is defined as a :class:`.Script` which is referred - to by more than one succeeding :class:`.Script`, that is more - than one :class:`.Script` has a `down_revision` identifier pointing - here. - - """ - return len(self.nextrev) > 1 - - @property - def log_entry(self): - return \ - "Rev: %s%s%s\n" \ - "Parent: %s\n" \ - "Path: %s\n" \ - "\n%s\n" % ( - self.revision, - " (head)" if self.is_head else "", - " (branchpoint)" if self.is_branch_point else "", - self.down_revision, - self.path, - "\n".join( - " %s" % para - for para in self.longdoc.splitlines() - ) - ) - - def __str__(self): - return "%s -> %s%s%s, %s" % ( - self.down_revision, - self.revision, - " (head)" if self.is_head else "", - " (branchpoint)" if self.is_branch_point else "", - self.doc) - - @classmethod - def _from_path(cls, scriptdir, path): - dir_, filename = os.path.split(path) - return cls._from_filename(scriptdir, dir_, filename) - - @classmethod - def _from_filename(cls, scriptdir, dir_, filename): - if scriptdir.sourceless: - py_match = _sourceless_rev_file.match(filename) - else: - py_match = _only_source_rev_file.match(filename) - - if not py_match: - return None - - py_filename = py_match.group(1) - - if scriptdir.sourceless: - is_c = py_match.group(2) == 'c' - is_o = py_match.group(2) == 'o' - else: - is_c = is_o = False - - if is_o or is_c: - py_exists = os.path.exists(os.path.join(dir_, py_filename)) - pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c")) - - # prefer .py over .pyc because we'd like to get the - # source encoding; prefer .pyc over .pyo because we'd like to - # have the docstrings which a -OO file would not have - if py_exists or is_o and pyc_exists: - return None - - module = util.load_python_file(dir_, filename) - - if not hasattr(module, "revision"): - # attempt to get the revision id from the script name, - # this for legacy only - m = _legacy_rev.match(filename) - if not m: - raise util.CommandError( - "Could not determine revision id from filename %s. " - "Be sure the 'revision' variable is " - "declared inside the script (please see 'Upgrading " - "from Alembic 0.1 to 0.2' in the documentation)." - % filename) - else: - revision = m.group(1) - else: - revision = module.revision - return Script(module, revision, os.path.join(dir_, filename)) diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/README b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/README deleted file mode 100644 index 98e4f9c..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/alembic.ini.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/alembic.ini.mako deleted file mode 100644 index a738a24..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/alembic.ini.mako +++ /dev/null @@ -1,59 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -script_location = ${script_location} - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -sqlalchemy.url = driver://user:pass@localhost/dbname - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/env.py b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/env.py deleted file mode 100644 index 712b616..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/env.py +++ /dev/null @@ -1,71 +0,0 @@ -from __future__ import with_statement -from alembic import context -from sqlalchemy import engine_from_config, pool -from logging.config import fileConfig - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure(url=url, target_metadata=target_metadata) - - with context.begin_transaction(): - context.run_migrations() - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - engine = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - connection = engine.connect() - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - try: - with context.begin_transaction(): - context.run_migrations() - finally: - connection.close() - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() - diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/script.py.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/script.py.mako deleted file mode 100644 index 9570201..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/generic/script.py.mako +++ /dev/null @@ -1,22 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/README b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/README deleted file mode 100644 index 5db219f..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/README +++ /dev/null @@ -1 +0,0 @@ -Rudimentary multi-database configuration. \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/alembic.ini.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/alembic.ini.mako deleted file mode 100644 index 132b246..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/alembic.ini.mako +++ /dev/null @@ -1,65 +0,0 @@ -# a multi-database configuration. - -[alembic] -# path to migration scripts -script_location = ${script_location} - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -databases = engine1, engine2 - -[engine1] -sqlalchemy.url = driver://user:pass@localhost/dbname - -[engine2] -sqlalchemy.url = driver://user:pass@localhost/dbname2 - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/env.py b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/env.py deleted file mode 100644 index e3511de..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/env.py +++ /dev/null @@ -1,130 +0,0 @@ -from __future__ import with_statement -from alembic import context -from sqlalchemy import engine_from_config, pool -from logging.config import fileConfig -import logging -import re - -USE_TWOPHASE = False - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - -# gather section names referring to different -# databases. These are named "engine1", "engine2" -# in the sample .ini file. -db_names = config.get_main_option('databases') - -# add your model's MetaData objects here -# for 'autogenerate' support. These must be set -# up to hold just those tables targeting a -# particular database. table.tometadata() may be -# helpful here in case a "copy" of -# a MetaData is needed. -# from myapp import mymodel -# target_metadata = { -# 'engine1':mymodel.metadata1, -# 'engine2':mymodel.metadata2 -#} -target_metadata = {} - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - # for the --sql use case, run migrations for each URL into - # individual files. - - engines = {} - for name in re.split(r',\s*', db_names): - engines[name] = rec = {} - rec['url'] = context.config.get_section_option(name, - "sqlalchemy.url") - - for name, rec in engines.items(): - logger.info("Migrating database %s" % name) - file_ = "%s.sql" % name - logger.info("Writing output to %s" % file_) - with open(file_, 'w') as buffer: - context.configure(url=rec['url'], output_buffer=buffer, - target_metadata=target_metadata.get(name)) - with context.begin_transaction(): - context.run_migrations(engine_name=name) - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # for the direct-to-DB use case, start a transaction on all - # engines, then run all migrations, then commit all transactions. - - engines = {} - for name in re.split(r',\s*', db_names): - engines[name] = rec = {} - rec['engine'] = engine_from_config( - context.config.get_section(name), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - for name, rec in engines.items(): - engine = rec['engine'] - rec['connection'] = conn = engine.connect() - - if USE_TWOPHASE: - rec['transaction'] = conn.begin_twophase() - else: - rec['transaction'] = conn.begin() - - try: - for name, rec in engines.items(): - logger.info("Migrating database %s" % name) - context.configure( - connection=rec['connection'], - upgrade_token="%s_upgrades" % name, - downgrade_token="%s_downgrades" % name, - target_metadata=target_metadata.get(name) - ) - context.run_migrations(engine_name=name) - - if USE_TWOPHASE: - for rec in engines.values(): - rec['transaction'].prepare() - - for rec in engines.values(): - rec['transaction'].commit() - except: - for rec in engines.values(): - rec['transaction'].rollback() - raise - finally: - for rec in engines.values(): - rec['connection'].close() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/script.py.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/script.py.mako deleted file mode 100644 index 1e7f79a..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/multidb/script.py.mako +++ /dev/null @@ -1,43 +0,0 @@ -<%! -import re - -%>"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(engine_name): - eval("upgrade_%s" % engine_name)() - - -def downgrade(engine_name): - eval("downgrade_%s" % engine_name)() - -<% - db_names = config.get_main_option("databases") -%> - -## generate an "upgrade_() / downgrade_()" function -## for each database name in the ini file. - -% for db_name in re.split(r',\s*', db_names): - -def upgrade_${db_name}(): - ${context.get("%s_upgrades" % db_name, "pass")} - - -def downgrade_${db_name}(): - ${context.get("%s_downgrades" % db_name, "pass")} - -% endfor diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/README b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/README deleted file mode 100644 index ed3c28e..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/README +++ /dev/null @@ -1 +0,0 @@ -Configuration that reads from a Pylons project environment. \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/alembic.ini.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/alembic.ini.mako deleted file mode 100644 index 771c027..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/alembic.ini.mako +++ /dev/null @@ -1,25 +0,0 @@ -# a Pylons configuration. - -[alembic] -# path to migration scripts -script_location = ${script_location} - -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# max length of characters to apply to the -# "slug" field -#truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -pylons_config_file = ./development.ini - -# that's it ! \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/env.py b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/env.py deleted file mode 100644 index 36c3fca..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/env.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Pylons bootstrap environment. - -Place 'pylons_config_file' into alembic.ini, and the application will -be loaded from there. - -""" -from alembic import context -from paste.deploy import loadapp -from logging.config import fileConfig -from sqlalchemy.engine.base import Engine - - -try: - # if pylons app already in, don't create a new app - from pylons import config as pylons_config - pylons_config['__file__'] -except: - config = context.config - # can use config['__file__'] here, i.e. the Pylons - # ini file, instead of alembic.ini - config_file = config.get_main_option('pylons_config_file') - fileConfig(config_file) - wsgi_app = loadapp('config:%s' % config_file, relative_to='.') - - -# customize this section for non-standard engine configurations. -meta = __import__("%s.model.meta" % wsgi_app.config['pylons.package']).model.meta - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - context.configure( - url=meta.engine.url, target_metadata=target_metadata) - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - # specify here how the engine is acquired - # engine = meta.engine - raise NotImplementedError("Please specify engine connectivity here") - - if isinstance(engine, Engine): - connection = engine.connect() - else: - raise Exception( - 'Expected engine instance got %s instead' % type(engine) - ) - - context.configure( - connection=connection, - target_metadata=target_metadata - ) - - try: - with context.begin_transaction(): - context.run_migrations() - finally: - connection.close() - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/script.py.mako b/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/script.py.mako deleted file mode 100644 index 9570201..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/templates/pylons/script.py.mako +++ /dev/null @@ -1,22 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision} -Create Date: ${create_date} - -""" - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/Linux_i686/lib/python2.7/site-packages/alembic/util.py b/Linux_i686/lib/python2.7/site-packages/alembic/util.py deleted file mode 100644 index 63e9269..0000000 --- a/Linux_i686/lib/python2.7/site-packages/alembic/util.py +++ /dev/null @@ -1,348 +0,0 @@ -import sys -import os -import textwrap -import warnings -import re -import inspect -import uuid - -from mako.template import Template -from sqlalchemy.engine import url -from sqlalchemy import __version__ - -from .compat import callable, exec_, load_module_py, load_module_pyc, binary_type - -class CommandError(Exception): - pass - -def _safe_int(value): - try: - return int(value) - except: - return value -_vers = tuple([_safe_int(x) for x in re.findall(r'(\d+|[abc]\d)', __version__)]) -sqla_07 = _vers > (0, 7, 2) -sqla_08 = _vers >= (0, 8, 0, 'b2') -sqla_09 = _vers >= (0, 9, 0) -sqla_092 = _vers >= (0, 9, 2) -sqla_094 = _vers >= (0, 9, 4) -if not sqla_07: - raise CommandError( - "SQLAlchemy 0.7.3 or greater is required. ") - -from sqlalchemy.util import format_argspec_plus, update_wrapper -from sqlalchemy.util.compat import inspect_getfullargspec - - -try: - import fcntl - import termios - import struct - ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, - struct.pack('HHHH', 0, 0, 0, 0)) - _h, TERMWIDTH, _hp, _wp = struct.unpack('HHHH', ioctl) - if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty - TERMWIDTH = None -except (ImportError, IOError): - TERMWIDTH = None - - -def template_to_file(template_file, dest, **kw): - with open(dest, 'w') as f: - f.write( - Template(filename=template_file).render(**kw) - ) - -def create_module_class_proxy(cls, globals_, locals_): - """Create module level proxy functions for the - methods on a given class. - - The functions will have a compatible signature - as the methods. A proxy is established - using the ``_install_proxy(obj)`` function, - and removed using ``_remove_proxy()``, both - installed by calling this function. - - """ - attr_names = set() - - def _install_proxy(obj): - globals_['_proxy'] = obj - for name in attr_names: - globals_[name] = getattr(obj, name) - - def _remove_proxy(): - globals_['_proxy'] = None - for name in attr_names: - del globals_[name] - - globals_['_install_proxy'] = _install_proxy - globals_['_remove_proxy'] = _remove_proxy - - def _create_op_proxy(name): - fn = getattr(cls, name) - spec = inspect.getargspec(fn) - if spec[0] and spec[0][0] == 'self': - spec[0].pop(0) - args = inspect.formatargspec(*spec) - num_defaults = 0 - if spec[3]: - num_defaults += len(spec[3]) - name_args = spec[0] - if num_defaults: - defaulted_vals = name_args[0 - num_defaults:] - else: - defaulted_vals = () - - apply_kw = inspect.formatargspec( - name_args, spec[1], spec[2], - defaulted_vals, - formatvalue=lambda x: '=' + x) - - def _name_error(name): - raise NameError( - "Can't invoke function '%s', as the proxy object has "\ - "not yet been " - "established for the Alembic '%s' class. " - "Try placing this code inside a callable." % ( - name, cls.__name__ - )) - globals_['_name_error'] = _name_error - - func_text = textwrap.dedent("""\ - def %(name)s(%(args)s): - %(doc)r - try: - p = _proxy - except NameError: - _name_error('%(name)s') - return _proxy.%(name)s(%(apply_kw)s) - e - """ % { - 'name': name, - 'args': args[1:-1], - 'apply_kw': apply_kw[1:-1], - 'doc': fn.__doc__, - }) - lcl = {} - exec_(func_text, globals_, lcl) - return lcl[name] - - for methname in dir(cls): - if not methname.startswith('_'): - if callable(getattr(cls, methname)): - locals_[methname] = _create_op_proxy(methname) - else: - attr_names.add(methname) - -def write_outstream(stream, *text): - encoding = getattr(stream, 'encoding', 'ascii') or 'ascii' - for t in text: - if not isinstance(t, binary_type): - t = t.encode(encoding, 'replace') - t = t.decode(encoding) - try: - stream.write(t) - except IOError: - # suppress "broken pipe" errors. - # no known way to handle this on Python 3 however - # as the exception is "ignored" (noisily) in TextIOWrapper. - break - -def coerce_resource_to_filename(fname): - """Interpret a filename as either a filesystem location or as a package resource. - - Names that are non absolute paths and contain a colon - are interpreted as resources and coerced to a file location. - - """ - if not os.path.isabs(fname) and ":" in fname: - import pkg_resources - fname = pkg_resources.resource_filename(*fname.split(':')) - return fname - -def status(_statmsg, fn, *arg, **kw): - msg(_statmsg + " ...", False) - try: - ret = fn(*arg, **kw) - write_outstream(sys.stdout, " done\n") - return ret - except: - write_outstream(sys.stdout, " FAILED\n") - raise - -def err(message): - msg(message) - sys.exit(-1) - -def obfuscate_url_pw(u): - u = url.make_url(u) - if u.password: - u.password = 'XXXXX' - return str(u) - -def asbool(value): - return value is not None and \ - value.lower() == 'true' - -def warn(msg): - warnings.warn(msg) - -def msg(msg, newline=True): - if TERMWIDTH is None: - write_outstream(sys.stdout, msg) - if newline: - write_outstream(sys.stdout, "\n") - else: - # left indent output lines - lines = textwrap.wrap(msg, TERMWIDTH) - if len(lines) > 1: - for line in lines[0:-1]: - write_outstream(sys.stdout, " ", line, "\n") - write_outstream(sys.stdout, " ", lines[-1], ("\n" if newline else "")) - -def load_python_file(dir_, filename): - """Load a file from the given path as a Python module.""" - - module_id = re.sub(r'\W', "_", filename) - path = os.path.join(dir_, filename) - _, ext = os.path.splitext(filename) - if ext == ".py": - if os.path.exists(path): - module = load_module_py(module_id, path) - elif os.path.exists(simple_pyc_file_from_path(path)): - # look for sourceless load - module = load_module_pyc(module_id, simple_pyc_file_from_path(path)) - else: - raise ImportError("Can't find Python file %s" % path) - elif ext in (".pyc", ".pyo"): - module = load_module_pyc(module_id, path) - del sys.modules[module_id] - return module - -def simple_pyc_file_from_path(path): - """Given a python source path, return the so-called - "sourceless" .pyc or .pyo path. - - This just a .pyc or .pyo file where the .py file would be. - - Even with PEP-3147, which normally puts .pyc/.pyo files in __pycache__, - this use case remains supported as a so-called "sourceless module import". - - """ - if sys.flags.optimize: - return path + "o" # e.g. .pyo - else: - return path + "c" # e.g. .pyc - -def pyc_file_from_path(path): - """Given a python source path, locate the .pyc. - - See http://www.python.org/dev/peps/pep-3147/ - #detecting-pep-3147-availability - http://www.python.org/dev/peps/pep-3147/#file-extension-checks - - """ - import imp - has3147 = hasattr(imp, 'get_tag') - if has3147: - return imp.cache_from_source(path) - else: - return simple_pyc_file_from_path(path) - -def rev_id(): - val = int(uuid.uuid4()) % 100000000000000 - return hex(val)[2:-1] - -class memoized_property(object): - """A read-only @property that is only evaluated once.""" - - def __init__(self, fget, doc=None): - self.fget = fget - self.__doc__ = doc or fget.__doc__ - self.__name__ = fget.__name__ - - def __get__(self, obj, cls): - if obj is None: - return None - obj.__dict__[self.__name__] = result = self.fget(obj) - return result - - -class immutabledict(dict): - - def _immutable(self, *arg, **kw): - raise TypeError("%s object is immutable" % self.__class__.__name__) - - __delitem__ = __setitem__ = __setattr__ = \ - clear = pop = popitem = setdefault = \ - update = _immutable - - def __new__(cls, *args): - new = dict.__new__(cls) - dict.__init__(new, *args) - return new - - def __init__(self, *args): - pass - - def __reduce__(self): - return immutabledict, (dict(self), ) - - def union(self, d): - if not self: - return immutabledict(d) - else: - d2 = immutabledict(self) - dict.update(d2, d) - return d2 - - def __repr__(self): - return "immutabledict(%s)" % dict.__repr__(self) - - -def _with_legacy_names(translations): - def decorate(fn): - - spec = inspect_getfullargspec(fn) - metadata = dict(target='target', fn='fn') - metadata.update(format_argspec_plus(spec, grouped=False)) - - has_keywords = bool(spec[2]) - - if not has_keywords: - metadata['args'] += ", **kw" - metadata['apply_kw'] += ", **kw" - - def go(*arg, **kw): - names = set(kw).difference(spec[0]) - for oldname, newname in translations: - if oldname in kw: - kw[newname] = kw.pop(oldname) - names.discard(oldname) - - warnings.warn( - "Argument '%s' is now named '%s' for function '%s'" % - (oldname, newname, fn.__name__)) - if not has_keywords and names: - raise TypeError("Unknown arguments: %s" % ", ".join(names)) - return fn(*arg, **kw) - - code = 'lambda %(args)s: %(target)s(%(apply_kw)s)' % ( - metadata) - decorated = eval(code, {"target": go}) - decorated.__defaults__ = getattr(fn, '__func__', fn).__defaults__ - update_wrapper(decorated, fn) - if hasattr(decorated, '__wrapped__'): - # update_wrapper in py3k applies __wrapped__, which causes - # inspect.getargspec() to ignore the extra arguments on our - # wrapper as of Python 3.4. We need this for the - # "module class proxy" thing though, so just del the __wrapped__ - # for now. See #175 as well as bugs.python.org/issue17482 - del decorated.__wrapped__ - return decorated - - return decorate - - - diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/PKG-INFO similarity index 98% rename from Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/PKG-INFO rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/PKG-INFO index 23f2982..e19ce47 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/PKG-INFO +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cffi -Version: 0.8.2 +Version: 0.8.6 Summary: Foreign Function Interface for Python calling C code. Home-page: http://cffi.readthedocs.org Author: Armin Rigo, Maciej Fijalkowski diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/SOURCES.txt similarity index 81% rename from Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/SOURCES.txt rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/SOURCES.txt index 0fe9fb2..599d5cc 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/SOURCES.txt +++ b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/SOURCES.txt @@ -4,7 +4,6 @@ setup.cfg setup.py setup_base.py c/_cffi_backend.c -c/check__thread.c c/file_emulator.h c/malloc_closure.h c/minibuffer.h @@ -12,7 +11,6 @@ c/misc_thread.h c/misc_win32.h c/test_c.py c/wchar_helper.h -c/x.py c/libffi_msvc/ffi.c c/libffi_msvc/ffi.h c/libffi_msvc/ffi_common.h @@ -41,7 +39,6 @@ cffi.egg-info/dependency_links.txt cffi.egg-info/not-zip-safe cffi.egg-info/requires.txt cffi.egg-info/top_level.txt -demo/_csvmodule.py demo/_curses.py demo/api.py demo/bsdopendirtype.py @@ -49,20 +46,15 @@ demo/btrfs-snap.py demo/cffi-cocoa.py demo/fastcsv.py demo/gmp.py -demo/image.py demo/pwuid.py demo/py.cleanup demo/pyobj.py demo/readdir.py demo/readdir2.py demo/readdir_ctypes.py -demo/sarvi.py demo/setup.py -demo/syslog.py -demo/ui.py demo/winclipboard.py demo/xclient.py -demo/y.py doc/Makefile doc/design.rst doc/make.bat @@ -91,22 +83,22 @@ testing/test_zintegration.py testing/udir.py testing/snippets/distutils_module/setup.py testing/snippets/distutils_module/snip_basic_verify.py -testing/snippets/distutils_module/build/lib.linux-x86_64-3.2/snip_basic_verify.py +testing/snippets/distutils_module/build/lib.linux-x86_64-2.7/snip_basic_verify.py testing/snippets/distutils_package_1/setup.py -testing/snippets/distutils_package_1/build/lib.linux-x86_64-3.2/snip_basic_verify1/__init__.py +testing/snippets/distutils_package_1/build/lib.linux-x86_64-2.7/snip_basic_verify1/__init__.py testing/snippets/distutils_package_1/snip_basic_verify1/__init__.py testing/snippets/distutils_package_2/setup.py -testing/snippets/distutils_package_2/build/lib.linux-x86_64-3.2/snip_basic_verify2/__init__.py +testing/snippets/distutils_package_2/build/lib.linux-x86_64-2.7/snip_basic_verify2/__init__.py testing/snippets/distutils_package_2/snip_basic_verify2/__init__.py testing/snippets/infrastructure/setup.py -testing/snippets/infrastructure/build/lib/snip_infrastructure/__init__.py +testing/snippets/infrastructure/build/lib.linux-x86_64-2.7/snip_infrastructure/__init__.py testing/snippets/infrastructure/snip_infrastructure/__init__.py testing/snippets/setuptools_module/setup.py testing/snippets/setuptools_module/snip_setuptools_verify.py -testing/snippets/setuptools_module/build/lib.linux-x86_64-3.2/snip_setuptools_verify.py +testing/snippets/setuptools_module/build/lib.linux-x86_64-2.7/snip_setuptools_verify.py testing/snippets/setuptools_package_1/setup.py -testing/snippets/setuptools_package_1/build/lib.linux-x86_64-3.2/snip_setuptools_verify1/__init__.py +testing/snippets/setuptools_package_1/build/lib.linux-x86_64-2.7/snip_setuptools_verify1/__init__.py testing/snippets/setuptools_package_1/snip_setuptools_verify1/__init__.py testing/snippets/setuptools_package_2/setup.py -testing/snippets/setuptools_package_2/build/lib.linux-x86_64-3.2/snip_setuptools_verify2/__init__.py +testing/snippets/setuptools_package_2/build/lib.linux-x86_64-2.7/snip_setuptools_verify2/__init__.py testing/snippets/setuptools_package_2/snip_setuptools_verify2/__init__.py \ No newline at end of file diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/dependency_links.txt similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/dependency_links.txt rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/dependency_links.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/installed-files.txt similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/installed-files.txt rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/installed-files.txt diff --git a/Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/not-zip-safe similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/Flask-0.10.1.egg-info/not-zip-safe rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/not-zip-safe diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/requires.txt similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/requires.txt rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/requires.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/top_level.txt similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/cffi-0.8.2.egg-info/top_level.txt rename to Linux_i686/lib/python2.7/site-packages/cffi-0.8.6.egg-info/top_level.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py b/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py index fa9e86f..df981fd 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py +++ b/Linux_i686/lib/python2.7/site-packages/cffi/__init__.py @@ -4,5 +4,5 @@ __all__ = ['FFI', 'VerificationError', 'VerificationMissing', 'CDefError', from .api import FFI, CDefError, FFIError from .ffiplatform import VerificationError, VerificationMissing -__version__ = "0.8.2" -__version_info__ = (0, 8, 2) +__version__ = "0.8.6" +__version_info__ = (0, 8, 6) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/api.py b/Linux_i686/lib/python2.7/site-packages/cffi/api.py index f44f086..aed9715 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi/api.py +++ b/Linux_i686/lib/python2.7/site-packages/cffi/api.py @@ -55,8 +55,7 @@ class FFI(object): # _cffi_backend.so compiled. import _cffi_backend as backend from . import __version__ - assert (backend.__version__ == __version__ or - backend.__version__ == __version__[:3]) + assert backend.__version__ == __version__ # (If you insist you can also try to pass the option # 'backend=backend_ctypes.CTypesBackend()', but don't # rely on it! It's probably not going to work well.) @@ -443,6 +442,10 @@ def _make_ffi_library(ffi, libname, flags): for enumname, enumval in zip(tp.enumerators, tp.enumvalues): if enumname not in library.__dict__: library.__dict__[enumname] = enumval + for key, val in ffi._parser._int_constants.items(): + if key not in library.__dict__: + library.__dict__[key] = val + copied_enums.append(True) if name in library.__dict__: return diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py b/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py index 99998ac..a53d4c3 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py +++ b/Linux_i686/lib/python2.7/site-packages/cffi/cparser.py @@ -24,6 +24,7 @@ _r_enum_dotdotdot = re.compile(r"__dotdotdot\d+__$") _r_partial_array = re.compile(r"\[\s*\.\.\.\s*\]") _r_words = re.compile(r"\w+|\S") _parser_cache = None +_r_int_literal = re.compile(r"^0?x?[0-9a-f]+u?l?$", re.IGNORECASE) def _get_parser(): global _parser_cache @@ -99,6 +100,7 @@ class Parser(object): self._structnode2type = weakref.WeakKeyDictionary() self._override = False self._packed = False + self._int_constants = {} def _parse(self, csource): csource, macros = _preprocess(csource) @@ -128,9 +130,10 @@ class Parser(object): finally: if lock is not None: lock.release() - return ast, macros + # csource will be used to find buggy source text + return ast, macros, csource - def convert_pycparser_error(self, e, csource): + def _convert_pycparser_error(self, e, csource): # xxx look for ":NUM:" at the start of str(e) and try to interpret # it as a line number line = None @@ -142,6 +145,12 @@ class Parser(object): csourcelines = csource.splitlines() if 1 <= linenum <= len(csourcelines): line = csourcelines[linenum-1] + return line + + def convert_pycparser_error(self, e, csource): + line = self._convert_pycparser_error(e, csource) + + msg = str(e) if line: msg = 'cannot parse "%s"\n%s' % (line.strip(), msg) else: @@ -160,14 +169,9 @@ class Parser(object): self._packed = prev_packed def _internal_parse(self, csource): - ast, macros = self._parse(csource) + ast, macros, csource = self._parse(csource) # add the macros - for key, value in macros.items(): - value = value.strip() - if value != '...': - raise api.CDefError('only supports the syntax "#define ' - '%s ..." for now (literally)' % key) - self._declare('macro ' + key, value) + self._process_macros(macros) # find the first "__dotdotdot__" and use that as a separator # between the repeated typedefs and the real csource iterator = iter(ast.ext) @@ -175,27 +179,61 @@ class Parser(object): if decl.name == '__dotdotdot__': break # - for decl in iterator: - if isinstance(decl, pycparser.c_ast.Decl): - self._parse_decl(decl) - elif isinstance(decl, pycparser.c_ast.Typedef): - if not decl.name: - raise api.CDefError("typedef does not declare any name", - decl) - if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) - and decl.type.type.names == ['__dotdotdot__']): - realtype = model.unknown_type(decl.name) - elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and - isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and - isinstance(decl.type.type.type, - pycparser.c_ast.IdentifierType) and - decl.type.type.type.names == ['__dotdotdot__']): - realtype = model.unknown_ptr_type(decl.name) + try: + for decl in iterator: + if isinstance(decl, pycparser.c_ast.Decl): + self._parse_decl(decl) + elif isinstance(decl, pycparser.c_ast.Typedef): + if not decl.name: + raise api.CDefError("typedef does not declare any name", + decl) + if (isinstance(decl.type.type, pycparser.c_ast.IdentifierType) + and decl.type.type.names == ['__dotdotdot__']): + realtype = model.unknown_type(decl.name) + elif (isinstance(decl.type, pycparser.c_ast.PtrDecl) and + isinstance(decl.type.type, pycparser.c_ast.TypeDecl) and + isinstance(decl.type.type.type, + pycparser.c_ast.IdentifierType) and + decl.type.type.type.names == ['__dotdotdot__']): + realtype = model.unknown_ptr_type(decl.name) + else: + realtype = self._get_type(decl.type, name=decl.name) + self._declare('typedef ' + decl.name, realtype) else: - realtype = self._get_type(decl.type, name=decl.name) - self._declare('typedef ' + decl.name, realtype) + raise api.CDefError("unrecognized construct", decl) + except api.FFIError as e: + msg = self._convert_pycparser_error(e, csource) + if msg: + e.args = (e.args[0] + "\n *** Err: %s" % msg,) + raise + + def _add_constants(self, key, val): + if key in self._int_constants: + raise api.FFIError( + "multiple declarations of constant: %s" % (key,)) + self._int_constants[key] = val + + def _process_macros(self, macros): + for key, value in macros.items(): + value = value.strip() + match = _r_int_literal.search(value) + if match is not None: + int_str = match.group(0).lower().rstrip("ul") + + # "010" is not valid oct in py3 + if (int_str.startswith("0") and + int_str != "0" and + not int_str.startswith("0x")): + int_str = "0o" + int_str[1:] + + pyvalue = int(int_str, 0) + self._add_constants(key, pyvalue) + elif value == '...': + self._declare('macro ' + key, value) else: - raise api.CDefError("unrecognized construct", decl) + raise api.CDefError('only supports the syntax "#define ' + '%s ..." (literally) or "#define ' + '%s 0x1FF" for now' % (key, key)) def _parse_decl(self, decl): node = decl.type @@ -227,7 +265,7 @@ class Parser(object): self._declare('variable ' + decl.name, tp) def parse_type(self, cdecl): - ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl) + ast, macros = self._parse('void __dummy(\n%s\n);' % cdecl)[:2] assert not macros exprnode = ast.ext[-1].type.args.params[0] if isinstance(exprnode, pycparser.c_ast.ID): @@ -306,7 +344,8 @@ class Parser(object): if ident == 'void': return model.void_type if ident == '__dotdotdot__': - raise api.FFIError('bad usage of "..."') + raise api.FFIError(':%d: bad usage of "..."' % + typenode.coord.line) return resolve_common_type(ident) # if isinstance(type, pycparser.c_ast.Struct): @@ -333,7 +372,8 @@ class Parser(object): return self._get_struct_union_enum_type('union', typenode, name, nested=True) # - raise api.FFIError("bad or unsupported type declaration") + raise api.FFIError(":%d: bad or unsupported type declaration" % + typenode.coord.line) def _parse_function_type(self, typenode, funcname=None): params = list(getattr(typenode.args, 'params', [])) @@ -499,6 +539,10 @@ class Parser(object): if (isinstance(exprnode, pycparser.c_ast.UnaryOp) and exprnode.op == '-'): return -self._parse_constant(exprnode.expr) + # load previously defined int constant + if (isinstance(exprnode, pycparser.c_ast.ID) and + exprnode.name in self._int_constants): + return self._int_constants[exprnode.name] # if partial_length_ok: if (isinstance(exprnode, pycparser.c_ast.ID) and @@ -506,8 +550,8 @@ class Parser(object): self._partial_length = True return '...' # - raise api.FFIError("unsupported expression: expected a " - "simple numeric constant") + raise api.FFIError(":%d: unsupported expression: expected a " + "simple numeric constant" % exprnode.coord.line) def _build_enum_type(self, explicit_name, decls): if decls is not None: @@ -522,6 +566,7 @@ class Parser(object): if enum.value is not None: nextenumvalue = self._parse_constant(enum.value) enumvalues.append(nextenumvalue) + self._add_constants(enum.name, nextenumvalue) nextenumvalue += 1 enumvalues = tuple(enumvalues) tp = model.EnumType(explicit_name, enumerators, enumvalues) @@ -535,3 +580,5 @@ class Parser(object): kind = name.split(' ', 1)[0] if kind in ('typedef', 'struct', 'union', 'enum'): self._declare(name, tp) + for k, v in other._int_constants.items(): + self._add_constants(k, v) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py b/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py index 460ba90..4515d6c 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py +++ b/Linux_i686/lib/python2.7/site-packages/cffi/ffiplatform.py @@ -38,6 +38,7 @@ def _build(tmpdir, ext): import distutils.errors # dist = Distribution({'ext_modules': [ext]}) + dist.parse_config_files() options = dist.get_option_dict('build_ext') options['force'] = ('ffiplatform', True) options['build_lib'] = ('ffiplatform', tmpdir) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py index d9af334..31793f0 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py +++ b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_cpy.py @@ -89,43 +89,54 @@ class VCPythonEngine(object): # by generate_cpy_function_method(). prnt('static PyMethodDef _cffi_methods[] = {') self._generate("method") - prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS},') - prnt(' {NULL, NULL} /* Sentinel */') + prnt(' {"_cffi_setup", _cffi_setup, METH_VARARGS, NULL},') + prnt(' {NULL, NULL, 0, NULL} /* Sentinel */') prnt('};') prnt() # # standard init. modname = self.verifier.get_module_name() - if sys.version_info >= (3,): - prnt('static struct PyModuleDef _cffi_module_def = {') - prnt(' PyModuleDef_HEAD_INIT,') - prnt(' "%s",' % modname) - prnt(' NULL,') - prnt(' -1,') - prnt(' _cffi_methods,') - prnt(' NULL, NULL, NULL, NULL') - prnt('};') - prnt() - initname = 'PyInit_%s' % modname - createmod = 'PyModule_Create(&_cffi_module_def)' - errorcase = 'return NULL' - finalreturn = 'return lib' - else: - initname = 'init%s' % modname - createmod = 'Py_InitModule("%s", _cffi_methods)' % modname - errorcase = 'return' - finalreturn = 'return' + constants = self._chained_list_constants[False] + prnt('#if PY_MAJOR_VERSION >= 3') + prnt() + prnt('static struct PyModuleDef _cffi_module_def = {') + prnt(' PyModuleDef_HEAD_INIT,') + prnt(' "%s",' % modname) + prnt(' NULL,') + prnt(' -1,') + prnt(' _cffi_methods,') + prnt(' NULL, NULL, NULL, NULL') + prnt('};') + prnt() prnt('PyMODINIT_FUNC') - prnt('%s(void)' % initname) + prnt('PyInit_%s(void)' % modname) prnt('{') prnt(' PyObject *lib;') - prnt(' lib = %s;' % createmod) - prnt(' if (lib == NULL || %s < 0)' % ( - self._chained_list_constants[False],)) - prnt(' %s;' % errorcase) - prnt(' _cffi_init();') - prnt(' %s;' % finalreturn) + prnt(' lib = PyModule_Create(&_cffi_module_def);') + prnt(' if (lib == NULL)') + prnt(' return NULL;') + prnt(' if (%s < 0 || _cffi_init() < 0) {' % (constants,)) + prnt(' Py_DECREF(lib);') + prnt(' return NULL;') + prnt(' }') + prnt(' return lib;') prnt('}') + prnt() + prnt('#else') + prnt() + prnt('PyMODINIT_FUNC') + prnt('init%s(void)' % modname) + prnt('{') + prnt(' PyObject *lib;') + prnt(' lib = Py_InitModule("%s", _cffi_methods);' % modname) + prnt(' if (lib == NULL)') + prnt(' return;') + prnt(' if (%s < 0 || _cffi_init() < 0)' % (constants,)) + prnt(' return;') + prnt(' return;') + prnt('}') + prnt() + prnt('#endif') def load_library(self): # XXX review all usages of 'self' here! @@ -394,7 +405,7 @@ class VCPythonEngine(object): meth = 'METH_O' else: meth = 'METH_VARARGS' - self._prnt(' {"%s", _cffi_f_%s, %s},' % (name, name, meth)) + self._prnt(' {"%s", _cffi_f_%s, %s, NULL},' % (name, name, meth)) _loading_cpy_function = _loaded_noop @@ -481,8 +492,8 @@ class VCPythonEngine(object): if tp.fldnames is None: return # nothing to do with opaque structs layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) - self._prnt(' {"%s", %s, METH_NOARGS},' % (layoutfuncname, - layoutfuncname)) + self._prnt(' {"%s", %s, METH_NOARGS, NULL},' % (layoutfuncname, + layoutfuncname)) def _loading_struct_or_union(self, tp, prefix, name, module): if tp.fldnames is None: @@ -589,13 +600,7 @@ class VCPythonEngine(object): 'variable type'),)) assert delayed else: - prnt(' if (LONG_MIN <= (%s) && (%s) <= LONG_MAX)' % (name, name)) - prnt(' o = PyInt_FromLong((long)(%s));' % (name,)) - prnt(' else if ((%s) <= 0)' % (name,)) - prnt(' o = PyLong_FromLongLong((long long)(%s));' % (name,)) - prnt(' else') - prnt(' o = PyLong_FromUnsignedLongLong(' - '(unsigned long long)(%s));' % (name,)) + prnt(' o = _cffi_from_c_int_const(%s);' % name) prnt(' if (o == NULL)') prnt(' return -1;') if size_too: @@ -632,13 +637,18 @@ class VCPythonEngine(object): # ---------- # enums + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + def _generate_cpy_enum_decl(self, tp, name, prefix='enum'): if tp.partial: for enumerator in tp.enumerators: self._generate_cpy_const(True, enumerator, delayed=False) return # - funcname = '_cffi_e_%s_%s' % (prefix, name) + funcname = self._enum_funcname(prefix, name) prnt = self._prnt prnt('static int %s(PyObject *lib)' % funcname) prnt('{') @@ -760,17 +770,30 @@ cffimod_header = r''' #include #include -#ifdef MS_WIN32 -#include /* for alloca() */ -typedef __int8 int8_t; -typedef __int16 int16_t; -typedef __int32 int32_t; -typedef __int64 int64_t; -typedef unsigned __int8 uint8_t; -typedef unsigned __int16 uint16_t; -typedef unsigned __int32 uint32_t; -typedef unsigned __int64 uint64_t; -typedef unsigned char _Bool; +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; +# else +# include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif #endif #if PY_MAJOR_VERSION < 3 @@ -795,6 +818,15 @@ typedef unsigned char _Bool; #define _cffi_to_c_double PyFloat_AsDouble #define _cffi_to_c_float PyFloat_AsDouble +#define _cffi_from_c_int_const(x) \ + (((x) > 0) ? \ + ((unsigned long long)(x) <= (unsigned long long)LONG_MAX) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromUnsignedLongLong((unsigned long long)(x)) : \ + ((long long)(x) >= (long long)LONG_MIN) ? \ + PyInt_FromLong((long)(x)) : \ + PyLong_FromLongLong((long long)(x))) + #define _cffi_from_c_int(x, type) \ (((type)-1) > 0 ? /* unsigned */ \ (sizeof(type) < sizeof(long) ? PyInt_FromLong(x) : \ @@ -804,14 +836,14 @@ typedef unsigned char _Bool; PyLong_FromLongLong(x))) #define _cffi_to_c_int(o, type) \ - (sizeof(type) == 1 ? (((type)-1) > 0 ? _cffi_to_c_u8(o) \ - : _cffi_to_c_i8(o)) : \ - sizeof(type) == 2 ? (((type)-1) > 0 ? _cffi_to_c_u16(o) \ - : _cffi_to_c_i16(o)) : \ - sizeof(type) == 4 ? (((type)-1) > 0 ? _cffi_to_c_u32(o) \ - : _cffi_to_c_i32(o)) : \ - sizeof(type) == 8 ? (((type)-1) > 0 ? _cffi_to_c_u64(o) \ - : _cffi_to_c_i64(o)) : \ + (sizeof(type) == 1 ? (((type)-1) > 0 ? (type)_cffi_to_c_u8(o) \ + : (type)_cffi_to_c_i8(o)) : \ + sizeof(type) == 2 ? (((type)-1) > 0 ? (type)_cffi_to_c_u16(o) \ + : (type)_cffi_to_c_i16(o)) : \ + sizeof(type) == 4 ? (((type)-1) > 0 ? (type)_cffi_to_c_u32(o) \ + : (type)_cffi_to_c_i32(o)) : \ + sizeof(type) == 8 ? (((type)-1) > 0 ? (type)_cffi_to_c_u64(o) \ + : (type)_cffi_to_c_i64(o)) : \ (Py_FatalError("unsupported size for type " #type), 0)) #define _cffi_to_c_i8 \ @@ -885,25 +917,32 @@ static PyObject *_cffi_setup(PyObject *self, PyObject *args) return PyBool_FromLong(was_alive); } -static void _cffi_init(void) +static int _cffi_init(void) { - PyObject *module = PyImport_ImportModule("_cffi_backend"); - PyObject *c_api_object; + PyObject *module, *c_api_object = NULL; + module = PyImport_ImportModule("_cffi_backend"); if (module == NULL) - return; + goto failure; c_api_object = PyObject_GetAttrString(module, "_C_API"); if (c_api_object == NULL) - return; + goto failure; if (!PyCapsule_CheckExact(c_api_object)) { - Py_DECREF(c_api_object); PyErr_SetNone(PyExc_ImportError); - return; + goto failure; } memcpy(_cffi_exports, PyCapsule_GetPointer(c_api_object, "cffi"), _CFFI_NUM_EXPORTS * sizeof(void *)); + + Py_DECREF(module); Py_DECREF(c_api_object); + return 0; + + failure: + Py_XDECREF(module); + Py_XDECREF(c_api_object); + return -1; } #define _cffi_type(num) ((CTypeDescrObject *)PyList_GET_ITEM(_cffi_types, num)) diff --git a/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py index f8715c7..133ec7f 100644 --- a/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py +++ b/Linux_i686/lib/python2.7/site-packages/cffi/vengine_gen.py @@ -249,10 +249,10 @@ class VGenericEngine(object): prnt(' /* %s */' % str(e)) # cannot verify it, ignore prnt('}') self.export_symbols.append(layoutfuncname) - prnt('ssize_t %s(ssize_t i)' % (layoutfuncname,)) + prnt('intptr_t %s(intptr_t i)' % (layoutfuncname,)) prnt('{') prnt(' struct _cffi_aligncheck { char x; %s y; };' % cname) - prnt(' static ssize_t nums[] = {') + prnt(' static intptr_t nums[] = {') prnt(' sizeof(%s),' % cname) prnt(' offsetof(struct _cffi_aligncheck, y),') for fname, ftype, fbitsize in tp.enumfields(): @@ -276,7 +276,7 @@ class VGenericEngine(object): return # nothing to do with opaque structs layoutfuncname = '_cffi_layout_%s_%s' % (prefix, name) # - BFunc = self.ffi._typeof_locked("ssize_t(*)(ssize_t)")[0] + BFunc = self.ffi._typeof_locked("intptr_t(*)(intptr_t)")[0] function = module.load_function(BFunc, layoutfuncname) layout = [] num = 0 @@ -410,13 +410,18 @@ class VGenericEngine(object): # ---------- # enums + def _enum_funcname(self, prefix, name): + # "$enum_$1" => "___D_enum____D_1" + name = name.replace('$', '___D_') + return '_cffi_e_%s_%s' % (prefix, name) + def _generate_gen_enum_decl(self, tp, name, prefix='enum'): if tp.partial: for enumerator in tp.enumerators: self._generate_gen_const(True, enumerator) return # - funcname = '_cffi_e_%s_%s' % (prefix, name) + funcname = self._enum_funcname(prefix, name) self.export_symbols.append(funcname) prnt = self._prnt prnt('int %s(char *out_error)' % funcname) @@ -430,14 +435,14 @@ class VGenericEngine(object): enumerator, enumerator, enumvalue)) prnt(' char buf[64];') prnt(' if ((%s) < 0)' % enumerator) - prnt(' snprintf(buf, 63, "%%ld", (long)(%s));' % enumerator) + prnt(' sprintf(buf, "%%ld", (long)(%s));' % enumerator) prnt(' else') - prnt(' snprintf(buf, 63, "%%lu", (unsigned long)(%s));' % + prnt(' sprintf(buf, "%%lu", (unsigned long)(%s));' % enumerator) - prnt(' snprintf(out_error, 255,' + prnt(' sprintf(out_error,' ' "%s has the real value %s, not %s",') prnt(' "%s", buf, "%d");' % ( - enumerator, enumvalue)) + enumerator[:100], enumvalue)) prnt(' return -1;') prnt(' }') prnt(' return 0;') @@ -453,7 +458,7 @@ class VGenericEngine(object): else: BType = self.ffi._typeof_locked("char[]")[0] BFunc = self.ffi._typeof_locked("int(*)(char*)")[0] - funcname = '_cffi_e_%s_%s' % (prefix, name) + funcname = self._enum_funcname(prefix, name) function = module.load_function(BFunc, funcname) p = self.ffi.new(BType, 256) if function(p) < 0: @@ -547,20 +552,29 @@ cffimod_header = r''' #include #include /* XXX for ssize_t on some platforms */ -#ifdef _WIN32 -# include -# define snprintf _snprintf -typedef __int8 int8_t; -typedef __int16 int16_t; -typedef __int32 int32_t; -typedef __int64 int64_t; -typedef unsigned __int8 uint8_t; -typedef unsigned __int16 uint16_t; -typedef unsigned __int32 uint32_t; -typedef unsigned __int64 uint64_t; -typedef SSIZE_T ssize_t; -typedef unsigned char _Bool; -#else +/* this block of #ifs should be kept exactly identical between + c/_cffi_backend.c, cffi/vengine_cpy.py, cffi/vengine_gen.py */ +#if defined(_MSC_VER) +# include /* for alloca() */ +# if _MSC_VER < 1600 /* MSVC < 2010 */ + typedef __int8 int8_t; + typedef __int16 int16_t; + typedef __int32 int32_t; + typedef __int64 int64_t; + typedef unsigned __int8 uint8_t; + typedef unsigned __int16 uint16_t; + typedef unsigned __int32 uint32_t; + typedef unsigned __int64 uint64_t; +# else # include +# endif +# if _MSC_VER < 1800 /* MSVC < 2013 */ + typedef unsigned char _Bool; +# endif +#else +# include +# if (defined (__SVR4) && defined (__sun)) || defined(_AIX) +# include +# endif #endif ''' diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/PKG-INFO b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/PKG-INFO similarity index 94% rename from Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/PKG-INFO rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/PKG-INFO index a3eb033..40d0e13 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/PKG-INFO +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/PKG-INFO @@ -1,6 +1,6 @@ Metadata-Version: 1.1 Name: cryptography -Version: 0.4 +Version: 0.5.4 Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. Home-page: https://github.com/pyca/cryptography Author: The cryptography developers @@ -9,6 +9,10 @@ License: Apache License, Version 2.0 Description: Cryptography ============ + .. image:: https://pypip.in/version/cryptography/badge.svg + :target: https://pypi.python.org/pypi/cryptography/ + :alt: Latest Version + .. image:: https://travis-ci.org/pyca/cryptography.svg?branch=master :target: https://travis-ci.org/pyca/cryptography diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/SOURCES.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/SOURCES.txt similarity index 83% rename from Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/SOURCES.txt rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/SOURCES.txt index d15cf6e..064a501 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/SOURCES.txt +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/SOURCES.txt @@ -23,18 +23,34 @@ cryptography/hazmat/backends/interfaces.py cryptography/hazmat/backends/multibackend.py cryptography/hazmat/backends/commoncrypto/__init__.py cryptography/hazmat/backends/commoncrypto/backend.py +cryptography/hazmat/backends/commoncrypto/ciphers.py +cryptography/hazmat/backends/commoncrypto/hashes.py +cryptography/hazmat/backends/commoncrypto/hmac.py cryptography/hazmat/backends/openssl/__init__.py cryptography/hazmat/backends/openssl/backend.py +cryptography/hazmat/backends/openssl/ciphers.py +cryptography/hazmat/backends/openssl/cmac.py +cryptography/hazmat/backends/openssl/dsa.py +cryptography/hazmat/backends/openssl/ec.py +cryptography/hazmat/backends/openssl/hashes.py +cryptography/hazmat/backends/openssl/hmac.py +cryptography/hazmat/backends/openssl/rsa.py cryptography/hazmat/bindings/__init__.py cryptography/hazmat/bindings/utils.py -cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_444d7397xa22f8491.c -cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_be05eb56x6daa9a79.c +cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_4ed9e37dx4000d087.c +cryptography/hazmat/bindings/__pycache__/_Cryptography_cffi_d62b3d91x972e1c0b.c cryptography/hazmat/bindings/commoncrypto/__init__.py cryptography/hazmat/bindings/commoncrypto/binding.py +cryptography/hazmat/bindings/commoncrypto/cf.py cryptography/hazmat/bindings/commoncrypto/common_cryptor.py cryptography/hazmat/bindings/commoncrypto/common_digest.py cryptography/hazmat/bindings/commoncrypto/common_hmac.py cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py +cryptography/hazmat/bindings/commoncrypto/secimport.py +cryptography/hazmat/bindings/commoncrypto/secitem.py +cryptography/hazmat/bindings/commoncrypto/seckey.py +cryptography/hazmat/bindings/commoncrypto/seckeychain.py +cryptography/hazmat/bindings/commoncrypto/sectransform.py cryptography/hazmat/bindings/openssl/__init__.py cryptography/hazmat/bindings/openssl/aes.py cryptography/hazmat/bindings/openssl/asn1.py @@ -65,6 +81,7 @@ cryptography/hazmat/bindings/openssl/rand.py cryptography/hazmat/bindings/openssl/rsa.py cryptography/hazmat/bindings/openssl/ssl.py cryptography/hazmat/bindings/openssl/x509.py +cryptography/hazmat/bindings/openssl/x509_vfy.py cryptography/hazmat/bindings/openssl/x509name.py cryptography/hazmat/bindings/openssl/x509v3.py cryptography/hazmat/primitives/__init__.py @@ -74,10 +91,12 @@ cryptography/hazmat/primitives/hashes.py cryptography/hazmat/primitives/hmac.py cryptography/hazmat/primitives/interfaces.py cryptography/hazmat/primitives/padding.py +cryptography/hazmat/primitives/serialization.py cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_684bb40axf342507b.c cryptography/hazmat/primitives/__pycache__/_Cryptography_cffi_8f86901cxc1767c5a.c cryptography/hazmat/primitives/asymmetric/__init__.py cryptography/hazmat/primitives/asymmetric/dsa.py +cryptography/hazmat/primitives/asymmetric/ec.py cryptography/hazmat/primitives/asymmetric/padding.py cryptography/hazmat/primitives/asymmetric/rsa.py cryptography/hazmat/primitives/ciphers/__init__.py @@ -109,6 +128,7 @@ docs/random-numbers.rst docs/security.rst docs/spelling_wordlist.txt docs/_static/.keep +docs/development/c-bindings.rst docs/development/getting-started.rst docs/development/index.rst docs/development/reviewing-patches.rst @@ -140,9 +160,11 @@ docs/hazmat/primitives/padding.rst docs/hazmat/primitives/symmetric-encryption.rst docs/hazmat/primitives/twofactor.rst docs/hazmat/primitives/asymmetric/dsa.rst +docs/hazmat/primitives/asymmetric/ec.rst docs/hazmat/primitives/asymmetric/index.rst docs/hazmat/primitives/asymmetric/padding.rst docs/hazmat/primitives/asymmetric/rsa.rst +docs/hazmat/primitives/asymmetric/serialization.rst docs/hazmat/primitives/mac/cmac.rst docs/hazmat/primitives/mac/hmac.rst docs/hazmat/primitives/mac/index.rst @@ -160,6 +182,8 @@ tests/hazmat/bindings/test_commoncrypto.py tests/hazmat/bindings/test_openssl.py tests/hazmat/bindings/test_utils.py tests/hazmat/primitives/__init__.py +tests/hazmat/primitives/fixtures_dsa.py +tests/hazmat/primitives/fixtures_rsa.py tests/hazmat/primitives/test_3des.py tests/hazmat/primitives/test_aes.py tests/hazmat/primitives/test_arc4.py @@ -171,6 +195,7 @@ tests/hazmat/primitives/test_ciphers.py tests/hazmat/primitives/test_cmac.py tests/hazmat/primitives/test_constant_time.py tests/hazmat/primitives/test_dsa.py +tests/hazmat/primitives/test_ec.py tests/hazmat/primitives/test_hash_vectors.py tests/hazmat/primitives/test_hashes.py tests/hazmat/primitives/test_hkdf.py @@ -183,6 +208,7 @@ tests/hazmat/primitives/test_pbkdf2hmac.py tests/hazmat/primitives/test_pbkdf2hmac_vectors.py tests/hazmat/primitives/test_rsa.py tests/hazmat/primitives/test_seed.py +tests/hazmat/primitives/test_serialization.py tests/hazmat/primitives/utils.py tests/hazmat/primitives/twofactor/__init__.py tests/hazmat/primitives/twofactor/test_hotp.py diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/dependency_links.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/dependency_links.txt similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/dependency_links.txt rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/dependency_links.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/installed-files.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/installed-files.txt similarity index 78% rename from Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/installed-files.txt rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/installed-files.txt index 2f17187..91ce4dd 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/installed-files.txt +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/installed-files.txt @@ -11,14 +11,25 @@ ../cryptography/hazmat/primitives/hashes.py ../cryptography/hazmat/primitives/cmac.py ../cryptography/hazmat/primitives/interfaces.py +../cryptography/hazmat/primitives/serialization.py ../cryptography/hazmat/primitives/constant_time.py ../cryptography/hazmat/primitives/__init__.py ../cryptography/hazmat/primitives/padding.py ../cryptography/hazmat/bindings/__init__.py ../cryptography/hazmat/bindings/utils.py +../cryptography/hazmat/backends/openssl/hmac.py +../cryptography/hazmat/backends/openssl/rsa.py +../cryptography/hazmat/backends/openssl/hashes.py +../cryptography/hazmat/backends/openssl/cmac.py +../cryptography/hazmat/backends/openssl/dsa.py ../cryptography/hazmat/backends/openssl/__init__.py +../cryptography/hazmat/backends/openssl/ciphers.py ../cryptography/hazmat/backends/openssl/backend.py +../cryptography/hazmat/backends/openssl/ec.py +../cryptography/hazmat/backends/commoncrypto/hmac.py +../cryptography/hazmat/backends/commoncrypto/hashes.py ../cryptography/hazmat/backends/commoncrypto/__init__.py +../cryptography/hazmat/backends/commoncrypto/ciphers.py ../cryptography/hazmat/backends/commoncrypto/backend.py ../cryptography/hazmat/primitives/twofactor/totp.py ../cryptography/hazmat/primitives/twofactor/__init__.py @@ -30,6 +41,7 @@ ../cryptography/hazmat/primitives/asymmetric/dsa.py ../cryptography/hazmat/primitives/asymmetric/__init__.py ../cryptography/hazmat/primitives/asymmetric/padding.py +../cryptography/hazmat/primitives/asymmetric/ec.py ../cryptography/hazmat/primitives/ciphers/modes.py ../cryptography/hazmat/primitives/ciphers/__init__.py ../cryptography/hazmat/primitives/ciphers/base.py @@ -50,6 +62,7 @@ ../cryptography/hazmat/bindings/openssl/x509.py ../cryptography/hazmat/bindings/openssl/err.py ../cryptography/hazmat/bindings/openssl/dsa.py +../cryptography/hazmat/bindings/openssl/x509_vfy.py ../cryptography/hazmat/bindings/openssl/dh.py ../cryptography/hazmat/bindings/openssl/pkcs12.py ../cryptography/hazmat/bindings/openssl/__init__.py @@ -69,9 +82,15 @@ ../cryptography/hazmat/bindings/commoncrypto/common_cryptor.py ../cryptography/hazmat/bindings/commoncrypto/binding.py ../cryptography/hazmat/bindings/commoncrypto/common_key_derivation.py +../cryptography/hazmat/bindings/commoncrypto/seckey.py ../cryptography/hazmat/bindings/commoncrypto/common_hmac.py ../cryptography/hazmat/bindings/commoncrypto/common_digest.py ../cryptography/hazmat/bindings/commoncrypto/__init__.py +../cryptography/hazmat/bindings/commoncrypto/sectransform.py +../cryptography/hazmat/bindings/commoncrypto/secitem.py +../cryptography/hazmat/bindings/commoncrypto/secimport.py +../cryptography/hazmat/bindings/commoncrypto/cf.py +../cryptography/hazmat/bindings/commoncrypto/seckeychain.py ../cryptography/exceptions.pyc ../cryptography/__init__.pyc ../cryptography/fernet.pyc @@ -85,14 +104,25 @@ ../cryptography/hazmat/primitives/hashes.pyc ../cryptography/hazmat/primitives/cmac.pyc ../cryptography/hazmat/primitives/interfaces.pyc +../cryptography/hazmat/primitives/serialization.pyc ../cryptography/hazmat/primitives/constant_time.pyc ../cryptography/hazmat/primitives/__init__.pyc ../cryptography/hazmat/primitives/padding.pyc ../cryptography/hazmat/bindings/__init__.pyc ../cryptography/hazmat/bindings/utils.pyc +../cryptography/hazmat/backends/openssl/hmac.pyc +../cryptography/hazmat/backends/openssl/rsa.pyc +../cryptography/hazmat/backends/openssl/hashes.pyc +../cryptography/hazmat/backends/openssl/cmac.pyc +../cryptography/hazmat/backends/openssl/dsa.pyc ../cryptography/hazmat/backends/openssl/__init__.pyc +../cryptography/hazmat/backends/openssl/ciphers.pyc ../cryptography/hazmat/backends/openssl/backend.pyc +../cryptography/hazmat/backends/openssl/ec.pyc +../cryptography/hazmat/backends/commoncrypto/hmac.pyc +../cryptography/hazmat/backends/commoncrypto/hashes.pyc ../cryptography/hazmat/backends/commoncrypto/__init__.pyc +../cryptography/hazmat/backends/commoncrypto/ciphers.pyc ../cryptography/hazmat/backends/commoncrypto/backend.pyc ../cryptography/hazmat/primitives/twofactor/totp.pyc ../cryptography/hazmat/primitives/twofactor/__init__.pyc @@ -104,6 +134,7 @@ ../cryptography/hazmat/primitives/asymmetric/dsa.pyc ../cryptography/hazmat/primitives/asymmetric/__init__.pyc ../cryptography/hazmat/primitives/asymmetric/padding.pyc +../cryptography/hazmat/primitives/asymmetric/ec.pyc ../cryptography/hazmat/primitives/ciphers/modes.pyc ../cryptography/hazmat/primitives/ciphers/__init__.pyc ../cryptography/hazmat/primitives/ciphers/base.pyc @@ -124,6 +155,7 @@ ../cryptography/hazmat/bindings/openssl/x509.pyc ../cryptography/hazmat/bindings/openssl/err.pyc ../cryptography/hazmat/bindings/openssl/dsa.pyc +../cryptography/hazmat/bindings/openssl/x509_vfy.pyc ../cryptography/hazmat/bindings/openssl/dh.pyc ../cryptography/hazmat/bindings/openssl/pkcs12.pyc ../cryptography/hazmat/bindings/openssl/__init__.pyc @@ -143,10 +175,16 @@ ../cryptography/hazmat/bindings/commoncrypto/common_cryptor.pyc ../cryptography/hazmat/bindings/commoncrypto/binding.pyc ../cryptography/hazmat/bindings/commoncrypto/common_key_derivation.pyc +../cryptography/hazmat/bindings/commoncrypto/seckey.pyc ../cryptography/hazmat/bindings/commoncrypto/common_hmac.pyc ../cryptography/hazmat/bindings/commoncrypto/common_digest.pyc ../cryptography/hazmat/bindings/commoncrypto/__init__.pyc -../cryptography/_Cryptography_cffi_444d7397xa22f8491.so +../cryptography/hazmat/bindings/commoncrypto/sectransform.pyc +../cryptography/hazmat/bindings/commoncrypto/secitem.pyc +../cryptography/hazmat/bindings/commoncrypto/secimport.pyc +../cryptography/hazmat/bindings/commoncrypto/cf.pyc +../cryptography/hazmat/bindings/commoncrypto/seckeychain.pyc +../cryptography/_Cryptography_cffi_4ed9e37dx4000d087.so ../cryptography/_Cryptography_cffi_684bb40axf342507b.so ../cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so ./ diff --git a/Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/not-zip-safe b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/not-zip-safe similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/Flask_Migrate-1.2.0.egg-info/not-zip-safe rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/not-zip-safe diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/requires.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/requires.txt similarity index 100% rename from Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/requires.txt rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/requires.txt diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/top_level.txt b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/top_level.txt similarity index 70% rename from Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/top_level.txt rename to Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/top_level.txt index 2cead95..c05850b 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography-0.4.egg-info/top_level.txt +++ b/Linux_i686/lib/python2.7/site-packages/cryptography-0.5.4.egg-info/top_level.txt @@ -1,4 +1,4 @@ -_Cryptography_cffi_444d7397xa22f8491 _Cryptography_cffi_684bb40axf342507b +_Cryptography_cffi_4ed9e37dx4000d087 cryptography _Cryptography_cffi_8f86901cxc1767c5a diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_444d7397xa22f8491.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_444d7397xa22f8491.so deleted file mode 100755 index 6bbdaa7..0000000 Binary files a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_444d7397xa22f8491.so and /dev/null differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_4ed9e37dx4000d087.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_4ed9e37dx4000d087.so new file mode 100755 index 0000000..f2e963c Binary files /dev/null and b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_4ed9e37dx4000d087.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so index 7faef53..d6794bf 100755 Binary files a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so and b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_684bb40axf342507b.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so index 17a33c1..435b6d0 100755 Binary files a/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so and b/Linux_i686/lib/python2.7/site-packages/cryptography/_Cryptography_cffi_8f86901cxc1767c5a.so differ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py b/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py index d1151dc..048a2d9 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/__about__.py @@ -22,10 +22,10 @@ __summary__ = ("cryptography is a package which provides cryptographic recipes" " and primitives to Python developers.") __uri__ = "https://github.com/pyca/cryptography" -__version__ = "0.4" +__version__ = "0.5.4" __author__ = "The cryptography developers" __email__ = "cryptography-dev@python.org" __license__ = "Apache License, Version 2.0" -__copyright__ = "Copyright 2013-2014 %s" % __author__ +__copyright__ = "Copyright 2013-2014 {0}".format(__author__) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py b/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py index b4ee8fe..c14763f 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/exceptions.py @@ -21,6 +21,8 @@ class _Reasons(object): UNSUPPORTED_PADDING = object() UNSUPPORTED_MGF = object() UNSUPPORTED_PUBLIC_KEY_ALGORITHM = object() + UNSUPPORTED_ELLIPTIC_CURVE = object() + UNSUPPORTED_SERIALIZATION = object() class UnsupportedAlgorithm(Exception): diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py b/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py index 674ce8a..cdb9bdc 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/fernet.py @@ -43,7 +43,7 @@ class Fernet(object): key = base64.urlsafe_b64decode(key) if len(key) != 32: raise ValueError( - "Fernet key must be 32 url-safe base64-encoded bytes" + "Fernet key must be 32 url-safe base64-encoded bytes." ) self._signing_key = key[:16] @@ -60,10 +60,8 @@ class Fernet(object): return self._encrypt_from_parts(data, current_time, iv) def _encrypt_from_parts(self, data, current_time, iv): - if isinstance(data, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before encryption" - ) + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") padder = padding.PKCS7(algorithms.AES.block_size).padder() padded_data = padder.update(data) + padder.finalize() @@ -82,10 +80,8 @@ class Fernet(object): return base64.urlsafe_b64encode(basic_parts + hmac) def decrypt(self, token, ttl=None): - if isinstance(token, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before decryption" - ) + if not isinstance(token, bytes): + raise TypeError("token must be bytes.") current_time = int(time.time()) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py index 4faca73..7bab979 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/backend.py @@ -16,19 +16,21 @@ from __future__ import absolute_import, division, print_function from collections import namedtuple from cryptography import utils -from cryptography.exceptions import ( - InternalError, InvalidTag, UnsupportedAlgorithm, _Reasons +from cryptography.exceptions import InternalError +from cryptography.hazmat.backends.commoncrypto.ciphers import ( + _CipherContext, _GCMCipherContext ) +from cryptography.hazmat.backends.commoncrypto.hashes import _HashContext +from cryptography.hazmat.backends.commoncrypto.hmac import _HMACContext from cryptography.hazmat.backends.interfaces import ( CipherBackend, HMACBackend, HashBackend, PBKDF2HMACBackend ) from cryptography.hazmat.bindings.commoncrypto.binding import Binding -from cryptography.hazmat.primitives import constant_time, interfaces from cryptography.hazmat.primitives.ciphers.algorithms import ( AES, ARC4, Blowfish, CAST5, TripleDES ) from cryptography.hazmat.primitives.ciphers.modes import ( - CBC, CFB, CTR, ECB, GCM, OFB + CBC, CFB, CFB8, CTR, ECB, GCM, OFB ) @@ -147,14 +149,14 @@ class Backend(object): buf, length ) - self._check_response(res) + self._check_cipher_response(res) return self._ffi.buffer(buf)[:] def _register_cipher_adapter(self, cipher_cls, cipher_const, mode_cls, mode_const): if (cipher_cls, mode_cls) in self._cipher_registry: - raise ValueError("Duplicate registration for: {0} {1}".format( + raise ValueError("Duplicate registration for: {0} {1}.".format( cipher_cls, mode_cls) ) self._cipher_registry[cipher_cls, mode_cls] = (cipher_const, @@ -165,6 +167,7 @@ class Backend(object): (CBC, self._lib.kCCModeCBC), (ECB, self._lib.kCCModeECB), (CFB, self._lib.kCCModeCFB), + (CFB8, self._lib.kCCModeCFB8), (OFB, self._lib.kCCModeOFB), (CTR, self._lib.kCCModeCTR), (GCM, self._lib.kCCModeGCM), @@ -177,7 +180,9 @@ class Backend(object): ) for mode_cls, mode_const in [ (CBC, self._lib.kCCModeCBC), + (ECB, self._lib.kCCModeECB), (CFB, self._lib.kCCModeCFB), + (CFB8, self._lib.kCCModeCFB8), (OFB, self._lib.kCCModeOFB), ]: self._register_cipher_adapter( @@ -218,7 +223,7 @@ class Backend(object): self._lib.kCCModeRC4 ) - def _check_response(self, response): + def _check_cipher_response(self, response): if response == self._lib.kCCSuccess: return elif response == self._lib.kCCAlignmentError: @@ -226,7 +231,7 @@ class Backend(object): # rdar://15589470 raise ValueError( "The length of the provided data is not a multiple of " - "the block length" + "the block length." ) else: raise InternalError( @@ -234,266 +239,15 @@ class Backend(object): " Code: {0}.".format(response) ) - -def _release_cipher_ctx(ctx): - """ - Called by the garbage collector and used to safely dereference and - release the context. - """ - if ctx[0] != backend._ffi.NULL: - res = backend._lib.CCCryptorRelease(ctx[0]) - backend._check_response(res) - ctx[0] = backend._ffi.NULL - - -@utils.register_interface(interfaces.CipherContext) -class _CipherContext(object): - def __init__(self, backend, cipher, mode, operation): - self._backend = backend - self._cipher = cipher - self._mode = mode - self._operation = operation - # There is a bug in CommonCrypto where block ciphers do not raise - # kCCAlignmentError when finalizing if you supply non-block aligned - # data. To work around this we need to keep track of the block - # alignment ourselves, but only for alg+mode combos that require - # block alignment. OFB, CFB, and CTR make a block cipher algorithm - # into a stream cipher so we don't need to track them (and thus their - # block size is effectively 1 byte just like OpenSSL/CommonCrypto - # treat RC4 and other stream cipher block sizes). - # This bug has been filed as rdar://15589470 - self._bytes_processed = 0 - if (isinstance(cipher, interfaces.BlockCipherAlgorithm) and not - isinstance(mode, (OFB, CFB, CTR))): - self._byte_block_size = cipher.block_size // 8 - else: - self._byte_block_size = 1 - - registry = self._backend._cipher_registry - try: - cipher_enum, mode_enum = registry[type(cipher), type(mode)] - except KeyError: - raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported " - "by this backend".format( - cipher.name, mode.name if mode else mode), - _Reasons.UNSUPPORTED_CIPHER - ) - - ctx = self._backend._ffi.new("CCCryptorRef *") - ctx = self._backend._ffi.gc(ctx, _release_cipher_ctx) - - if isinstance(mode, interfaces.ModeWithInitializationVector): - iv_nonce = mode.initialization_vector - elif isinstance(mode, interfaces.ModeWithNonce): - iv_nonce = mode.nonce - else: - iv_nonce = self._backend._ffi.NULL - - if isinstance(mode, CTR): - mode_option = self._backend._lib.kCCModeOptionCTR_BE - else: - mode_option = 0 - - res = self._backend._lib.CCCryptorCreateWithMode( - operation, - mode_enum, cipher_enum, - self._backend._lib.ccNoPadding, iv_nonce, - cipher.key, len(cipher.key), - self._backend._ffi.NULL, 0, 0, mode_option, ctx) - self._backend._check_response(res) - - self._ctx = ctx - - def update(self, data): - # Count bytes processed to handle block alignment. - self._bytes_processed += len(data) - buf = self._backend._ffi.new( - "unsigned char[]", len(data) + self._byte_block_size - 1) - outlen = self._backend._ffi.new("size_t *") - res = self._backend._lib.CCCryptorUpdate( - self._ctx[0], data, len(data), buf, - len(data) + self._byte_block_size - 1, outlen) - self._backend._check_response(res) - return self._backend._ffi.buffer(buf)[:outlen[0]] - - def finalize(self): - # Raise error if block alignment is wrong. - if self._bytes_processed % self._byte_block_size: - raise ValueError( - "The length of the provided data is not a multiple of " - "the block length" - ) - buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size) - outlen = self._backend._ffi.new("size_t *") - res = self._backend._lib.CCCryptorFinal( - self._ctx[0], buf, len(buf), outlen) - self._backend._check_response(res) - _release_cipher_ctx(self._ctx) - return self._backend._ffi.buffer(buf)[:outlen[0]] - - -@utils.register_interface(interfaces.AEADCipherContext) -@utils.register_interface(interfaces.AEADEncryptionContext) -class _GCMCipherContext(object): - def __init__(self, backend, cipher, mode, operation): - self._backend = backend - self._cipher = cipher - self._mode = mode - self._operation = operation - self._tag = None - - registry = self._backend._cipher_registry - try: - cipher_enum, mode_enum = registry[type(cipher), type(mode)] - except KeyError: - raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported " - "by this backend".format( - cipher.name, mode.name if mode else mode), - _Reasons.UNSUPPORTED_CIPHER - ) - - ctx = self._backend._ffi.new("CCCryptorRef *") - ctx = self._backend._ffi.gc(ctx, _release_cipher_ctx) - - self._ctx = ctx - - res = self._backend._lib.CCCryptorCreateWithMode( - operation, - mode_enum, cipher_enum, - self._backend._lib.ccNoPadding, - self._backend._ffi.NULL, - cipher.key, len(cipher.key), - self._backend._ffi.NULL, 0, 0, 0, self._ctx) - self._backend._check_response(res) - - res = self._backend._lib.CCCryptorGCMAddIV( - self._ctx[0], - mode.initialization_vector, - len(mode.initialization_vector) - ) - self._backend._check_response(res) - - def update(self, data): - buf = self._backend._ffi.new("unsigned char[]", len(data)) - args = (self._ctx[0], data, len(data), buf) - if self._operation == self._backend._lib.kCCEncrypt: - res = self._backend._lib.CCCryptorGCMEncrypt(*args) - else: - res = self._backend._lib.CCCryptorGCMDecrypt(*args) - - self._backend._check_response(res) - return self._backend._ffi.buffer(buf)[:] - - def finalize(self): - tag_size = self._cipher.block_size // 8 - tag_buf = self._backend._ffi.new("unsigned char[]", tag_size) - tag_len = self._backend._ffi.new("size_t *", tag_size) - res = backend._lib.CCCryptorGCMFinal(self._ctx[0], tag_buf, tag_len) - self._backend._check_response(res) - _release_cipher_ctx(self._ctx) - self._tag = self._backend._ffi.buffer(tag_buf)[:] - if (self._operation == self._backend._lib.kCCDecrypt and - not constant_time.bytes_eq( - self._tag[:len(self._mode.tag)], self._mode.tag - )): - raise InvalidTag - return b"" - - def authenticate_additional_data(self, data): - res = self._backend._lib.CCCryptorGCMAddAAD( - self._ctx[0], data, len(data) - ) - self._backend._check_response(res) - - @property - def tag(self): - return self._tag - - -@utils.register_interface(interfaces.HashContext) -class _HashContext(object): - def __init__(self, backend, algorithm, ctx=None): - self.algorithm = algorithm - self._backend = backend - - if ctx is None: - try: - methods = self._backend._hash_mapping[self.algorithm.name] - except KeyError: - raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( - algorithm.name), - _Reasons.UNSUPPORTED_HASH - ) - ctx = self._backend._ffi.new(methods.ctx) - res = methods.hash_init(ctx) - assert res == 1 - - self._ctx = ctx - - def copy(self): - methods = self._backend._hash_mapping[self.algorithm.name] - new_ctx = self._backend._ffi.new(methods.ctx) - # CommonCrypto has no APIs for copying hashes, so we have to copy the - # underlying struct. - new_ctx[0] = self._ctx[0] - - return _HashContext(self._backend, self.algorithm, ctx=new_ctx) - - def update(self, data): - methods = self._backend._hash_mapping[self.algorithm.name] - res = methods.hash_update(self._ctx, data, len(data)) - assert res == 1 - - def finalize(self): - methods = self._backend._hash_mapping[self.algorithm.name] - buf = self._backend._ffi.new("unsigned char[]", - self.algorithm.digest_size) - res = methods.hash_final(buf, self._ctx) - assert res == 1 - return self._backend._ffi.buffer(buf)[:] - - -@utils.register_interface(interfaces.HashContext) -class _HMACContext(object): - def __init__(self, backend, key, algorithm, ctx=None): - self.algorithm = algorithm - self._backend = backend - if ctx is None: - ctx = self._backend._ffi.new("CCHmacContext *") - try: - alg = self._backend._supported_hmac_algorithms[algorithm.name] - except KeyError: - raise UnsupportedAlgorithm( - "{0} is not a supported HMAC hash on this backend".format( - algorithm.name), - _Reasons.UNSUPPORTED_HASH - ) - - self._backend._lib.CCHmacInit(ctx, alg, key, len(key)) - - self._ctx = ctx - self._key = key - - def copy(self): - copied_ctx = self._backend._ffi.new("CCHmacContext *") - # CommonCrypto has no APIs for copying HMACs, so we have to copy the - # underlying struct. - copied_ctx[0] = self._ctx[0] - return _HMACContext( - self._backend, self._key, self.algorithm, ctx=copied_ctx - ) - - def update(self, data): - self._backend._lib.CCHmacUpdate(self._ctx, data, len(data)) - - def finalize(self): - buf = self._backend._ffi.new("unsigned char[]", - self.algorithm.digest_size) - self._backend._lib.CCHmacFinal(self._ctx, buf) - return self._backend._ffi.buffer(buf)[:] + def _release_cipher_ctx(self, ctx): + """ + Called by the garbage collector and used to safely dereference and + release the context. + """ + if ctx[0] != self._ffi.NULL: + res = self._lib.CCCryptorRelease(ctx[0]) + self._check_cipher_response(res) + ctx[0] = self._ffi.NULL backend = Backend() diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py new file mode 100644 index 0000000..525500c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/ciphers.py @@ -0,0 +1,191 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import ( + InvalidTag, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import constant_time, interfaces +from cryptography.hazmat.primitives.ciphers.modes import ( + CFB, CFB8, CTR, OFB +) + + +@utils.register_interface(interfaces.CipherContext) +class _CipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + # There is a bug in CommonCrypto where block ciphers do not raise + # kCCAlignmentError when finalizing if you supply non-block aligned + # data. To work around this we need to keep track of the block + # alignment ourselves, but only for alg+mode combos that require + # block alignment. OFB, CFB, and CTR make a block cipher algorithm + # into a stream cipher so we don't need to track them (and thus their + # block size is effectively 1 byte just like OpenSSL/CommonCrypto + # treat RC4 and other stream cipher block sizes). + # This bug has been filed as rdar://15589470 + self._bytes_processed = 0 + if (isinstance(cipher, interfaces.BlockCipherAlgorithm) and not + isinstance(mode, (OFB, CFB, CFB8, CTR))): + self._byte_block_size = cipher.block_size // 8 + else: + self._byte_block_size = 1 + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) + + if isinstance(mode, interfaces.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, interfaces.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + + if isinstance(mode, CTR): + mode_option = self._backend._lib.kCCModeOptionCTR_BE + else: + mode_option = 0 + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, iv_nonce, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, mode_option, ctx) + self._backend._check_cipher_response(res) + + self._ctx = ctx + + def update(self, data): + # Count bytes processed to handle block alignment. + self._bytes_processed += len(data) + buf = self._backend._ffi.new( + "unsigned char[]", len(data) + self._byte_block_size - 1) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorUpdate( + self._ctx[0], data, len(data), buf, + len(data) + self._byte_block_size - 1, outlen) + self._backend._check_cipher_response(res) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + # Raise error if block alignment is wrong. + if self._bytes_processed % self._byte_block_size: + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + buf = self._backend._ffi.new("unsigned char[]", self._byte_block_size) + outlen = self._backend._ffi.new("size_t *") + res = self._backend._lib.CCCryptorFinal( + self._ctx[0], buf, len(buf), outlen) + self._backend._check_cipher_response(res) + self._backend._release_cipher_ctx(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] + + +@utils.register_interface(interfaces.AEADCipherContext) +@utils.register_interface(interfaces.AEADEncryptionContext) +class _GCMCipherContext(object): + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + registry = self._backend._cipher_registry + try: + cipher_enum, mode_enum = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + ctx = self._backend._ffi.new("CCCryptorRef *") + ctx = self._backend._ffi.gc(ctx, self._backend._release_cipher_ctx) + + self._ctx = ctx + + res = self._backend._lib.CCCryptorCreateWithMode( + operation, + mode_enum, cipher_enum, + self._backend._lib.ccNoPadding, + self._backend._ffi.NULL, + cipher.key, len(cipher.key), + self._backend._ffi.NULL, 0, 0, 0, self._ctx) + self._backend._check_cipher_response(res) + + res = self._backend._lib.CCCryptorGCMAddIV( + self._ctx[0], + mode.initialization_vector, + len(mode.initialization_vector) + ) + self._backend._check_cipher_response(res) + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + args = (self._ctx[0], data, len(data), buf) + if self._operation == self._backend._lib.kCCEncrypt: + res = self._backend._lib.CCCryptorGCMEncrypt(*args) + else: + res = self._backend._lib.CCCryptorGCMDecrypt(*args) + + self._backend._check_cipher_response(res) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + tag_size = self._cipher.block_size // 8 + tag_buf = self._backend._ffi.new("unsigned char[]", tag_size) + tag_len = self._backend._ffi.new("size_t *", tag_size) + res = self._backend._lib.CCCryptorGCMFinal( + self._ctx[0], tag_buf, tag_len + ) + self._backend._check_cipher_response(res) + self._backend._release_cipher_ctx(self._ctx) + self._tag = self._backend._ffi.buffer(tag_buf)[:] + if (self._operation == self._backend._lib.kCCDecrypt and + not constant_time.bytes_eq( + self._tag[:len(self._mode.tag)], self._mode.tag + )): + raise InvalidTag + return b"" + + def authenticate_additional_data(self, data): + res = self._backend._lib.CCCryptorGCMAddAAD( + self._ctx[0], data, len(data) + ) + self._backend._check_cipher_response(res) + + @property + def tag(self): + return self._tag diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py new file mode 100644 index 0000000..ebad720 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hashes.py @@ -0,0 +1,62 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self.algorithm = algorithm + self._backend = backend + + if ctx is None: + try: + methods = self._backend._hash_mapping[self.algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + ctx = self._backend._ffi.new(methods.ctx) + res = methods.hash_init(ctx) + assert res == 1 + + self._ctx = ctx + + def copy(self): + methods = self._backend._hash_mapping[self.algorithm.name] + new_ctx = self._backend._ffi.new(methods.ctx) + # CommonCrypto has no APIs for copying hashes, so we have to copy the + # underlying struct. + new_ctx[0] = self._ctx[0] + + return _HashContext(self._backend, self.algorithm, ctx=new_ctx) + + def update(self, data): + methods = self._backend._hash_mapping[self.algorithm.name] + res = methods.hash_update(self._ctx, data, len(data)) + assert res == 1 + + def finalize(self): + methods = self._backend._hash_mapping[self.algorithm.name] + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + res = methods.hash_final(buf, self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py new file mode 100644 index 0000000..ec3a878 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/commoncrypto/hmac.py @@ -0,0 +1,58 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self.algorithm = algorithm + self._backend = backend + if ctx is None: + ctx = self._backend._ffi.new("CCHmacContext *") + try: + alg = self._backend._supported_hmac_algorithms[algorithm.name] + except KeyError: + raise UnsupportedAlgorithm( + "{0} is not a supported HMAC hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + + self._backend._lib.CCHmacInit(ctx, alg, key, len(key)) + + self._ctx = ctx + self._key = key + + def copy(self): + copied_ctx = self._backend._ffi.new("CCHmacContext *") + # CommonCrypto has no APIs for copying HMACs, so we have to copy the + # underlying struct. + copied_ctx[0] = self._ctx[0] + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + self._backend._lib.CCHmacUpdate(self._ctx, data, len(data)) + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self.algorithm.digest_size) + self._backend._lib.CCHmacFinal(self._ctx, buf) + return self._backend._ffi.buffer(buf)[:] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py index 264c5af..5ed4996 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/interfaces.py @@ -129,6 +129,31 @@ class RSABackend(object): Returns encrypted bytes. """ + @abc.abstractmethod + def rsa_padding_supported(self, padding): + """ + Returns True if the backend supports the given padding options. + """ + + @abc.abstractmethod + def generate_rsa_parameters_supported(self, public_exponent, key_size): + """ + Returns True if the backend supports the given parameters for key + generation. + """ + + @abc.abstractmethod + def load_rsa_private_numbers(self, numbers): + """ + Returns an RSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_rsa_public_numbers(self, numbers): + """ + Returns an RSAPublicKey provider. + """ + @six.add_metaclass(abc.ABCMeta) class DSABackend(object): @@ -141,8 +166,14 @@ class DSABackend(object): @abc.abstractmethod def generate_dsa_private_key(self, parameters): """ - Generate an DSAPrivateKey instance with parameters as - a DSAParameters object. + Generate a DSAPrivateKey instance with parameters as a DSAParameters + object. + """ + + @abc.abstractmethod + def generate_dsa_private_key_and_parameters(self, key_size): + """ + Generate a DSAPrivateKey instance using key size only. """ @abc.abstractmethod @@ -171,6 +202,24 @@ class DSABackend(object): Return True if the parameters are supported by the backend for DSA. """ + @abc.abstractmethod + def load_dsa_private_numbers(self, numbers): + """ + Returns a DSAPrivateKey provider. + """ + + @abc.abstractmethod + def load_dsa_public_numbers(self, numbers): + """ + Returns a DSAPublicKey provider. + """ + + @abc.abstractmethod + def load_dsa_parameter_numbers(self, numbers): + """ + Returns a DSAParameters provider. + """ + @six.add_metaclass(abc.ABCMeta) class TraditionalOpenSSLSerializationBackend(object): @@ -182,6 +231,16 @@ class TraditionalOpenSSLSerializationBackend(object): """ +@six.add_metaclass(abc.ABCMeta) +class PKCS8SerializationBackend(object): + @abc.abstractmethod + def load_pkcs8_pem_private_key(self, data, password): + """ + Load a private key from PEM encoded data, using password if the data + is encrypted. + """ + + @six.add_metaclass(abc.ABCMeta) class CMACBackend(object): @abc.abstractmethod @@ -195,3 +254,39 @@ class CMACBackend(object): """ Create a CMACContext for calculating a message authentication code. """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveBackend(object): + @abc.abstractmethod + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + """ + Returns True if the backend supports the named elliptic curve with the + specified signature algorithm. + """ + + @abc.abstractmethod + def elliptic_curve_supported(self, curve): + """ + Returns True if the backend supports the named elliptic curve. + """ + + @abc.abstractmethod + def generate_elliptic_curve_private_key(self, curve): + """ + Return an object conforming to the EllipticCurvePrivateKey interface. + """ + + @abc.abstractmethod + def elliptic_curve_public_key_from_numbers(self, numbers): + """ + Return an EllipticCurvePublicKey provider using the given numbers. + """ + + @abc.abstractmethod + def elliptic_curve_private_key_from_numbers(self, numbers): + """ + Return an EllipticCurvePublicKey provider using the given numbers. + """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py index 753f4fc..35e2a09 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/multibackend.py @@ -16,8 +16,9 @@ from __future__ import absolute_import, division, print_function from cryptography import utils from cryptography.exceptions import UnsupportedAlgorithm, _Reasons from cryptography.hazmat.backends.interfaces import ( - CMACBackend, CipherBackend, DSABackend, HMACBackend, HashBackend, - PBKDF2HMACBackend, RSABackend + CMACBackend, CipherBackend, DSABackend, EllipticCurveBackend, HMACBackend, + HashBackend, PBKDF2HMACBackend, PKCS8SerializationBackend, + RSABackend, TraditionalOpenSSLSerializationBackend ) @@ -26,8 +27,11 @@ from cryptography.hazmat.backends.interfaces import ( @utils.register_interface(HashBackend) @utils.register_interface(HMACBackend) @utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(PKCS8SerializationBackend) @utils.register_interface(RSABackend) +@utils.register_interface(TraditionalOpenSSLSerializationBackend) @utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) class MultiBackend(object): name = "multibackend" @@ -52,7 +56,7 @@ class MultiBackend(object): except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported by this backend".format( + "cipher {0} in {1} mode is not supported by this backend.".format( algorithm.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER ) @@ -64,7 +68,7 @@ class MultiBackend(object): except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported by this backend".format( + "cipher {0} in {1} mode is not supported by this backend.".format( algorithm.name, mode.name if mode else mode), _Reasons.UNSUPPORTED_CIPHER ) @@ -82,7 +86,7 @@ class MultiBackend(object): except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( + "{0} is not a supported hash on this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) @@ -100,7 +104,7 @@ class MultiBackend(object): except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( + "{0} is not a supported hash on this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) @@ -121,7 +125,7 @@ class MultiBackend(object): except UnsupportedAlgorithm: pass raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( + "{0} is not a supported hash on this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) @@ -129,13 +133,21 @@ class MultiBackend(object): def generate_rsa_private_key(self, public_exponent, key_size): for b in self._filtered_backends(RSABackend): return b.generate_rsa_private_key(public_exponent, key_size) - raise UnsupportedAlgorithm("RSA is not supported by the backend", + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_rsa_parameters_supported(self, public_exponent, key_size): + for b in self._filtered_backends(RSABackend): + return b.generate_rsa_parameters_supported( + public_exponent, key_size + ) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def create_rsa_signature_ctx(self, private_key, padding, algorithm): for b in self._filtered_backends(RSABackend): return b.create_rsa_signature_ctx(private_key, padding, algorithm) - raise UnsupportedAlgorithm("RSA is not supported by the backend", + raise UnsupportedAlgorithm("RSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def create_rsa_verification_ctx(self, public_key, signature, padding, @@ -143,44 +155,88 @@ class MultiBackend(object): for b in self._filtered_backends(RSABackend): return b.create_rsa_verification_ctx(public_key, signature, padding, algorithm) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def mgf1_hash_supported(self, algorithm): + for b in self._filtered_backends(RSABackend): + return b.mgf1_hash_supported(algorithm) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def decrypt_rsa(self, private_key, ciphertext, padding): + for b in self._filtered_backends(RSABackend): + return b.decrypt_rsa(private_key, ciphertext, padding) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def encrypt_rsa(self, public_key, plaintext, padding): + for b in self._filtered_backends(RSABackend): + return b.encrypt_rsa(public_key, plaintext, padding) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def rsa_padding_supported(self, padding): + for b in self._filtered_backends(RSABackend): + return b.rsa_padding_supported(padding) + raise UnsupportedAlgorithm("RSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_rsa_private_numbers(self, numbers): + for b in self._filtered_backends(RSABackend): + return b.load_rsa_private_numbers(numbers) + + raise UnsupportedAlgorithm("RSA is not supported by the backend", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def load_rsa_public_numbers(self, numbers): + for b in self._filtered_backends(RSABackend): + return b.load_rsa_public_numbers(numbers) + raise UnsupportedAlgorithm("RSA is not supported by the backend", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def generate_dsa_parameters(self, key_size): for b in self._filtered_backends(DSABackend): return b.generate_dsa_parameters(key_size) - raise UnsupportedAlgorithm("DSA is not supported by the backend", + raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def generate_dsa_private_key(self, parameters): for b in self._filtered_backends(DSABackend): return b.generate_dsa_private_key(parameters) - raise UnsupportedAlgorithm("DSA is not supported by the backend", + raise UnsupportedAlgorithm("DSA is not supported by the backend.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def generate_dsa_private_key_and_parameters(self, key_size): + for b in self._filtered_backends(DSABackend): + return b.generate_dsa_private_key_and_parameters(key_size) + raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def create_dsa_verification_ctx(self, public_key, signature, algorithm): for b in self._filtered_backends(DSABackend): return b.create_dsa_verification_ctx(public_key, signature, algorithm) - raise UnsupportedAlgorithm("DSA is not supported by the backend", + raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def create_dsa_signature_ctx(self, private_key, algorithm): for b in self._filtered_backends(DSABackend): return b.create_dsa_signature_ctx(private_key, algorithm) - raise UnsupportedAlgorithm("DSA is not supported by the backend", + raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def dsa_hash_supported(self, algorithm): for b in self._filtered_backends(DSABackend): return b.dsa_hash_supported(algorithm) - raise UnsupportedAlgorithm("DSA is not supported by the backend", + raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def dsa_parameters_supported(self, p, q, g): for b in self._filtered_backends(DSABackend): return b.dsa_parameters_supported(p, q, g) - raise UnsupportedAlgorithm("DSA is not supported by the backend", + raise UnsupportedAlgorithm("DSA is not supported by the backend.", _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) def cmac_algorithm_supported(self, algorithm): @@ -195,5 +251,77 @@ class MultiBackend(object): return b.create_cmac_ctx(algorithm) except UnsupportedAlgorithm: pass - raise UnsupportedAlgorithm("This backend does not support CMAC", + raise UnsupportedAlgorithm("This backend does not support CMAC.", _Reasons.UNSUPPORTED_CIPHER) + + def elliptic_curve_supported(self, curve): + return any( + b.elliptic_curve_supported(curve) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + return any( + b.elliptic_curve_signature_algorithm_supported( + signature_algorithm, curve + ) + for b in self._filtered_backends(EllipticCurveBackend) + ) + + def generate_elliptic_curve_private_key(self, curve): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.generate_elliptic_curve_private_key(curve) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def elliptic_curve_private_key_from_numbers(self, numbers): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.elliptic_curve_private_key_from_numbers(numbers) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def elliptic_curve_public_key_from_numbers(self, numbers): + for b in self._filtered_backends(EllipticCurveBackend): + try: + return b.elliptic_curve_public_key_from_numbers(numbers) + except UnsupportedAlgorithm: + continue + + raise UnsupportedAlgorithm( + "This backend does not support this elliptic curve.", + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def load_pkcs8_pem_private_key(self, data, password): + for b in self._filtered_backends(PKCS8SerializationBackend): + return b.load_pkcs8_pem_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) + + def load_traditional_openssl_pem_private_key(self, data, password): + for b in self._filtered_backends( + TraditionalOpenSSLSerializationBackend + ): + return b.load_traditional_openssl_pem_private_key(data, password) + + raise UnsupportedAlgorithm( + "This backend does not support this key serialization.", + _Reasons.UNSUPPORTED_SERIALIZATION + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py index e00be92..4991177 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/backend.py @@ -15,22 +15,39 @@ from __future__ import absolute_import, division, print_function import collections import itertools -import math +import warnings import six from cryptography import utils from cryptography.exceptions import ( - AlreadyFinalized, InternalError, InvalidSignature, InvalidTag, - UnsupportedAlgorithm, _Reasons + InternalError, UnsupportedAlgorithm, _Reasons ) from cryptography.hazmat.backends.interfaces import ( - CMACBackend, CipherBackend, DSABackend, HMACBackend, HashBackend, - PBKDF2HMACBackend, RSABackend + CMACBackend, CipherBackend, DSABackend, EllipticCurveBackend, HMACBackend, + HashBackend, PBKDF2HMACBackend, PKCS8SerializationBackend, RSABackend, + TraditionalOpenSSLSerializationBackend +) +from cryptography.hazmat.backends.openssl.ciphers import ( + _AESCTRCipherContext, _CipherContext +) +from cryptography.hazmat.backends.openssl.cmac import _CMACContext +from cryptography.hazmat.backends.openssl.dsa import ( + _DSAParameters, _DSAPrivateKey, _DSAPublicKey, + _DSASignatureContext, _DSAVerificationContext +) +from cryptography.hazmat.backends.openssl.ec import ( + _EllipticCurvePrivateKey, _EllipticCurvePublicKey +) +from cryptography.hazmat.backends.openssl.hashes import _HashContext +from cryptography.hazmat.backends.openssl.hmac import _HMACContext +from cryptography.hazmat.backends.openssl.rsa import ( + _RSAPrivateKey, _RSAPublicKey, _RSASignatureContext, + _RSAVerificationContext ) from cryptography.hazmat.bindings.openssl.binding import Binding -from cryptography.hazmat.primitives import hashes, interfaces -from cryptography.hazmat.primitives.asymmetric import dsa, rsa +from cryptography.hazmat.primitives import hashes +from cryptography.hazmat.primitives.asymmetric import dsa, ec, rsa from cryptography.hazmat.primitives.asymmetric.padding import ( MGF1, OAEP, PKCS1v15, PSS ) @@ -38,10 +55,11 @@ from cryptography.hazmat.primitives.ciphers.algorithms import ( AES, ARC4, Blowfish, CAST5, Camellia, IDEA, SEED, TripleDES ) from cryptography.hazmat.primitives.ciphers.modes import ( - CBC, CFB, CTR, ECB, GCM, OFB + CBC, CFB, CFB8, CTR, ECB, GCM, OFB ) +_MemoryBIO = collections.namedtuple("_MemoryBIO", ["bio", "char_ptr"]) _OpenSSLError = collections.namedtuple("_OpenSSLError", ["code", "lib", "func", "reason"]) @@ -49,10 +67,13 @@ _OpenSSLError = collections.namedtuple("_OpenSSLError", @utils.register_interface(CipherBackend) @utils.register_interface(CMACBackend) @utils.register_interface(DSABackend) +@utils.register_interface(EllipticCurveBackend) @utils.register_interface(HashBackend) @utils.register_interface(HMACBackend) @utils.register_interface(PBKDF2HMACBackend) +@utils.register_interface(PKCS8SerializationBackend) @utils.register_interface(RSABackend) +@utils.register_interface(TraditionalOpenSSLSerializationBackend) class Backend(object): """ OpenSSL API binding interfaces. @@ -112,11 +133,14 @@ class Backend(object): def openssl_version_text(self): """ - Friendly string name of linked OpenSSL. + Friendly string name of the loaded OpenSSL library. This is not + necessarily the same version as it was compiled against. Example: OpenSSL 1.0.1e 11 Feb 2013 """ - return self._ffi.string(self._lib.OPENSSL_VERSION_TEXT).decode("ascii") + return self._ffi.string( + self._lib.SSLeay_version(self._lib.SSLEAY_VERSION) + ).decode("ascii") def create_hmac_ctx(self, key, algorithm): return _HMACContext(self, key, algorithm) @@ -132,6 +156,14 @@ class Backend(object): return _HashContext(self, algorithm) def cipher_supported(self, cipher, mode): + if self._evp_cipher_supported(cipher, mode): + return True + elif isinstance(mode, CTR) and isinstance(cipher, AES): + return True + else: + return False + + def _evp_cipher_supported(self, cipher, mode): try: adapter = self._cipher_registry[type(cipher), type(mode)] except KeyError: @@ -141,27 +173,35 @@ class Backend(object): def register_cipher_adapter(self, cipher_cls, mode_cls, adapter): if (cipher_cls, mode_cls) in self._cipher_registry: - raise ValueError("Duplicate registration for: {0} {1}".format( + raise ValueError("Duplicate registration for: {0} {1}.".format( cipher_cls, mode_cls) ) self._cipher_registry[cipher_cls, mode_cls] = adapter def _register_default_ciphers(self): - for cipher_cls, mode_cls in itertools.product( - [AES, Camellia], - [CBC, CTR, ECB, OFB, CFB], - ): + for mode_cls in [CBC, CTR, ECB, OFB, CFB, CFB8]: self.register_cipher_adapter( - cipher_cls, + AES, mode_cls, GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") ) - for mode_cls in [CBC, CFB, OFB]: + for mode_cls in [CBC, CTR, ECB, OFB, CFB]: + self.register_cipher_adapter( + Camellia, + mode_cls, + GetCipherByName("{cipher.name}-{cipher.key_size}-{mode.name}") + ) + for mode_cls in [CBC, CFB, CFB8, OFB]: self.register_cipher_adapter( TripleDES, mode_cls, GetCipherByName("des-ede3-{mode.name}") ) + self.register_cipher_adapter( + TripleDES, + ECB, + GetCipherByName("des-ede3") + ) for mode_cls in [CBC, CFB, OFB, ECB]: self.register_cipher_adapter( Blowfish, @@ -195,10 +235,24 @@ class Backend(object): ) def create_symmetric_encryption_ctx(self, cipher, mode): - return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) + if (isinstance(mode, CTR) and isinstance(cipher, AES) + and not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._ENCRYPT) def create_symmetric_decryption_ctx(self, cipher, mode): - return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) + if (isinstance(mode, CTR) and isinstance(cipher, AES) + and not self._evp_cipher_supported(cipher, mode)): + # This is needed to provide support for AES CTR mode in OpenSSL + # 0.9.8. It can be removed when we drop 0.9.8 support (RHEL 5 + # extended life ends 2020). + return _AESCTRCipherContext(self, cipher, mode) + else: + return _CipherContext(self, cipher, mode, _CipherContext._DECRYPT) def pbkdf2_hmac_supported(self, algorithm): if self._lib.Cryptography_HAS_PBKDF2_HMAC: @@ -231,7 +285,7 @@ class Backend(object): if not isinstance(algorithm, hashes.SHA1): raise UnsupportedAlgorithm( "This version of OpenSSL only supports PBKDF2HMAC with " - "SHA1", + "SHA1.", _Reasons.UNSUPPORTED_HASH ) res = self._lib.PKCS5_PBKDF2_HMAC_SHA1( @@ -269,7 +323,7 @@ class Backend(object): def _unknown_error(self, error): return InternalError( "Unknown error code {0} from OpenSSL, " - "you should probably file a bug. {1}".format( + "you should probably file a bug. {1}.".format( error.code, self._err_string(error.code) ) ) @@ -325,69 +379,137 @@ class Backend(object): return bn_ptr[0] def generate_rsa_private_key(self, public_exponent, key_size): - if public_exponent < 3: - raise ValueError("public_exponent must be >= 3") + rsa._verify_rsa_parameters(public_exponent, key_size) - if public_exponent & 1 == 0: - raise ValueError("public_exponent must be odd") - - if key_size < 512: - raise ValueError("key_size must be at least 512-bits") - - ctx = self._lib.RSA_new() - assert ctx != self._ffi.NULL - ctx = self._ffi.gc(ctx, self._lib.RSA_free) + rsa_cdata = self._lib.RSA_new() + assert rsa_cdata != self._ffi.NULL + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) bn = self._int_to_bn(public_exponent) bn = self._ffi.gc(bn, self._lib.BN_free) res = self._lib.RSA_generate_key_ex( - ctx, key_size, bn, self._ffi.NULL + rsa_cdata, key_size, bn, self._ffi.NULL ) assert res == 1 - return self._rsa_cdata_to_private_key(ctx) + return _RSAPrivateKey(self, rsa_cdata) - def _new_evp_pkey(self): - evp_pkey = self._lib.EVP_PKEY_new() - assert evp_pkey != self._ffi.NULL - return self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + def generate_rsa_parameters_supported(self, public_exponent, key_size): + return (public_exponent >= 3 and public_exponent & 1 != 0 and + key_size >= 512) - def _rsa_private_key_to_evp_pkey(self, private_key): - evp_pkey = self._new_evp_pkey() - rsa_cdata = self._rsa_cdata_from_private_key(private_key) - - res = self._lib.EVP_PKEY_assign_RSA(evp_pkey, rsa_cdata) + def load_rsa_private_numbers(self, numbers): + rsa._check_private_key_components( + numbers.p, + numbers.q, + numbers.d, + numbers.dmp1, + numbers.dmq1, + numbers.iqmp, + numbers.public_numbers.e, + numbers.public_numbers.n + ) + rsa_cdata = self._lib.RSA_new() + assert rsa_cdata != self._ffi.NULL + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + rsa_cdata.p = self._int_to_bn(numbers.p) + rsa_cdata.q = self._int_to_bn(numbers.q) + rsa_cdata.d = self._int_to_bn(numbers.d) + rsa_cdata.dmp1 = self._int_to_bn(numbers.dmp1) + rsa_cdata.dmq1 = self._int_to_bn(numbers.dmq1) + rsa_cdata.iqmp = self._int_to_bn(numbers.iqmp) + rsa_cdata.e = self._int_to_bn(numbers.public_numbers.e) + rsa_cdata.n = self._int_to_bn(numbers.public_numbers.n) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) assert res == 1 - return evp_pkey + return _RSAPrivateKey(self, rsa_cdata) - def _rsa_public_key_to_evp_pkey(self, public_key): - evp_pkey = self._new_evp_pkey() - rsa_cdata = self._rsa_cdata_from_public_key(public_key) - - res = self._lib.EVP_PKEY_assign_RSA(evp_pkey, rsa_cdata) + def load_rsa_public_numbers(self, numbers): + rsa._check_public_key_components(numbers.e, numbers.n) + rsa_cdata = self._lib.RSA_new() + assert rsa_cdata != self._ffi.NULL + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + rsa_cdata.e = self._int_to_bn(numbers.e) + rsa_cdata.n = self._int_to_bn(numbers.n) + res = self._lib.RSA_blinding_on(rsa_cdata, self._ffi.NULL) assert res == 1 - return evp_pkey + return _RSAPublicKey(self, rsa_cdata) - def _rsa_cdata_to_private_key(self, cdata): - return rsa.RSAPrivateKey( - p=self._bn_to_int(cdata.p), - q=self._bn_to_int(cdata.q), - dmp1=self._bn_to_int(cdata.dmp1), - dmq1=self._bn_to_int(cdata.dmq1), - iqmp=self._bn_to_int(cdata.iqmp), - private_exponent=self._bn_to_int(cdata.d), - public_exponent=self._bn_to_int(cdata.e), - modulus=self._bn_to_int(cdata.n), + def _bytes_to_bio(self, data): + """ + Return a _MemoryBIO namedtuple of (BIO, char*). + + The char* is the storage for the BIO and it must stay alive until the + BIO is finished with. + """ + data_char_p = self._ffi.new("char[]", data) + bio = self._lib.BIO_new_mem_buf( + data_char_p, len(data) + ) + assert bio != self._ffi.NULL + + return _MemoryBIO(self._ffi.gc(bio, self._lib.BIO_free), data_char_p) + + def _evp_pkey_to_private_key(self, evp_pkey): + """ + Return the appropriate type of PrivateKey given an evp_pkey cdata + pointer. + """ + + type = evp_pkey.type + + if type == self._lib.EVP_PKEY_RSA: + rsa_cdata = self._lib.EVP_PKEY_get1_RSA(evp_pkey) + assert rsa_cdata != self._ffi.NULL + rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) + return _RSAPrivateKey(self, rsa_cdata) + elif type == self._lib.EVP_PKEY_DSA: + dsa_cdata = self._lib.EVP_PKEY_get1_DSA(evp_pkey) + assert dsa_cdata != self._ffi.NULL + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + return _DSAPrivateKey(self, dsa_cdata) + else: + raise UnsupportedAlgorithm("Unsupported key type.") + + def _pem_password_cb(self, password): + """ + Generate a pem_password_cb function pointer that copied the password to + OpenSSL as required and returns the number of bytes copied. + + typedef int pem_password_cb(char *buf, int size, + int rwflag, void *userdata); + + Useful for decrypting PKCS8 files and so on. + + Returns a tuple of (cdata function pointer, callback function). + """ + + def pem_password_cb(buf, size, writing, userdata): + pem_password_cb.called += 1 + + if not password or len(password) >= size: + return 0 + else: + pw_buf = self._ffi.buffer(buf, size) + pw_buf[:len(password)] = password + return len(password) + + pem_password_cb.called = 0 + + return ( + self._ffi.callback("int (char *, int, int, void *)", + pem_password_cb), + pem_password_cb ) def _rsa_cdata_from_private_key(self, private_key): - # Does not GC the RSA cdata. You *must* make sure it's freed - # correctly yourself! ctx = self._lib.RSA_new() assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.RSA_free) + ctx.p = self._int_to_bn(private_key.p) ctx.q = self._int_to_bn(private_key.q) ctx.d = self._int_to_bn(private_key.d) @@ -402,11 +524,10 @@ class Backend(object): return ctx def _rsa_cdata_from_public_key(self, public_key): - # Does not GC the RSA cdata. You *must* make sure it's freed - # correctly yourself! - ctx = self._lib.RSA_new() assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.RSA_free) + ctx.e = self._int_to_bn(public_key.e) ctx.n = self._int_to_bn(public_key.n) res = self._lib.RSA_blinding_on(ctx, self._ffi.NULL) @@ -415,29 +536,64 @@ class Backend(object): return ctx def create_rsa_signature_ctx(self, private_key, padding, algorithm): - return _RSASignatureContext(self, private_key, padding, algorithm) + warnings.warn( + "create_rsa_signature_ctx is deprecated and will be removed in a " + "future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + rsa_cdata = self._rsa_cdata_from_private_key(private_key) + key = _RSAPrivateKey(self, rsa_cdata) + return _RSASignatureContext(self, key, padding, algorithm) def create_rsa_verification_ctx(self, public_key, signature, padding, algorithm): - return _RSAVerificationContext(self, public_key, signature, padding, + warnings.warn( + "create_rsa_verification_ctx is deprecated and will be removed in " + "a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + rsa_cdata = self._rsa_cdata_from_public_key(public_key) + key = _RSAPublicKey(self, rsa_cdata) + return _RSAVerificationContext(self, key, signature, padding, algorithm) def mgf1_hash_supported(self, algorithm): + warnings.warn( + "mgf1_hash_supported is deprecated and will be removed in " + "a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + return self._mgf1_hash_supported(algorithm) + + def _mgf1_hash_supported(self, algorithm): if self._lib.Cryptography_HAS_MGF1_MD: return self.hash_supported(algorithm) else: return isinstance(algorithm, hashes.SHA1) + def rsa_padding_supported(self, padding): + if isinstance(padding, PKCS1v15): + return True + elif isinstance(padding, PSS) and isinstance(padding._mgf, MGF1): + return self._mgf1_hash_supported(padding._mgf._algorithm) + elif isinstance(padding, OAEP) and isinstance(padding._mgf, MGF1): + return isinstance(padding._mgf._algorithm, hashes.SHA1) + else: + return False + def generate_dsa_parameters(self, key_size): if key_size not in (1024, 2048, 3072): raise ValueError( - "Key size must be 1024 or 2048 or 3072 bits") + "Key size must be 1024 or 2048 or 3072 bits.") if (self._lib.OPENSSL_VERSION_NUMBER < 0x1000000f and key_size > 1024): raise ValueError( "Key size must be 1024 because OpenSSL < 1.0.0 doesn't " - "support larger key sizes") + "support larger key sizes.") ctx = self._lib.DSA_new() assert ctx != self._ffi.NULL @@ -450,43 +606,97 @@ class Backend(object): assert res == 1 - return dsa.DSAParameters( - modulus=self._bn_to_int(ctx.p), - subgroup_order=self._bn_to_int(ctx.q), - generator=self._bn_to_int(ctx.g) - ) + return _DSAParameters(self, ctx) def generate_dsa_private_key(self, parameters): ctx = self._lib.DSA_new() assert ctx != self._ffi.NULL ctx = self._ffi.gc(ctx, self._lib.DSA_free) - ctx.p = self._int_to_bn(parameters.p) - ctx.q = self._int_to_bn(parameters.q) - ctx.g = self._int_to_bn(parameters.g) + if isinstance(parameters, dsa.DSAParameters): + ctx.p = self._int_to_bn(parameters.p) + ctx.q = self._int_to_bn(parameters.q) + ctx.g = self._int_to_bn(parameters.g) + else: + ctx.p = self._lib.BN_dup(parameters._dsa_cdata.p) + ctx.q = self._lib.BN_dup(parameters._dsa_cdata.q) + ctx.g = self._lib.BN_dup(parameters._dsa_cdata.g) self._lib.DSA_generate_key(ctx) - return dsa.DSAPrivateKey( - modulus=self._bn_to_int(ctx.p), - subgroup_order=self._bn_to_int(ctx.q), - generator=self._bn_to_int(ctx.g), - x=self._bn_to_int(ctx.priv_key), - y=self._bn_to_int(ctx.pub_key) - ) + return _DSAPrivateKey(self, ctx) + + def generate_dsa_private_key_and_parameters(self, key_size): + parameters = self.generate_dsa_parameters(key_size) + return self.generate_dsa_private_key(parameters) def create_dsa_signature_ctx(self, private_key, algorithm): - return _DSASignatureContext(self, private_key, algorithm) + warnings.warn( + "create_dsa_signature_ctx is deprecated and will be removed in " + "a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + dsa_cdata = self._dsa_cdata_from_private_key(private_key) + key = _DSAPrivateKey(self, dsa_cdata) + return _DSASignatureContext(self, key, algorithm) def create_dsa_verification_ctx(self, public_key, signature, algorithm): - return _DSAVerificationContext(self, public_key, signature, - algorithm) + warnings.warn( + "create_dsa_verification_ctx is deprecated and will be removed in " + "a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + dsa_cdata = self._dsa_cdata_from_public_key(public_key) + key = _DSAPublicKey(self, dsa_cdata) + return _DSAVerificationContext(self, key, signature, algorithm) + + def load_dsa_private_numbers(self, numbers): + dsa._check_dsa_private_numbers(numbers) + parameter_numbers = numbers.public_numbers.parameter_numbers + + dsa_cdata = self._lib.DSA_new() + assert dsa_cdata != self._ffi.NULL + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(parameter_numbers.p) + dsa_cdata.q = self._int_to_bn(parameter_numbers.q) + dsa_cdata.g = self._int_to_bn(parameter_numbers.g) + dsa_cdata.pub_key = self._int_to_bn(numbers.public_numbers.y) + dsa_cdata.priv_key = self._int_to_bn(numbers.x) + + return _DSAPrivateKey(self, dsa_cdata) + + def load_dsa_public_numbers(self, numbers): + dsa._check_dsa_parameters(numbers.parameter_numbers) + dsa_cdata = self._lib.DSA_new() + assert dsa_cdata != self._ffi.NULL + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(numbers.parameter_numbers.p) + dsa_cdata.q = self._int_to_bn(numbers.parameter_numbers.q) + dsa_cdata.g = self._int_to_bn(numbers.parameter_numbers.g) + dsa_cdata.pub_key = self._int_to_bn(numbers.y) + + return _DSAPublicKey(self, dsa_cdata) + + def load_dsa_parameter_numbers(self, numbers): + dsa._check_dsa_parameters(numbers) + dsa_cdata = self._lib.DSA_new() + assert dsa_cdata != self._ffi.NULL + dsa_cdata = self._ffi.gc(dsa_cdata, self._lib.DSA_free) + + dsa_cdata.p = self._int_to_bn(numbers.p) + dsa_cdata.q = self._int_to_bn(numbers.q) + dsa_cdata.g = self._int_to_bn(numbers.g) + + return _DSAParameters(self, dsa_cdata) def _dsa_cdata_from_public_key(self, public_key): - # Does not GC the DSA cdata. You *must* make sure it's freed - # correctly yourself! ctx = self._lib.DSA_new() assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.DSA_free) parameters = public_key.parameters() ctx.p = self._int_to_bn(parameters.p) ctx.q = self._int_to_bn(parameters.q) @@ -495,10 +705,9 @@ class Backend(object): return ctx def _dsa_cdata_from_private_key(self, private_key): - # Does not GC the DSA cdata. You *must* make sure it's freed - # correctly yourself! ctx = self._lib.DSA_new() assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.DSA_free) parameters = private_key.parameters() ctx.p = self._int_to_bn(parameters.p) ctx.q = self._int_to_bn(parameters.q) @@ -520,131 +729,26 @@ class Backend(object): return True def decrypt_rsa(self, private_key, ciphertext, padding): - key_size_bytes = int(math.ceil(private_key.key_size / 8.0)) - if key_size_bytes != len(ciphertext): - raise ValueError("Ciphertext length must be equal to key size.") - - return self._enc_dec_rsa(private_key, ciphertext, padding) + warnings.warn( + "decrypt_rsa is deprecated and will be removed in a future " + "version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + rsa_cdata = self._rsa_cdata_from_private_key(private_key) + key = _RSAPrivateKey(self, rsa_cdata) + return key.decrypt(ciphertext, padding) def encrypt_rsa(self, public_key, plaintext, padding): - return self._enc_dec_rsa(public_key, plaintext, padding) - - def _enc_dec_rsa(self, key, data, padding): - if isinstance(padding, PKCS1v15): - padding_enum = self._lib.RSA_PKCS1_PADDING - elif isinstance(padding, OAEP): - padding_enum = self._lib.RSA_PKCS1_OAEP_PADDING - if not isinstance(padding._mgf, MGF1): - raise UnsupportedAlgorithm( - "Only MGF1 is supported by this backend", - _Reasons.UNSUPPORTED_MGF - ) - - if not isinstance(padding._mgf._algorithm, hashes.SHA1): - raise UnsupportedAlgorithm( - "This backend supports only SHA1 inside MGF1 when " - "using OAEP", - _Reasons.UNSUPPORTED_HASH - ) - - if padding._label is not None and padding._label != b"": - raise ValueError("This backend does not support OAEP labels") - - if not isinstance(padding._algorithm, hashes.SHA1): - raise UnsupportedAlgorithm( - "This backend only supports SHA1 when using OAEP", - _Reasons.UNSUPPORTED_HASH - ) - else: - raise UnsupportedAlgorithm( - "{0} is not supported by this backend".format( - padding.name - ), - _Reasons.UNSUPPORTED_PADDING - ) - - if self._lib.Cryptography_HAS_PKEY_CTX: - return self._enc_dec_rsa_pkey_ctx(key, data, padding_enum) - else: - return self._enc_dec_rsa_098(key, data, padding_enum) - - def _enc_dec_rsa_pkey_ctx(self, key, data, padding_enum): - if isinstance(key, rsa.RSAPublicKey): - init = self._lib.EVP_PKEY_encrypt_init - crypt = self._lib.Cryptography_EVP_PKEY_encrypt - evp_pkey = self._rsa_public_key_to_evp_pkey(key) - else: - init = self._lib.EVP_PKEY_decrypt_init - crypt = self._lib.Cryptography_EVP_PKEY_decrypt - evp_pkey = self._rsa_private_key_to_evp_pkey(key) - - pkey_ctx = self._lib.EVP_PKEY_CTX_new( - evp_pkey, self._ffi.NULL + warnings.warn( + "encrypt_rsa is deprecated and will be removed in a future " + "version.", + utils.DeprecatedIn05, + stacklevel=2 ) - assert pkey_ctx != self._ffi.NULL - pkey_ctx = self._ffi.gc(pkey_ctx, self._lib.EVP_PKEY_CTX_free) - res = init(pkey_ctx) - assert res == 1 - res = self._lib.EVP_PKEY_CTX_set_rsa_padding( - pkey_ctx, padding_enum) - assert res > 0 - buf_size = self._lib.EVP_PKEY_size(evp_pkey) - assert buf_size > 0 - outlen = self._ffi.new("size_t *", buf_size) - buf = self._ffi.new("char[]", buf_size) - res = crypt( - pkey_ctx, - buf, - outlen, - data, - len(data) - ) - if res <= 0: - self._handle_rsa_enc_dec_error(key) - - return self._ffi.buffer(buf)[:outlen[0]] - - def _enc_dec_rsa_098(self, key, data, padding_enum): - if isinstance(key, rsa.RSAPublicKey): - crypt = self._lib.RSA_public_encrypt - rsa_cdata = self._rsa_cdata_from_public_key(key) - else: - crypt = self._lib.RSA_private_decrypt - rsa_cdata = self._rsa_cdata_from_private_key(key) - - rsa_cdata = self._ffi.gc(rsa_cdata, self._lib.RSA_free) - key_size = self._lib.RSA_size(rsa_cdata) - assert key_size > 0 - buf = self._ffi.new("unsigned char[]", key_size) - res = crypt( - len(data), - data, - buf, - rsa_cdata, - padding_enum - ) - if res < 0: - self._handle_rsa_enc_dec_error(key) - - return self._ffi.buffer(buf)[:res] - - def _handle_rsa_enc_dec_error(self, key): - errors = self._consume_errors() - assert errors - assert errors[0].lib == self._lib.ERR_LIB_RSA - if isinstance(key, rsa.RSAPublicKey): - assert (errors[0].reason == - self._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) - raise ValueError( - "Data too long for key size. Encrypt less data or use a " - "larger key size" - ) - else: - assert ( - errors[0].reason == self._lib.RSA_R_BLOCK_TYPE_IS_NOT_01 or - errors[0].reason == self._lib.RSA_R_BLOCK_TYPE_IS_NOT_02 - ) - raise ValueError("Decryption failed") + rsa_cdata = self._rsa_cdata_from_public_key(public_key) + key = _RSAPublicKey(self, rsa_cdata) + return key.encrypt(plaintext, padding) def cmac_algorithm_supported(self, algorithm): return ( @@ -656,6 +760,294 @@ class Backend(object): def create_cmac_ctx(self, algorithm): return _CMACContext(self, algorithm) + def load_traditional_openssl_pem_private_key(self, data, password): + # OpenSSLs API for loading PKCS#8 certs can also load the traditional + # format so we just use that for both of them. + + return self.load_pkcs8_pem_private_key(data, password) + + def load_pkcs8_pem_private_key(self, data, password): + mem_bio = self._bytes_to_bio(data) + + password_callback, password_func = self._pem_password_cb(password) + + evp_pkey = self._lib.PEM_read_bio_PrivateKey( + mem_bio.bio, + self._ffi.NULL, + password_callback, + self._ffi.NULL + ) + + if evp_pkey == self._ffi.NULL: + errors = self._consume_errors() + if not errors: + raise ValueError("Could not unserialize key data.") + + if ( + errors[0][1:] == ( + self._lib.ERR_LIB_PEM, + self._lib.PEM_F_PEM_DO_HEADER, + self._lib.PEM_R_BAD_PASSWORD_READ + ) + ) or ( + errors[0][1:] == ( + self._lib.ERR_LIB_PEM, + self._lib.PEM_F_PEM_READ_BIO_PRIVATEKEY, + self._lib.PEM_R_BAD_PASSWORD_READ + ) + ): + assert not password + raise TypeError( + "Password was not given but private key is encrypted.") + + elif errors[0][1:] == ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._lib.EVP_R_BAD_DECRYPT + ): + raise ValueError( + "Bad decrypt. Incorrect password?" + ) + + elif errors[0][1:] in ( + ( + self._lib.ERR_LIB_PEM, + self._lib.PEM_F_PEM_GET_EVP_CIPHER_INFO, + self._lib.PEM_R_UNSUPPORTED_ENCRYPTION + ), + + ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PBE_CIPHERINIT, + self._lib.EVP_R_UNKNOWN_PBE_ALGORITHM + ) + ): + raise UnsupportedAlgorithm( + "PEM data is encrypted with an unsupported cipher", + _Reasons.UNSUPPORTED_CIPHER + ) + + elif any( + error[1:] == ( + self._lib.ERR_LIB_EVP, + self._lib.EVP_F_EVP_PKCS82PKEY, + self._lib.EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM + ) + for error in errors + ): + raise UnsupportedAlgorithm( + "Unsupported public key algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM + ) + + else: + assert errors[0][1] in ( + self._lib.ERR_LIB_EVP, + self._lib.ERR_LIB_PEM, + self._lib.ERR_LIB_ASN1, + ) + raise ValueError("Could not unserialize key data.") + + evp_pkey = self._ffi.gc(evp_pkey, self._lib.EVP_PKEY_free) + + if password is not None and password_func.called == 0: + raise TypeError( + "Password was given but private key is not encrypted.") + + assert ( + (password is not None and password_func.called == 1) or + password is None + ) + + return self._evp_pkey_to_private_key(evp_pkey) + + def elliptic_curve_supported(self, curve): + if self._lib.Cryptography_HAS_EC != 1: + return False + + try: + curve_nid = self._elliptic_curve_to_nid(curve) + except UnsupportedAlgorithm: + curve_nid = self._lib.NID_undef + + ctx = self._lib.EC_GROUP_new_by_curve_name(curve_nid) + + if ctx == self._ffi.NULL: + errors = self._consume_errors() + assert ( + curve_nid == self._lib.NID_undef or + errors[0][1:] == ( + self._lib.ERR_LIB_EC, + self._lib.EC_F_EC_GROUP_NEW_BY_CURVE_NAME, + self._lib.EC_R_UNKNOWN_GROUP + ) + ) + return False + else: + assert curve_nid != self._lib.NID_undef + self._lib.EC_GROUP_free(ctx) + return True + + def elliptic_curve_signature_algorithm_supported( + self, signature_algorithm, curve + ): + if self._lib.Cryptography_HAS_EC != 1: + return False + + # We only support ECDSA right now. + if not isinstance(signature_algorithm, ec.ECDSA): + return False + + # Before 0.9.8m OpenSSL can't cope with digests longer than the curve. + if ( + self._lib.OPENSSL_VERSION_NUMBER < 0x009080df and + curve.key_size < signature_algorithm.algorithm.digest_size * 8 + ): + return False + + return self.elliptic_curve_supported(curve) + + def generate_elliptic_curve_private_key(self, curve): + """ + Generate a new private key on the named curve. + """ + + if self.elliptic_curve_supported(curve): + curve_nid = self._elliptic_curve_to_nid(curve) + + ctx = self._lib.EC_KEY_new_by_curve_name(curve_nid) + assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.EC_KEY_free) + + res = self._lib.EC_KEY_generate_key(ctx) + assert res == 1 + + res = self._lib.EC_KEY_check_key(ctx) + assert res == 1 + + return _EllipticCurvePrivateKey(self, ctx, curve) + else: + raise UnsupportedAlgorithm( + "Backend object does not support {0}.".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + + def elliptic_curve_private_key_from_numbers(self, numbers): + public = numbers.public_numbers + + curve_nid = self._elliptic_curve_to_nid(public.curve) + + ctx = self._lib.EC_KEY_new_by_curve_name(curve_nid) + assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.EC_KEY_free) + + ctx = self._ec_key_set_public_key_affine_coordinates( + ctx, public.x, public.y) + + res = self._lib.EC_KEY_set_private_key( + ctx, self._int_to_bn(numbers.private_value)) + assert res == 1 + + return _EllipticCurvePrivateKey(self, ctx, + numbers.public_numbers.curve) + + def elliptic_curve_public_key_from_numbers(self, numbers): + curve_nid = self._elliptic_curve_to_nid(numbers.curve) + + ctx = self._lib.EC_KEY_new_by_curve_name(curve_nid) + assert ctx != self._ffi.NULL + ctx = self._ffi.gc(ctx, self._lib.EC_KEY_free) + + ctx = self._ec_key_set_public_key_affine_coordinates( + ctx, numbers.x, numbers.y) + + return _EllipticCurvePublicKey(self, ctx, numbers.curve) + + def _elliptic_curve_to_nid(self, curve): + """ + Get the NID for a curve name. + """ + + curve_aliases = { + "secp192r1": "prime192v1", + "secp256r1": "prime256v1" + } + + curve_name = curve_aliases.get(curve.name, curve.name) + + curve_nid = self._lib.OBJ_sn2nid(curve_name.encode()) + if curve_nid == self._lib.NID_undef: + raise UnsupportedAlgorithm( + "{0} is not a supported elliptic curve".format(curve.name), + _Reasons.UNSUPPORTED_ELLIPTIC_CURVE + ) + return curve_nid + + def _ec_key_set_public_key_affine_coordinates(self, ctx, x, y): + """ + This is a port of EC_KEY_set_public_key_affine_coordinates that was + added in 1.0.1. + + Sets the public key point in the EC_KEY context to the affine x and y + values. + """ + + assert ctx != self._ffi.NULL + + bn_x = self._int_to_bn(x) + bn_y = self._int_to_bn(y) + + nid_two_field = self._lib.OBJ_sn2nid(b"characteristic-two-field") + assert nid_two_field != self._lib.NID_undef + + bn_ctx = self._lib.BN_CTX_new() + assert bn_ctx != self._ffi.NULL + bn_ctx = self._ffi.gc(bn_ctx, self._lib.BN_CTX_free) + + group = self._lib.EC_KEY_get0_group(ctx) + assert group != self._ffi.NULL + + point = self._lib.EC_POINT_new(group) + assert point != self._ffi.NULL + point = self._ffi.gc(point, self._lib.EC_POINT_free) + + method = self._lib.EC_GROUP_method_of(group) + assert method != self._ffi.NULL + + nid = self._lib.EC_METHOD_get_field_type(method) + assert nid != self._lib.NID_undef + + check_x = self._lib.BN_CTX_get(bn_ctx) + check_y = self._lib.BN_CTX_get(bn_ctx) + + if nid == nid_two_field and self._lib.Cryptography_HAS_EC2M: + set_func = self._lib.EC_POINT_set_affine_coordinates_GF2m + get_func = self._lib.EC_POINT_get_affine_coordinates_GF2m + else: + set_func = self._lib.EC_POINT_set_affine_coordinates_GFp + get_func = self._lib.EC_POINT_get_affine_coordinates_GFp + + assert set_func and get_func + + res = set_func(group, point, bn_x, bn_y, bn_ctx) + assert res == 1 + + res = get_func(group, point, check_x, check_y, bn_ctx) + assert res == 1 + + assert ( + self._lib.BN_cmp(bn_x, check_x) == 0 and + self._lib.BN_cmp(bn_y, check_y) == 0 + ) + + res = self._lib.EC_KEY_set_public_key(ctx, point) + assert res == 1 + + res = self._lib.EC_KEY_check_key(ctx) + assert res == 1 + + return ctx + class GetCipherByName(object): def __init__(self, fmt): @@ -666,822 +1058,4 @@ class GetCipherByName(object): return backend._lib.EVP_get_cipherbyname(cipher_name.encode("ascii")) -@utils.register_interface(interfaces.CipherContext) -@utils.register_interface(interfaces.AEADCipherContext) -@utils.register_interface(interfaces.AEADEncryptionContext) -class _CipherContext(object): - _ENCRYPT = 1 - _DECRYPT = 0 - - def __init__(self, backend, cipher, mode, operation): - self._backend = backend - self._cipher = cipher - self._mode = mode - self._operation = operation - self._tag = None - - if isinstance(self._cipher, interfaces.BlockCipherAlgorithm): - self._block_size = self._cipher.block_size - else: - self._block_size = 1 - - ctx = self._backend._lib.EVP_CIPHER_CTX_new() - ctx = self._backend._ffi.gc( - ctx, self._backend._lib.EVP_CIPHER_CTX_free - ) - - registry = self._backend._cipher_registry - try: - adapter = registry[type(cipher), type(mode)] - except KeyError: - raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported " - "by this backend".format( - cipher.name, mode.name if mode else mode), - _Reasons.UNSUPPORTED_CIPHER - ) - - evp_cipher = adapter(self._backend, cipher, mode) - if evp_cipher == self._backend._ffi.NULL: - raise UnsupportedAlgorithm( - "cipher {0} in {1} mode is not supported " - "by this backend".format( - cipher.name, mode.name if mode else mode), - _Reasons.UNSUPPORTED_CIPHER - ) - - if isinstance(mode, interfaces.ModeWithInitializationVector): - iv_nonce = mode.initialization_vector - elif isinstance(mode, interfaces.ModeWithNonce): - iv_nonce = mode.nonce - else: - iv_nonce = self._backend._ffi.NULL - # begin init with cipher and operation type - res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher, - self._backend._ffi.NULL, - self._backend._ffi.NULL, - self._backend._ffi.NULL, - operation) - assert res != 0 - # set the key length to handle variable key ciphers - res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( - ctx, len(cipher.key) - ) - assert res != 0 - if isinstance(mode, GCM): - res = self._backend._lib.EVP_CIPHER_CTX_ctrl( - ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN, - len(iv_nonce), self._backend._ffi.NULL - ) - assert res != 0 - if operation == self._DECRYPT: - res = self._backend._lib.EVP_CIPHER_CTX_ctrl( - ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG, - len(mode.tag), mode.tag - ) - assert res != 0 - - # pass key/iv - res = self._backend._lib.EVP_CipherInit_ex( - ctx, - self._backend._ffi.NULL, - self._backend._ffi.NULL, - cipher.key, - iv_nonce, - operation - ) - assert res != 0 - # We purposely disable padding here as it's handled higher up in the - # API. - self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) - self._ctx = ctx - - def update(self, data): - # OpenSSL 0.9.8e has an assertion in its EVP code that causes it - # to SIGABRT if you call update with an empty byte string. This can be - # removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch - # should be taken only when length is zero and mode is not GCM because - # AES GCM can return improper tag values if you don't call update - # with empty plaintext when authenticating AAD for ...reasons. - if len(data) == 0 and not isinstance(self._mode, GCM): - return b"" - - buf = self._backend._ffi.new("unsigned char[]", - len(data) + self._block_size - 1) - outlen = self._backend._ffi.new("int *") - res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data, - len(data)) - assert res != 0 - return self._backend._ffi.buffer(buf)[:outlen[0]] - - def finalize(self): - buf = self._backend._ffi.new("unsigned char[]", self._block_size) - outlen = self._backend._ffi.new("int *") - res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) - if res == 0: - errors = self._backend._consume_errors() - - if not errors and isinstance(self._mode, GCM): - raise InvalidTag - - assert errors - - if errors[0][1:] == ( - self._backend._lib.ERR_LIB_EVP, - self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, - self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH - ) or errors[0][1:] == ( - self._backend._lib.ERR_LIB_EVP, - self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, - self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH - ): - raise ValueError( - "The length of the provided data is not a multiple of " - "the block length." - ) - else: - raise self._backend._unknown_error(errors[0]) - - if (isinstance(self._mode, GCM) and - self._operation == self._ENCRYPT): - block_byte_size = self._block_size // 8 - tag_buf = self._backend._ffi.new( - "unsigned char[]", block_byte_size - ) - res = self._backend._lib.EVP_CIPHER_CTX_ctrl( - self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG, - block_byte_size, tag_buf - ) - assert res != 0 - self._tag = self._backend._ffi.buffer(tag_buf)[:] - - res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) - assert res == 1 - return self._backend._ffi.buffer(buf)[:outlen[0]] - - def authenticate_additional_data(self, data): - outlen = self._backend._ffi.new("int *") - res = self._backend._lib.EVP_CipherUpdate( - self._ctx, self._backend._ffi.NULL, outlen, data, len(data) - ) - assert res != 0 - - @property - def tag(self): - return self._tag - - -@utils.register_interface(interfaces.HashContext) -class _HashContext(object): - def __init__(self, backend, algorithm, ctx=None): - self.algorithm = algorithm - - self._backend = backend - - if ctx is None: - ctx = self._backend._lib.EVP_MD_CTX_create() - ctx = self._backend._ffi.gc(ctx, - self._backend._lib.EVP_MD_CTX_destroy) - evp_md = self._backend._lib.EVP_get_digestbyname( - algorithm.name.encode("ascii")) - if evp_md == self._backend._ffi.NULL: - raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( - algorithm.name), - _Reasons.UNSUPPORTED_HASH - ) - res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, - self._backend._ffi.NULL) - assert res != 0 - - self._ctx = ctx - - def copy(self): - copied_ctx = self._backend._lib.EVP_MD_CTX_create() - copied_ctx = self._backend._ffi.gc( - copied_ctx, self._backend._lib.EVP_MD_CTX_destroy - ) - res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) - assert res != 0 - return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) - - def update(self, data): - res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) - assert res != 0 - - def finalize(self): - buf = self._backend._ffi.new("unsigned char[]", - self._backend._lib.EVP_MAX_MD_SIZE) - outlen = self._backend._ffi.new("unsigned int *") - res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) - assert res != 0 - assert outlen[0] == self.algorithm.digest_size - res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx) - assert res == 1 - return self._backend._ffi.buffer(buf)[:outlen[0]] - - -@utils.register_interface(interfaces.HashContext) -class _HMACContext(object): - def __init__(self, backend, key, algorithm, ctx=None): - self.algorithm = algorithm - self._backend = backend - - if ctx is None: - ctx = self._backend._ffi.new("HMAC_CTX *") - self._backend._lib.HMAC_CTX_init(ctx) - ctx = self._backend._ffi.gc( - ctx, self._backend._lib.HMAC_CTX_cleanup - ) - evp_md = self._backend._lib.EVP_get_digestbyname( - algorithm.name.encode('ascii')) - if evp_md == self._backend._ffi.NULL: - raise UnsupportedAlgorithm( - "{0} is not a supported hash on this backend".format( - algorithm.name), - _Reasons.UNSUPPORTED_HASH - ) - res = self._backend._lib.Cryptography_HMAC_Init_ex( - ctx, key, len(key), evp_md, self._backend._ffi.NULL - ) - assert res != 0 - - self._ctx = ctx - self._key = key - - def copy(self): - copied_ctx = self._backend._ffi.new("HMAC_CTX *") - self._backend._lib.HMAC_CTX_init(copied_ctx) - copied_ctx = self._backend._ffi.gc( - copied_ctx, self._backend._lib.HMAC_CTX_cleanup - ) - res = self._backend._lib.Cryptography_HMAC_CTX_copy( - copied_ctx, self._ctx - ) - assert res != 0 - return _HMACContext( - self._backend, self._key, self.algorithm, ctx=copied_ctx - ) - - def update(self, data): - res = self._backend._lib.Cryptography_HMAC_Update( - self._ctx, data, len(data) - ) - assert res != 0 - - def finalize(self): - buf = self._backend._ffi.new("unsigned char[]", - self._backend._lib.EVP_MAX_MD_SIZE) - outlen = self._backend._ffi.new("unsigned int *") - res = self._backend._lib.Cryptography_HMAC_Final( - self._ctx, buf, outlen - ) - assert res != 0 - assert outlen[0] == self.algorithm.digest_size - self._backend._lib.HMAC_CTX_cleanup(self._ctx) - return self._backend._ffi.buffer(buf)[:outlen[0]] - - -def _get_rsa_pss_salt_length(pss, key_size, digest_size): - if pss._mgf._salt_length is not None: - salt = pss._mgf._salt_length - else: - salt = pss._salt_length - - if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: - # bit length - 1 per RFC 3447 - emlen = int(math.ceil((key_size - 1) / 8.0)) - salt_length = emlen - digest_size - 2 - assert salt_length >= 0 - return salt_length - else: - return salt - - -@utils.register_interface(interfaces.AsymmetricSignatureContext) -class _RSASignatureContext(object): - def __init__(self, backend, private_key, padding, algorithm): - self._backend = backend - self._private_key = private_key - - if not isinstance(padding, interfaces.AsymmetricPadding): - raise TypeError( - "Expected provider of interfaces.AsymmetricPadding") - - if isinstance(padding, PKCS1v15): - if self._backend._lib.Cryptography_HAS_PKEY_CTX: - self._finalize_method = self._finalize_pkey_ctx - self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING - else: - self._finalize_method = self._finalize_pkcs1 - elif isinstance(padding, PSS): - if not isinstance(padding._mgf, MGF1): - raise UnsupportedAlgorithm( - "Only MGF1 is supported by this backend", - _Reasons.UNSUPPORTED_MGF - ) - - # Size of key in bytes - 2 is the maximum - # PSS signature length (salt length is checked later) - key_size_bytes = int(math.ceil(private_key.key_size / 8.0)) - if key_size_bytes - algorithm.digest_size - 2 < 0: - raise ValueError("Digest too large for key size. Use a larger " - "key.") - - if not self._backend.mgf1_hash_supported(padding._mgf._algorithm): - raise UnsupportedAlgorithm( - "When OpenSSL is older than 1.0.1 then only SHA1 is " - "supported with MGF1.", - _Reasons.UNSUPPORTED_HASH - ) - - if self._backend._lib.Cryptography_HAS_PKEY_CTX: - self._finalize_method = self._finalize_pkey_ctx - self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING - else: - self._finalize_method = self._finalize_pss - else: - raise UnsupportedAlgorithm( - "{0} is not supported by this backend".format(padding.name), - _Reasons.UNSUPPORTED_PADDING - ) - - self._padding = padding - self._algorithm = algorithm - self._hash_ctx = _HashContext(backend, self._algorithm) - - def update(self, data): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - self._hash_ctx.update(data) - - def finalize(self): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - evp_pkey = self._backend._rsa_private_key_to_evp_pkey( - self._private_key) - - evp_md = self._backend._lib.EVP_get_digestbyname( - self._algorithm.name.encode("ascii")) - assert evp_md != self._backend._ffi.NULL - pkey_size = self._backend._lib.EVP_PKEY_size(evp_pkey) - assert pkey_size > 0 - - return self._finalize_method(evp_pkey, pkey_size, evp_md) - - def _finalize_pkey_ctx(self, evp_pkey, pkey_size, evp_md): - pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( - evp_pkey, self._backend._ffi.NULL - ) - assert pkey_ctx != self._backend._ffi.NULL - pkey_ctx = self._backend._ffi.gc(pkey_ctx, - self._backend._lib.EVP_PKEY_CTX_free) - res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx) - assert res == 1 - res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( - pkey_ctx, evp_md) - assert res > 0 - - res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( - pkey_ctx, self._padding_enum) - assert res > 0 - if isinstance(self._padding, PSS): - res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( - pkey_ctx, - _get_rsa_pss_salt_length( - self._padding, - self._private_key.key_size, - self._hash_ctx.algorithm.digest_size - ) - ) - assert res > 0 - - if self._backend._lib.Cryptography_HAS_MGF1_MD: - # MGF1 MD is configurable in OpenSSL 1.0.1+ - mgf1_md = self._backend._lib.EVP_get_digestbyname( - self._padding._mgf._algorithm.name.encode("ascii")) - assert mgf1_md != self._backend._ffi.NULL - res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( - pkey_ctx, mgf1_md - ) - assert res > 0 - data_to_sign = self._hash_ctx.finalize() - self._hash_ctx = None - buflen = self._backend._ffi.new("size_t *") - res = self._backend._lib.EVP_PKEY_sign( - pkey_ctx, - self._backend._ffi.NULL, - buflen, - data_to_sign, - len(data_to_sign) - ) - assert res == 1 - buf = self._backend._ffi.new("unsigned char[]", buflen[0]) - res = self._backend._lib.EVP_PKEY_sign( - pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign)) - if res != 1: - errors = self._backend._consume_errors() - assert errors[0].lib == self._backend._lib.ERR_LIB_RSA - reason = None - if (errors[0].reason == - self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): - reason = ("Salt length too long for key size. Try using " - "MAX_LENGTH instead.") - elif (errors[0].reason == - self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY): - reason = "Digest too large for key size. Use a larger key." - assert reason is not None - raise ValueError(reason) - - return self._backend._ffi.buffer(buf)[:] - - def _finalize_pkcs1(self, evp_pkey, pkey_size, evp_md): - sig_buf = self._backend._ffi.new("char[]", pkey_size) - sig_len = self._backend._ffi.new("unsigned int *") - res = self._backend._lib.EVP_SignFinal( - self._hash_ctx._ctx, - sig_buf, - sig_len, - evp_pkey - ) - self._hash_ctx.finalize() - self._hash_ctx = None - if res == 0: - errors = self._backend._consume_errors() - assert errors[0].lib == self._backend._lib.ERR_LIB_RSA - assert (errors[0].reason == - self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) - raise ValueError("Digest too large for key size. Use a larger " - "key.") - - return self._backend._ffi.buffer(sig_buf)[:sig_len[0]] - - def _finalize_pss(self, evp_pkey, pkey_size, evp_md): - data_to_sign = self._hash_ctx.finalize() - self._hash_ctx = None - padded = self._backend._ffi.new("unsigned char[]", pkey_size) - rsa_cdata = self._backend._lib.EVP_PKEY_get1_RSA(evp_pkey) - assert rsa_cdata != self._backend._ffi.NULL - rsa_cdata = self._backend._ffi.gc(rsa_cdata, - self._backend._lib.RSA_free) - res = self._backend._lib.RSA_padding_add_PKCS1_PSS( - rsa_cdata, - padded, - data_to_sign, - evp_md, - _get_rsa_pss_salt_length( - self._padding, - self._private_key.key_size, - len(data_to_sign) - ) - ) - if res != 1: - errors = self._backend._consume_errors() - assert errors[0].lib == self._backend._lib.ERR_LIB_RSA - assert (errors[0].reason == - self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) - raise ValueError("Salt length too long for key size. Try using " - "MAX_LENGTH instead.") - - sig_buf = self._backend._ffi.new("char[]", pkey_size) - sig_len = self._backend._lib.RSA_private_encrypt( - pkey_size, - padded, - sig_buf, - rsa_cdata, - self._backend._lib.RSA_NO_PADDING - ) - assert sig_len != -1 - return self._backend._ffi.buffer(sig_buf)[:sig_len] - - -@utils.register_interface(interfaces.AsymmetricVerificationContext) -class _RSAVerificationContext(object): - def __init__(self, backend, public_key, signature, padding, algorithm): - self._backend = backend - self._public_key = public_key - self._signature = signature - - if not isinstance(padding, interfaces.AsymmetricPadding): - raise TypeError( - "Expected provider of interfaces.AsymmetricPadding") - - if isinstance(padding, PKCS1v15): - if self._backend._lib.Cryptography_HAS_PKEY_CTX: - self._verify_method = self._verify_pkey_ctx - self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING - else: - self._verify_method = self._verify_pkcs1 - elif isinstance(padding, PSS): - if not isinstance(padding._mgf, MGF1): - raise UnsupportedAlgorithm( - "Only MGF1 is supported by this backend", - _Reasons.UNSUPPORTED_MGF - ) - - # Size of key in bytes - 2 is the maximum - # PSS signature length (salt length is checked later) - key_size_bytes = int(math.ceil(public_key.key_size / 8.0)) - if key_size_bytes - algorithm.digest_size - 2 < 0: - raise ValueError( - "Digest too large for key size. Check that you have the " - "correct key and digest algorithm." - ) - - if not self._backend.mgf1_hash_supported(padding._mgf._algorithm): - raise UnsupportedAlgorithm( - "When OpenSSL is older than 1.0.1 then only SHA1 is " - "supported with MGF1.", - _Reasons.UNSUPPORTED_HASH - ) - - if self._backend._lib.Cryptography_HAS_PKEY_CTX: - self._verify_method = self._verify_pkey_ctx - self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING - else: - self._verify_method = self._verify_pss - else: - raise UnsupportedAlgorithm( - "{0} is not supported by this backend".format(padding.name), - _Reasons.UNSUPPORTED_PADDING - ) - - self._padding = padding - self._algorithm = algorithm - self._hash_ctx = _HashContext(backend, self._algorithm) - - def update(self, data): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - self._hash_ctx.update(data) - - def verify(self): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - evp_pkey = self._backend._rsa_public_key_to_evp_pkey( - self._public_key) - - evp_md = self._backend._lib.EVP_get_digestbyname( - self._algorithm.name.encode("ascii")) - assert evp_md != self._backend._ffi.NULL - - self._verify_method(evp_pkey, evp_md) - - def _verify_pkey_ctx(self, evp_pkey, evp_md): - pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( - evp_pkey, self._backend._ffi.NULL - ) - assert pkey_ctx != self._backend._ffi.NULL - pkey_ctx = self._backend._ffi.gc(pkey_ctx, - self._backend._lib.EVP_PKEY_CTX_free) - res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx) - assert res == 1 - res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( - pkey_ctx, evp_md) - assert res > 0 - - res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( - pkey_ctx, self._padding_enum) - assert res > 0 - if isinstance(self._padding, PSS): - res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( - pkey_ctx, - _get_rsa_pss_salt_length( - self._padding, - self._public_key.key_size, - self._hash_ctx.algorithm.digest_size - ) - ) - assert res > 0 - if self._backend._lib.Cryptography_HAS_MGF1_MD: - # MGF1 MD is configurable in OpenSSL 1.0.1+ - mgf1_md = self._backend._lib.EVP_get_digestbyname( - self._padding._mgf._algorithm.name.encode("ascii")) - assert mgf1_md != self._backend._ffi.NULL - res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( - pkey_ctx, mgf1_md - ) - assert res > 0 - - data_to_verify = self._hash_ctx.finalize() - self._hash_ctx = None - res = self._backend._lib.EVP_PKEY_verify( - pkey_ctx, - self._signature, - len(self._signature), - data_to_verify, - len(data_to_verify) - ) - # The previous call can return negative numbers in the event of an - # error. This is not a signature failure but we need to fail if it - # occurs. - assert res >= 0 - if res == 0: - errors = self._backend._consume_errors() - assert errors - raise InvalidSignature - - def _verify_pkcs1(self, evp_pkey, evp_md): - res = self._backend._lib.EVP_VerifyFinal( - self._hash_ctx._ctx, - self._signature, - len(self._signature), - evp_pkey - ) - self._hash_ctx.finalize() - self._hash_ctx = None - # The previous call can return negative numbers in the event of an - # error. This is not a signature failure but we need to fail if it - # occurs. - assert res >= 0 - if res == 0: - errors = self._backend._consume_errors() - assert errors - raise InvalidSignature - - def _verify_pss(self, evp_pkey, evp_md): - pkey_size = self._backend._lib.EVP_PKEY_size(evp_pkey) - assert pkey_size > 0 - rsa_cdata = self._backend._lib.EVP_PKEY_get1_RSA(evp_pkey) - assert rsa_cdata != self._backend._ffi.NULL - rsa_cdata = self._backend._ffi.gc(rsa_cdata, - self._backend._lib.RSA_free) - buf = self._backend._ffi.new("unsigned char[]", pkey_size) - res = self._backend._lib.RSA_public_decrypt( - len(self._signature), - self._signature, - buf, - rsa_cdata, - self._backend._lib.RSA_NO_PADDING - ) - if res != pkey_size: - errors = self._backend._consume_errors() - assert errors - raise InvalidSignature - - data_to_verify = self._hash_ctx.finalize() - self._hash_ctx = None - res = self._backend._lib.RSA_verify_PKCS1_PSS( - rsa_cdata, - data_to_verify, - evp_md, - buf, - _get_rsa_pss_salt_length( - self._padding, - self._public_key.key_size, - len(data_to_verify) - ) - ) - if res != 1: - errors = self._backend._consume_errors() - assert errors - raise InvalidSignature - - -@utils.register_interface(interfaces.AsymmetricVerificationContext) -class _DSAVerificationContext(object): - def __init__(self, backend, public_key, signature, algorithm): - self._backend = backend - self._public_key = public_key - self._signature = signature - self._algorithm = algorithm - - self._hash_ctx = _HashContext(backend, self._algorithm) - - def update(self, data): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - self._hash_ctx.update(data) - - def verify(self): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - self._dsa_cdata = self._backend._dsa_cdata_from_public_key( - self._public_key) - self._dsa_cdata = self._backend._ffi.gc(self._dsa_cdata, - self._backend._lib.DSA_free) - - data_to_verify = self._hash_ctx.finalize() - self._hash_ctx = None - - # The first parameter passed to DSA_verify is unused by OpenSSL but - # must be an integer. - res = self._backend._lib.DSA_verify( - 0, data_to_verify, len(data_to_verify), self._signature, - len(self._signature), self._dsa_cdata) - - if res != 1: - errors = self._backend._consume_errors() - assert errors - if res == -1: - assert errors[0].lib == self._backend._lib.ERR_LIB_ASN1 - - raise InvalidSignature - - -@utils.register_interface(interfaces.AsymmetricSignatureContext) -class _DSASignatureContext(object): - def __init__(self, backend, private_key, algorithm): - self._backend = backend - self._private_key = private_key - self._algorithm = algorithm - self._hash_ctx = _HashContext(backend, self._algorithm) - self._dsa_cdata = self._backend._dsa_cdata_from_private_key( - self._private_key) - self._dsa_cdata = self._backend._ffi.gc(self._dsa_cdata, - self._backend._lib.DSA_free) - - def update(self, data): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - self._hash_ctx.update(data) - - def finalize(self): - if self._hash_ctx is None: - raise AlreadyFinalized("Context has already been finalized") - - data_to_sign = self._hash_ctx.finalize() - self._hash_ctx = None - sig_buf_len = self._backend._lib.DSA_size(self._dsa_cdata) - sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len) - buflen = self._backend._ffi.new("unsigned int *") - - # The first parameter passed to DSA_sign is unused by OpenSSL but - # must be an integer. - res = self._backend._lib.DSA_sign( - 0, data_to_sign, len(data_to_sign), sig_buf, - buflen, self._dsa_cdata) - assert res == 1 - assert buflen[0] - - return self._backend._ffi.buffer(sig_buf)[:buflen[0]] - - -@utils.register_interface(interfaces.CMACContext) -class _CMACContext(object): - def __init__(self, backend, algorithm, ctx=None): - if not backend.cmac_algorithm_supported(algorithm): - raise UnsupportedAlgorithm("This backend does not support CMAC", - _Reasons.UNSUPPORTED_CIPHER) - - self._backend = backend - self._key = algorithm.key - self._algorithm = algorithm - self._output_length = algorithm.block_size // 8 - - if ctx is None: - registry = self._backend._cipher_registry - adapter = registry[type(algorithm), CBC] - - evp_cipher = adapter(self._backend, algorithm, CBC) - - ctx = self._backend._lib.CMAC_CTX_new() - - assert ctx != self._backend._ffi.NULL - ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) - - self._backend._lib.CMAC_Init( - ctx, self._key, len(self._key), - evp_cipher, self._backend._ffi.NULL - ) - - self._ctx = ctx - - def update(self, data): - res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) - assert res == 1 - - def finalize(self): - buf = self._backend._ffi.new("unsigned char[]", self._output_length) - length = self._backend._ffi.new("size_t *", self._output_length) - res = self._backend._lib.CMAC_Final( - self._ctx, buf, length - ) - assert res == 1 - - self._ctx = None - - return self._backend._ffi.buffer(buf)[:] - - def copy(self): - copied_ctx = self._backend._lib.CMAC_CTX_new() - copied_ctx = self._backend._ffi.gc( - copied_ctx, self._backend._lib.CMAC_CTX_free - ) - res = self._backend._lib.CMAC_CTX_copy( - copied_ctx, self._ctx - ) - assert res == 1 - return _CMACContext( - self._backend, self._algorithm, ctx=copied_ctx - ) - - backend = Backend() diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/ciphers.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/ciphers.py new file mode 100644 index 0000000..c3a5499 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/ciphers.py @@ -0,0 +1,219 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidTag, UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.primitives.ciphers.modes import GCM + + +@utils.register_interface(interfaces.CipherContext) +@utils.register_interface(interfaces.AEADCipherContext) +@utils.register_interface(interfaces.AEADEncryptionContext) +class _CipherContext(object): + _ENCRYPT = 1 + _DECRYPT = 0 + + def __init__(self, backend, cipher, mode, operation): + self._backend = backend + self._cipher = cipher + self._mode = mode + self._operation = operation + self._tag = None + + if isinstance(self._cipher, interfaces.BlockCipherAlgorithm): + self._block_size = self._cipher.block_size + else: + self._block_size = 1 + + ctx = self._backend._lib.EVP_CIPHER_CTX_new() + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.EVP_CIPHER_CTX_free + ) + + registry = self._backend._cipher_registry + try: + adapter = registry[type(cipher), type(mode)] + except KeyError: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + evp_cipher = adapter(self._backend, cipher, mode) + if evp_cipher == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "cipher {0} in {1} mode is not supported " + "by this backend.".format( + cipher.name, mode.name if mode else mode), + _Reasons.UNSUPPORTED_CIPHER + ) + + if isinstance(mode, interfaces.ModeWithInitializationVector): + iv_nonce = mode.initialization_vector + elif isinstance(mode, interfaces.ModeWithNonce): + iv_nonce = mode.nonce + else: + iv_nonce = self._backend._ffi.NULL + # begin init with cipher and operation type + res = self._backend._lib.EVP_CipherInit_ex(ctx, evp_cipher, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + operation) + assert res != 0 + # set the key length to handle variable key ciphers + res = self._backend._lib.EVP_CIPHER_CTX_set_key_length( + ctx, len(cipher.key) + ) + assert res != 0 + if isinstance(mode, GCM): + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_IVLEN, + len(iv_nonce), self._backend._ffi.NULL + ) + assert res != 0 + if operation == self._DECRYPT: + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + ctx, self._backend._lib.EVP_CTRL_GCM_SET_TAG, + len(mode.tag), mode.tag + ) + assert res != 0 + + # pass key/iv + res = self._backend._lib.EVP_CipherInit_ex( + ctx, + self._backend._ffi.NULL, + self._backend._ffi.NULL, + cipher.key, + iv_nonce, + operation + ) + assert res != 0 + # We purposely disable padding here as it's handled higher up in the + # API. + self._backend._lib.EVP_CIPHER_CTX_set_padding(ctx, 0) + self._ctx = ctx + + def update(self, data): + # OpenSSL 0.9.8e has an assertion in its EVP code that causes it + # to SIGABRT if you call update with an empty byte string. This can be + # removed when we drop support for 0.9.8e (CentOS/RHEL 5). This branch + # should be taken only when length is zero and mode is not GCM because + # AES GCM can return improper tag values if you don't call update + # with empty plaintext when authenticating AAD for ...reasons. + if len(data) == 0 and not isinstance(self._mode, GCM): + return b"" + + buf = self._backend._ffi.new("unsigned char[]", + len(data) + self._block_size - 1) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate(self._ctx, buf, outlen, data, + len(data)) + assert res != 0 + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._block_size) + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherFinal_ex(self._ctx, buf, outlen) + if res == 0: + errors = self._backend._consume_errors() + + if not errors and isinstance(self._mode, GCM): + raise InvalidTag + + assert errors + + if errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_ENCRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ) or errors[0][1:] == ( + self._backend._lib.ERR_LIB_EVP, + self._backend._lib.EVP_F_EVP_DECRYPTFINAL_EX, + self._backend._lib.EVP_R_DATA_NOT_MULTIPLE_OF_BLOCK_LENGTH + ): + raise ValueError( + "The length of the provided data is not a multiple of " + "the block length." + ) + else: + raise self._backend._unknown_error(errors[0]) + + if (isinstance(self._mode, GCM) and + self._operation == self._ENCRYPT): + block_byte_size = self._block_size // 8 + tag_buf = self._backend._ffi.new( + "unsigned char[]", block_byte_size + ) + res = self._backend._lib.EVP_CIPHER_CTX_ctrl( + self._ctx, self._backend._lib.EVP_CTRL_GCM_GET_TAG, + block_byte_size, tag_buf + ) + assert res != 0 + self._tag = self._backend._ffi.buffer(tag_buf)[:] + + res = self._backend._lib.EVP_CIPHER_CTX_cleanup(self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:outlen[0]] + + def authenticate_additional_data(self, data): + outlen = self._backend._ffi.new("int *") + res = self._backend._lib.EVP_CipherUpdate( + self._ctx, self._backend._ffi.NULL, outlen, data, len(data) + ) + assert res != 0 + + @property + def tag(self): + return self._tag + + +@utils.register_interface(interfaces.CipherContext) +class _AESCTRCipherContext(object): + """ + This is needed to provide support for AES CTR mode in OpenSSL 0.9.8. It can + be removed when we drop 0.9.8 support (RHEL5 extended life ends 2020). + """ + def __init__(self, backend, cipher, mode): + self._backend = backend + + self._key = self._backend._ffi.new("AES_KEY *") + assert self._key != self._backend._ffi.NULL + res = self._backend._lib.AES_set_encrypt_key( + cipher.key, len(cipher.key) * 8, self._key + ) + assert res == 0 + self._ecount = self._backend._ffi.new("char[]", 16) + self._nonce = self._backend._ffi.new("char[16]", mode.nonce) + self._num = self._backend._ffi.new("unsigned int *", 0) + + def update(self, data): + buf = self._backend._ffi.new("unsigned char[]", len(data)) + self._backend._lib.AES_ctr128_encrypt( + data, buf, len(data), self._key, self._nonce, + self._ecount, self._num + ) + return self._backend._ffi.buffer(buf)[:] + + def finalize(self): + self._key = None + self._ecount = None + self._nonce = None + self._num = None + return b"" diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/cmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/cmac.py new file mode 100644 index 0000000..7acf439 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/cmac.py @@ -0,0 +1,80 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import interfaces +from cryptography.hazmat.primitives.ciphers.modes import CBC + + +@utils.register_interface(interfaces.CMACContext) +class _CMACContext(object): + def __init__(self, backend, algorithm, ctx=None): + if not backend.cmac_algorithm_supported(algorithm): + raise UnsupportedAlgorithm("This backend does not support CMAC.", + _Reasons.UNSUPPORTED_CIPHER) + + self._backend = backend + self._key = algorithm.key + self._algorithm = algorithm + self._output_length = algorithm.block_size // 8 + + if ctx is None: + registry = self._backend._cipher_registry + adapter = registry[type(algorithm), CBC] + + evp_cipher = adapter(self._backend, algorithm, CBC) + + ctx = self._backend._lib.CMAC_CTX_new() + + assert ctx != self._backend._ffi.NULL + ctx = self._backend._ffi.gc(ctx, self._backend._lib.CMAC_CTX_free) + + self._backend._lib.CMAC_Init( + ctx, self._key, len(self._key), + evp_cipher, self._backend._ffi.NULL + ) + + self._ctx = ctx + + def update(self, data): + res = self._backend._lib.CMAC_Update(self._ctx, data, len(data)) + assert res == 1 + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", self._output_length) + length = self._backend._ffi.new("size_t *", self._output_length) + res = self._backend._lib.CMAC_Final( + self._ctx, buf, length + ) + assert res == 1 + + self._ctx = None + + return self._backend._ffi.buffer(buf)[:] + + def copy(self): + copied_ctx = self._backend._lib.CMAC_CTX_new() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.CMAC_CTX_free + ) + res = self._backend._lib.CMAC_CTX_copy( + copied_ctx, self._ctx + ) + assert res == 1 + return _CMACContext( + self._backend, self._algorithm, ctx=copied_ctx + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/dsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/dsa.py new file mode 100644 index 0000000..5e7a26f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/dsa.py @@ -0,0 +1,190 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +from cryptography import utils +from cryptography.exceptions import InvalidSignature +from cryptography.hazmat.primitives import hashes, interfaces +from cryptography.hazmat.primitives.asymmetric import dsa +from cryptography.hazmat.primitives.interfaces import ( + DSAParametersWithNumbers, DSAPrivateKeyWithNumbers, DSAPublicKeyWithNumbers +) + + +@utils.register_interface(interfaces.AsymmetricVerificationContext) +class _DSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._algorithm = algorithm + + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + self._dsa_cdata = self._backend._ffi.gc(self._public_key._dsa_cdata, + self._backend._lib.DSA_free) + + data_to_verify = self._hash_ctx.finalize() + + # The first parameter passed to DSA_verify is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_verify( + 0, data_to_verify, len(data_to_verify), self._signature, + len(self._signature), self._public_key._dsa_cdata) + + if res != 1: + errors = self._backend._consume_errors() + assert errors + if res == -1: + assert errors[0].lib == self._backend._lib.ERR_LIB_ASN1 + + raise InvalidSignature + + +@utils.register_interface(interfaces.AsymmetricSignatureContext) +class _DSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + data_to_sign = self._hash_ctx.finalize() + sig_buf_len = self._backend._lib.DSA_size(self._private_key._dsa_cdata) + sig_buf = self._backend._ffi.new("unsigned char[]", sig_buf_len) + buflen = self._backend._ffi.new("unsigned int *") + + # The first parameter passed to DSA_sign is unused by OpenSSL but + # must be an integer. + res = self._backend._lib.DSA_sign( + 0, data_to_sign, len(data_to_sign), sig_buf, + buflen, self._private_key._dsa_cdata) + assert res == 1 + assert buflen[0] + + return self._backend._ffi.buffer(sig_buf)[:buflen[0]] + + +@utils.register_interface(DSAParametersWithNumbers) +class _DSAParameters(object): + def __init__(self, backend, dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + + def parameter_numbers(self): + return dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(self._dsa_cdata.p), + q=self._backend._bn_to_int(self._dsa_cdata.q), + g=self._backend._bn_to_int(self._dsa_cdata.g) + ) + + def generate_private_key(self): + return self._backend.generate_dsa_private_key(self) + + +@utils.register_interface(DSAPrivateKeyWithNumbers) +class _DSAPrivateKey(object): + def __init__(self, backend, dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p) + + @property + def key_size(self): + return self._key_size + + def signer(self, algorithm): + return _DSASignatureContext(self._backend, self, algorithm) + + def private_numbers(self): + return dsa.DSAPrivateNumbers( + public_numbers=dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(self._dsa_cdata.p), + q=self._backend._bn_to_int(self._dsa_cdata.q), + g=self._backend._bn_to_int(self._dsa_cdata.g) + ), + y=self._backend._bn_to_int(self._dsa_cdata.pub_key) + ), + x=self._backend._bn_to_int(self._dsa_cdata.priv_key) + ) + + def public_key(self): + dsa_cdata = self._backend._lib.DSA_new() + assert dsa_cdata != self._backend._ffi.NULL + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p) + dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) + dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) + dsa_cdata.pub_key = self._backend._lib.BN_dup(self._dsa_cdata.pub_key) + return _DSAPublicKey(self._backend, dsa_cdata) + + def parameters(self): + dsa_cdata = self._backend._lib.DSA_new() + assert dsa_cdata != self._backend._ffi.NULL + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p) + dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) + dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) + return _DSAParameters(self._backend, dsa_cdata) + + +@utils.register_interface(DSAPublicKeyWithNumbers) +class _DSAPublicKey(object): + def __init__(self, backend, dsa_cdata): + self._backend = backend + self._dsa_cdata = dsa_cdata + self._key_size = self._backend._lib.BN_num_bits(self._dsa_cdata.p) + + @property + def key_size(self): + return self._key_size + + def verifier(self, signature, algorithm): + return _DSAVerificationContext( + self._backend, self, signature, algorithm + ) + + def public_numbers(self): + return dsa.DSAPublicNumbers( + parameter_numbers=dsa.DSAParameterNumbers( + p=self._backend._bn_to_int(self._dsa_cdata.p), + q=self._backend._bn_to_int(self._dsa_cdata.q), + g=self._backend._bn_to_int(self._dsa_cdata.g) + ), + y=self._backend._bn_to_int(self._dsa_cdata.pub_key) + ) + + def parameters(self): + dsa_cdata = self._backend._lib.DSA_new() + assert dsa_cdata != self._backend._ffi.NULL + dsa_cdata = self._backend._ffi.gc( + dsa_cdata, self._backend._lib.DSA_free + ) + dsa_cdata.p = self._backend._lib.BN_dup(self._dsa_cdata.p) + dsa_cdata.q = self._backend._lib.BN_dup(self._dsa_cdata.q) + dsa_cdata.g = self._backend._lib.BN_dup(self._dsa_cdata.g) + return _DSAParameters(self._backend, dsa_cdata) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/ec.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/ec.py new file mode 100644 index 0000000..b7cd980 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/ec.py @@ -0,0 +1,191 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.exceptions import ( + InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import hashes, interfaces +from cryptography.hazmat.primitives.asymmetric import ec + + +def _truncate_digest_for_ecdsa(ec_key_cdata, digest, backend): + _lib = backend._lib + _ffi = backend._ffi + + digest_len = len(digest) + + group = _lib.EC_KEY_get0_group(ec_key_cdata) + + bn_ctx = _lib.BN_CTX_new() + assert bn_ctx != _ffi.NULL + bn_ctx = _ffi.gc(bn_ctx, _lib.BN_CTX_free) + + order = _lib.BN_CTX_get(bn_ctx) + assert order != _ffi.NULL + + res = _lib.EC_GROUP_get_order(group, order, bn_ctx) + assert res == 1 + + order_bits = _lib.BN_num_bits(order) + + if 8 * digest_len > order_bits: + digest_len = (order_bits + 7) // 8 + digest = digest[:digest_len] + + if 8 * digest_len > order_bits: + rshift = 8 - (order_bits & 0x7) + assert rshift > 0 and rshift < 8 + + mask = 0xFF >> rshift << rshift + + # Set the bottom rshift bits to 0 + digest = digest[:-1] + six.int2byte(six.indexbytes(digest, -1) & mask) + + return digest + + +@utils.register_interface(interfaces.AsymmetricSignatureContext) +class _ECDSASignatureContext(object): + def __init__(self, backend, private_key, algorithm): + self._backend = backend + self._private_key = private_key + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def finalize(self): + ec_key = self._private_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + max_size = self._backend._lib.ECDSA_size(ec_key) + assert max_size > 0 + + sigbuf = self._backend._ffi.new("char[]", max_size) + siglen_ptr = self._backend._ffi.new("unsigned int[]", 1) + res = self._backend._lib.ECDSA_sign( + 0, + digest, + len(digest), + sigbuf, + siglen_ptr, + ec_key + ) + assert res == 1 + return self._backend._ffi.buffer(sigbuf)[:siglen_ptr[0]] + + +@utils.register_interface(interfaces.AsymmetricVerificationContext) +class _ECDSAVerificationContext(object): + def __init__(self, backend, public_key, signature, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + self._digest = hashes.Hash(algorithm, backend) + + def update(self, data): + self._digest.update(data) + + def verify(self): + ec_key = self._public_key._ec_key + + digest = self._digest.finalize() + + digest = _truncate_digest_for_ecdsa(ec_key, digest, self._backend) + + res = self._backend._lib.ECDSA_verify( + 0, + digest, + len(digest), + self._signature, + len(self._signature), + ec_key + ) + if res != 1: + self._backend._consume_errors() + raise InvalidSignature + return True + + +@utils.register_interface(interfaces.EllipticCurvePrivateKey) +class _EllipticCurvePrivateKey(object): + def __init__(self, backend, ec_key_cdata, curve): + self._backend = backend + self._ec_key = ec_key_cdata + self._curve = curve + + @property + def curve(self): + return self._curve + + def signer(self, signature_algorithm): + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSASignatureContext( + self._backend, self, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) + + def public_key(self): + group = self._backend._lib.EC_KEY_get0_group(self._ec_key) + assert group != self._backend._ffi.NULL + + curve_nid = self._backend._lib.EC_GROUP_get_curve_name(group) + + public_ec_key = self._backend._lib.EC_KEY_new_by_curve_name(curve_nid) + assert public_ec_key != self._backend._ffi.NULL + public_ec_key = self._backend._ffi.gc( + public_ec_key, self._backend._lib.EC_KEY_free + ) + + point = self._backend._lib.EC_KEY_get0_public_key(self._ec_key) + assert point != self._backend._ffi.NULL + + res = self._backend._lib.EC_KEY_set_public_key(public_ec_key, point) + assert res == 1 + + return _EllipticCurvePublicKey( + self._backend, public_ec_key, self._curve + ) + + +@utils.register_interface(interfaces.EllipticCurvePublicKey) +class _EllipticCurvePublicKey(object): + def __init__(self, backend, ec_key_cdata, curve): + self._backend = backend + self._ec_key = ec_key_cdata + self._curve = curve + + @property + def curve(self): + return self._curve + + def verifier(self, signature, signature_algorithm): + if isinstance(signature_algorithm, ec.ECDSA): + return _ECDSAVerificationContext( + self._backend, self, signature, signature_algorithm.algorithm + ) + else: + raise UnsupportedAlgorithm( + "Unsupported elliptic curve signature algorithm.", + _Reasons.UNSUPPORTED_PUBLIC_KEY_ALGORITHM) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/hashes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/hashes.py new file mode 100644 index 0000000..da91eef --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/hashes.py @@ -0,0 +1,69 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.HashContext) +class _HashContext(object): + def __init__(self, backend, algorithm, ctx=None): + self.algorithm = algorithm + + self._backend = backend + + if ctx is None: + ctx = self._backend._lib.EVP_MD_CTX_create() + ctx = self._backend._ffi.gc(ctx, + self._backend._lib.EVP_MD_CTX_destroy) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode("ascii")) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.EVP_DigestInit_ex(ctx, evp_md, + self._backend._ffi.NULL) + assert res != 0 + + self._ctx = ctx + + def copy(self): + copied_ctx = self._backend._lib.EVP_MD_CTX_create() + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.EVP_MD_CTX_destroy + ) + res = self._backend._lib.EVP_MD_CTX_copy_ex(copied_ctx, self._ctx) + assert res != 0 + return _HashContext(self._backend, self.algorithm, ctx=copied_ctx) + + def update(self, data): + res = self._backend._lib.EVP_DigestUpdate(self._ctx, data, len(data)) + assert res != 0 + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_DigestFinal_ex(self._ctx, buf, outlen) + assert res != 0 + assert outlen[0] == self.algorithm.digest_size + res = self._backend._lib.EVP_MD_CTX_cleanup(self._ctx) + assert res == 1 + return self._backend._ffi.buffer(buf)[:outlen[0]] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/hmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/hmac.py new file mode 100644 index 0000000..3f1576f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/hmac.py @@ -0,0 +1,80 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + + +from cryptography import utils +from cryptography.exceptions import UnsupportedAlgorithm, _Reasons +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.HashContext) +class _HMACContext(object): + def __init__(self, backend, key, algorithm, ctx=None): + self.algorithm = algorithm + self._backend = backend + + if ctx is None: + ctx = self._backend._ffi.new("HMAC_CTX *") + self._backend._lib.HMAC_CTX_init(ctx) + ctx = self._backend._ffi.gc( + ctx, self._backend._lib.HMAC_CTX_cleanup + ) + evp_md = self._backend._lib.EVP_get_digestbyname( + algorithm.name.encode('ascii')) + if evp_md == self._backend._ffi.NULL: + raise UnsupportedAlgorithm( + "{0} is not a supported hash on this backend.".format( + algorithm.name), + _Reasons.UNSUPPORTED_HASH + ) + res = self._backend._lib.Cryptography_HMAC_Init_ex( + ctx, key, len(key), evp_md, self._backend._ffi.NULL + ) + assert res != 0 + + self._ctx = ctx + self._key = key + + def copy(self): + copied_ctx = self._backend._ffi.new("HMAC_CTX *") + self._backend._lib.HMAC_CTX_init(copied_ctx) + copied_ctx = self._backend._ffi.gc( + copied_ctx, self._backend._lib.HMAC_CTX_cleanup + ) + res = self._backend._lib.Cryptography_HMAC_CTX_copy( + copied_ctx, self._ctx + ) + assert res != 0 + return _HMACContext( + self._backend, self._key, self.algorithm, ctx=copied_ctx + ) + + def update(self, data): + res = self._backend._lib.Cryptography_HMAC_Update( + self._ctx, data, len(data) + ) + assert res != 0 + + def finalize(self): + buf = self._backend._ffi.new("unsigned char[]", + self._backend._lib.EVP_MAX_MD_SIZE) + outlen = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.Cryptography_HMAC_Final( + self._ctx, buf, outlen + ) + assert res != 0 + assert outlen[0] == self.algorithm.digest_size + self._backend._lib.HMAC_CTX_cleanup(self._ctx) + return self._backend._ffi.buffer(buf)[:outlen[0]] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/rsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/rsa.py new file mode 100644 index 0000000..6f28c54 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/backends/openssl/rsa.py @@ -0,0 +1,603 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import math + +from cryptography import utils +from cryptography.exceptions import ( + AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons +) +from cryptography.hazmat.primitives import hashes, interfaces +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.asymmetric.padding import ( + MGF1, OAEP, PKCS1v15, PSS +) +from cryptography.hazmat.primitives.interfaces import ( + RSAPrivateKeyWithNumbers, RSAPublicKeyWithNumbers +) + + +def _get_rsa_pss_salt_length(pss, key_size, digest_size): + if pss._mgf._salt_length is not None: + salt = pss._mgf._salt_length + else: + salt = pss._salt_length + + if salt is MGF1.MAX_LENGTH or salt is PSS.MAX_LENGTH: + # bit length - 1 per RFC 3447 + emlen = int(math.ceil((key_size - 1) / 8.0)) + salt_length = emlen - digest_size - 2 + assert salt_length >= 0 + return salt_length + else: + return salt + + +def _enc_dec_rsa(backend, key, data, padding): + if isinstance(padding, PKCS1v15): + padding_enum = backend._lib.RSA_PKCS1_PADDING + elif isinstance(padding, OAEP): + padding_enum = backend._lib.RSA_PKCS1_OAEP_PADDING + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + if not isinstance(padding._mgf._algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This backend supports only SHA1 inside MGF1 when " + "using OAEP.", + _Reasons.UNSUPPORTED_HASH + ) + + if padding._label is not None and padding._label != b"": + raise ValueError("This backend does not support OAEP labels.") + + if not isinstance(padding._algorithm, hashes.SHA1): + raise UnsupportedAlgorithm( + "This backend only supports SHA1 when using OAEP.", + _Reasons.UNSUPPORTED_HASH + ) + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format( + padding.name + ), + _Reasons.UNSUPPORTED_PADDING + ) + + if backend._lib.Cryptography_HAS_PKEY_CTX: + return _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum) + else: + return _enc_dec_rsa_098(backend, key, data, padding_enum) + + +def _enc_dec_rsa_pkey_ctx(backend, key, data, padding_enum): + if isinstance(key, _RSAPublicKey): + init = backend._lib.EVP_PKEY_encrypt_init + crypt = backend._lib.Cryptography_EVP_PKEY_encrypt + else: + init = backend._lib.EVP_PKEY_decrypt_init + crypt = backend._lib.Cryptography_EVP_PKEY_decrypt + + pkey_ctx = backend._lib.EVP_PKEY_CTX_new( + key._evp_pkey, backend._ffi.NULL + ) + assert pkey_ctx != backend._ffi.NULL + pkey_ctx = backend._ffi.gc(pkey_ctx, backend._lib.EVP_PKEY_CTX_free) + res = init(pkey_ctx) + assert res == 1 + res = backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, padding_enum) + assert res > 0 + buf_size = backend._lib.EVP_PKEY_size(key._evp_pkey) + assert buf_size > 0 + outlen = backend._ffi.new("size_t *", buf_size) + buf = backend._ffi.new("char[]", buf_size) + res = crypt(pkey_ctx, buf, outlen, data, len(data)) + if res <= 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:outlen[0]] + + +def _enc_dec_rsa_098(backend, key, data, padding_enum): + if isinstance(key, _RSAPublicKey): + crypt = backend._lib.RSA_public_encrypt + else: + crypt = backend._lib.RSA_private_decrypt + + key_size = backend._lib.RSA_size(key._rsa_cdata) + assert key_size > 0 + buf = backend._ffi.new("unsigned char[]", key_size) + res = crypt(len(data), data, buf, key._rsa_cdata, padding_enum) + if res < 0: + _handle_rsa_enc_dec_error(backend, key) + + return backend._ffi.buffer(buf)[:res] + + +def _handle_rsa_enc_dec_error(backend, key): + errors = backend._consume_errors() + assert errors + assert errors[0].lib == backend._lib.ERR_LIB_RSA + if isinstance(key, _RSAPublicKey): + assert (errors[0].reason == + backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError( + "Data too long for key size. Encrypt less data or use a " + "larger key size." + ) + else: + assert ( + errors[0].reason == backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_01 or + errors[0].reason == backend._lib.RSA_R_BLOCK_TYPE_IS_NOT_02 + ) + raise ValueError("Decryption failed.") + + +@utils.register_interface(interfaces.AsymmetricSignatureContext) +class _RSASignatureContext(object): + def __init__(self, backend, private_key, padding, algorithm): + self._backend = backend + self._private_key = private_key + + if not isinstance(padding, interfaces.AsymmetricPadding): + raise TypeError( + "Expected provider of interfaces.AsymmetricPadding.") + + self._pkey_size = self._backend._lib.EVP_PKEY_size( + self._private_key._evp_pkey + ) + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._finalize_method = self._finalize_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + assert self._pkey_size > 0 + if self._pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + if not self._backend._mgf1_hash_supported(padding._mgf._algorithm): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._finalize_method = self._finalize_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._finalize_method = self._finalize_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def finalize(self): + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + assert evp_md != self._backend._ffi.NULL + + return self._finalize_method(evp_md) + + def _finalize_pkey_ctx(self, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._private_key._evp_pkey, self._backend._ffi.NULL + ) + assert pkey_ctx != self._backend._ffi.NULL + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_sign_init(pkey_ctx) + assert res == 1 + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + assert res > 0 + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + assert res > 0 + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + assert res > 0 + + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + assert mgf1_md != self._backend._ffi.NULL + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + assert res > 0 + data_to_sign = self._hash_ctx.finalize() + buflen = self._backend._ffi.new("size_t *") + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, + self._backend._ffi.NULL, + buflen, + data_to_sign, + len(data_to_sign) + ) + assert res == 1 + buf = self._backend._ffi.new("unsigned char[]", buflen[0]) + res = self._backend._lib.EVP_PKEY_sign( + pkey_ctx, buf, buflen, data_to_sign, len(data_to_sign)) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + reason = None + if (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE): + reason = ("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + elif (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY): + reason = "Digest too large for key size. Use a larger key." + assert reason is not None + raise ValueError(reason) + + return self._backend._ffi.buffer(buf)[:] + + def _finalize_pkcs1(self, evp_md): + if self._hash_ctx._ctx is None: + raise AlreadyFinalized("Context has already been finalized.") + + sig_buf = self._backend._ffi.new("char[]", self._pkey_size) + sig_len = self._backend._ffi.new("unsigned int *") + res = self._backend._lib.EVP_SignFinal( + self._hash_ctx._ctx._ctx, + sig_buf, + sig_len, + self._private_key._evp_pkey + ) + self._hash_ctx.finalize() + if res == 0: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DIGEST_TOO_BIG_FOR_RSA_KEY) + raise ValueError("Digest too large for key size. Use a larger " + "key.") + + return self._backend._ffi.buffer(sig_buf)[:sig_len[0]] + + def _finalize_pss(self, evp_md): + data_to_sign = self._hash_ctx.finalize() + padded = self._backend._ffi.new("unsigned char[]", self._pkey_size) + res = self._backend._lib.RSA_padding_add_PKCS1_PSS( + self._private_key._rsa_cdata, + padded, + data_to_sign, + evp_md, + _get_rsa_pss_salt_length( + self._padding, + self._private_key.key_size, + len(data_to_sign) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors[0].lib == self._backend._lib.ERR_LIB_RSA + assert (errors[0].reason == + self._backend._lib.RSA_R_DATA_TOO_LARGE_FOR_KEY_SIZE) + raise ValueError("Salt length too long for key size. Try using " + "MAX_LENGTH instead.") + + sig_buf = self._backend._ffi.new("char[]", self._pkey_size) + sig_len = self._backend._lib.RSA_private_encrypt( + self._pkey_size, + padded, + sig_buf, + self._private_key._rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + assert sig_len != -1 + return self._backend._ffi.buffer(sig_buf)[:sig_len] + + +@utils.register_interface(interfaces.AsymmetricVerificationContext) +class _RSAVerificationContext(object): + def __init__(self, backend, public_key, signature, padding, algorithm): + self._backend = backend + self._public_key = public_key + self._signature = signature + + if not isinstance(padding, interfaces.AsymmetricPadding): + raise TypeError( + "Expected provider of interfaces.AsymmetricPadding.") + + self._pkey_size = self._backend._lib.EVP_PKEY_size( + self._public_key._evp_pkey + ) + + if isinstance(padding, PKCS1v15): + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PADDING + else: + self._verify_method = self._verify_pkcs1 + elif isinstance(padding, PSS): + if not isinstance(padding._mgf, MGF1): + raise UnsupportedAlgorithm( + "Only MGF1 is supported by this backend.", + _Reasons.UNSUPPORTED_MGF + ) + + # Size of key in bytes - 2 is the maximum + # PSS signature length (salt length is checked later) + assert self._pkey_size > 0 + if self._pkey_size - algorithm.digest_size - 2 < 0: + raise ValueError( + "Digest too large for key size. Check that you have the " + "correct key and digest algorithm." + ) + + if not self._backend._mgf1_hash_supported(padding._mgf._algorithm): + raise UnsupportedAlgorithm( + "When OpenSSL is older than 1.0.1 then only SHA1 is " + "supported with MGF1.", + _Reasons.UNSUPPORTED_HASH + ) + + if self._backend._lib.Cryptography_HAS_PKEY_CTX: + self._verify_method = self._verify_pkey_ctx + self._padding_enum = self._backend._lib.RSA_PKCS1_PSS_PADDING + else: + self._verify_method = self._verify_pss + else: + raise UnsupportedAlgorithm( + "{0} is not supported by this backend.".format(padding.name), + _Reasons.UNSUPPORTED_PADDING + ) + + self._padding = padding + self._algorithm = algorithm + self._hash_ctx = hashes.Hash(self._algorithm, self._backend) + + def update(self, data): + self._hash_ctx.update(data) + + def verify(self): + evp_md = self._backend._lib.EVP_get_digestbyname( + self._algorithm.name.encode("ascii")) + assert evp_md != self._backend._ffi.NULL + + self._verify_method(evp_md) + + def _verify_pkey_ctx(self, evp_md): + pkey_ctx = self._backend._lib.EVP_PKEY_CTX_new( + self._public_key._evp_pkey, self._backend._ffi.NULL + ) + assert pkey_ctx != self._backend._ffi.NULL + pkey_ctx = self._backend._ffi.gc(pkey_ctx, + self._backend._lib.EVP_PKEY_CTX_free) + res = self._backend._lib.EVP_PKEY_verify_init(pkey_ctx) + assert res == 1 + res = self._backend._lib.EVP_PKEY_CTX_set_signature_md( + pkey_ctx, evp_md) + assert res > 0 + + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_padding( + pkey_ctx, self._padding_enum) + assert res > 0 + if isinstance(self._padding, PSS): + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_pss_saltlen( + pkey_ctx, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + self._hash_ctx.algorithm.digest_size + ) + ) + assert res > 0 + if self._backend._lib.Cryptography_HAS_MGF1_MD: + # MGF1 MD is configurable in OpenSSL 1.0.1+ + mgf1_md = self._backend._lib.EVP_get_digestbyname( + self._padding._mgf._algorithm.name.encode("ascii")) + assert mgf1_md != self._backend._ffi.NULL + res = self._backend._lib.EVP_PKEY_CTX_set_rsa_mgf1_md( + pkey_ctx, mgf1_md + ) + assert res > 0 + + data_to_verify = self._hash_ctx.finalize() + res = self._backend._lib.EVP_PKEY_verify( + pkey_ctx, + self._signature, + len(self._signature), + data_to_verify, + len(data_to_verify) + ) + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + assert res >= 0 + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pkcs1(self, evp_md): + if self._hash_ctx._ctx is None: + raise AlreadyFinalized("Context has already been finalized.") + + res = self._backend._lib.EVP_VerifyFinal( + self._hash_ctx._ctx._ctx, + self._signature, + len(self._signature), + self._public_key._evp_pkey + ) + self._hash_ctx.finalize() + # The previous call can return negative numbers in the event of an + # error. This is not a signature failure but we need to fail if it + # occurs. + assert res >= 0 + if res == 0: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + def _verify_pss(self, evp_md): + buf = self._backend._ffi.new("unsigned char[]", self._pkey_size) + res = self._backend._lib.RSA_public_decrypt( + len(self._signature), + self._signature, + buf, + self._public_key._rsa_cdata, + self._backend._lib.RSA_NO_PADDING + ) + if res != self._pkey_size: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + data_to_verify = self._hash_ctx.finalize() + res = self._backend._lib.RSA_verify_PKCS1_PSS( + self._public_key._rsa_cdata, + data_to_verify, + evp_md, + buf, + _get_rsa_pss_salt_length( + self._padding, + self._public_key.key_size, + len(data_to_verify) + ) + ) + if res != 1: + errors = self._backend._consume_errors() + assert errors + raise InvalidSignature + + +@utils.register_interface(RSAPrivateKeyWithNumbers) +class _RSAPrivateKey(object): + def __init__(self, backend, rsa_cdata): + self._backend = backend + self._rsa_cdata = rsa_cdata + + evp_pkey = self._backend._lib.EVP_PKEY_new() + assert evp_pkey != self._backend._ffi.NULL + evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + res = self._backend._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + assert res == 1 + self._evp_pkey = evp_pkey + + self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n) + + @property + def key_size(self): + return self._key_size + + def signer(self, padding, algorithm): + return _RSASignatureContext(self._backend, self, padding, algorithm) + + def decrypt(self, ciphertext, padding): + key_size_bytes = int(math.ceil(self.key_size / 8.0)) + if key_size_bytes != len(ciphertext): + raise ValueError("Ciphertext length must be equal to key size.") + + return _enc_dec_rsa(self._backend, self, ciphertext, padding) + + def public_key(self): + ctx = self._backend._lib.RSA_new() + assert ctx != self._backend._ffi.NULL + ctx = self._backend._ffi.gc(ctx, self._backend._lib.RSA_free) + ctx.e = self._backend._lib.BN_dup(self._rsa_cdata.e) + ctx.n = self._backend._lib.BN_dup(self._rsa_cdata.n) + res = self._backend._lib.RSA_blinding_on(ctx, self._backend._ffi.NULL) + assert res == 1 + return _RSAPublicKey(self._backend, ctx) + + def private_numbers(self): + return rsa.RSAPrivateNumbers( + p=self._backend._bn_to_int(self._rsa_cdata.p), + q=self._backend._bn_to_int(self._rsa_cdata.q), + d=self._backend._bn_to_int(self._rsa_cdata.d), + dmp1=self._backend._bn_to_int(self._rsa_cdata.dmp1), + dmq1=self._backend._bn_to_int(self._rsa_cdata.dmq1), + iqmp=self._backend._bn_to_int(self._rsa_cdata.iqmp), + public_numbers=rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(self._rsa_cdata.e), + n=self._backend._bn_to_int(self._rsa_cdata.n), + ) + ) + + +@utils.register_interface(RSAPublicKeyWithNumbers) +class _RSAPublicKey(object): + def __init__(self, backend, rsa_cdata): + self._backend = backend + self._rsa_cdata = rsa_cdata + + evp_pkey = self._backend._lib.EVP_PKEY_new() + assert evp_pkey != self._backend._ffi.NULL + evp_pkey = self._backend._ffi.gc( + evp_pkey, self._backend._lib.EVP_PKEY_free + ) + res = self._backend._lib.EVP_PKEY_set1_RSA(evp_pkey, rsa_cdata) + assert res == 1 + self._evp_pkey = evp_pkey + + self._key_size = self._backend._lib.BN_num_bits(self._rsa_cdata.n) + + @property + def key_size(self): + return self._key_size + + def verifier(self, signature, padding, algorithm): + return _RSAVerificationContext( + self._backend, self, signature, padding, algorithm + ) + + def encrypt(self, plaintext, padding): + return _enc_dec_rsa(self._backend, self, plaintext, padding) + + def public_numbers(self): + return rsa.RSAPublicNumbers( + e=self._backend._bn_to_int(self._rsa_cdata.e), + n=self._backend._bn_to_int(self._rsa_cdata.n), + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py index 144bb09..ee7378a 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/binding.py @@ -25,10 +25,16 @@ class Binding(object): """ _module_prefix = "cryptography.hazmat.bindings.commoncrypto." _modules = [ + "cf", "common_digest", "common_hmac", "common_key_derivation", "common_cryptor", + "secimport", + "secitem", + "seckey", + "seckeychain", + "sectransform", ] ffi = None @@ -45,6 +51,7 @@ class Binding(object): cls.ffi, cls.lib = build_ffi( module_prefix=cls._module_prefix, modules=cls._modules, + extra_link_args=["-framework", "Security"] ) @classmethod diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/cf.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/cf.py new file mode 100644 index 0000000..671963a --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/cf.py @@ -0,0 +1,114 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef bool Boolean; +typedef signed long OSStatus; +typedef unsigned char UInt8; +typedef uint32_t UInt32; + +typedef const void * CFAllocatorRef; +const CFAllocatorRef kCFAllocatorDefault; +typedef const void * CFDataRef; +typedef signed long long CFIndex; +typedef ... *CFStringRef; +typedef ... *CFArrayRef; +typedef ... *CFBooleanRef; +typedef ... *CFErrorRef; +typedef ... *CFNumberRef; +typedef ... *CFTypeRef; +typedef ... *CFDictionaryRef; +typedef ... *CFMutableDictionaryRef; +typedef struct { + ...; +} CFDictionaryKeyCallBacks; +typedef struct { + ...; +} CFDictionaryValueCallBacks; +typedef struct { + ...; +} CFRange; + +typedef UInt32 CFStringEncoding; +enum { + kCFStringEncodingASCII = 0x0600 +}; + +enum { + kCFNumberSInt8Type = 1, + kCFNumberSInt16Type = 2, + kCFNumberSInt32Type = 3, + kCFNumberSInt64Type = 4, + kCFNumberFloat32Type = 5, + kCFNumberFloat64Type = 6, + kCFNumberCharType = 7, + kCFNumberShortType = 8, + kCFNumberIntType = 9, + kCFNumberLongType = 10, + kCFNumberLongLongType = 11, + kCFNumberFloatType = 12, + kCFNumberDoubleType = 13, + kCFNumberCFIndexType = 14, + kCFNumberNSIntegerType = 15, + kCFNumberCGFloatType = 16, + kCFNumberMaxType = 16 +}; +typedef int CFNumberType; + +const CFDictionaryKeyCallBacks kCFTypeDictionaryKeyCallBacks; +const CFDictionaryValueCallBacks kCFTypeDictionaryValueCallBacks; + +const CFBooleanRef kCFBooleanTrue; +const CFBooleanRef kCFBooleanFalse; +""" + +FUNCTIONS = """ +CFDataRef CFDataCreate(CFAllocatorRef, const UInt8 *, CFIndex); +CFStringRef CFStringCreateWithCString(CFAllocatorRef, const char *, + CFStringEncoding); +CFDictionaryRef CFDictionaryCreate(CFAllocatorRef, const void **, + const void **, CFIndex, + const CFDictionaryKeyCallBacks *, + const CFDictionaryValueCallBacks *); +CFMutableDictionaryRef CFDictionaryCreateMutable( + CFAllocatorRef, + CFIndex, + const CFDictionaryKeyCallBacks *, + const CFDictionaryValueCallBacks * +); +void CFDictionarySetValue(CFMutableDictionaryRef, const void *, const void *); +CFIndex CFArrayGetCount(CFArrayRef); +const void *CFArrayGetValueAtIndex(CFArrayRef, CFIndex); +CFIndex CFDataGetLength(CFDataRef); +void CFDataGetBytes(CFDataRef, CFRange, UInt8 *); +CFRange CFRangeMake(CFIndex, CFIndex); +void CFShow(CFTypeRef); +Boolean CFBooleanGetValue(CFBooleanRef); +CFNumberRef CFNumberCreate(CFAllocatorRef, CFNumberType, const void *); +void CFRelease(CFTypeRef); +CFTypeRef CFRetain(CFTypeRef); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py index 9bd03a7..713bc56 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/common_cryptor.py @@ -101,7 +101,7 @@ MACROS = """ """ CUSTOMIZATIONS = """ -// Not defined in the public header +/* Not defined in the public header */ enum { kCCModeGCM = 11 }; diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/secimport.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/secimport.py new file mode 100644 index 0000000..add62c7 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/secimport.py @@ -0,0 +1,95 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... *SecAccessRef; + +CFStringRef kSecImportExportPassphrase; +CFStringRef kSecImportExportKeychain; +CFStringRef kSecImportExportAccess; + +typedef uint32_t SecExternalItemType; +enum { + kSecItemTypeUnknown, + kSecItemTypePrivateKey, + kSecItemTypePublicKey, + kSecItemTypeSessionKey, + kSecItemTypeCertificate, + kSecItemTypeAggregate +}; + + +typedef uint32_t SecExternalFormat; +enum { + kSecFormatUnknown = 0, + kSecFormatOpenSSL, + kSecFormatSSH, + kSecFormatBSAFE, + kSecFormatRawKey, + kSecFormatWrappedPKCS8, + kSecFormatWrappedOpenSSL, + kSecFormatWrappedSSH, + kSecFormatWrappedLSH, + kSecFormatX509Cert, + kSecFormatPEMSequence, + kSecFormatPKCS7, + kSecFormatPKCS12, + kSecFormatNetscapeCertSequence, + kSecFormatSSHv2 +}; + +typedef uint32_t SecItemImportExportFlags; +enum { + kSecKeyImportOnlyOne = 0x00000001, + kSecKeySecurePassphrase = 0x00000002, + kSecKeyNoAccessControl = 0x00000004 +}; +typedef uint32_t SecKeyImportExportFlags; + +typedef struct { + /* for import and export */ + uint32_t version; + SecKeyImportExportFlags flags; + CFTypeRef passphrase; + CFStringRef alertTitle; + CFStringRef alertPrompt; + + /* for import only */ + SecAccessRef accessRef; + CFArrayRef keyUsage; + + CFArrayRef keyAttributes; +} SecItemImportExportKeyParameters; +""" + +FUNCTIONS = """ +OSStatus SecItemImport(CFDataRef, CFStringRef, SecExternalFormat *, + SecExternalItemType *, SecItemImportExportFlags, + const SecItemImportExportKeyParameters *, + SecKeychainRef, CFArrayRef *); +OSStatus SecPKCS12Import(CFDataRef, CFDictionaryRef, CFArrayRef *); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/secitem.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/secitem.py new file mode 100644 index 0000000..ac3dad3 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/secitem.py @@ -0,0 +1,38 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +const CFTypeRef kSecAttrKeyType; +const CFTypeRef kSecAttrKeySizeInBits; +const CFTypeRef kSecAttrIsPermanent; +const CFTypeRef kSecAttrKeyTypeRSA; +const CFTypeRef kSecAttrKeyTypeDSA; +const CFTypeRef kSecUseKeychain; +""" + +FUNCTIONS = """ +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/seckey.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/seckey.py new file mode 100644 index 0000000..5e4b6da --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/seckey.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... *SecKeyRef; +""" + +FUNCTIONS = """ +OSStatus SecKeyGeneratePair(CFDictionaryRef, SecKeyRef *, SecKeyRef *); +size_t SecKeyGetBlockSize(SecKeyRef); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/seckeychain.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/seckeychain.py new file mode 100644 index 0000000..c045c34 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/seckeychain.py @@ -0,0 +1,36 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +""" + +TYPES = """ +typedef ... *SecKeychainRef; +""" + +FUNCTIONS = """ +OSStatus SecKeychainCreate(const char *, UInt32, const void *, Boolean, + SecAccessRef, SecKeychainRef *); +OSStatus SecKeychainDelete(SecKeychainRef); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/sectransform.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/sectransform.py new file mode 100644 index 0000000..d6dbc5f --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/commoncrypto/sectransform.py @@ -0,0 +1,79 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include +#include +#include +""" + +TYPES = """ +typedef ... *SecTransformRef; + +CFStringRef kSecImportExportPassphrase; +CFStringRef kSecImportExportKeychain; +CFStringRef kSecImportExportAccess; + +CFStringRef kSecEncryptionMode; +CFStringRef kSecEncryptKey; +CFStringRef kSecIVKey; +CFStringRef kSecModeCBCKey; +CFStringRef kSecModeCFBKey; +CFStringRef kSecModeECBKey; +CFStringRef kSecModeNoneKey; +CFStringRef kSecModeOFBKey; +CFStringRef kSecOAEPEncodingParametersAttributeName; +CFStringRef kSecPaddingKey; +CFStringRef kSecPaddingNoneKey; +CFStringRef kSecPaddingOAEPKey; +CFStringRef kSecPaddingPKCS1Key; +CFStringRef kSecPaddingPKCS5Key; +CFStringRef kSecPaddingPKCS7Key; + +const CFStringRef kSecTransformInputAttributeName; +const CFStringRef kSecTransformOutputAttributeName; +const CFStringRef kSecTransformDebugAttributeName; +const CFStringRef kSecTransformTransformName; +const CFStringRef kSecTransformAbortAttributeName; + +CFStringRef kSecInputIsAttributeName; +CFStringRef kSecInputIsPlainText; +CFStringRef kSecInputIsDigest; +CFStringRef kSecInputIsRaw; + +const CFStringRef kSecDigestTypeAttribute; +const CFStringRef kSecDigestLengthAttribute; +const CFStringRef kSecDigestMD5; +const CFStringRef kSecDigestSHA1; +const CFStringRef kSecDigestSHA2; +""" + +FUNCTIONS = """ +Boolean SecTransformSetAttribute(SecTransformRef, CFStringRef, CFTypeRef, + CFErrorRef *); +SecTransformRef SecDecryptTransformCreate(SecKeyRef, CFErrorRef *); +SecTransformRef SecEncryptTransformCreate(SecKeyRef, CFErrorRef *); +SecTransformRef SecVerifyTransformCreate(SecKeyRef, CFDataRef, CFErrorRef *); +SecTransformRef SecSignTransformCreate(SecKeyRef, CFErrorRef *) ; +CFTypeRef SecTransformExecute(SecTransformRef, CFErrorRef *); +""" + +MACROS = """ +""" + +CUSTOMIZATIONS = """ +""" + +CONDITIONAL_NAMES = {} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py index 17c154c..e407152 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/aes.py @@ -38,10 +38,18 @@ int AES_wrap_key(AES_KEY *, const unsigned char *, unsigned char *, const unsigned char *, unsigned int); int AES_unwrap_key(AES_KEY *, const unsigned char *, unsigned char *, const unsigned char *, unsigned int); + +/* The ctr128_encrypt function is only useful in 0.9.8. You should use EVP for + this in 1.0.0+. It is defined in macros because the function signature + changed after 0.9.8 */ +void AES_ctr128_encrypt(const unsigned char *, unsigned char *, + const size_t, const AES_KEY *, + unsigned char[], unsigned char[], unsigned int *); + """ CUSTOMIZATIONS = """ -// OpenSSL 0.9.8h+ +/* OpenSSL 0.9.8h+ */ #if OPENSSL_VERSION_NUMBER >= 0x0090808fL static const long Cryptography_HAS_AES_WRAP = 1; #else diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py index dfdf1bf..2edfd2d 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/asn1.py @@ -141,6 +141,9 @@ ASN1_INTEGER *BN_to_ASN1_INTEGER(BIGNUM *, ASN1_INTEGER *); /* These isn't a macro the arg is const on openssl 1.0.2+ */ int ASN1_GENERALIZEDTIME_check(ASN1_GENERALIZEDTIME *); + +/* Not a macro, const on openssl 1.0 */ +int ASN1_STRING_set_default_mask_asc(char *); """ CUSTOMIZATIONS = """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py index aa0525f..4cd1b89 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/binding.py @@ -13,6 +13,7 @@ from __future__ import absolute_import, division, print_function +import os import sys import threading @@ -74,6 +75,7 @@ class Binding(object): "x509", "x509name", "x509v3", + "x509_vfy" ] _locks = None @@ -96,7 +98,8 @@ class Binding(object): if sys.platform != "win32": libraries = ["crypto", "ssl"] else: # pragma: no cover - libraries = ["libeay32", "ssleay32", "advapi32"] + link_type = os.environ.get("PYCA_WINDOWS_LINK_TYPE", "static") + libraries = _get_windows_libraries(link_type) cls.ffi, cls.lib = build_ffi( module_prefix=cls._module_prefix, @@ -149,7 +152,19 @@ class Binding(object): lock.release() else: raise RuntimeError( - "Unknown lock mode {0}: lock={1}, file={2}, line={3}".format( + "Unknown lock mode {0}: lock={1}, file={2}, line={3}.".format( mode, n, file, line ) ) + + +def _get_windows_libraries(link_type): + if link_type == "dynamic": + return ["libeay32", "ssleay32", "advapi32"] + elif link_type == "static" or link_type == "": + return ["libeay32mt", "ssleay32mt", "advapi32", + "crypt32", "gdi32", "user32", "ws2_32"] + else: + raise ValueError( + "PYCA_WINDOWS_LINK_TYPE must be 'static' or 'dynamic'" + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py index a3760f2..cbf4b28 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/cms.py @@ -15,8 +15,8 @@ from __future__ import absolute_import, division, print_function INCLUDES = """ #if !defined(OPENSSL_NO_CMS) && OPENSSL_VERSION_NUMBER >= 0x0090808fL -// The next define should really be in the OpenSSL header, but it is missing. -// Failing to include this on Windows causes compilation failures. +/* The next define should really be in the OpenSSL header, but it is missing. + Failing to include this on Windows causes compilation failures. */ #if defined(OPENSSL_SYS_WINDOWS) #include #endif diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py index dda35e8..001a070 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/conf.py @@ -22,6 +22,8 @@ typedef ... CONF; """ FUNCTIONS = """ +void OPENSSL_config(const char *); +void OPENSSL_no_config(void); """ MACROS = """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py index a0f9947..e2e8976 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dh.py @@ -19,13 +19,13 @@ INCLUDES = """ TYPES = """ typedef struct dh_st { - // prime number (shared) + /* Prime number (shared) */ BIGNUM *p; - // generator of Z_p (shared) + /* Generator of Z_p (shared) */ BIGNUM *g; - // private DH value x + /* Private DH value x */ BIGNUM *priv_key; - // public DH value g^x + /* Public DH value g^x */ BIGNUM *pub_key; ...; } DH; diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py index 7db0332..c9aa888 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/dsa.py @@ -19,15 +19,15 @@ INCLUDES = """ TYPES = """ typedef struct dsa_st { - // prime number (public) + /* Prime number (public) */ BIGNUM *p; - // 160-bit subprime, q | p-1 (public) + /* Subprime (160-bit, q | p-1, public) */ BIGNUM *q; - // generator of subgroup (public) + /* Generator of subgroup (public) */ BIGNUM *g; - // private key x + /* Private key x */ BIGNUM *priv_key; - // public key y = g^x + /* Public key y = g^x */ BIGNUM *pub_key; ...; } DSA; diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py index 45c17c2..26fc8ff 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ec.py @@ -27,6 +27,8 @@ static const int Cryptography_HAS_EC_1_0_1; static const int Cryptography_HAS_EC_NISTP_64_GCC_128; static const int Cryptography_HAS_EC2M; +static const int OPENSSL_EC_NAMED_CURVE; + typedef ... EC_KEY; typedef ... EC_GROUP; typedef ... EC_POINT; @@ -61,6 +63,8 @@ int EC_GROUP_set_curve_GF2m( int EC_GROUP_get_curve_GF2m( const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); +int EC_GROUP_get_degree(const EC_GROUP *); + const EC_METHOD *EC_GROUP_method_of(const EC_GROUP *); const EC_POINT *EC_GROUP_get0_generator(const EC_GROUP *); int EC_GROUP_get_curve_name(const EC_GROUP *); @@ -198,6 +202,7 @@ int EC_METHOD_get_field_type(const EC_METHOD *); CUSTOMIZATIONS = """ #ifdef OPENSSL_NO_EC static const long Cryptography_HAS_EC = 0; + typedef void EC_KEY; typedef void EC_GROUP; typedef void EC_POINT; @@ -208,6 +213,8 @@ typedef struct { } EC_builtin_curve; typedef long point_conversion_form_t; +static const int OPENSSL_EC_NAMED_CURVE = 0; + void (*EC_KEY_free)(EC_KEY *) = NULL; size_t (*EC_get_builtin_curves)(EC_builtin_curve *, size_t) = NULL; EC_KEY *(*EC_KEY_new_by_curve_name)(int) = NULL; @@ -250,6 +257,8 @@ int (*EC_GROUP_set_curve_GFp)( int (*EC_GROUP_get_curve_GFp)( const EC_GROUP *, BIGNUM *, BIGNUM *, BIGNUM *, BN_CTX *); +int (*EC_GROUP_get_degree)(const EC_GROUP *) = NULL; + const EC_METHOD *(*EC_GROUP_method_of)(const EC_GROUP *) = NULL; const EC_POINT *(*EC_GROUP_get0_generator)(const EC_GROUP *) = NULL; int (*EC_GROUP_get_curve_name)(const EC_GROUP *) = NULL; @@ -389,6 +398,7 @@ static const long Cryptography_HAS_EC2M = 1; CONDITIONAL_NAMES = { "Cryptography_HAS_EC": [ + "OPENSSL_EC_NAMED_CURVE", "EC_GROUP_new", "EC_GROUP_free", "EC_GROUP_clear_free", @@ -399,6 +409,7 @@ CONDITIONAL_NAMES = { "EC_GROUP_method_of", "EC_GROUP_get0_generator", "EC_GROUP_get_curve_name", + "EC_GROUP_get_degree", "EC_KEY_free", "EC_get_builtin_curves", "EC_KEY_new_by_curve_name", diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py index f6456d6..232060a 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/err.py @@ -21,6 +21,7 @@ TYPES = """ static const int Cryptography_HAS_REMOVE_THREAD_STATE; static const int Cryptography_HAS_098H_ERROR_CODES; static const int Cryptography_HAS_098C_CAMELLIA_CODES; +static const int Cryptography_HAS_EC_CODES; struct ERR_string_data_st { unsigned long error; @@ -28,8 +29,8 @@ struct ERR_string_data_st { }; typedef struct ERR_string_data_st ERR_STRING_DATA; - static const int ERR_LIB_EVP; +static const int ERR_LIB_EC; static const int ERR_LIB_PEM; static const int ERR_LIB_ASN1; static const int ERR_LIB_RSA; @@ -135,6 +136,7 @@ static const int EVP_F_PKCS5_V2_PBE_KEYIVGEN; static const int EVP_F_PKCS8_SET_BROKEN; static const int EVP_F_RC2_MAGIC_TO_METH; static const int EVP_F_RC5_CTRL; + static const int EVP_R_AES_KEY_SETUP_FAILED; static const int EVP_R_ASN1_LIB; static const int EVP_R_BAD_BLOCK_LENGTH; @@ -168,9 +170,14 @@ static const int EVP_R_UNSUPPORTED_CIPHER; static const int EVP_R_UNSUPPORTED_KEY_DERIVATION_FUNCTION; static const int EVP_R_UNSUPPORTED_KEYLENGTH; static const int EVP_R_UNSUPPORTED_SALT_TYPE; +static const int EVP_R_UNSUPPORTED_PRIVATE_KEY_ALGORITHM; static const int EVP_R_WRONG_FINAL_BLOCK_LENGTH; static const int EVP_R_WRONG_PUBLIC_KEY_TYPE; +static const int EC_F_EC_GROUP_NEW_BY_CURVE_NAME; + +static const int EC_R_UNKNOWN_GROUP; + static const int PEM_F_D2I_PKCS8PRIVATEKEY_BIO; static const int PEM_F_D2I_PKCS8PRIVATEKEY_FP; static const int PEM_F_DO_PK8PKEY; @@ -283,7 +290,7 @@ typedef uint32_t CRYPTO_THREADID; void (*ERR_remove_thread_state)(const CRYPTO_THREADID *) = NULL; #endif -// OpenSSL 0.9.8h+ +/* OpenSSL 0.9.8h+ */ #if OPENSSL_VERSION_NUMBER >= 0x0090808fL static const long Cryptography_HAS_098H_ERROR_CODES = 1; #else @@ -297,7 +304,7 @@ static const int ASN1_R_NO_MULTIPART_BODY_FAILURE = 0; static const int ASN1_R_NO_MULTIPART_BOUNDARY = 0; #endif -// OpenSSL 0.9.8c+ +/* OpenSSL 0.9.8c+ */ #ifdef EVP_F_CAMELLIA_INIT_KEY static const long Cryptography_HAS_098C_CAMELLIA_CODES = 1; #else @@ -306,6 +313,14 @@ static const int EVP_F_CAMELLIA_INIT_KEY = 0; static const int EVP_R_CAMELLIA_KEY_SETUP_FAILED = 0; #endif +// OpenSSL without EC. e.g. RHEL +#ifndef OPENSSL_NO_EC +static const long Cryptography_HAS_EC_CODES = 1; +#else +static const long Cryptography_HAS_EC_CODES = 0; +static const int EC_R_UNKNOWN_GROUP = 0; +static const int EC_F_EC_GROUP_NEW_BY_CURVE_NAME = 0; +#endif """ CONDITIONAL_NAMES = { @@ -324,5 +339,9 @@ CONDITIONAL_NAMES = { "Cryptography_HAS_098C_CAMELLIA_CODES": [ "EVP_F_CAMELLIA_INIT_KEY", "EVP_R_CAMELLIA_KEY_SETUP_FAILED" + ], + "Cryptography_HAS_EC_CODES": [ + "EC_R_UNKNOWN_GROUP", + "EC_F_EC_GROUP_NEW_BY_CURVE_NAME" ] } diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py index b3d958e..1183450 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/evp.py @@ -139,7 +139,8 @@ int PKCS5_PBKDF2_HMAC(const char *, int, const unsigned char *, int, int, int EVP_PKEY_CTX_set_signature_md(EVP_PKEY_CTX *, const EVP_MD *); -// not macros but must be in this section since they're not available in 0.9.8 +/* These aren't macros, but must be in this section because they're not + available in 0.9.8. */ EVP_PKEY_CTX *EVP_PKEY_CTX_new(EVP_PKEY *, ENGINE *); EVP_PKEY_CTX *EVP_PKEY_CTX_new_id(int, ENGINE *); EVP_PKEY_CTX *EVP_PKEY_CTX_dup(EVP_PKEY_CTX *); diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py index ea6fd4d..133d2ca 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/nid.py @@ -42,6 +42,7 @@ static const int NID_ecdsa_with_SHA512; static const int NID_crl_reason; static const int NID_pbe_WithSHA1And3_Key_TripleDES_CBC; static const int NID_subject_alt_name; +static const int NID_issuer_alt_name; static const int NID_X9_62_c2pnb163v1; static const int NID_X9_62_c2pnb163v2; static const int NID_X9_62_c2pnb163v3; @@ -193,7 +194,7 @@ MACROS = """ """ CUSTOMIZATIONS = """ -// OpenSSL 0.9.8g+ +/* OpenSSL 0.9.8g+ */ #if OPENSSL_VERSION_NUMBER >= 0x0090807fL static const long Cryptography_HAS_ECDSA_SHA2_NIDS = 1; #else diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py index e4aa621..ef6e057 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/opensslv.py @@ -18,6 +18,8 @@ INCLUDES = """ """ TYPES = """ +/* Note that these will be resolved when cryptography is compiled and are NOT + guaranteed to be the version that it actually loads. */ static const int OPENSSL_VERSION_NUMBER; static const char *const OPENSSL_VERSION_TEXT; """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py index e42fc6f..752f198 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/pem.py @@ -41,6 +41,7 @@ int i2d_PKCS8PrivateKey_bio(BIO *, EVP_PKEY *, const EVP_CIPHER *, int i2d_PKCS8PrivateKey_nid_bio(BIO *, EVP_PKEY *, int, char *, int, pem_password_cb *, void *); +PKCS7 *d2i_PKCS7_bio(BIO *, PKCS7 **); EVP_PKEY *d2i_PKCS8PrivateKey_bio(BIO *, EVP_PKEY **, pem_password_cb *, void *); diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py index c635610..cb8e701 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/rsa.py @@ -80,7 +80,7 @@ CUSTOMIZATIONS = """ #if OPENSSL_VERSION_NUMBER >= 0x10000000 static const long Cryptography_HAS_PSS_PADDING = 1; #else -// see evp.py for the definition of Cryptography_HAS_PKEY_CTX +/* see evp.py for the definition of Cryptography_HAS_PKEY_CTX */ static const long Cryptography_HAS_PSS_PADDING = 0; int (*EVP_PKEY_CTX_set_rsa_padding)(EVP_PKEY_CTX *, int) = NULL; int (*EVP_PKEY_CTX_set_rsa_pss_saltlen)(EVP_PKEY_CTX *, int) = NULL; diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py index 0b15411..7d805e7 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/ssl.py @@ -15,6 +15,8 @@ from __future__ import absolute_import, division, print_function INCLUDES = """ #include + +typedef STACK_OF(SSL_CIPHER) Cryptography_STACK_OF_SSL_CIPHER; """ TYPES = """ @@ -24,6 +26,7 @@ TYPES = """ static const long Cryptography_HAS_SSL2; static const long Cryptography_HAS_TLSv1_1; static const long Cryptography_HAS_TLSv1_2; +static const long Cryptography_HAS_SECURE_RENEGOTIATION; /* Internally invented symbol to tell us if SNI is supported */ static const long Cryptography_HAS_TLSEXT_HOSTNAME; @@ -43,6 +46,7 @@ static const long Cryptography_HAS_SSL_SET_SSL_CTX; static const long Cryptography_HAS_SSL_OP_NO_TICKET; static const long Cryptography_HAS_NETBSD_D1_METH; static const long Cryptography_HAS_NEXTPROTONEG; +static const long Cryptography_HAS_ALPN; static const long SSL_FILETYPE_PEM; static const long SSL_FILETYPE_ASN1; @@ -84,6 +88,8 @@ static const long SSL_OP_COOKIE_EXCHANGE; static const long SSL_OP_NO_TICKET; static const long SSL_OP_ALL; static const long SSL_OP_SINGLE_ECDH_USE; +static const long SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION; +static const long SSL_OP_LEGACY_SERVER_CONNECT; static const long SSL_VERIFY_PEER; static const long SSL_VERIFY_FAIL_IF_NO_PEER_CERT; static const long SSL_VERIFY_CLIENT_ONCE; @@ -121,9 +127,6 @@ static const long SSL_MODE_ENABLE_PARTIAL_WRITE; static const long SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER; static const long SSL_MODE_AUTO_RETRY; static const long SSL3_RANDOM_SIZE; -typedef ... X509_STORE_CTX; -static const long X509_V_OK; -static const long X509_V_ERR_APPLICATION_VERIFICATION; typedef ... SSL_METHOD; typedef struct ssl_st { int version; @@ -153,6 +156,8 @@ typedef struct { static const long TLSEXT_NAMETYPE_host_name; typedef ... SSL_CIPHER; +typedef ... Cryptography_STACK_OF_SSL_CIPHER; +typedef ... COMP_METHOD; """ FUNCTIONS = """ @@ -190,6 +195,11 @@ int SSL_get_error(const SSL *, int); int SSL_do_handshake(SSL *); int SSL_shutdown(SSL *); const char *SSL_get_cipher_list(const SSL *, int); +Cryptography_STACK_OF_SSL_CIPHER *SSL_get_ciphers(const SSL *); + +const COMP_METHOD *SSL_get_current_compression(SSL *); +const COMP_METHOD *SSL_get_current_expansion(SSL *); +const char *SSL_COMP_get_name(const COMP_METHOD *); /* context */ void SSL_CTX_free(SSL_CTX *); @@ -215,16 +225,6 @@ int SSL_CTX_add_client_CA(SSL_CTX *, X509 *); void SSL_CTX_set_client_CA_list(SSL_CTX *, Cryptography_STACK_OF_X509_NAME *); - -/* X509_STORE_CTX */ -int X509_STORE_CTX_get_error(X509_STORE_CTX *); -void X509_STORE_CTX_set_error(X509_STORE_CTX *, int); -int X509_STORE_CTX_get_error_depth(X509_STORE_CTX *); -X509 *X509_STORE_CTX_get_current_cert(X509_STORE_CTX *); -int X509_STORE_CTX_set_ex_data(X509_STORE_CTX *, int, void *); -void *X509_STORE_CTX_get_ex_data(X509_STORE_CTX *, int); - - /* SSL_SESSION */ void SSL_SESSION_free(SSL_SESSION *); @@ -248,6 +248,7 @@ int SSL_want_read(const SSL *); int SSL_want_write(const SSL *); long SSL_total_renegotiations(SSL *); +long SSL_get_secure_renegotiation_support(SSL *); /* Defined as unsigned long because SSL_OP_ALL is greater than signed 32-bit and Windows defines long as 32-bit. */ @@ -351,9 +352,38 @@ int SSL_select_next_proto(unsigned char **, unsigned char *, const unsigned char *, unsigned int); void SSL_get0_next_proto_negotiated(const SSL *, const unsigned char **, unsigned *); + +int sk_SSL_CIPHER_num(Cryptography_STACK_OF_SSL_CIPHER *); +SSL_CIPHER *sk_SSL_CIPHER_value(Cryptography_STACK_OF_SSL_CIPHER *, int); + +/* ALPN APIs were introduced in OpenSSL 1.0.2. To continue to support earlier + * versions some special handling of these is necessary. + */ +int SSL_CTX_set_alpn_protos(SSL_CTX *, const unsigned char*, unsigned); +int SSL_set_alpn_protos(SSL *, const unsigned char*, unsigned); +void SSL_CTX_set_alpn_select_cb(SSL_CTX *, + int (*) (SSL *, + const unsigned char **, + unsigned char *, + const unsigned char *, + unsigned int, + void *), + void *); +void SSL_get0_alpn_selected(const SSL *, const unsigned char **, unsigned *); """ CUSTOMIZATIONS = """ +/** Secure renegotiation is supported in OpenSSL >= 0.9.8m + * But some Linux distributions have back ported some features. + */ +#ifndef SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION +static const long Cryptography_HAS_SECURE_RENEGOTIATION = 0; +long (*SSL_get_secure_renegotiation_support)(SSL *) = NULL; +const long SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION = 0; +const long SSL_OP_LEGACY_SERVER_CONNECT = 0; +#else +static const long Cryptography_HAS_SECURE_RENEGOTIATION = 1; +#endif #ifdef OPENSSL_NO_SSL2 static const long Cryptography_HAS_SSL2 = 0; SSL_METHOD* (*SSLv2_method)(void) = NULL; @@ -426,7 +456,7 @@ static const long Cryptography_HAS_SSL_OP_NO_TICKET = 0; const long SSL_OP_NO_TICKET = 0; #endif -// OpenSSL 0.9.8f+ +/* OpenSSL 0.9.8f+ */ #if OPENSSL_VERSION_NUMBER >= 0x00908070L static const long Cryptography_HAS_SSL_SET_SSL_CTX = 1; #else @@ -453,7 +483,7 @@ static const long Cryptography_HAS_NETBSD_D1_METH = 1; static const long Cryptography_HAS_NETBSD_D1_METH = 1; #endif -// Workaround for #794 caused by cffi const** bug. +/* Workaround for #794 caused by cffi const** bug. */ const SSL_METHOD* Cryptography_SSL_CTX_get_method(const SSL_CTX* ctx) { return ctx->method; } @@ -488,6 +518,28 @@ void (*SSL_get0_next_proto_negotiated)(const SSL *, #else static const long Cryptography_HAS_NEXTPROTONEG = 1; #endif + +/* ALPN was added in OpenSSL 1.0.2. */ +#if OPENSSL_VERSION_NUMBER < 0x10002001L +int (*SSL_CTX_set_alpn_protos)(SSL_CTX *, + const unsigned char*, + unsigned) = NULL; +int (*SSL_set_alpn_protos)(SSL *, const unsigned char*, unsigned) = NULL; +void (*SSL_CTX_set_alpn_select_cb)(SSL_CTX *, + int (*) (SSL *, + const unsigned char **, + unsigned char *, + const unsigned char *, + unsigned int, + void *), + void *) = NULL; +void (*SSL_get0_alpn_selected)(const SSL *, + const unsigned char **, + unsigned *) = NULL; +static const long Cryptography_HAS_ALPN = 0; +#else +static const long Cryptography_HAS_ALPN = 1; +#endif """ CONDITIONAL_NAMES = { @@ -551,5 +603,18 @@ CONDITIONAL_NAMES = { "SSL_CTX_set_next_proto_select_cb", "SSL_select_next_proto", "SSL_get0_next_proto_negotiated", + ], + + "Cryptography_HAS_SECURE_RENEGOTIATION": [ + "SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION", + "SSL_OP_LEGACY_SERVER_CONNECT", + "SSL_get_secure_renegotiation_support", + ], + + "Cryptography_HAS_ALPN": [ + "SSL_CTX_set_alpn_protos", + "SSL_set_alpn_protos", + "SSL_CTX_set_alpn_select_cb", + "SSL_get0_alpn_selected", ] } diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py index 36a15e4..b74c118 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509.py @@ -24,11 +24,13 @@ INCLUDES = """ * Note that the result is an opaque type. */ typedef STACK_OF(X509) Cryptography_STACK_OF_X509; +typedef STACK_OF(X509_CRL) Cryptography_STACK_OF_X509_CRL; typedef STACK_OF(X509_REVOKED) Cryptography_STACK_OF_X509_REVOKED; """ TYPES = """ typedef ... Cryptography_STACK_OF_X509; +typedef ... Cryptography_STACK_OF_X509_CRL; typedef ... Cryptography_STACK_OF_X509_REVOKED; typedef struct { @@ -76,7 +78,6 @@ typedef struct { ...; } X509; -typedef ... X509_STORE; typedef ... NETSCAPE_SPKI; """ @@ -166,12 +167,6 @@ EVP_PKEY *d2i_PUBKEY_bio(BIO *, EVP_PKEY **); ASN1_INTEGER *X509_get_serialNumber(X509 *); int X509_set_serialNumber(X509 *, ASN1_INTEGER *); -/* X509_STORE */ -X509_STORE *X509_STORE_new(void); -void X509_STORE_free(X509_STORE *); -int X509_STORE_add_cert(X509_STORE *, X509 *); -int X509_verify_cert(X509_STORE_CTX *); - const char *X509_verify_cert_error_string(long); const char *X509_get_default_cert_area(void); @@ -190,7 +185,6 @@ DSA *d2i_DSA_PUBKEY(DSA **, const unsigned char **, long); DSA *d2i_DSAPublicKey(DSA **, const unsigned char **, long); DSA *d2i_DSAPrivateKey(DSA **, const unsigned char **, long); - RSA *d2i_RSAPrivateKey_bio(BIO *, RSA **); int i2d_RSAPrivateKey_bio(BIO *, RSA *); RSA *d2i_RSAPublicKey_bio(BIO *, RSA **); @@ -237,7 +231,7 @@ int i2d_DSAPrivateKey(DSA *, unsigned char **); int X509_CRL_set_lastUpdate(X509_CRL *, ASN1_TIME *); int X509_CRL_set_nextUpdate(X509_CRL *, ASN1_TIME *); -/* these use STACK_OF(X509_EXTENSION) in 0.9.8e. Once we drop support for +/* These use STACK_OF(X509_EXTENSION) in 0.9.8e. Once we drop support for RHEL/CentOS 5 we should move these back to FUNCTIONS. */ int X509_REQ_add_extensions(X509_REQ *, X509_EXTENSIONS *); X509_EXTENSIONS *X509_REQ_get_extensions(X509_REQ *); @@ -251,7 +245,7 @@ int i2d_ECPrivateKey_bio(BIO *, EC_KEY *); """ CUSTOMIZATIONS = """ -// OpenSSL 0.9.8e does not have this definition +/* OpenSSL 0.9.8e does not have this definition. */ #if OPENSSL_VERSION_NUMBER <= 0x0090805fL typedef STACK_OF(X509_EXTENSION) X509_EXTENSIONS; #endif diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509_vfy.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509_vfy.py new file mode 100644 index 0000000..601926c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509_vfy.py @@ -0,0 +1,336 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +INCLUDES = """ +#include + +/* + * This is part of a work-around for the difficulty cffi has in dealing with + * `STACK_OF(foo)` as the name of a type. We invent a new, simpler name that + * will be an alias for this type and use the alias throughout. This works + * together with another opaque typedef for the same name in the TYPES section. + * Note that the result is an opaque type. + */ +typedef STACK_OF(ASN1_OBJECT) Cryptography_STACK_OF_ASN1_OBJECT; +""" + +TYPES = """ +static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES; +static const long Cryptography_HAS_102_VERIFICATION_PARAMS; +static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST; +static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN; +static const long Cryptography_HAS_100_VERIFICATION_ERROR_CODES; +static const long Cryptography_HAS_100_VERIFICATION_PARAMS; +static const long Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE; + +typedef ... Cryptography_STACK_OF_ASN1_OBJECT; + +typedef ... X509_STORE; +typedef ... X509_STORE_CTX; +typedef ... X509_VERIFY_PARAM; + +/* While these are defined in the source as ints, they're tagged here + as longs, just in case they ever grow to large, such as what we saw + with OP_ALL. */ + +/* Verification error codes */ +static const int X509_V_OK; +static const int X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT; +static const int X509_V_ERR_UNABLE_TO_GET_CRL; +static const int X509_V_ERR_UNABLE_TO_DECRYPT_CERT_SIGNATURE; +static const int X509_V_ERR_UNABLE_TO_DECRYPT_CRL_SIGNATURE; +static const int X509_V_ERR_UNABLE_TO_DECODE_ISSUER_PUBLIC_KEY; +static const int X509_V_ERR_CERT_SIGNATURE_FAILURE; +static const int X509_V_ERR_CRL_SIGNATURE_FAILURE; +static const int X509_V_ERR_CERT_NOT_YET_VALID; +static const int X509_V_ERR_CERT_HAS_EXPIRED; +static const int X509_V_ERR_CRL_NOT_YET_VALID; +static const int X509_V_ERR_CRL_HAS_EXPIRED; +static const int X509_V_ERR_ERROR_IN_CERT_NOT_BEFORE_FIELD; +static const int X509_V_ERR_ERROR_IN_CERT_NOT_AFTER_FIELD; +static const int X509_V_ERR_ERROR_IN_CRL_LAST_UPDATE_FIELD; +static const int X509_V_ERR_ERROR_IN_CRL_NEXT_UPDATE_FIELD; +static const int X509_V_ERR_OUT_OF_MEM; +static const int X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT; +static const int X509_V_ERR_SELF_SIGNED_CERT_IN_CHAIN; +static const int X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY; +static const int X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE; +static const int X509_V_ERR_CERT_CHAIN_TOO_LONG; +static const int X509_V_ERR_CERT_REVOKED; +static const int X509_V_ERR_INVALID_CA; +static const int X509_V_ERR_PATH_LENGTH_EXCEEDED; +static const int X509_V_ERR_INVALID_PURPOSE; +static const int X509_V_ERR_CERT_UNTRUSTED; +static const int X509_V_ERR_CERT_REJECTED; +static const int X509_V_ERR_SUBJECT_ISSUER_MISMATCH; +static const int X509_V_ERR_AKID_SKID_MISMATCH; +static const int X509_V_ERR_AKID_ISSUER_SERIAL_MISMATCH; +static const int X509_V_ERR_KEYUSAGE_NO_CERTSIGN; +static const int X509_V_ERR_UNABLE_TO_GET_CRL_ISSUER; +static const int X509_V_ERR_UNHANDLED_CRITICAL_EXTENSION; +static const int X509_V_ERR_KEYUSAGE_NO_CRL_SIGN; +static const int X509_V_ERR_UNHANDLED_CRITICAL_CRL_EXTENSION; +static const int X509_V_ERR_INVALID_NON_CA; +static const int X509_V_ERR_PROXY_PATH_LENGTH_EXCEEDED; +static const int X509_V_ERR_KEYUSAGE_NO_DIGITAL_SIGNATURE; +static const int X509_V_ERR_PROXY_CERTIFICATES_NOT_ALLOWED; +static const int X509_V_ERR_INVALID_EXTENSION; +static const int X509_V_ERR_INVALID_POLICY_EXTENSION; +static const int X509_V_ERR_NO_EXPLICIT_POLICY; +static const int X509_V_ERR_DIFFERENT_CRL_SCOPE; +static const int X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE; +static const int X509_V_ERR_UNNESTED_RESOURCE; +static const int X509_V_ERR_PERMITTED_VIOLATION; +static const int X509_V_ERR_EXCLUDED_VIOLATION; +static const int X509_V_ERR_SUBTREE_MINMAX; +static const int X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE; +static const int X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX; +static const int X509_V_ERR_UNSUPPORTED_NAME_SYNTAX; +static const int X509_V_ERR_CRL_PATH_VALIDATION_ERROR; +static const int X509_V_ERR_SUITE_B_INVALID_VERSION; +static const int X509_V_ERR_SUITE_B_INVALID_ALGORITHM; +static const int X509_V_ERR_SUITE_B_INVALID_CURVE; +static const int X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM; +static const int X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED; +static const int X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256; +static const int X509_V_ERR_HOSTNAME_MISMATCH; +static const int X509_V_ERR_EMAIL_MISMATCH; +static const int X509_V_ERR_IP_ADDRESS_MISMATCH; +static const int X509_V_ERR_APPLICATION_VERIFICATION; + +/* Verification parameters */ +static const long X509_V_FLAG_CB_ISSUER_CHECK; +static const long X509_V_FLAG_USE_CHECK_TIME; +static const long X509_V_FLAG_CRL_CHECK; +static const long X509_V_FLAG_CRL_CHECK_ALL; +static const long X509_V_FLAG_IGNORE_CRITICAL; +static const long X509_V_FLAG_X509_STRICT; +static const long X509_V_FLAG_ALLOW_PROXY_CERTS; +static const long X509_V_FLAG_POLICY_CHECK; +static const long X509_V_FLAG_EXPLICIT_POLICY; +static const long X509_V_FLAG_INHIBIT_ANY; +static const long X509_V_FLAG_INHIBIT_MAP; +static const long X509_V_FLAG_NOTIFY_POLICY; +static const long X509_V_FLAG_EXTENDED_CRL_SUPPORT; +static const long X509_V_FLAG_USE_DELTAS; +static const long X509_V_FLAG_CHECK_SS_SIGNATURE; +static const long X509_V_FLAG_TRUSTED_FIRST; +static const long X509_V_FLAG_SUITEB_128_LOS_ONLY; +static const long X509_V_FLAG_SUITEB_192_LOS; +static const long X509_V_FLAG_SUITEB_128_LOS; +static const long X509_V_FLAG_PARTIAL_CHAIN; +""" + +FUNCTIONS = """ +int X509_verify_cert(X509_STORE_CTX *); + +/* X509_STORE */ +X509_STORE *X509_STORE_new(void); +void X509_STORE_free(X509_STORE *); +int X509_STORE_add_cert(X509_STORE *, X509 *); + +/* X509_STORE_CTX */ +X509_STORE_CTX *X509_STORE_CTX_new(void); +void X509_STORE_CTX_cleanup(X509_STORE_CTX *); +void X509_STORE_CTX_free(X509_STORE_CTX *); +int X509_STORE_CTX_init(X509_STORE_CTX *, X509_STORE *, X509 *, + Cryptography_STACK_OF_X509 *); +void X509_STORE_CTX_trusted_stack(X509_STORE_CTX *, + Cryptography_STACK_OF_X509 *); +void X509_STORE_CTX_set_cert(X509_STORE_CTX *, X509 *); +void X509_STORE_CTX_set_chain(X509_STORE_CTX *,Cryptography_STACK_OF_X509 *); +X509_VERIFY_PARAM *X509_STORE_CTX_get0_param(X509_STORE_CTX *); +void X509_STORE_CTX_set0_param(X509_STORE_CTX *, X509_VERIFY_PARAM *); +int X509_STORE_CTX_set_default(X509_STORE_CTX *, const char *); +void X509_STORE_CTX_set_verify_cb(X509_STORE_CTX *, + int (*)(int, X509_STORE_CTX *)); +Cryptography_STACK_OF_X509 *X509_STORE_CTX_get_chain(X509_STORE_CTX *); +Cryptography_STACK_OF_X509 *X509_STORE_CTX_get1_chain(X509_STORE_CTX *); +int X509_STORE_CTX_get_error(X509_STORE_CTX *); +void X509_STORE_CTX_set_error(X509_STORE_CTX *, int); +int X509_STORE_CTX_get_error_depth(X509_STORE_CTX *); +X509 *X509_STORE_CTX_get_current_cert(X509_STORE_CTX *); +int X509_STORE_CTX_set_ex_data(X509_STORE_CTX *, int, void *); +void *X509_STORE_CTX_get_ex_data(X509_STORE_CTX *, int); + +/* X509_VERIFY_PARAM */ +X509_VERIFY_PARAM *X509_VERIFY_PARAM_new(void); +int X509_VERIFY_PARAM_set_flags(X509_VERIFY_PARAM *, unsigned long); +int X509_VERIFY_PARAM_clear_flags(X509_VERIFY_PARAM *, unsigned long); +unsigned long X509_VERIFY_PARAM_get_flags(X509_VERIFY_PARAM *); +int X509_VERIFY_PARAM_set_purpose(X509_VERIFY_PARAM *, int); +int X509_VERIFY_PARAM_set_trust(X509_VERIFY_PARAM *, int); +void X509_VERIFY_PARAM_set_time(X509_VERIFY_PARAM *, time_t); +int X509_VERIFY_PARAM_add0_policy(X509_VERIFY_PARAM *, ASN1_OBJECT *); +int X509_VERIFY_PARAM_set1_policies(X509_VERIFY_PARAM *, + Cryptography_STACK_OF_ASN1_OBJECT *); +void X509_VERIFY_PARAM_set_depth(X509_VERIFY_PARAM *, int); +int X509_VERIFY_PARAM_get_depth(const X509_VERIFY_PARAM *); +""" + +MACROS = """ +/* X509_STORE_CTX */ +void X509_STORE_CTX_set0_crls(X509_STORE_CTX *, + Cryptography_STACK_OF_X509_CRL *); + +/* X509_VERIFY_PARAM */ +int X509_VERIFY_PARAM_set1_host(X509_VERIFY_PARAM *, const char *, + size_t); +void X509_VERIFY_PARAM_set_hostflags(X509_VERIFY_PARAM *, unsigned int); +int X509_VERIFY_PARAM_set1_email(X509_VERIFY_PARAM *, const char *, + size_t); +int X509_VERIFY_PARAM_set1_ip(X509_VERIFY_PARAM *, const unsigned char *, + size_t); +int X509_VERIFY_PARAM_set1_ip_asc(X509_VERIFY_PARAM *, const char *); +""" + +CUSTOMIZATIONS = """ +/* OpenSSL 1.0.2+ verification error codes */ +#if OPENSSL_VERSION_NUMBER >= 0x10002000L +static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES = 1; +#else +static const long Cryptography_HAS_102_VERIFICATION_ERROR_CODES = 0; +static const long X509_V_ERR_SUITE_B_INVALID_VERSION = 0; +static const long X509_V_ERR_SUITE_B_INVALID_ALGORITHM = 0; +static const long X509_V_ERR_SUITE_B_INVALID_CURVE = 0; +static const long X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM = 0; +static const long X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED = 0; +static const long X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256 = 0; +static const long X509_V_ERR_HOSTNAME_MISMATCH = 0; +static const long X509_V_ERR_EMAIL_MISMATCH = 0; +static const long X509_V_ERR_IP_ADDRESS_MISMATCH = 0; +#endif + +/* OpenSSL 1.0.2+ verification parameters */ +#if OPENSSL_VERSION_NUMBER >= 0x10002000L +static const long Cryptography_HAS_102_VERIFICATION_PARAMS = 1; +#else +static const long Cryptography_HAS_102_VERIFICATION_PARAMS = 0; +/* X509_V_FLAG_TRUSTED_FIRST is also new in 1.0.2+, but it is added separately + below because it shows up in some earlier 3rd party OpenSSL packages. */ +static const long X509_V_FLAG_SUITEB_128_LOS_ONLY = 0; +static const long X509_V_FLAG_SUITEB_192_LOS = 0; +static const long X509_V_FLAG_SUITEB_128_LOS = 0; + +int (*X509_VERIFY_PARAM_set1_host)(X509_VERIFY_PARAM *, const char *, + size_t) = NULL; +int (*X509_VERIFY_PARAM_set1_email)(X509_VERIFY_PARAM *, const char *, + size_t) = NULL; +int (*X509_VERIFY_PARAM_set1_ip)(X509_VERIFY_PARAM *, const unsigned char *, + size_t) = NULL; +int (*X509_VERIFY_PARAM_set1_ip_asc)(X509_VERIFY_PARAM *, const char *) = NULL; +void (*X509_VERIFY_PARAM_set_hostflags)(X509_VERIFY_PARAM *, + unsigned int) = NULL; +#endif + +/* OpenSSL 1.0.2+ or Solaris's backport */ +#ifdef X509_V_FLAG_PARTIAL_CHAIN +static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN = 1; +#else +static const long Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN = 0; +static const long X509_V_FLAG_PARTIAL_CHAIN = 0; +#endif + +/* OpenSSL 1.0.2+, *or* Fedora 20's flavor of OpenSSL 1.0.1e... */ +#ifdef X509_V_FLAG_TRUSTED_FIRST +static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST = 1; +#else +static const long Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST = 0; +static const long X509_V_FLAG_TRUSTED_FIRST = 0; +#endif + +/* OpenSSL 1.0.0+ verification error codes */ +#if OPENSSL_VERSION_NUMBER >= 0x10000000L +static const long Cryptography_HAS_100_VERIFICATION_ERROR_CODES = 1; +#else +static const long Cryptography_HAS_100_VERIFICATION_ERROR_CODES = 0; +static const long X509_V_ERR_DIFFERENT_CRL_SCOPE = 0; +static const long X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE = 0; +static const long X509_V_ERR_PERMITTED_VIOLATION = 0; +static const long X509_V_ERR_EXCLUDED_VIOLATION = 0; +static const long X509_V_ERR_SUBTREE_MINMAX = 0; +static const long X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE = 0; +static const long X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX = 0; +static const long X509_V_ERR_UNSUPPORTED_NAME_SYNTAX = 0; +static const long X509_V_ERR_CRL_PATH_VALIDATION_ERROR = 0; +#endif + +/* OpenSSL 1.0.0+ verification parameters */ +#if OPENSSL_VERSION_NUMBER >= 0x10000000L +static const long Cryptography_HAS_100_VERIFICATION_PARAMS = 1; +#else +static const long Cryptography_HAS_100_VERIFICATION_PARAMS = 0; +static const long X509_V_FLAG_EXTENDED_CRL_SUPPORT = 0; +static const long X509_V_FLAG_USE_DELTAS = 0; +#endif + +/* OpenSSL 0.9.8recent+ */ +#ifdef X509_V_FLAG_CHECK_SS_SIGNATURE +static const long Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE = 1; +#else +static const long Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE = 0; +static const long X509_V_FLAG_CHECK_SS_SIGNATURE = 0; +#endif +""" + +CONDITIONAL_NAMES = { + "Cryptography_HAS_102_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_SUITE_B_INVALID_VERSION', + 'X509_V_ERR_SUITE_B_INVALID_ALGORITHM', + 'X509_V_ERR_SUITE_B_INVALID_CURVE', + 'X509_V_ERR_SUITE_B_INVALID_SIGNATURE_ALGORITHM', + 'X509_V_ERR_SUITE_B_LOS_NOT_ALLOWED', + 'X509_V_ERR_SUITE_B_CANNOT_SIGN_P_384_WITH_P_256', + 'X509_V_ERR_HOSTNAME_MISMATCH', + 'X509_V_ERR_EMAIL_MISMATCH', + 'X509_V_ERR_IP_ADDRESS_MISMATCH' + ], + "Cryptography_HAS_102_VERIFICATION_PARAMS": [ + "X509_V_FLAG_SUITEB_128_LOS_ONLY", + "X509_V_FLAG_SUITEB_192_LOS", + "X509_V_FLAG_SUITEB_128_LOS", + "X509_VERIFY_PARAM_set1_host", + "X509_VERIFY_PARAM_set1_email", + "X509_VERIFY_PARAM_set1_ip", + "X509_VERIFY_PARAM_set1_ip_asc", + "X509_VERIFY_PARAM_set_hostflags", + ], + "Cryptography_HAS_X509_V_FLAG_TRUSTED_FIRST": [ + "X509_V_FLAG_TRUSTED_FIRST", + ], + "Cryptography_HAS_X509_V_FLAG_PARTIAL_CHAIN": [ + "X509_V_FLAG_PARTIAL_CHAIN", + ], + "Cryptography_HAS_100_VERIFICATION_ERROR_CODES": [ + 'X509_V_ERR_DIFFERENT_CRL_SCOPE', + 'X509_V_ERR_UNSUPPORTED_EXTENSION_FEATURE', + 'X509_V_ERR_UNNESTED_RESOURCE', + 'X509_V_ERR_PERMITTED_VIOLATION', + 'X509_V_ERR_EXCLUDED_VIOLATION', + 'X509_V_ERR_SUBTREE_MINMAX', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_TYPE', + 'X509_V_ERR_UNSUPPORTED_CONSTRAINT_SYNTAX', + 'X509_V_ERR_UNSUPPORTED_NAME_SYNTAX', + 'X509_V_ERR_CRL_PATH_VALIDATION_ERROR', + ], + "Cryptography_HAS_100_VERIFICATION_PARAMS": [ + "Cryptography_HAS_100_VERIFICATION_PARAMS", + "X509_V_FLAG_EXTENDED_CRL_SUPPORT", + "X509_V_FLAG_USE_DELTAS", + ], + "Cryptography_HAS_X509_V_FLAG_CHECK_SS_SIGNATURE": [ + "X509_V_FLAG_CHECK_SS_SIGNATURE", + ] +} diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py index 02ec250..cf4be1f 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/bindings/openssl/x509v3.py @@ -82,6 +82,8 @@ FUNCTIONS = """ void X509V3_set_ctx(X509V3_CTX *, X509 *, X509 *, X509_REQ *, X509_CRL *, int); X509_EXTENSION *X509V3_EXT_nconf(CONF *, X509V3_CTX *, char *, char *); int GENERAL_NAME_print(BIO *, GENERAL_NAME *); +void GENERAL_NAMES_free(GENERAL_NAMES *); +void *X509V3_EXT_d2i(X509_EXTENSION *); """ MACROS = """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py index aa3cdc9..04b2272 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/dsa.py @@ -13,6 +13,8 @@ from __future__ import absolute_import, division, print_function +import warnings + import six from cryptography import utils @@ -21,31 +23,54 @@ from cryptography.hazmat.backends.interfaces import DSABackend from cryptography.hazmat.primitives import interfaces -def _check_dsa_parameters(modulus, subgroup_order, generator): - if ( - not isinstance(modulus, six.integer_types) or - not isinstance(subgroup_order, six.integer_types) or - not isinstance(generator, six.integer_types) - ): - raise TypeError("DSA parameters must be integers") +def generate_parameters(key_size, backend): + return backend.generate_dsa_parameters(key_size) - if (utils.bit_length(modulus), - utils.bit_length(subgroup_order)) not in ( + +def generate_private_key(key_size, backend): + return backend.generate_dsa_private_key_and_parameters(key_size) + + +def _check_dsa_parameters(parameters): + if (utils.bit_length(parameters.p), + utils.bit_length(parameters.q)) not in ( (1024, 160), (2048, 256), (3072, 256)): - raise ValueError("modulus and subgroup_order lengths must be " + raise ValueError("p and q lengths must be " "one of these pairs (1024, 160) or (2048, 256) " - "or (3072, 256)") + "or (3072, 256).") - if generator <= 1 or generator >= modulus: - raise ValueError("generator must be > 1 and < modulus") + if not (1 < parameters.g < parameters.p): + raise ValueError("g, p don't satisfy 1 < g < p.") + + +def _check_dsa_private_numbers(numbers): + parameters = numbers.public_numbers.parameter_numbers + _check_dsa_parameters(parameters) + if numbers.x <= 0 or numbers.x >= parameters.q: + raise ValueError("x must be > 0 and < q.") + + if numbers.public_numbers.y != pow(parameters.g, numbers.x, parameters.p): + raise ValueError("y must be equal to (g ** x % p).") @utils.register_interface(interfaces.DSAParameters) class DSAParameters(object): def __init__(self, modulus, subgroup_order, generator): - _check_dsa_parameters(modulus, subgroup_order, generator) + warnings.warn( + "The DSAParameters class is deprecated and will be removed in a " + "future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + _check_dsa_parameters( + DSAParameterNumbers( + p=modulus, + q=subgroup_order, + g=generator + ) + ) self._modulus = modulus self._subgroup_order = subgroup_order @@ -53,13 +78,24 @@ class DSAParameters(object): @classmethod def generate(cls, key_size, backend): + warnings.warn( + "generate is deprecated and will be removed in a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) if not isinstance(backend, DSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend", + "Backend object does not implement DSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) - return backend.generate_dsa_parameters(key_size) + parameters = backend.generate_dsa_parameters(key_size) + numbers = parameters.parameter_numbers() + return cls( + modulus=numbers.p, + subgroup_order=numbers.q, + generator=numbers.g + ) @property def modulus(self): @@ -89,18 +125,31 @@ class DSAParameters(object): @utils.register_interface(interfaces.DSAPrivateKey) class DSAPrivateKey(object): def __init__(self, modulus, subgroup_order, generator, x, y): - _check_dsa_parameters(modulus, subgroup_order, generator) + warnings.warn( + "The DSAPrivateKey class is deprecated and will be removed in a " + "future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) if ( not isinstance(x, six.integer_types) or not isinstance(y, six.integer_types) ): - raise TypeError("DSAPrivateKey arguments must be integers") + raise TypeError("DSAPrivateKey arguments must be integers.") - if x <= 0 or x >= subgroup_order: - raise ValueError("x must be > 0 and < subgroup_order") - - if y != pow(generator, x, modulus): - raise ValueError("y must be equal to (generator ** x % modulus)") + _check_dsa_private_numbers( + DSAPrivateNumbers( + public_numbers=DSAPublicNumbers( + parameter_numbers=DSAParameterNumbers( + p=modulus, + q=subgroup_order, + g=generator + ), + y=y + ), + x=x + ) + ) self._modulus = modulus self._subgroup_order = subgroup_order @@ -110,18 +159,31 @@ class DSAPrivateKey(object): @classmethod def generate(cls, parameters, backend): + warnings.warn( + "generate is deprecated and will be removed in a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) if not isinstance(backend, DSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend", + "Backend object does not implement DSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) - return backend.generate_dsa_private_key(parameters) + key = backend.generate_dsa_private_key(parameters) + private_numbers = key.private_numbers() + return cls( + modulus=private_numbers.public_numbers.parameter_numbers.p, + subgroup_order=private_numbers.public_numbers.parameter_numbers.q, + generator=private_numbers.public_numbers.parameter_numbers.g, + x=private_numbers.x, + y=private_numbers.public_numbers.y + ) def signer(self, algorithm, backend): if not isinstance(backend, DSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend", + "Backend object does not implement DSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -151,9 +213,21 @@ class DSAPrivateKey(object): @utils.register_interface(interfaces.DSAPublicKey) class DSAPublicKey(object): def __init__(self, modulus, subgroup_order, generator, y): - _check_dsa_parameters(modulus, subgroup_order, generator) + warnings.warn( + "The DSAPublicKey class is deprecated and will be removed in a " + "future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) + _check_dsa_parameters( + DSAParameterNumbers( + p=modulus, + q=subgroup_order, + g=generator + ) + ) if not isinstance(y, six.integer_types): - raise TypeError("y must be an integer") + raise TypeError("y must be an integer.") self._modulus = modulus self._subgroup_order = subgroup_order @@ -163,7 +237,7 @@ class DSAPublicKey(object): def verifier(self, signature, algorithm, backend): if not isinstance(backend, DSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement DSABackend", + "Backend object does not implement DSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -181,3 +255,83 @@ class DSAPublicKey(object): def parameters(self): return DSAParameters(self._modulus, self._subgroup_order, self._generator) + + +class DSAParameterNumbers(object): + def __init__(self, p, q, g): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(g, six.integer_types) + ): + raise TypeError( + "DSAParameterNumbers p, q, and g arguments must be integers." + ) + + self._p = p + self._q = q + self._g = g + + @property + def p(self): + return self._p + + @property + def q(self): + return self._q + + @property + def g(self): + return self._g + + def parameters(self, backend): + return backend.load_dsa_parameter_numbers(self) + + +class DSAPublicNumbers(object): + def __init__(self, y, parameter_numbers): + if not isinstance(y, six.integer_types): + raise TypeError("DSAPublicNumbers y argument must be an integer.") + + if not isinstance(parameter_numbers, DSAParameterNumbers): + raise TypeError( + "parameter_numbers must be a DSAParameterNumbers instance." + ) + + self._y = y + self._parameter_numbers = parameter_numbers + + @property + def y(self): + return self._y + + @property + def parameter_numbers(self): + return self._parameter_numbers + + def public_key(self, backend): + return backend.load_dsa_public_numbers(self) + + +class DSAPrivateNumbers(object): + def __init__(self, x, public_numbers): + if not isinstance(x, six.integer_types): + raise TypeError("DSAPrivateNumbers x argument must be an integer.") + + if not isinstance(public_numbers, DSAPublicNumbers): + raise TypeError( + "public_numbers must be a DSAPublicNumbers instance." + ) + self._public_numbers = public_numbers + self._x = x + + @property + def x(self): + return self._x + + @property + def public_numbers(self): + return self._public_numbers + + def private_key(self, backend): + return backend.load_dsa_private_numbers(self) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py new file mode 100644 index 0000000..220a419 --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/ec.py @@ -0,0 +1,255 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + +import six + +from cryptography import utils +from cryptography.hazmat.primitives import interfaces + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT571R1(object): + @property + def name(self): + return "sect571r1" + + @property + def key_size(self): + return 571 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT409R1(object): + @property + def name(self): + return "sect409r1" + + @property + def key_size(self): + return 409 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT283R1(object): + @property + def name(self): + return "sect283r1" + + @property + def key_size(self): + return 283 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT233R1(object): + @property + def name(self): + return "sect233r1" + + @property + def key_size(self): + return 233 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT163R2(object): + @property + def name(self): + return "sect163r2" + + @property + def key_size(self): + return 163 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT571K1(object): + @property + def name(self): + return "sect571k1" + + @property + def key_size(self): + return 571 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT409K1(object): + @property + def name(self): + return "sect409k1" + + @property + def key_size(self): + return 409 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT283K1(object): + @property + def name(self): + return "sect283k1" + + @property + def key_size(self): + return 283 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT233K1(object): + @property + def name(self): + return "sect233k1" + + @property + def key_size(self): + return 233 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECT163K1(object): + @property + def name(self): + return "sect163k1" + + @property + def key_size(self): + return 163 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECP521R1(object): + @property + def name(self): + return "secp521r1" + + @property + def key_size(self): + return 521 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECP384R1(object): + @property + def name(self): + return "secp384r1" + + @property + def key_size(self): + return 384 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECP256R1(object): + @property + def name(self): + return "secp256r1" + + @property + def key_size(self): + return 256 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECP224R1(object): + @property + def name(self): + return "secp224r1" + + @property + def key_size(self): + return 224 + + +@utils.register_interface(interfaces.EllipticCurve) +class SECP192R1(object): + @property + def name(self): + return "secp192r1" + + @property + def key_size(self): + return 192 + + +@utils.register_interface(interfaces.EllipticCurveSignatureAlgorithm) +class ECDSA(object): + def __init__(self, algorithm): + self._algorithm = algorithm + + @property + def algorithm(self): + return self._algorithm + + +def generate_private_key(curve, backend): + return backend.generate_elliptic_curve_private_key(curve) + + +class EllipticCurvePublicNumbers(object): + def __init__(self, x, y, curve): + if ( + not isinstance(x, six.integer_types) or + not isinstance(y, six.integer_types) + ): + raise TypeError("x and y must be integers.") + + if not isinstance(curve, interfaces.EllipticCurve): + raise TypeError("curve must provide the EllipticCurve interface.") + + self._y = y + self._x = x + self._curve = curve + + def public_key(self, backend): + return backend.elliptic_curve_public_key_from_numbers(self) + + @property + def curve(self): + return self._curve + + @property + def x(self): + return self._x + + @property + def y(self): + return self._y + + +class EllipticCurvePrivateNumbers(object): + def __init__(self, private_value, public_numbers): + if not isinstance(private_value, six.integer_types): + raise TypeError("private_value must be an integer.") + + if not isinstance(public_numbers, EllipticCurvePublicNumbers): + raise TypeError( + "public_numbers must be an EllipticCurvePublicNumbers " + "instance." + ) + + self._private_value = private_value + self._public_numbers = public_numbers + + def private_key(self, backend): + return backend.elliptic_curve_private_key_from_numbers(self) + + @property + def private_value(self): + return self._private_value + + @property + def public_numbers(self): + return self._public_numbers diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py index dcc6fe0..d44bbda 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/padding.py @@ -38,18 +38,19 @@ class PSS(object): warnings.warn( "salt_length is deprecated on MGF1 and should be added via the" " PSS constructor.", - utils.DeprecatedIn04 + utils.DeprecatedIn04, + stacklevel=2 ) else: if (not isinstance(salt_length, six.integer_types) and salt_length is not self.MAX_LENGTH): - raise TypeError("salt_length must be an integer") + raise TypeError("salt_length must be an integer.") if salt_length is not self.MAX_LENGTH and salt_length < 0: - raise ValueError("salt_length must be zero or greater") + raise ValueError("salt_length must be zero or greater.") if salt_length is None and self._mgf._salt_length is None: - raise ValueError("You must supply salt_length") + raise ValueError("You must supply salt_length.") self._salt_length = salt_length @@ -80,13 +81,14 @@ class MGF1(object): warnings.warn( "salt_length is deprecated on MGF1 and should be passed to " "the PSS constructor instead.", - utils.DeprecatedIn04 + utils.DeprecatedIn04, + stacklevel=2 ) if (not isinstance(salt_length, six.integer_types) and salt_length is not self.MAX_LENGTH): - raise TypeError("salt_length must be an integer") + raise TypeError("salt_length must be an integer.") if salt_length is not self.MAX_LENGTH and salt_length < 0: - raise ValueError("salt_length must be zero or greater") + raise ValueError("salt_length must be zero or greater.") self._salt_length = salt_length diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py index 5d3bb36..15ec52a 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/asymmetric/rsa.py @@ -13,31 +13,102 @@ from __future__ import absolute_import, division, print_function +import warnings + import six from cryptography import utils from cryptography.exceptions import UnsupportedAlgorithm, _Reasons from cryptography.hazmat.backends.interfaces import RSABackend -from cryptography.hazmat.primitives import interfaces -@utils.register_interface(interfaces.RSAPublicKey) +def generate_private_key(public_exponent, key_size, backend): + if not isinstance(backend, RSABackend): + raise UnsupportedAlgorithm( + "Backend object does not implement RSABackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + _verify_rsa_parameters(public_exponent, key_size) + return backend.generate_rsa_private_key(public_exponent, key_size) + + +def _verify_rsa_parameters(public_exponent, key_size): + if public_exponent < 3: + raise ValueError("public_exponent must be >= 3.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if key_size < 512: + raise ValueError("key_size must be at least 512-bits.") + + +def _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus): + if modulus < 3: + raise ValueError("modulus must be >= 3.") + + if p >= modulus: + raise ValueError("p must be < modulus.") + + if q >= modulus: + raise ValueError("q must be < modulus.") + + if dmp1 >= modulus: + raise ValueError("dmp1 must be < modulus.") + + if dmq1 >= modulus: + raise ValueError("dmq1 must be < modulus.") + + if iqmp >= modulus: + raise ValueError("iqmp must be < modulus.") + + if private_exponent >= modulus: + raise ValueError("private_exponent must be < modulus.") + + if public_exponent < 3 or public_exponent >= modulus: + raise ValueError("public_exponent must be >= 3 and < modulus.") + + if public_exponent & 1 == 0: + raise ValueError("public_exponent must be odd.") + + if dmp1 & 1 == 0: + raise ValueError("dmp1 must be odd.") + + if dmq1 & 1 == 0: + raise ValueError("dmq1 must be odd.") + + if p * q != modulus: + raise ValueError("p*q must equal modulus.") + + +def _check_public_key_components(e, n): + if n < 3: + raise ValueError("n must be >= 3.") + + if e < 3 or e >= n: + raise ValueError("e must be >= 3 and < n.") + + if e & 1 == 0: + raise ValueError("e must be odd.") + + class RSAPublicKey(object): def __init__(self, public_exponent, modulus): + warnings.warn( + "The RSAPublicKey class is deprecated and will be removed in a " + "future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) if ( not isinstance(public_exponent, six.integer_types) or not isinstance(modulus, six.integer_types) ): - raise TypeError("RSAPublicKey arguments must be integers") + raise TypeError("RSAPublicKey arguments must be integers.") - if modulus < 3: - raise ValueError("modulus must be >= 3") - - if public_exponent < 3 or public_exponent >= modulus: - raise ValueError("public_exponent must be >= 3 and < modulus") - - if public_exponent & 1 == 0: - raise ValueError("public_exponent must be odd") + _check_public_key_components(public_exponent, modulus) self._public_exponent = public_exponent self._modulus = modulus @@ -45,7 +116,7 @@ class RSAPublicKey(object): def verifier(self, signature, padding, algorithm, backend): if not isinstance(backend, RSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend", + "Backend object does not implement RSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -55,7 +126,7 @@ class RSAPublicKey(object): def encrypt(self, plaintext, padding, backend): if not isinstance(backend, RSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend", + "Backend object does not implement RSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -118,10 +189,15 @@ def rsa_crt_dmq1(private_exponent, q): return private_exponent % (q - 1) -@utils.register_interface(interfaces.RSAPrivateKey) class RSAPrivateKey(object): def __init__(self, p, q, private_exponent, dmp1, dmq1, iqmp, public_exponent, modulus): + warnings.warn( + "The RSAPrivateKey class is deprecated and will be removed in a " + "future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) if ( not isinstance(p, six.integer_types) or not isinstance(q, six.integer_types) or @@ -132,43 +208,10 @@ class RSAPrivateKey(object): not isinstance(public_exponent, six.integer_types) or not isinstance(modulus, six.integer_types) ): - raise TypeError("RSAPrivateKey arguments must be integers") + raise TypeError("RSAPrivateKey arguments must be integers.") - if modulus < 3: - raise ValueError("modulus must be >= 3") - - if p >= modulus: - raise ValueError("p must be < modulus") - - if q >= modulus: - raise ValueError("q must be < modulus") - - if dmp1 >= modulus: - raise ValueError("dmp1 must be < modulus") - - if dmq1 >= modulus: - raise ValueError("dmq1 must be < modulus") - - if iqmp >= modulus: - raise ValueError("iqmp must be < modulus") - - if private_exponent >= modulus: - raise ValueError("private_exponent must be < modulus") - - if public_exponent < 3 or public_exponent >= modulus: - raise ValueError("public_exponent must be >= 3 and < modulus") - - if public_exponent & 1 == 0: - raise ValueError("public_exponent must be odd") - - if dmp1 & 1 == 0: - raise ValueError("dmp1 must be odd") - - if dmq1 & 1 == 0: - raise ValueError("dmq1 must be odd") - - if p * q != modulus: - raise ValueError("p*q must equal modulus") + _check_private_key_components(p, q, private_exponent, dmp1, dmq1, iqmp, + public_exponent, modulus) self._p = p self._q = q @@ -181,18 +224,35 @@ class RSAPrivateKey(object): @classmethod def generate(cls, public_exponent, key_size, backend): + warnings.warn( + "generate is deprecated and will be removed in a future version.", + utils.DeprecatedIn05, + stacklevel=2 + ) if not isinstance(backend, RSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend", + "Backend object does not implement RSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) - return backend.generate_rsa_private_key(public_exponent, key_size) + _verify_rsa_parameters(public_exponent, key_size) + key = backend.generate_rsa_private_key(public_exponent, key_size) + private_numbers = key.private_numbers() + return RSAPrivateKey( + p=private_numbers.p, + q=private_numbers.q, + dmp1=private_numbers.dmp1, + dmq1=private_numbers.dmq1, + iqmp=private_numbers.iqmp, + private_exponent=private_numbers.d, + public_exponent=private_numbers.public_numbers.e, + modulus=private_numbers.public_numbers.n + ) def signer(self, padding, algorithm, backend): if not isinstance(backend, RSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend", + "Backend object does not implement RSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -201,7 +261,7 @@ class RSAPrivateKey(object): def decrypt(self, ciphertext, padding, backend): if not isinstance(backend, RSABackend): raise UnsupportedAlgorithm( - "Backend object does not implement RSABackend", + "Backend object does not implement RSABackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -257,3 +317,88 @@ class RSAPrivateKey(object): @property def n(self): return self.modulus + + +class RSAPrivateNumbers(object): + def __init__(self, p, q, d, dmp1, dmq1, iqmp, + public_numbers): + if ( + not isinstance(p, six.integer_types) or + not isinstance(q, six.integer_types) or + not isinstance(d, six.integer_types) or + not isinstance(dmp1, six.integer_types) or + not isinstance(dmq1, six.integer_types) or + not isinstance(iqmp, six.integer_types) + ): + raise TypeError( + "RSAPrivateNumbers p, q, d, dmp1, dmq1, iqmp arguments must" + " all be an integers." + ) + + if not isinstance(public_numbers, RSAPublicNumbers): + raise TypeError( + "RSAPrivateNumbers public_numbers must be an RSAPublicNumbers" + " instance." + ) + + self._p = p + self._q = q + self._d = d + self._dmp1 = dmp1 + self._dmq1 = dmq1 + self._iqmp = iqmp + self._public_numbers = public_numbers + + @property + def p(self): + return self._p + + @property + def q(self): + return self._q + + @property + def d(self): + return self._d + + @property + def dmp1(self): + return self._dmp1 + + @property + def dmq1(self): + return self._dmq1 + + @property + def iqmp(self): + return self._iqmp + + @property + def public_numbers(self): + return self._public_numbers + + def private_key(self, backend): + return backend.load_rsa_private_numbers(self) + + +class RSAPublicNumbers(object): + def __init__(self, e, n): + if ( + not isinstance(e, six.integer_types) or + not isinstance(n, six.integer_types) + ): + raise TypeError("RSAPublicNumbers arguments must be integers.") + + self._e = e + self._n = n + + @property + def e(self): + return self._e + + @property + def n(self): + return self._n + + def public_key(self, backend): + return backend.load_rsa_public_numbers(self) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py index 52daf17..bd8437c 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/algorithms.py @@ -20,7 +20,7 @@ from cryptography.hazmat.primitives import interfaces def _verify_key_size(algorithm, key): # Verify that the key size matches the expected key size if len(key) * 8 not in algorithm.key_sizes: - raise ValueError("Invalid key size ({0}) for {1}".format( + raise ValueError("Invalid key size ({0}) for {1}.".format( len(key) * 8, algorithm.name )) return key diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py index 2274e94..e3fe5ad 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/base.py @@ -26,12 +26,14 @@ class Cipher(object): def __init__(self, algorithm, mode, backend): if not isinstance(backend, CipherBackend): raise UnsupportedAlgorithm( - "Backend object does not implement CipherBackend", + "Backend object does not implement CipherBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) if not isinstance(algorithm, interfaces.CipherAlgorithm): - raise TypeError("Expected interface of interfaces.CipherAlgorithm") + raise TypeError( + "Expected interface of interfaces.CipherAlgorithm." + ) if mode is not None: mode.validate_for_algorithm(algorithm) @@ -44,7 +46,7 @@ class Cipher(object): if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): if self.mode.tag is not None: raise ValueError( - "Authentication tag must be None when encrypting" + "Authentication tag must be None when encrypting." ) ctx = self._backend.create_symmetric_encryption_ctx( self.algorithm, self.mode @@ -55,7 +57,7 @@ class Cipher(object): if isinstance(self.mode, interfaces.ModeWithAuthenticationTag): if self.mode.tag is None: raise ValueError( - "Authentication tag must be provided when decrypting" + "Authentication tag must be provided when decrypting." ) ctx = self._backend.create_symmetric_decryption_ctx( self.algorithm, self.mode @@ -79,12 +81,12 @@ class _CipherContext(object): def update(self, data): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") return self._ctx.update(data) def finalize(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") data = self._ctx.finalize() self._ctx = None return data @@ -100,13 +102,13 @@ class _AEADCipherContext(object): def update(self, data): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") self._updated = True return self._ctx.update(data) def finalize(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") data = self._ctx.finalize() self._tag = self._ctx.tag self._ctx = None @@ -114,9 +116,9 @@ class _AEADCipherContext(object): def authenticate_additional_data(self, data): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") if self._updated: - raise AlreadyUpdated("Update has been called on this context") + raise AlreadyUpdated("Update has been called on this context.") self._ctx.authenticate_additional_data(data) @@ -126,5 +128,5 @@ class _AEADEncryptionContext(_AEADCipherContext): def tag(self): if self._ctx is not None: raise NotYetFinalized("You must finalize encryption before " - "getting the tag") + "getting the tag.") return self._tag diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py index 739f23d..509b4de 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/ciphers/modes.py @@ -17,6 +17,13 @@ from cryptography import utils from cryptography.hazmat.primitives import interfaces +def _check_iv_length(mode, algorithm): + if len(mode.initialization_vector) * 8 != algorithm.block_size: + raise ValueError("Invalid IV size ({0}) for {1}.".format( + len(mode.initialization_vector), mode.name + )) + + @utils.register_interface(interfaces.Mode) @utils.register_interface(interfaces.ModeWithInitializationVector) class CBC(object): @@ -25,11 +32,7 @@ class CBC(object): def __init__(self, initialization_vector): self.initialization_vector = initialization_vector - def validate_for_algorithm(self, algorithm): - if len(self.initialization_vector) * 8 != algorithm.block_size: - raise ValueError("Invalid iv size ({0}) for {1}".format( - len(self.initialization_vector), self.name - )) + validate_for_algorithm = _check_iv_length @utils.register_interface(interfaces.Mode) @@ -48,11 +51,7 @@ class OFB(object): def __init__(self, initialization_vector): self.initialization_vector = initialization_vector - def validate_for_algorithm(self, algorithm): - if len(self.initialization_vector) * 8 != algorithm.block_size: - raise ValueError("Invalid iv size ({0}) for {1}".format( - len(self.initialization_vector), self.name - )) + validate_for_algorithm = _check_iv_length @utils.register_interface(interfaces.Mode) @@ -63,11 +62,18 @@ class CFB(object): def __init__(self, initialization_vector): self.initialization_vector = initialization_vector - def validate_for_algorithm(self, algorithm): - if len(self.initialization_vector) * 8 != algorithm.block_size: - raise ValueError("Invalid iv size ({0}) for {1}".format( - len(self.initialization_vector), self.name - )) + validate_for_algorithm = _check_iv_length + + +@utils.register_interface(interfaces.Mode) +@utils.register_interface(interfaces.ModeWithInitializationVector) +class CFB8(object): + name = "CFB8" + + def __init__(self, initialization_vector): + self.initialization_vector = initialization_vector + + validate_for_algorithm = _check_iv_length @utils.register_interface(interfaces.Mode) @@ -80,7 +86,7 @@ class CTR(object): def validate_for_algorithm(self, algorithm): if len(self.nonce) * 8 != algorithm.block_size: - raise ValueError("Invalid nonce size ({0}) for {1}".format( + raise ValueError("Invalid nonce size ({0}) for {1}.".format( len(self.nonce), self.name )) @@ -91,13 +97,16 @@ class CTR(object): class GCM(object): name = "GCM" - def __init__(self, initialization_vector, tag=None): + def __init__(self, initialization_vector, tag=None, min_tag_length=16): # len(initialization_vector) must in [1, 2 ** 64), but it's impossible # to actually construct a bytes object that large, so we don't check # for it - if tag is not None and len(tag) < 4: + if min_tag_length < 4: + raise ValueError("min_tag_length must be >= 4") + if tag is not None and len(tag) < min_tag_length: raise ValueError( - "Authentication tag must be 4 bytes or longer" + "Authentication tag must be {0} bytes or longer.".format( + min_tag_length) ) self.initialization_vector = initialization_vector diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py index 7e7f65a..fa463ae 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/cmac.py @@ -13,8 +13,6 @@ from __future__ import absolute_import, division, print_function -import six - from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons @@ -28,13 +26,13 @@ class CMAC(object): def __init__(self, algorithm, backend, ctx=None): if not isinstance(backend, CMACBackend): raise UnsupportedAlgorithm( - "Backend object does not implement CMACBackend", + "Backend object does not implement CMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) if not isinstance(algorithm, interfaces.BlockCipherAlgorithm): raise TypeError( - "Expected instance of interfaces.BlockCipherAlgorithm" + "Expected instance of interfaces.BlockCipherAlgorithm." ) self._algorithm = algorithm @@ -46,28 +44,28 @@ class CMAC(object): def update(self, data): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") - if isinstance(data, six.text_type): - raise TypeError("Unicode-objects must be encoded before hashing") + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") self._ctx.update(data) def finalize(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") digest = self._ctx.finalize() self._ctx = None return digest def verify(self, signature): - if isinstance(signature, six.text_type): - raise TypeError("Unicode-objects must be encoded before verifying") + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") digest = self.finalize() if not constant_time.bytes_eq(digest, signature): raise InvalidSignature("Signature did not match digest.") def copy(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") return CMAC( self._algorithm, backend=self._backend, diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py index e0e9aa3..9789851 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/constant_time.py @@ -13,12 +13,11 @@ from __future__ import absolute_import, division, print_function +import hmac import sys import cffi -import six - from cryptography.hazmat.bindings.utils import _create_modulename TYPES = """ @@ -55,9 +54,18 @@ _lib = _ffi.verify( ext_package="cryptography", ) +if hasattr(hmac, "compare_digest"): + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") -def bytes_eq(a, b): - if isinstance(a, six.text_type) or isinstance(b, six.text_type): - raise TypeError("Unicode-objects must be encoded before comparing") + return hmac.compare_digest(a, b) - return _lib.Cryptography_constant_time_bytes_eq(a, len(a), b, len(b)) == 1 +else: + def bytes_eq(a, b): + if not isinstance(a, bytes) or not isinstance(b, bytes): + raise TypeError("a and b must be bytes.") + + return _lib.Cryptography_constant_time_bytes_eq( + a, len(a), b, len(b) + ) == 1 diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py index 35b677b..04f7620 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hashes.py @@ -13,8 +13,6 @@ from __future__ import absolute_import, division, print_function -import six - from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, UnsupportedAlgorithm, _Reasons @@ -28,7 +26,7 @@ class Hash(object): def __init__(self, algorithm, backend, ctx=None): if not isinstance(backend, HashBackend): raise UnsupportedAlgorithm( - "Backend object does not implement HashBackend", + "Backend object does not implement HashBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -45,21 +43,21 @@ class Hash(object): def update(self, data): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") - if isinstance(data, six.text_type): - raise TypeError("Unicode-objects must be encoded before hashing") + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") self._ctx.update(data) def copy(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") return Hash( self.algorithm, backend=self._backend, ctx=self._ctx.copy() ) def finalize(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") digest = self._ctx.finalize() self._ctx = None return digest diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py index afbb2f7..026ad3b 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/hmac.py @@ -13,8 +13,6 @@ from __future__ import absolute_import, division, print_function -import six - from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, InvalidSignature, UnsupportedAlgorithm, _Reasons @@ -28,7 +26,7 @@ class HMAC(object): def __init__(self, key, algorithm, backend, ctx=None): if not isinstance(backend, HMACBackend): raise UnsupportedAlgorithm( - "Backend object does not implement HMACBackend", + "Backend object does not implement HMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -45,14 +43,14 @@ class HMAC(object): def update(self, msg): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") - if isinstance(msg, six.text_type): - raise TypeError("Unicode-objects must be encoded before hashing") + raise AlreadyFinalized("Context was already finalized.") + if not isinstance(msg, bytes): + raise TypeError("msg must be bytes.") self._ctx.update(msg) def copy(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") return HMAC( self._key, self.algorithm, @@ -62,14 +60,14 @@ class HMAC(object): def finalize(self): if self._ctx is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") digest = self._ctx.finalize() self._ctx = None return digest def verify(self, signature): - if isinstance(signature, six.text_type): - raise TypeError("Unicode-objects must be encoded before verifying") + if not isinstance(signature, bytes): + raise TypeError("signature must be bytes.") digest = self.finalize() if not constant_time.bytes_eq(digest, signature): raise InvalidSignature("Signature did not match digest.") diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py index 810a67a..d60f9e0 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/interfaces.py @@ -186,27 +186,15 @@ class HashContext(object): @six.add_metaclass(abc.ABCMeta) class RSAPrivateKey(object): @abc.abstractmethod - def signer(self, padding, algorithm, backend): + def signer(self, padding, algorithm): """ Returns an AsymmetricSignatureContext used for signing data. """ - @abc.abstractproperty - def modulus(self): + @abc.abstractmethod + def decrypt(self, ciphertext, padding): """ - The public modulus of the RSA key. - """ - - @abc.abstractproperty - def public_exponent(self): - """ - The public exponent of the RSA key. - """ - - @abc.abstractproperty - def private_exponent(self): - """ - The private exponent of the RSA key. + Decrypts the provided ciphertext. """ @abc.abstractproperty @@ -221,77 +209,28 @@ class RSAPrivateKey(object): The RSAPublicKey associated with this private key. """ - @abc.abstractproperty - def n(self): - """ - The public modulus of the RSA key. Alias for modulus. - """ - @abc.abstractproperty - def p(self): +@six.add_metaclass(abc.ABCMeta) +class RSAPrivateKeyWithNumbers(RSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): """ - One of the two primes used to generate d. - """ - - @abc.abstractproperty - def q(self): - """ - One of the two primes used to generate d. - """ - - @abc.abstractproperty - def d(self): - """ - The private exponent. This can be calculated using p and q. Alias for - private_exponent. - """ - - @abc.abstractproperty - def dmp1(self): - """ - A Chinese remainder theorem coefficient used to speed up RSA - calculations. Calculated as: d mod (p-1) - """ - - @abc.abstractproperty - def dmq1(self): - """ - A Chinese remainder theorem coefficient used to speed up RSA - calculations. Calculated as: d mod (q-1) - """ - - @abc.abstractproperty - def iqmp(self): - """ - A Chinese remainder theorem coefficient used to speed up RSA - calculations. The modular inverse of q modulo p - """ - - @abc.abstractproperty - def e(self): - """ - The public exponent of the RSA key. Alias for public_exponent. + Returns an RSAPrivateNumbers. """ @six.add_metaclass(abc.ABCMeta) class RSAPublicKey(object): @abc.abstractmethod - def verifier(self, signature, padding, algorithm, backend): + def verifier(self, signature, padding, algorithm): """ Returns an AsymmetricVerificationContext used for verifying signatures. """ - @abc.abstractproperty - def modulus(self): + @abc.abstractmethod + def encrypt(self, plaintext, padding): """ - The public modulus of the RSA key. - """ - - @abc.abstractproperty - def public_exponent(self): - """ - The public exponent of the RSA key. + Encrypts the given plaintext. """ @abc.abstractproperty @@ -300,63 +239,31 @@ class RSAPublicKey(object): The bit length of the public modulus. """ - @abc.abstractproperty - def n(self): - """ - The public modulus of the RSA key. Alias for modulus. - """ - @abc.abstractproperty - def e(self): +@six.add_metaclass(abc.ABCMeta) +class RSAPublicKeyWithNumbers(RSAPublicKey): + @abc.abstractmethod + def public_numbers(self): """ - The public exponent of the RSA key. Alias for public_exponent. + Returns an RSAPublicNumbers """ @six.add_metaclass(abc.ABCMeta) class DSAParameters(object): - @abc.abstractproperty - def modulus(self): + @abc.abstractmethod + def generate_private_key(self): """ - The prime modulus that's used in generating the DSA keypair and used - in the DSA signing and verification processes. + Generates and returns a DSAPrivateKey. """ - @abc.abstractproperty - def subgroup_order(self): - """ - The subgroup order that's used in generating the DSA keypair - by the generator and used in the DSA signing and verification - processes. - """ - @abc.abstractproperty - def generator(self): +@six.add_metaclass(abc.ABCMeta) +class DSAParametersWithNumbers(DSAParameters): + @abc.abstractmethod + def parameter_numbers(self): """ - The generator that is used in generating the DSA keypair and used - in the DSA signing and verification processes. - """ - - @abc.abstractproperty - def p(self): - """ - The prime modulus that's used in generating the DSA keypair and used - in the DSA signing and verification processes. Alias for modulus. - """ - - @abc.abstractproperty - def q(self): - """ - The subgroup order that's used in generating the DSA keypair - by the generator and used in the DSA signing and verification - processes. Alias for subgroup_order. - """ - - @abc.abstractproperty - def g(self): - """ - The generator that is used in generating the DSA keypair and used - in the DSA signing and verification processes. Alias for generator. + Returns a DSAParameterNumbers. """ @@ -374,18 +281,6 @@ class DSAPrivateKey(object): The DSAPublicKey associated with this private key. """ - @abc.abstractproperty - def x(self): - """ - The private key "x" in the DSA structure. - """ - - @abc.abstractproperty - def y(self): - """ - The public key. - """ - @abc.abstractmethod def parameters(self): """ @@ -393,6 +288,15 @@ class DSAPrivateKey(object): """ +@six.add_metaclass(abc.ABCMeta) +class DSAPrivateKeyWithNumbers(DSAPrivateKey): + @abc.abstractmethod + def private_numbers(self): + """ + Returns a DSAPrivateNumbers. + """ + + @six.add_metaclass(abc.ABCMeta) class DSAPublicKey(object): @abc.abstractproperty @@ -401,12 +305,6 @@ class DSAPublicKey(object): The bit length of the prime modulus. """ - @abc.abstractproperty - def y(self): - """ - The public key. - """ - @abc.abstractmethod def parameters(self): """ @@ -414,6 +312,15 @@ class DSAPublicKey(object): """ +@six.add_metaclass(abc.ABCMeta) +class DSAPublicKeyWithNumbers(DSAPublicKey): + @abc.abstractmethod + def public_numbers(self): + """ + Returns a DSAPublicNumbers. + """ + + @six.add_metaclass(abc.ABCMeta) class AsymmetricSignatureContext(object): @abc.abstractmethod @@ -489,3 +396,63 @@ class CMACContext(object): """ Return a CMACContext that is a copy of the current context. """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurve(object): + @abc.abstractproperty + def name(self): + """ + The name of the curve. e.g. secp256r1. + """ + + @abc.abstractproperty + def key_size(self): + """ + The bit length of the base point of the curve. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurveSignatureAlgorithm(object): + @abc.abstractproperty + def algorithm(self): + """ + The digest algorithm used with this signature. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePrivateKey(object): + @abc.abstractmethod + def signer(self, signature_algorithm): + """ + Returns an AsymmetricSignatureContext used for signing data. + """ + + @abc.abstractmethod + def public_key(self): + """ + The EllipticCurvePublicKey for this private key. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ + + +@six.add_metaclass(abc.ABCMeta) +class EllipticCurvePublicKey(object): + @abc.abstractmethod + def verifier(self, signature, signature_algorithm): + """ + Returns an AsymmetricVerificationContext used for signing data. + """ + + @abc.abstractproperty + def curve(self): + """ + The EllipticCurve that this key is on. + """ diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py index 03500aa..04d02b2 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/hkdf.py @@ -28,12 +28,53 @@ class HKDF(object): def __init__(self, algorithm, length, salt, info, backend): if not isinstance(backend, HMACBackend): raise UnsupportedAlgorithm( - "Backend object does not implement HMACBackend", + "Backend object does not implement HMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) self._algorithm = algorithm + if not isinstance(salt, bytes) and salt is not None: + raise TypeError("salt must be bytes.") + + if salt is None: + salt = b"\x00" * (self._algorithm.digest_size // 8) + + self._salt = salt + + self._backend = backend + + self._hkdf_expand = HKDFExpand(self._algorithm, length, info, backend) + + def _extract(self, key_material): + h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend) + h.update(key_material) + return h.finalize() + + def derive(self, key_material): + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") + + return self._hkdf_expand.derive(self._extract(key_material)) + + def verify(self, key_material, expected_key): + if not constant_time.bytes_eq(self.derive(key_material), expected_key): + raise InvalidKey + + +@utils.register_interface(interfaces.KeyDerivationFunction) +class HKDFExpand(object): + def __init__(self, algorithm, length, info, backend): + if not isinstance(backend, HMACBackend): + raise UnsupportedAlgorithm( + "Backend object does not implement HMACBackend.", + _Reasons.BACKEND_MISSING_INTERFACE + ) + + self._algorithm = algorithm + + self._backend = backend + max_length = 255 * (algorithm.digest_size // 8) if length > max_length: @@ -44,32 +85,16 @@ class HKDF(object): self._length = length - if isinstance(salt, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before using them as a salt.") - - if salt is None: - salt = b"\x00" * (self._algorithm.digest_size // 8) - - self._salt = salt - - if isinstance(info, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before using them as info.") + if not isinstance(info, bytes) and info is not None: + raise TypeError("info must be bytes.") if info is None: info = b"" self._info = info - self._backend = backend self._used = False - def _extract(self, key_material): - h = hmac.HMAC(self._salt, self._algorithm, backend=self._backend) - h.update(key_material) - return h.finalize() - def _expand(self, key_material): output = [b""] counter = 1 @@ -85,17 +110,14 @@ class HKDF(object): return b"".join(output)[:self._length] def derive(self, key_material): - if isinstance(key_material, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before using them as key " - "material." - ) + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") if self._used: raise AlreadyFinalized self._used = True - return self._expand(self._extract(key_material)) + return self._expand(key_material) def verify(self, key_material, expected_key): if not constant_time.bytes_eq(self.derive(key_material), expected_key): diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py index bec35bb..97b6408 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/kdf/pbkdf2.py @@ -13,8 +13,6 @@ from __future__ import absolute_import, division, print_function -import six - from cryptography import utils from cryptography.exceptions import ( AlreadyFinalized, InvalidKey, UnsupportedAlgorithm, _Reasons @@ -28,38 +26,32 @@ class PBKDF2HMAC(object): def __init__(self, algorithm, length, salt, iterations, backend): if not isinstance(backend, PBKDF2HMACBackend): raise UnsupportedAlgorithm( - "Backend object does not implement PBKDF2HMACBackend", + "Backend object does not implement PBKDF2HMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) if not backend.pbkdf2_hmac_supported(algorithm): raise UnsupportedAlgorithm( - "{0} is not supported for PBKDF2 by this backend".format( + "{0} is not supported for PBKDF2 by this backend.".format( algorithm.name), _Reasons.UNSUPPORTED_HASH ) self._used = False self._algorithm = algorithm self._length = length - if isinstance(salt, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before using them as key " - "material." - ) + if not isinstance(salt, bytes): + raise TypeError("salt must be bytes.") self._salt = salt self._iterations = iterations self._backend = backend def derive(self, key_material): if self._used: - raise AlreadyFinalized("PBKDF2 instances can only be used once") + raise AlreadyFinalized("PBKDF2 instances can only be used once.") self._used = True - if isinstance(key_material, six.text_type): - raise TypeError( - "Unicode-objects must be encoded before using them as key " - "material." - ) + if not isinstance(key_material, bytes): + raise TypeError("key_material must be bytes.") return self._backend.derive_pbkdf2_hmac( self._algorithm, self._length, diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py index c1a763b..74f1ef2 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/padding.py @@ -79,10 +79,10 @@ _lib = _ffi.verify( class PKCS7(object): def __init__(self, block_size): if not (0 <= block_size < 256): - raise ValueError("block_size must be in range(0, 256)") + raise ValueError("block_size must be in range(0, 256).") if block_size % 8 != 0: - raise ValueError("block_size must be a multiple of 8") + raise ValueError("block_size must be a multiple of 8.") self.block_size = block_size @@ -102,10 +102,10 @@ class _PKCS7PaddingContext(object): def update(self, data): if self._buffer is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") - if isinstance(data, six.text_type): - raise TypeError("Unicode-objects must be encoded before padding") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") self._buffer += data @@ -118,7 +118,7 @@ class _PKCS7PaddingContext(object): def finalize(self): if self._buffer is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") pad_size = self.block_size // 8 - len(self._buffer) result = self._buffer + six.int2byte(pad_size) * pad_size @@ -135,10 +135,10 @@ class _PKCS7UnpaddingContext(object): def update(self, data): if self._buffer is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") - if isinstance(data, six.text_type): - raise TypeError("Unicode-objects must be encoded before unpadding") + if not isinstance(data, bytes): + raise TypeError("data must be bytes.") self._buffer += data @@ -154,17 +154,17 @@ class _PKCS7UnpaddingContext(object): def finalize(self): if self._buffer is None: - raise AlreadyFinalized("Context was already finalized") + raise AlreadyFinalized("Context was already finalized.") if len(self._buffer) != self.block_size // 8: - raise ValueError("Invalid padding bytes") + raise ValueError("Invalid padding bytes.") valid = _lib.Cryptography_check_pkcs7_padding( self._buffer, self.block_size // 8 ) if not valid: - raise ValueError("Invalid padding bytes") + raise ValueError("Invalid padding bytes.") pad_size = six.indexbytes(self._buffer, -1) res = self._buffer[:-pad_size] diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/serialization.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/serialization.py new file mode 100644 index 0000000..ed73c4c --- /dev/null +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/serialization.py @@ -0,0 +1,26 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import, division, print_function + + +def load_pem_traditional_openssl_private_key(data, password, backend): + return backend.load_traditional_openssl_pem_private_key( + data, password + ) + + +def load_pem_pkcs8_private_key(data, password, backend): + return backend.load_pkcs8_pem_private_key( + data, password + ) diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py index 41c467c..d0b476a 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/hotp.py @@ -29,7 +29,7 @@ class HOTP(object): def __init__(self, key, length, algorithm, backend): if not isinstance(backend, HMACBackend): raise UnsupportedAlgorithm( - "Backend object does not implement HMACBackend", + "Backend object does not implement HMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -37,13 +37,13 @@ class HOTP(object): raise ValueError("Key length has to be at least 128 bits.") if not isinstance(length, six.integer_types): - raise TypeError("Length parameter must be an integer type") + raise TypeError("Length parameter must be an integer type.") if length < 6 or length > 8: raise ValueError("Length of HOTP has to be between 6 to 8.") if not isinstance(algorithm, (SHA1, SHA256, SHA512)): - raise TypeError("Algorithm must be SHA1, SHA256 or SHA512") + raise TypeError("Algorithm must be SHA1, SHA256 or SHA512.") self._key = key self._length = length @@ -57,15 +57,13 @@ class HOTP(object): def verify(self, hotp, counter): if not constant_time.bytes_eq(self.generate(counter), hotp): - raise InvalidToken("Supplied HOTP value does not match") + raise InvalidToken("Supplied HOTP value does not match.") def _dynamic_truncate(self, counter): ctx = hmac.HMAC(self._key, self._algorithm, self._backend) ctx.update(struct.pack(">Q", counter)) hmac_value = ctx.finalize() - offset_bits = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 - - offset = int(offset_bits) + offset = six.indexbytes(hmac_value, len(hmac_value) - 1) & 0b1111 p = hmac_value[offset:offset + 4] return struct.unpack(">I", p)[0] & 0x7fffffff diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py index e55ba00..854c516 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/hazmat/primitives/twofactor/totp.py @@ -25,7 +25,7 @@ class TOTP(object): def __init__(self, key, length, algorithm, time_step, backend): if not isinstance(backend, HMACBackend): raise UnsupportedAlgorithm( - "Backend object does not implement HMACBackend", + "Backend object does not implement HMACBackend.", _Reasons.BACKEND_MISSING_INTERFACE ) @@ -38,4 +38,4 @@ class TOTP(object): def verify(self, totp, time): if not constant_time.bytes_eq(self.generate(time), totp): - raise InvalidToken("Supplied TOTP value does not match") + raise InvalidToken("Supplied TOTP value does not match.") diff --git a/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py b/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py index 5566d12..1db1615 100644 --- a/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py +++ b/Linux_i686/lib/python2.7/site-packages/cryptography/utils.py @@ -16,7 +16,8 @@ from __future__ import absolute_import, division, print_function import sys -DeprecatedIn04 = PendingDeprecationWarning +DeprecatedIn04 = DeprecationWarning +DeprecatedIn05 = PendingDeprecationWarning def register_interface(iface): diff --git a/Linux_i686/lib/python2.7/site-packages/flask/__init__.py b/Linux_i686/lib/python2.7/site-packages/flask/__init__.py deleted file mode 100644 index 3fd8908..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/__init__.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask - ~~~~~ - - A microframework based on Werkzeug. It's extensively documented - and follows best practice patterns. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - -__version__ = '0.10.1' - -# utilities we import from Werkzeug and Jinja2 that are unused -# in the module but are exported as public interface. -from werkzeug.exceptions import abort -from werkzeug.utils import redirect -from jinja2 import Markup, escape - -from .app import Flask, Request, Response -from .config import Config -from .helpers import url_for, flash, send_file, send_from_directory, \ - get_flashed_messages, get_template_attribute, make_response, safe_join, \ - stream_with_context -from .globals import current_app, g, request, session, _request_ctx_stack, \ - _app_ctx_stack -from .ctx import has_request_context, has_app_context, \ - after_this_request, copy_current_request_context -from .module import Module -from .blueprints import Blueprint -from .templating import render_template, render_template_string - -# the signals -from .signals import signals_available, template_rendered, request_started, \ - request_finished, got_request_exception, request_tearing_down, \ - appcontext_tearing_down, appcontext_pushed, \ - appcontext_popped, message_flashed - -# We're not exposing the actual json module but a convenient wrapper around -# it. -from . import json - -# This was the only thing that flask used to export at one point and it had -# a more generic name. -jsonify = json.jsonify - -# backwards compat, goes away in 1.0 -from .sessions import SecureCookieSession as Session -json_available = True diff --git a/Linux_i686/lib/python2.7/site-packages/flask/_compat.py b/Linux_i686/lib/python2.7/site-packages/flask/_compat.py deleted file mode 100644 index c342884..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/_compat.py +++ /dev/null @@ -1,73 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask._compat - ~~~~~~~~~~~~~ - - Some py2/py3 compatibility support based on a stripped down - version of six so we don't have to depend on a specific version - of it. - - :copyright: (c) 2013 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" -import sys - -PY2 = sys.version_info[0] == 2 -_identity = lambda x: x - - -if not PY2: - text_type = str - string_types = (str,) - integer_types = (int, ) - - iterkeys = lambda d: iter(d.keys()) - itervalues = lambda d: iter(d.values()) - iteritems = lambda d: iter(d.items()) - - from io import StringIO - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value - - implements_to_string = _identity - -else: - text_type = unicode - string_types = (str, unicode) - integer_types = (int, long) - - iterkeys = lambda d: d.iterkeys() - itervalues = lambda d: d.itervalues() - iteritems = lambda d: d.iteritems() - - from cStringIO import StringIO - - exec('def reraise(tp, value, tb=None):\n raise tp, value, tb') - - def implements_to_string(cls): - cls.__unicode__ = cls.__str__ - cls.__str__ = lambda x: x.__unicode__().encode('utf-8') - return cls - - -def with_metaclass(meta, *bases): - # This requires a bit of explanation: the basic idea is to make a - # dummy metaclass for one level of class instantiation that replaces - # itself with the actual metaclass. Because of internal type checks - # we also need to make sure that we downgrade the custom metaclass - # for one level to something closer to type (that's why __call__ and - # __init__ comes back from type etc.). - # - # This has the advantage over six.with_metaclass in that it does not - # introduce dummy classes into the final MRO. - class metaclass(meta): - __call__ = type.__call__ - __init__ = type.__init__ - def __new__(cls, name, this_bases, d): - if this_bases is None: - return type.__new__(cls, name, (), d) - return meta(name, bases, d) - return metaclass('temporary_class', None, {}) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/app.py b/Linux_i686/lib/python2.7/site-packages/flask/app.py deleted file mode 100644 index addc40b..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/app.py +++ /dev/null @@ -1,1842 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.app - ~~~~~~~~~ - - This module implements the central WSGI application object. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - -import os -import sys -from threading import Lock -from datetime import timedelta -from itertools import chain -from functools import update_wrapper - -from werkzeug.datastructures import ImmutableDict -from werkzeug.routing import Map, Rule, RequestRedirect, BuildError -from werkzeug.exceptions import HTTPException, InternalServerError, \ - MethodNotAllowed, BadRequest - -from .helpers import _PackageBoundObject, url_for, get_flashed_messages, \ - locked_cached_property, _endpoint_from_view_func, find_package -from . import json -from .wrappers import Request, Response -from .config import ConfigAttribute, Config -from .ctx import RequestContext, AppContext, _AppCtxGlobals -from .globals import _request_ctx_stack, request, session, g -from .sessions import SecureCookieSessionInterface -from .module import blueprint_is_module -from .templating import DispatchingJinjaLoader, Environment, \ - _default_template_ctx_processor -from .signals import request_started, request_finished, got_request_exception, \ - request_tearing_down, appcontext_tearing_down -from ._compat import reraise, string_types, text_type, integer_types - -# a lock used for logger initialization -_logger_lock = Lock() - - -def _make_timedelta(value): - if not isinstance(value, timedelta): - return timedelta(seconds=value) - return value - - -def setupmethod(f): - """Wraps a method so that it performs a check in debug mode if the - first request was already handled. - """ - def wrapper_func(self, *args, **kwargs): - if self.debug and self._got_first_request: - raise AssertionError('A setup function was called after the ' - 'first request was handled. This usually indicates a bug ' - 'in the application where a module was not imported ' - 'and decorators or other functionality was called too late.\n' - 'To fix this make sure to import all your view modules, ' - 'database models and everything related at a central place ' - 'before the application starts serving requests.') - return f(self, *args, **kwargs) - return update_wrapper(wrapper_func, f) - - -class Flask(_PackageBoundObject): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an `__init__.py` file inside) or a standard module (just a `.py` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the `__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea what - belongs to your application. This name is used to find resources - on the file system, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in `yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: the folder with static files that should be served - at `static_url_path`. Defaults to the ``'static'`` - folder in the root path of the application. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to `True` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - """ - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class = Response - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on expected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is not application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - # Backwards compatibility support - def _get_request_globals_class(self): - return self.app_ctx_globals_class - def _set_request_globals_class(self, value): - from warnings import warn - warn(DeprecationWarning('request_globals_class attribute is now ' - 'called app_ctx_globals_class')) - self.app_ctx_globals_class = value - request_globals_class = property(_get_request_globals_class, - _set_request_globals_class) - del _get_request_globals_class, _set_request_globals_class - - #: The debug flag. Set this to `True` to enable debugging of the - #: application. In debug mode the debugger will kick in when an unhandled - #: exception occurs and the integrated server will automatically reload - #: the application if changes in the code are detected. - #: - #: This attribute can also be configured from the config with the `DEBUG` - #: configuration key. Defaults to `False`. - debug = ConfigAttribute('DEBUG') - - #: The testing flag. Set this to `True` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate unittest helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: `TESTING` configuration key. Defaults to `False`. - testing = ConfigAttribute('TESTING') - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: `SECRET_KEY` configuration key. Defaults to `None`. - secret_key = ConfigAttribute('SECRET_KEY') - - #: The secure cookie uses this for the name of the session cookie. - #: - #: This attribute can also be configured from the config with the - #: `SESSION_COOKIE_NAME` configuration key. Defaults to ``'session'`` - session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME') - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: `PERMANENT_SESSION_LIFETIME` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME', - get_converter=_make_timedelta) - - #: Enable this if you want to use the X-Sendfile feature. Keep in - #: mind that the server has to support this. This only affects files - #: sent with the :func:`send_file` method. - #: - #: .. versionadded:: 0.2 - #: - #: This attribute can also be configured from the config with the - #: `USE_X_SENDFILE` configuration key. Defaults to `False`. - use_x_sendfile = ConfigAttribute('USE_X_SENDFILE') - - #: The name of the logger to use. By default the logger name is the - #: package name passed to the constructor. - #: - #: .. versionadded:: 0.4 - logger_name = ConfigAttribute('LOGGER_NAME') - - #: Enable the deprecated module support? This is active by default - #: in 0.7 but will be changed to False in 0.8. With Flask 1.0 modules - #: will be removed in favor of Blueprints - enable_modules = True - - #: The logging format used for the debug logger. This is only used when - #: the application is in debug mode, otherwise the attached logging - #: handler does the formatting. - #: - #: .. versionadded:: 0.3 - debug_log_format = ( - '-' * 80 + '\n' + - '%(levelname)s in %(module)s [%(pathname)s:%(lineno)d]:\n' + - '%(message)s\n' + - '-' * 80 - ) - - #: The JSON encoder class to use. Defaults to :class:`~flask.json.JSONEncoder`. - #: - #: .. versionadded:: 0.10 - json_encoder = json.JSONEncoder - - #: The JSON decoder class to use. Defaults to :class:`~flask.json.JSONDecoder`. - #: - #: .. versionadded:: 0.10 - json_decoder = json.JSONDecoder - - #: Options that are passed directly to the Jinja2 environment. - jinja_options = ImmutableDict( - extensions=['jinja2.ext.autoescape', 'jinja2.ext.with_'] - ) - - #: Default configuration parameters. - default_config = ImmutableDict({ - 'DEBUG': False, - 'TESTING': False, - 'PROPAGATE_EXCEPTIONS': None, - 'PRESERVE_CONTEXT_ON_EXCEPTION': None, - 'SECRET_KEY': None, - 'PERMANENT_SESSION_LIFETIME': timedelta(days=31), - 'USE_X_SENDFILE': False, - 'LOGGER_NAME': None, - 'SERVER_NAME': None, - 'APPLICATION_ROOT': None, - 'SESSION_COOKIE_NAME': 'session', - 'SESSION_COOKIE_DOMAIN': None, - 'SESSION_COOKIE_PATH': None, - 'SESSION_COOKIE_HTTPONLY': True, - 'SESSION_COOKIE_SECURE': False, - 'MAX_CONTENT_LENGTH': None, - 'SEND_FILE_MAX_AGE_DEFAULT': 12 * 60 * 60, # 12 hours - 'TRAP_BAD_REQUEST_ERRORS': False, - 'TRAP_HTTP_EXCEPTIONS': False, - 'PREFERRED_URL_SCHEME': 'http', - 'JSON_AS_ASCII': True, - 'JSON_SORT_KEYS': True, - 'JSONIFY_PRETTYPRINT_REGULAR': True, - }) - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: the test client that is used with when `test_client` is used. - #: - #: .. versionadded:: 0.7 - test_client_class = None - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface = SecureCookieSessionInterface() - - def __init__(self, import_name, static_path=None, static_url_path=None, - static_folder='static', template_folder='templates', - instance_path=None, instance_relative_config=False): - _PackageBoundObject.__init__(self, import_name, - template_folder=template_folder) - if static_path is not None: - from warnings import warn - warn(DeprecationWarning('static_path is now called ' - 'static_url_path'), stacklevel=2) - static_url_path = static_path - - if static_url_path is not None: - self.static_url_path = static_url_path - if static_folder is not None: - self.static_folder = static_folder - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError('If an instance path is provided it must be ' - 'absolute. A relative path was given instead.') - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - # Prepare the deferred setup of the logger. - self._logger = None - self.logger_name = self.import_name - - #: A dictionary of all view functions registered. The keys will - #: be function names which are also used to generate URLs and - #: the values are the function objects themselves. - #: To register a view function, use the :meth:`route` decorator. - self.view_functions = {} - - # support for the now deprecated `error_handlers` attribute. The - # :attr:`error_handler_spec` shall be used now. - self._error_handlers = {} - - #: A dictionary of all registered error handlers. The key is `None` - #: for error handlers active on the application, otherwise the key is - #: the name of the blueprint. Each key points to another dictionary - #: where they key is the status code of the http exception. The - #: special key `None` points to a list of tuples where the first item - #: is the class for the instance check and the second the error handler - #: function. - #: - #: To register a error handler, use the :meth:`errorhandler` - #: decorator. - self.error_handler_spec = {None: self._error_handlers} - - #: A list of functions that are called when :meth:`url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function registered here - #: is called with `error`, `endpoint` and `values`. If a function - #: returns `None` or raises a `BuildError` the next function is - #: tried. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers = [] - - #: A dictionary with lists of functions that should be called at the - #: beginning of the request. The key of the dictionary is the name of - #: the blueprint this function is active for, `None` for all requests. - #: This can for example be used to open database connections or - #: getting hold of the currently logged in user. To register a - #: function here, use the :meth:`before_request` decorator. - self.before_request_funcs = {} - - #: A lists of functions that should be called at the beginning of the - #: first request to this instance. To register a function here, use - #: the :meth:`before_first_request` decorator. - #: - #: .. versionadded:: 0.8 - self.before_first_request_funcs = [] - - #: A dictionary with lists of functions that should be called after - #: each request. The key of the dictionary is the name of the blueprint - #: this function is active for, `None` for all requests. This can for - #: example be used to open database connections or getting hold of the - #: currently logged in user. To register a function here, use the - #: :meth:`after_request` decorator. - self.after_request_funcs = {} - - #: A dictionary with lists of functions that are called after - #: each request, even if an exception has occurred. The key of the - #: dictionary is the name of the blueprint this function is active for, - #: `None` for all requests. These functions are not allowed to modify - #: the request, and their return values are ignored. If an exception - #: occurred while processing the request, it gets passed to each - #: teardown_request function. To register a function here, use the - #: :meth:`teardown_request` decorator. - #: - #: .. versionadded:: 0.7 - self.teardown_request_funcs = {} - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs = [] - - #: A dictionary with lists of functions that can be used as URL - #: value processor functions. Whenever a URL is built these functions - #: are called to modify the dictionary of values in place. The key - #: `None` here is used for application wide - #: callbacks, otherwise the key is the name of the blueprint. - #: Each of these functions has the chance to modify the dictionary - #: - #: .. versionadded:: 0.7 - self.url_value_preprocessors = {} - - #: A dictionary with lists of functions that can be used as URL value - #: preprocessors. The key `None` here is used for application wide - #: callbacks, otherwise the key is the name of the blueprint. - #: Each of these functions has the chance to modify the dictionary - #: of URL values before they are used as the keyword arguments of the - #: view function. For each function registered this one should also - #: provide a :meth:`url_defaults` function that adds the parameters - #: automatically again that were removed that way. - #: - #: .. versionadded:: 0.7 - self.url_default_functions = {} - - #: A dictionary with list of functions that are called without argument - #: to populate the template context. The key of the dictionary is the - #: name of the blueprint this function is active for, `None` for all - #: requests. Each returns a dictionary that the template context is - #: updated with. To register a function here, use the - #: :meth:`context_processor` decorator. - self.template_context_processors = { - None: [_default_template_ctx_processor] - } - - #: all the attached blueprints in a dictionary by name. Blueprints - #: can be attached multiple times so this dictionary does not tell - #: you how often they got attached. - #: - #: .. versionadded:: 0.7 - self.blueprints = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. For backwards compatibility extensions should register - #: themselves like this:: - #: - #: if not hasattr(app, 'extensions'): - #: app.extensions = {} - #: app.extensions['extensionname'] = SomeObject() - #: - #: The key must match the name of the `flaskext` module. For example in - #: case of a "Flask-Foo" extension in `flaskext.foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(BaseConverter.to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = Map() - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - self._before_request_lock = Lock() - - # register the static folder for the application. Do that even - # if the folder does not exist. First of all it might be created - # while the server is running (usually happens during development) - # but also because google appengine stores static files somewhere - # else when mapped with the .yml file. - if self.has_static_folder: - self.add_url_rule(self.static_url_path + '/', - endpoint='static', - view_func=self.send_static_file) - - def _get_error_handlers(self): - from warnings import warn - warn(DeprecationWarning('error_handlers is deprecated, use the ' - 'new error_handler_spec attribute instead.'), stacklevel=1) - return self._error_handlers - def _set_error_handlers(self, value): - self._error_handlers = value - self.error_handler_spec[None] = value - error_handlers = property(_get_error_handlers, _set_error_handlers) - del _get_error_handlers, _set_error_handlers - - @locked_cached_property - def name(self): - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == '__main__': - fn = getattr(sys.modules['__main__'], '__file__', None) - if fn is None: - return '__main__' - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @property - def propagate_exceptions(self): - """Returns the value of the `PROPAGATE_EXCEPTIONS` configuration - value in case it's set, otherwise a sensible default is returned. - - .. versionadded:: 0.7 - """ - rv = self.config['PROPAGATE_EXCEPTIONS'] - if rv is not None: - return rv - return self.testing or self.debug - - @property - def preserve_context_on_exception(self): - """Returns the value of the `PRESERVE_CONTEXT_ON_EXCEPTION` - configuration value in case it's set, otherwise a sensible default - is returned. - - .. versionadded:: 0.7 - """ - rv = self.config['PRESERVE_CONTEXT_ON_EXCEPTION'] - if rv is not None: - return rv - return self.debug - - @property - def logger(self): - """A :class:`logging.Logger` object for this application. The - default configuration is to log to stderr if the application is - in debug mode. This logger can be used to (surprise) log messages. - Here some examples:: - - app.logger.debug('A value for debugging') - app.logger.warning('A warning occurred (%d apples)', 42) - app.logger.error('An error occurred') - - .. versionadded:: 0.3 - """ - if self._logger and self._logger.name == self.logger_name: - return self._logger - with _logger_lock: - if self._logger and self._logger.name == self.logger_name: - return self._logger - from flask.logging import create_logger - self._logger = rv = create_logger(self) - return rv - - @locked_cached_property - def jinja_env(self): - """The Jinja2 environment used to load templates.""" - return self.create_jinja_environment() - - @property - def got_first_request(self): - """This attribute is set to `True` if the application started - handling the first request. - - .. versionadded:: 0.8 - """ - return self._got_first_request - - def make_config(self, instance_relative=False): - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - return Config(root_path, self.default_config) - - def auto_find_instance_path(self): - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, 'instance') - return os.path.join(prefix, 'var', self.name + '-instance') - - def open_instance_resource(self, resource, mode='rb'): - """Opens a resource from the application's instance folder - (:attr:`instance_path`). Otherwise works like - :meth:`open_resource`. Instance resources can also be opened for - writing. - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: resource file opening mode, default is 'rb'. - """ - return open(os.path.join(self.instance_path, resource), mode) - - def create_jinja_environment(self): - """Creates the Jinja2 environment based on :attr:`jinja_options` - and :meth:`select_jinja_autoescape`. Since 0.7 this also adds - the Jinja2 globals and filters after initialization. Override - this function to customize the behavior. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - if 'autoescape' not in options: - options['autoescape'] = self.select_jinja_autoescape - rv = Environment(self, **options) - rv.globals.update( - url_for=url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g - ) - rv.filters['tojson'] = json.tojson_filter - return rv - - def create_global_jinja_loader(self): - """Creates the loader for the Jinja2 environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def init_jinja_globals(self): - """Deprecated. Used to initialize the Jinja2 globals. - - .. versionadded:: 0.5 - .. versionchanged:: 0.7 - This method is deprecated with 0.7. Override - :meth:`create_jinja_environment` instead. - """ - - def select_jinja_autoescape(self, filename): - """Returns `True` if autoescaping should be active for the given - template name. - - .. versionadded:: 0.5 - """ - if filename is None: - return False - return filename.endswith(('.html', '.htm', '.xml', '.xhtml')) - - def update_template_context(self, context): - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - funcs = self.template_context_processors[None] - reqctx = _request_ctx_stack.top - if reqctx is not None: - bp = reqctx.request.blueprint - if bp is not None and bp in self.template_context_processors: - funcs = chain(funcs, self.template_context_processors[bp]) - orig_ctx = context.copy() - for func in funcs: - context.update(func()) - # make sure the original values win. This makes it possible to - # easier add new variables in context processors without breaking - # existing views. - context.update(orig_ctx) - - def run(self, host=None, port=None, debug=None, **options): - """Runs the application on a local development server. If the - :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to `True` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` variable. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'``. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if - present. - :param debug: if given, enable or disable debug mode. - See :attr:`debug`. - :param options: the options to be forwarded to the underlying - Werkzeug server. See - :func:`werkzeug.serving.run_simple` for more - information. - """ - from werkzeug.serving import run_simple - if host is None: - host = '127.0.0.1' - if port is None: - server_name = self.config['SERVER_NAME'] - if server_name and ':' in server_name: - port = int(server_name.rsplit(':', 1)[1]) - else: - port = 5000 - if debug is not None: - self.debug = bool(debug) - options.setdefault('use_reloader', self.debug) - options.setdefault('use_debugger', self.debug) - try: - run_simple(host, port, self, **options) - finally: - # reset the first request information if the development server - # resetted normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies=True): - """Creates a test client for this application. For information - about unit testing head over to :ref:`testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a `with` block to defer the closing down - of the context until the end of the `with` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for `with` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - """ - cls = self.test_client_class - if cls is None: - from flask.testing import FlaskClient as cls - return cls(self, self.response_class, use_cookies=use_cookies) - - def open_session(self, request): - """Creates or opens a new session. Default implementation stores all - session data in a signed cookie. This requires that the - :attr:`secret_key` is set. Instead of overriding this method - we recommend replacing the :class:`session_interface`. - - :param request: an instance of :attr:`request_class`. - """ - return self.session_interface.open_session(self, request) - - def save_session(self, session, response): - """Saves the session if it needs updates. For the default - implementation, check :meth:`open_session`. Instead of overriding this - method we recommend replacing the :class:`session_interface`. - - :param session: the session to be saved (a - :class:`~werkzeug.contrib.securecookie.SecureCookie` - object) - :param response: an instance of :attr:`response_class` - """ - return self.session_interface.save_session(self, session, response) - - def make_null_session(self): - """Creates a new instance of a missing session. Instead of overriding - this method we recommend replacing the :class:`session_interface`. - - .. versionadded:: 0.7 - """ - return self.session_interface.make_null_session(self) - - def register_module(self, module, **options): - """Registers a module with this application. The keyword argument - of this function are the same as the ones for the constructor of the - :class:`Module` class and will override the values of the module if - provided. - - .. versionchanged:: 0.7 - The module system was deprecated in favor for the blueprint - system. - """ - assert blueprint_is_module(module), 'register_module requires ' \ - 'actual module objects. Please upgrade to blueprints though.' - if not self.enable_modules: - raise RuntimeError('Module support was disabled but code ' - 'attempted to register a module named %r' % module) - else: - from warnings import warn - warn(DeprecationWarning('Modules are deprecated. Upgrade to ' - 'using blueprints. Have a look into the documentation for ' - 'more information. If this module was registered by a ' - 'Flask-Extension upgrade the extension or contact the author ' - 'of that extension instead. (Registered %r)' % module), - stacklevel=2) - - self.register_blueprint(module, **options) - - @setupmethod - def register_blueprint(self, blueprint, **options): - """Registers a blueprint on the application. - - .. versionadded:: 0.7 - """ - first_registration = False - if blueprint.name in self.blueprints: - assert self.blueprints[blueprint.name] is blueprint, \ - 'A blueprint\'s name collision occurred between %r and ' \ - '%r. Both share the same name "%s". Blueprints that ' \ - 'are created on the fly need unique names.' % \ - (blueprint, self.blueprints[blueprint.name], blueprint.name) - else: - self.blueprints[blueprint.name] = blueprint - first_registration = True - blueprint.register(self, options, first_registration) - - @setupmethod - def add_url_rule(self, rule, endpoint=None, view_func=None, **options): - """Connects a URL rule. Works exactly like the :meth:`route` - decorator. If a view_func is provided it will be registered with the - endpoint. - - Basically this example:: - - @app.route('/') - def index(): - pass - - Is equivalent to the following:: - - def index(): - pass - app.add_url_rule('/', 'index', index) - - If the view_func is not provided you will need to connect the endpoint - to a view function like so:: - - app.view_functions['index'] = index - - Internally :meth:`route` invokes :meth:`add_url_rule` so if you want - to customize the behavior via subclassing you only need to change - this method. - - For more information refer to :ref:`url-route-registrations`. - - .. versionchanged:: 0.2 - `view_func` parameter added. - - .. versionchanged:: 0.6 - `OPTIONS` is added automatically as method. - - :param rule: the URL rule as string - :param endpoint: the endpoint for the registered URL rule. Flask - itself assumes the name of the view function as - endpoint - :param view_func: the function to call when serving a request to the - provided endpoint - :param options: the options to be forwarded to the underlying - :class:`~werkzeug.routing.Rule` object. A change - to Werkzeug is handling of method options. methods - is a list of methods this rule should be limited - to (`GET`, `POST` etc.). By default a rule - just listens for `GET` (and implicitly `HEAD`). - Starting with Flask 0.6, `OPTIONS` is implicitly - added and handled by the standard request handling. - """ - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) - options['endpoint'] = endpoint - methods = options.pop('methods', None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only `GET` as default. - if methods is None: - methods = getattr(view_func, 'methods', None) or ('GET',) - methods = set(methods) - - # Methods that should always be added - required_methods = set(getattr(view_func, 'required_methods', ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - provide_automatic_options = getattr(view_func, - 'provide_automatic_options', None) - - if provide_automatic_options is None: - if 'OPTIONS' not in methods: - provide_automatic_options = True - required_methods.add('OPTIONS') - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - # due to a werkzeug bug we need to make sure that the defaults are - # None if they are an empty dictionary. This should not be necessary - # with Werkzeug 0.7 - options['defaults'] = options.get('defaults') or None - - rule = self.url_rule_class(rule, methods=methods, **options) - rule.provide_automatic_options = provide_automatic_options - - self.url_map.add(rule) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError('View function mapping is overwriting an ' - 'existing endpoint function: %s' % endpoint) - self.view_functions[endpoint] = view_func - - def route(self, rule, **options): - """A decorator that is used to register a view function for a - given URL rule. This does the same thing as :meth:`add_url_rule` - but is intended for decorator usage:: - - @app.route('/') - def index(): - return 'Hello World' - - For more information refer to :ref:`url-route-registrations`. - - :param rule: the URL rule as string - :param endpoint: the endpoint for the registered URL rule. Flask - itself assumes the name of the view function as - endpoint - :param options: the options to be forwarded to the underlying - :class:`~werkzeug.routing.Rule` object. A change - to Werkzeug is handling of method options. methods - is a list of methods this rule should be limited - to (`GET`, `POST` etc.). By default a rule - just listens for `GET` (and implicitly `HEAD`). - Starting with Flask 0.6, `OPTIONS` is implicitly - added and handled by the standard request handling. - """ - def decorator(f): - endpoint = options.pop('endpoint', None) - self.add_url_rule(rule, endpoint, f, **options) - return f - return decorator - - @setupmethod - def endpoint(self, endpoint): - """A decorator to register a function as an endpoint. - Example:: - - @app.endpoint('example.endpoint') - def example(): - return "example" - - :param endpoint: the name of the endpoint - """ - def decorator(f): - self.view_functions[endpoint] = f - return f - return decorator - - @setupmethod - def errorhandler(self, code_or_exception): - """A decorator that is used to register a function give a given - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - You can also register a function as error handler without using - the :meth:`errorhandler` decorator. The following example is - equivalent to the one above:: - - def page_not_found(error): - return 'This page does not exist', 404 - app.error_handler_spec[None][404] = page_not_found - - Setting error handlers via assignments to :attr:`error_handler_spec` - however is discouraged as it requires fiddling with nested dictionaries - and the special case for arbitrary exception types. - - The first `None` refers to the active blueprint. If the error - handler should be application wide `None` shall be used. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code: the code as integer for the handler - """ - def decorator(f): - self._register_error_handler(None, code_or_exception, f) - return f - return decorator - - def register_error_handler(self, code_or_exception, f): - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - self._register_error_handler(None, code_or_exception, f) - - @setupmethod - def _register_error_handler(self, key, code_or_exception, f): - if isinstance(code_or_exception, HTTPException): - code_or_exception = code_or_exception.code - if isinstance(code_or_exception, integer_types): - assert code_or_exception != 500 or key is None, \ - 'It is currently not possible to register a 500 internal ' \ - 'server error on a per-blueprint level.' - self.error_handler_spec.setdefault(key, {})[code_or_exception] = f - else: - self.error_handler_spec.setdefault(key, {}).setdefault(None, []) \ - .append((code_or_exception, f)) - - @setupmethod - def template_filter(self, name=None): - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - def decorator(f): - self.add_template_filter(f, name=name) - return f - return decorator - - @setupmethod - def add_template_filter(self, f, name=None): - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test(self, name=None): - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - def decorator(f): - self.add_template_test(f, name=name) - return f - return decorator - - @setupmethod - def add_template_test(self, f, name=None): - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - - @setupmethod - def template_global(self, name=None): - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - def decorator(f): - self.add_template_global(f, name=name) - return f - return decorator - - @setupmethod - def add_template_global(self, f, name=None): - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def before_request(self, f): - """Registers a function to run before each request.""" - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def before_first_request(self, f): - """Registers a function to be run before the first request to this - instance of the application. - - .. versionadded:: 0.8 - """ - self.before_first_request_funcs.append(f) - - @setupmethod - def after_request(self, f): - """Register a function to be run after each request. Your function - must take one parameter, a :attr:`response_class` object and return - a new response object or the same (see :meth:`process_response`). - - As of Flask 0.7 this function might not be executed at the end of the - request in case an unhandled exception occurred. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f): - """Register a function to be run at the end of each request, - regardless of whether there was an exception or not. These functions - are executed when the request context is popped, even if not an - actual request was performed. - - Example:: - - ctx = app.test_request_context() - ctx.push() - ... - ctx.pop() - - When ``ctx.pop()`` is executed in the above example, the teardown - functions are called just before the request context moves from the - stack of active contexts. This becomes relevant if you are using - such constructs in tests. - - Generally teardown functions must take every necessary step to avoid - that they will fail. If they do execute code that might fail they - will have to surround the execution of these code by try/except - statements and log occurring errors. - - When a teardown function was called because of a exception it will - be passed an error object. - - .. admonition:: Debug Note - - In debug mode Flask will not tear down a request on an exception - immediately. Instead if will keep it alive so that the interactive - debugger can still access it. This behavior can be controlled - by the ``PRESERVE_CONTEXT_ON_EXCEPTION`` configuration variable. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_appcontext(self, f): - """Registers a function to be called when the application context - ends. These functions are typically also called when the request - context is popped. - - Example:: - - ctx = app.app_context() - ctx.push() - ... - ctx.pop() - - When ``ctx.pop()`` is executed in the above example, the teardown - functions are called just before the app context moves from the - stack of active contexts. This becomes relevant if you are using - such constructs in tests. - - Since a request context typically also manages an application - context it would also be called when you pop a request context. - - When a teardown function was called because of an exception it will - be passed an error object. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def context_processor(self, f): - """Registers a template context processor function.""" - self.template_context_processors[None].append(f) - return f - - @setupmethod - def url_value_preprocessor(self, f): - """Registers a function as URL value preprocessor for all view - functions of the application. It's called before the view functions - are called and can modify the url values provided. - """ - self.url_value_preprocessors.setdefault(None, []).append(f) - return f - - @setupmethod - def url_defaults(self, f): - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - """ - self.url_default_functions.setdefault(None, []).append(f) - return f - - def handle_http_exception(self, e): - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionadded:: 0.3 - """ - handlers = self.error_handler_spec.get(request.blueprint) - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - if handlers and e.code in handlers: - handler = handlers[e.code] - else: - handler = self.error_handler_spec[None].get(e.code) - if handler is None: - return e - return handler(e) - - def trap_http_exception(self, e): - """Checks if an HTTP exception should be trapped or not. By default - this will return `False` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to `True`. It - also returns `True` if ``TRAP_HTTP_EXCEPTIONS`` is set to `True`. - - This is called for all HTTP exceptions raised by a view function. - If it returns `True` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionadded:: 0.8 - """ - if self.config['TRAP_HTTP_EXCEPTIONS']: - return True - if self.config['TRAP_BAD_REQUEST_ERRORS']: - return isinstance(e, BadRequest) - return False - - def handle_user_exception(self, e): - """This method is called whenever an exception occurs that should be - handled. A special case are - :class:`~werkzeug.exception.HTTPException`\s which are forwarded by - this function to the :meth:`handle_http_exception` method. This - function will either return a response value or reraise the - exception with the same traceback. - - .. versionadded:: 0.7 - """ - exc_type, exc_value, tb = sys.exc_info() - assert exc_value is e - - # ensure not to trash sys.exc_info() at that point in case someone - # wants the traceback preserved in handle_http_exception. Of course - # we cannot prevent users from trashing it themselves in a custom - # trap_http_exception method so that's their fault then. - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - blueprint_handlers = () - handlers = self.error_handler_spec.get(request.blueprint) - if handlers is not None: - blueprint_handlers = handlers.get(None, ()) - app_handlers = self.error_handler_spec[None].get(None, ()) - for typecheck, handler in chain(blueprint_handlers, app_handlers): - if isinstance(e, typecheck): - return handler(e) - - reraise(exc_type, exc_value, tb) - - def handle_exception(self, e): - """Default exception handling that kicks in when an exception - occurs that is not caught. In debug mode the exception will - be re-raised immediately, otherwise it is logged and the handler - for a 500 internal server error is used. If no such handler - exists, a default 500 internal server error message is displayed. - - .. versionadded:: 0.3 - """ - exc_type, exc_value, tb = sys.exc_info() - - got_request_exception.send(self, exception=e) - handler = self.error_handler_spec[None].get(500) - - if self.propagate_exceptions: - # if we want to repropagate the exception, we can attempt to - # raise it with the whole traceback in case we can do that - # (the function was actually called from the except part) - # otherwise, we just raise the error again - if exc_value is e: - reraise(exc_type, exc_value, tb) - else: - raise e - - self.log_exception((exc_type, exc_value, tb)) - if handler is None: - return InternalServerError() - return handler(e) - - def log_exception(self, exc_info): - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error('Exception on %s [%s]' % ( - request.path, - request.method - ), exc_info=exc_info) - - def raise_routing_exception(self, request): - """Exceptions that are recording during routing are reraised with - this method. During debug we are not reraising redirect requests - for non ``GET``, ``HEAD``, or ``OPTIONS`` requests and we're raising - a different error instead to help debug situations. - - :internal: - """ - if not self.debug \ - or not isinstance(request.routing_exception, RequestRedirect) \ - or request.method in ('GET', 'HEAD', 'OPTIONS'): - raise request.routing_exception - - from .debughelpers import FormDataRoutingRedirect - raise FormDataRoutingRedirect(request) - - def dispatch_request(self): - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = _request_ctx_stack.top.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule = req.url_rule - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if getattr(rule, 'provide_automatic_options', False) \ - and req.method == 'OPTIONS': - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - return self.view_functions[rule.endpoint](**req.view_args) - - def full_dispatch_request(self): - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self.try_trigger_before_first_request_functions() - try: - request_started.send(self) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - response = self.make_response(rv) - response = self.process_response(response) - request_finished.send(self, response=response) - return response - - def try_trigger_before_first_request_functions(self): - """Called before each request and will ensure that it triggers - the :attr:`before_first_request_funcs` and only exactly once per - application instance (which means process usually). - - :internal: - """ - if self._got_first_request: - return - with self._before_request_lock: - if self._got_first_request: - return - self._got_first_request = True - for func in self.before_first_request_funcs: - func() - - def make_default_options_response(self): - """This method is called to create the default `OPTIONS` response. - This can be changed through subclassing to change the default - behavior of `OPTIONS` responses. - - .. versionadded:: 0.7 - """ - adapter = _request_ctx_stack.top.url_adapter - if hasattr(adapter, 'allowed_methods'): - methods = adapter.allowed_methods() - else: - # fallback for Werkzeug < 0.7 - methods = [] - try: - adapter.match(method='--') - except MethodNotAllowed as e: - methods = e.valid_methods - except HTTPException as e: - pass - rv = self.response_class() - rv.allow.update(methods) - return rv - - def should_ignore_error(self, error): - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns `True` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def make_response(self, rv): - """Converts the return value from a view function to a real - response object that is an instance of :attr:`response_class`. - - The following types are allowed for `rv`: - - .. tabularcolumns:: |p{3.5cm}|p{9.5cm}| - - ======================= =========================================== - :attr:`response_class` the object is returned unchanged - :class:`str` a response object is created with the - string as body - :class:`unicode` a response object is created with the - string encoded to utf-8 as body - a WSGI function the function is called as WSGI application - and buffered as response object - :class:`tuple` A tuple in the form ``(response, status, - headers)`` where `response` is any of the - types defined here, `status` is a string - or an integer and `headers` is a list of - a dictionary with header values. - ======================= =========================================== - - :param rv: the return value from the view function - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - status = headers = None - if isinstance(rv, tuple): - rv, status, headers = rv + (None,) * (3 - len(rv)) - - if rv is None: - raise ValueError('View function did not return a response') - - if not isinstance(rv, self.response_class): - # When we create a response object directly, we let the constructor - # set the headers and status. We do this because there can be - # some extra logic involved when creating these objects with - # specific values (like default content type selection). - if isinstance(rv, (text_type, bytes, bytearray)): - rv = self.response_class(rv, headers=headers, status=status) - headers = status = None - else: - rv = self.response_class.force_type(rv, request.environ) - - if status is not None: - if isinstance(status, string_types): - rv.status = status - else: - rv.status_code = status - if headers: - rv.headers.extend(headers) - - return rv - - def create_url_adapter(self, request): - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set up - so the request is passed explicitly. - - .. versionadded:: 0.6 - - .. versionchanged:: 0.9 - This can now also be called without a request object when the - URL adapter is created for the application context. - """ - if request is not None: - return self.url_map.bind_to_environ(request.environ, - server_name=self.config['SERVER_NAME']) - # We need at the very least the server name to be set for this - # to work. - if self.config['SERVER_NAME'] is not None: - return self.url_map.bind( - self.config['SERVER_NAME'], - script_name=self.config['APPLICATION_ROOT'] or '/', - url_scheme=self.config['PREFERRED_URL_SCHEME']) - - def inject_url_defaults(self, endpoint, values): - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - funcs = self.url_default_functions.get(None, ()) - if '.' in endpoint: - bp = endpoint.rsplit('.', 1)[0] - funcs = chain(funcs, self.url_default_functions.get(bp, ())) - for func in funcs: - func(endpoint, values) - - def handle_url_build_error(self, error, endpoint, values): - """Handle :class:`~werkzeug.routing.BuildError` on :meth:`url_for`. - """ - exc_type, exc_value, tb = sys.exc_info() - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - if rv is not None: - return rv - except BuildError as error: - pass - - # At this point we want to reraise the exception. If the error is - # still the same one we can reraise it with the original traceback, - # otherwise we raise it from here. - if error is exc_value: - reraise(exc_type, exc_value, tb) - raise error - - def preprocess_request(self): - """Called before the actual request dispatching and will - call every as :meth:`before_request` decorated function. - If any of these function returns a value it's handled as - if it was the return value from the view and further - request handling is stopped. - - This also triggers the :meth:`url_value_processor` functions before - the actual :meth:`before_request` functions are called. - """ - bp = _request_ctx_stack.top.request.blueprint - - funcs = self.url_value_preprocessors.get(None, ()) - if bp is not None and bp in self.url_value_preprocessors: - funcs = chain(funcs, self.url_value_preprocessors[bp]) - for func in funcs: - func(request.endpoint, request.view_args) - - funcs = self.before_request_funcs.get(None, ()) - if bp is not None and bp in self.before_request_funcs: - funcs = chain(funcs, self.before_request_funcs[bp]) - for func in funcs: - rv = func() - if rv is not None: - return rv - - def process_response(self, response): - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = _request_ctx_stack.top - bp = ctx.request.blueprint - funcs = ctx._after_request_functions - if bp is not None and bp in self.after_request_funcs: - funcs = chain(funcs, reversed(self.after_request_funcs[bp])) - if None in self.after_request_funcs: - funcs = chain(funcs, reversed(self.after_request_funcs[None])) - for handler in funcs: - response = handler(response) - if not self.session_interface.is_null_session(ctx.session): - self.save_session(ctx.session, response) - return response - - def do_teardown_request(self, exc=None): - """Called after the actual request dispatching and will - call every as :meth:`teardown_request` decorated function. This is - not actually called by the :class:`Flask` object itself but is always - triggered when the request context is popped. That way we have a - tighter control over certain resources under testing environments. - - .. versionchanged:: 0.9 - Added the `exc` argument. Previously this was always using the - current exception information. - """ - if exc is None: - exc = sys.exc_info()[1] - funcs = reversed(self.teardown_request_funcs.get(None, ())) - bp = _request_ctx_stack.top.request.blueprint - if bp is not None and bp in self.teardown_request_funcs: - funcs = chain(funcs, reversed(self.teardown_request_funcs[bp])) - for func in funcs: - rv = func(exc) - request_tearing_down.send(self, exc=exc) - - def do_teardown_appcontext(self, exc=None): - """Called when an application context is popped. This works pretty - much the same as :meth:`do_teardown_request` but for the application - context. - - .. versionadded:: 0.9 - """ - if exc is None: - exc = sys.exc_info()[1] - for func in reversed(self.teardown_appcontext_funcs): - func(exc) - appcontext_tearing_down.send(self, exc=exc) - - def app_context(self): - """Binds the application only. For as long as the application is bound - to the current context the :data:`flask.current_app` points to that - application. An application context is automatically created when a - request context is pushed if necessary. - - Example usage:: - - with app.app_context(): - ... - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ): - """Creates a :class:`~flask.ctx.RequestContext` from the given - environment and binds it to the current context. This must be used in - combination with the `with` statement because the request is only bound - to the current context for the duration of the `with` block. - - Example usage:: - - with app.request_context(environ): - do_something_with(request) - - The object returned can also be used without the `with` statement - which is useful for working in the shell. The example above is - doing exactly the same as this code:: - - ctx = app.request_context(environ) - ctx.push() - try: - do_something_with(request) - finally: - ctx.pop() - - .. versionchanged:: 0.3 - Added support for non-with statement usage and `with` statement - is now passed the ctx object. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args, **kwargs): - """Creates a WSGI environment from the given values (see - :func:`werkzeug.test.EnvironBuilder` for more information, this - function accepts the same arguments). - """ - from flask.testing import make_test_environ_builder - builder = make_test_environ_builder(self, *args, **kwargs) - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app(self, environ, start_response): - """The actual WSGI application. This is not implemented in - `__call__` so that middlewares can be applied without losing a - reference to the class. So instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - The behavior of the before and after request callbacks was changed - under error conditions and a new callback was added that will - always execute at the end of the request, independent on if an - error occurred or not. See :ref:`callbacks-and-errors`. - - :param environ: a WSGI environment - :param start_response: a callable accepting a status code, - a list of headers and an optional - exception context to start the response - """ - ctx = self.request_context(environ) - ctx.push() - error = None - try: - try: - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.make_response(self.handle_exception(e)) - return response(environ, start_response) - finally: - if self.should_ignore_error(error): - error = None - ctx.auto_pop(error) - - @property - def modules(self): - from warnings import warn - warn(DeprecationWarning('Flask.modules is deprecated, use ' - 'Flask.blueprints instead'), stacklevel=2) - return self.blueprints - - def __call__(self, environ, start_response): - """Shortcut for :attr:`wsgi_app`.""" - return self.wsgi_app(environ, start_response) - - def __repr__(self): - return '<%s %r>' % ( - self.__class__.__name__, - self.name, - ) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/blueprints.py b/Linux_i686/lib/python2.7/site-packages/flask/blueprints.py deleted file mode 100644 index 4575ec9..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/blueprints.py +++ /dev/null @@ -1,401 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.blueprints - ~~~~~~~~~~~~~~~~ - - Blueprints are the recommended way to implement larger or more - pluggable applications in Flask 0.7 and later. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" -from functools import update_wrapper - -from .helpers import _PackageBoundObject, _endpoint_from_view_func - - -class BlueprintSetupState(object): - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__(self, blueprint, app, options, first_registration): - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get('subdomain') - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, `None` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get('url_prefix') - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get('url_defaults', ())) - - def add_url_rule(self, rule, endpoint=None, view_func=None, **options): - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix: - rule = self.url_prefix + rule - options.setdefault('subdomain', self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) - defaults = self.url_defaults - if 'defaults' in options: - defaults = dict(defaults, **options.pop('defaults')) - self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint), - view_func, defaults=defaults, **options) - - -class Blueprint(_PackageBoundObject): - """Represents a blueprint. A blueprint is an object that records - functions that will be called with the - :class:`~flask.blueprint.BlueprintSetupState` later to register functions - or other things on the main application. See :ref:`blueprints` for more - information. - - .. versionadded:: 0.7 - """ - - warn_on_modifications = False - _got_registered_once = False - - def __init__(self, name, import_name, static_folder=None, - static_url_path=None, template_folder=None, - url_prefix=None, subdomain=None, url_defaults=None): - _PackageBoundObject.__init__(self, import_name, template_folder) - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.static_folder = static_folder - self.static_url_path = static_url_path - self.deferred_functions = [] - self.view_functions = {} - if url_defaults is None: - url_defaults = {} - self.url_values_defaults = url_defaults - - def record(self, func): - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - if self._got_registered_once and self.warn_on_modifications: - from warnings import warn - warn(Warning('The blueprint was already registered once ' - 'but is getting modified now. These changes ' - 'will not show up.')) - self.deferred_functions.append(func) - - def record_once(self, func): - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - def wrapper(state): - if state.first_registration: - func(state) - return self.record(update_wrapper(wrapper, func)) - - def make_setup_state(self, app, options, first_registration=False): - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - def register(self, app, options, first_registration=False): - """Called by :meth:`Flask.register_blueprint` to register a blueprint - on the application. This can be overridden to customize the register - behavior. Keyword arguments from - :func:`~flask.Flask.register_blueprint` are directly forwarded to this - method in the `options` dictionary. - """ - self._got_registered_once = True - state = self.make_setup_state(app, options, first_registration) - if self.has_static_folder: - state.add_url_rule(self.static_url_path + '/', - view_func=self.send_static_file, - endpoint='static') - - for deferred in self.deferred_functions: - deferred(state) - - def route(self, rule, **options): - """Like :meth:`Flask.route` but for a blueprint. The endpoint for the - :func:`url_for` function is prefixed with the name of the blueprint. - """ - def decorator(f): - endpoint = options.pop("endpoint", f.__name__) - self.add_url_rule(rule, endpoint, f, **options) - return f - return decorator - - def add_url_rule(self, rule, endpoint=None, view_func=None, **options): - """Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for - the :func:`url_for` function is prefixed with the name of the blueprint. - """ - if endpoint: - assert '.' not in endpoint, "Blueprint endpoint's should not contain dot's" - self.record(lambda s: - s.add_url_rule(rule, endpoint, view_func, **options)) - - def endpoint(self, endpoint): - """Like :meth:`Flask.endpoint` but for a blueprint. This does not - prefix the endpoint with the blueprint name, this has to be done - explicitly by the user of this method. If the endpoint is prefixed - with a `.` it will be registered to the current blueprint, otherwise - it's an application independent endpoint. - """ - def decorator(f): - def register_endpoint(state): - state.app.view_functions[endpoint] = f - self.record_once(register_endpoint) - return f - return decorator - - def app_template_filter(self, name=None): - """Register a custom template filter, available application wide. Like - :meth:`Flask.template_filter` but for a blueprint. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - def decorator(f): - self.add_app_template_filter(f, name=name) - return f - return decorator - - def add_app_template_filter(self, f, name=None): - """Register a custom template filter, available application wide. Like - :meth:`Flask.add_template_filter` but for a blueprint. Works exactly - like the :meth:`app_template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - def register_template(state): - state.app.jinja_env.filters[name or f.__name__] = f - self.record_once(register_template) - - def app_template_test(self, name=None): - """Register a custom template test, available application wide. Like - :meth:`Flask.template_test` but for a blueprint. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - def decorator(f): - self.add_app_template_test(f, name=name) - return f - return decorator - - def add_app_template_test(self, f, name=None): - """Register a custom template test, available application wide. Like - :meth:`Flask.add_template_test` but for a blueprint. Works exactly - like the :meth:`app_template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - def register_template(state): - state.app.jinja_env.tests[name or f.__name__] = f - self.record_once(register_template) - - def app_template_global(self, name=None): - """Register a custom template global, available application wide. Like - :meth:`Flask.template_global` but for a blueprint. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - def decorator(f): - self.add_app_template_global(f, name=name) - return f - return decorator - - def add_app_template_global(self, f, name=None): - """Register a custom template global, available application wide. Like - :meth:`Flask.add_template_global` but for a blueprint. Works exactly - like the :meth:`app_template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - def register_template(state): - state.app.jinja_env.globals[name or f.__name__] = f - self.record_once(register_template) - - def before_request(self, f): - """Like :meth:`Flask.before_request` but for a blueprint. This function - is only executed before each request that is handled by a function of - that blueprint. - """ - self.record_once(lambda s: s.app.before_request_funcs - .setdefault(self.name, []).append(f)) - return f - - def before_app_request(self, f): - """Like :meth:`Flask.before_request`. Such a function is executed - before each request, even if outside of a blueprint. - """ - self.record_once(lambda s: s.app.before_request_funcs - .setdefault(None, []).append(f)) - return f - - def before_app_first_request(self, f): - """Like :meth:`Flask.before_first_request`. Such a function is - executed before the first request to the application. - """ - self.record_once(lambda s: s.app.before_first_request_funcs.append(f)) - return f - - def after_request(self, f): - """Like :meth:`Flask.after_request` but for a blueprint. This function - is only executed after each request that is handled by a function of - that blueprint. - """ - self.record_once(lambda s: s.app.after_request_funcs - .setdefault(self.name, []).append(f)) - return f - - def after_app_request(self, f): - """Like :meth:`Flask.after_request` but for a blueprint. Such a function - is executed after each request, even if outside of the blueprint. - """ - self.record_once(lambda s: s.app.after_request_funcs - .setdefault(None, []).append(f)) - return f - - def teardown_request(self, f): - """Like :meth:`Flask.teardown_request` but for a blueprint. This - function is only executed when tearing down requests handled by a - function of that blueprint. Teardown request functions are executed - when the request context is popped, even when no actual request was - performed. - """ - self.record_once(lambda s: s.app.teardown_request_funcs - .setdefault(self.name, []).append(f)) - return f - - def teardown_app_request(self, f): - """Like :meth:`Flask.teardown_request` but for a blueprint. Such a - function is executed when tearing down each request, even if outside of - the blueprint. - """ - self.record_once(lambda s: s.app.teardown_request_funcs - .setdefault(None, []).append(f)) - return f - - def context_processor(self, f): - """Like :meth:`Flask.context_processor` but for a blueprint. This - function is only executed for requests handled by a blueprint. - """ - self.record_once(lambda s: s.app.template_context_processors - .setdefault(self.name, []).append(f)) - return f - - def app_context_processor(self, f): - """Like :meth:`Flask.context_processor` but for a blueprint. Such a - function is executed each request, even if outside of the blueprint. - """ - self.record_once(lambda s: s.app.template_context_processors - .setdefault(None, []).append(f)) - return f - - def app_errorhandler(self, code): - """Like :meth:`Flask.errorhandler` but for a blueprint. This - handler is used for all requests, even if outside of the blueprint. - """ - def decorator(f): - self.record_once(lambda s: s.app.errorhandler(code)(f)) - return f - return decorator - - def url_value_preprocessor(self, f): - """Registers a function as URL value preprocessor for this - blueprint. It's called before the view functions are called and - can modify the url values provided. - """ - self.record_once(lambda s: s.app.url_value_preprocessors - .setdefault(self.name, []).append(f)) - return f - - def url_defaults(self, f): - """Callback function for URL defaults for this blueprint. It's called - with the endpoint and values and should update the values passed - in place. - """ - self.record_once(lambda s: s.app.url_default_functions - .setdefault(self.name, []).append(f)) - return f - - def app_url_value_preprocessor(self, f): - """Same as :meth:`url_value_preprocessor` but application wide. - """ - self.record_once(lambda s: s.app.url_value_preprocessors - .setdefault(None, []).append(f)) - return f - - def app_url_defaults(self, f): - """Same as :meth:`url_defaults` but application wide. - """ - self.record_once(lambda s: s.app.url_default_functions - .setdefault(None, []).append(f)) - return f - - def errorhandler(self, code_or_exception): - """Registers an error handler that becomes active for this blueprint - only. Please be aware that routing does not happen local to a - blueprint so an error handler for 404 usually is not handled by - a blueprint unless it is caused inside a view function. Another - special case is the 500 internal server error which is always looked - up from the application. - - Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator - of the :class:`~flask.Flask` object. - """ - def decorator(f): - self.record_once(lambda s: s.app._register_error_handler( - self.name, code_or_exception, f)) - return f - return decorator diff --git a/Linux_i686/lib/python2.7/site-packages/flask/config.py b/Linux_i686/lib/python2.7/site-packages/flask/config.py deleted file mode 100644 index 155afa2..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/config.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.config - ~~~~~~~~~~~~ - - Implements the configuration related objects. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - -import imp -import os -import errno - -from werkzeug.utils import import_string -from ._compat import string_types - - -class ConfigAttribute(object): - """Makes an attribute forward to the config""" - - def __init__(self, name, get_converter=None): - self.__name__ = name - self.get_converter = get_converter - - def __get__(self, obj, type=None): - if obj is None: - return self - rv = obj.config[self.__name__] - if self.get_converter is not None: - rv = self.get_converter(rv) - return rv - - def __set__(self, obj, value): - obj.config[self.__name__] = value - - -class Config(dict): - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__(self, root_path, defaults=None): - dict.__init__(self, defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name, silent=False): - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to `True` if you want silent failure for missing - files. - :return: bool. `True` if able to load config, `False` otherwise. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError('The environment variable %r is not set ' - 'and as such configuration could not be ' - 'loaded. Set this variable and make it ' - 'point to a configuration file' % - variable_name) - return self.from_pyfile(rv, silent=silent) - - def from_pyfile(self, filename, silent=False): - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to `True` if you want silent failure for missing - files. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = imp.new_module('config') - d.__file__ = filename - try: - with open(filename) as config_file: - exec(compile(config_file.read(), filename, 'exec'), d.__dict__) - except IOError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - e.strerror = 'Unable to load configuration file (%s)' % e.strerror - raise - self.from_object(d) - return True - - def from_object(self, obj): - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. - - Just the uppercase variables in that object are stored in the config. - Example usage:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - :param obj: an import name or object - """ - if isinstance(obj, string_types): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def __repr__(self): - return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self)) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/ctx.py b/Linux_i686/lib/python2.7/site-packages/flask/ctx.py deleted file mode 100644 index f134237..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/ctx.py +++ /dev/null @@ -1,394 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.ctx - ~~~~~~~~~ - - Implements the objects required to keep the context. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - -from __future__ import with_statement - -import sys -from functools import update_wrapper - -from werkzeug.exceptions import HTTPException - -from .globals import _request_ctx_stack, _app_ctx_stack -from .module import blueprint_is_module -from .signals import appcontext_pushed, appcontext_popped - - -class _AppCtxGlobals(object): - """A plain object.""" - - def get(self, name, default=None): - return self.__dict__.get(name, default) - - def __contains__(self, item): - return item in self.__dict__ - - def __iter__(self): - return iter(self.__dict__) - - def __repr__(self): - top = _app_ctx_stack.top - if top is not None: - return '' % top.app.name - return object.__repr__(self) - - -def after_this_request(f): - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - _request_ctx_stack.top._after_request_functions.append(f) - return f - - -def copy_current_request_context(f): - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request like you - # would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - top = _request_ctx_stack.top - if top is None: - raise RuntimeError('This decorator can only be used at local scopes ' - 'when a request context is on the stack. For instance within ' - 'view functions.') - reqctx = top.copy() - def wrapper(*args, **kwargs): - with reqctx: - return f(*args, **kwargs) - return update_wrapper(wrapper, f) - - -def has_request_context(): - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g` for truthness):: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _request_ctx_stack.top is not None - - -def has_app_context(): - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _app_ctx_stack.top is not None - - -class AppContext(object): - """The application context binds an application object implicitly - to the current thread or greenlet, similar to how the - :class:`RequestContext` binds request information. The application - context is also implicitly created if a request context is created - but the application is not on top of the individual application - context. - """ - - def __init__(self, app): - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g = app.app_ctx_globals_class() - - # Like request context, app contexts can be pushed multiple times - # but there a basic "refcount" is enough to track them. - self._refcnt = 0 - - def push(self): - """Binds the app context to the current context.""" - self._refcnt += 1 - _app_ctx_stack.push(self) - appcontext_pushed.send(self.app) - - def pop(self, exc=None): - """Pops the app context.""" - self._refcnt -= 1 - if self._refcnt <= 0: - if exc is None: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - rv = _app_ctx_stack.pop() - assert rv is self, 'Popped wrong app context. (%r instead of %r)' \ - % (rv, self) - appcontext_popped.send(self.app) - - def __enter__(self): - self.push() - return self - - def __exit__(self, exc_type, exc_value, tb): - self.pop(exc_value) - - -class RequestContext(object): - """The request context contains all request relevant information. It is - created at the beginning of the request and pushed to the - `_request_ctx_stack` and removed at the end of it. It will create the - URL adapter and request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the request - for you. In debug mode the request context is kept around if - exceptions happen so that interactive debuggers have a chance to - introspect the data. With 0.4 this can also be forced for requests - that did not fail and outside of `DEBUG` mode. By setting - ``'flask._preserve_context'`` to `True` on the WSGI environment the - context will not pop itself at the end of the request. This is used by - the :meth:`~flask.Flask.test_client` for example to implement the - deferred cleanup functionality. - - You might find this helpful for unittests where you need the - information from the context local around for a little longer. Make - sure to properly :meth:`~werkzeug.LocalStack.pop` the stack yourself in - that situation, otherwise your unittests will leak memory. - """ - - def __init__(self, app, environ, request=None): - self.app = app - if request is None: - request = app.request_class(environ) - self.request = request - self.url_adapter = app.create_url_adapter(self.request) - self.flashes = None - self.session = None - - # Request contexts can be pushed multiple times and interleaved with - # other request contexts. Now only if the last level is popped we - # get rid of them. Additionally if an application context is missing - # one is created implicitly so for each level we add this information - self._implicit_app_ctx_stack = [] - - # indicator if the context was preserved. Next time another context - # is pushed the preserved context is popped. - self.preserved = False - - # remembers the exception for pop if there is one in case the context - # preservation kicks in. - self._preserved_exc = None - - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions = [] - - self.match_request() - - # XXX: Support for deprecated functionality. This is going away with - # Flask 1.0 - blueprint = self.request.blueprint - if blueprint is not None: - # better safe than sorry, we don't want to break code that - # already worked - bp = app.blueprints.get(blueprint) - if bp is not None and blueprint_is_module(bp): - self.request._is_old_module = True - - def _get_g(self): - return _app_ctx_stack.top.g - def _set_g(self, value): - _app_ctx_stack.top.g = value - g = property(_get_g, _set_g) - del _get_g, _set_g - - def copy(self): - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - """ - return self.__class__(self.app, - environ=self.request.environ, - request=self.request - ) - - def match_request(self): - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - url_rule, self.request.view_args = \ - self.url_adapter.match(return_rule=True) - self.request.url_rule = url_rule - except HTTPException as e: - self.request.routing_exception = e - - def push(self): - """Binds the request context to the current context.""" - # If an exception occurs in debug mode or if context preservation is - # activated under exception situations exactly one context stays - # on the stack. The rationale is that you want to access that - # information under debug situations. However if someone forgets to - # pop that context again we want to make sure that on the next push - # it's invalidated, otherwise we run at risk that something leaks - # memory. This is usually only a problem in testsuite since this - # functionality is not active in production environments. - top = _request_ctx_stack.top - if top is not None and top.preserved: - top.pop(top._preserved_exc) - - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _app_ctx_stack.top - if app_ctx is None or app_ctx.app != self.app: - app_ctx = self.app.app_context() - app_ctx.push() - self._implicit_app_ctx_stack.append(app_ctx) - else: - self._implicit_app_ctx_stack.append(None) - - _request_ctx_stack.push(self) - - # Open the session at the moment that the request context is - # available. This allows a custom open_session method to use the - # request context (e.g. code that access database information - # stored on `g` instead of the appcontext). - self.session = self.app.open_session(self.request) - if self.session is None: - self.session = self.app.make_null_session() - - def pop(self, exc=None): - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - app_ctx = self._implicit_app_ctx_stack.pop() - - clear_request = False - if not self._implicit_app_ctx_stack: - self.preserved = False - self._preserved_exc = None - if exc is None: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - # If this interpreter supports clearing the exception information - # we do that now. This will only go into effect on Python 2.x, - # on 3.x it disappears automatically at the end of the exception - # stack. - if hasattr(sys, 'exc_clear'): - sys.exc_clear() - - request_close = getattr(self.request, 'close', None) - if request_close is not None: - request_close() - clear_request = True - - rv = _request_ctx_stack.pop() - assert rv is self, 'Popped wrong request context. (%r instead of %r)' \ - % (rv, self) - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - rv.request.environ['werkzeug.request'] = None - - # Get rid of the app as well if necessary. - if app_ctx is not None: - app_ctx.pop(exc) - - def auto_pop(self, exc): - if self.request.environ.get('flask._preserve_context') or \ - (exc is not None and self.app.preserve_context_on_exception): - self.preserved = True - self._preserved_exc = exc - else: - self.pop(exc) - - def __enter__(self): - self.push() - return self - - def __exit__(self, exc_type, exc_value, tb): - # do not pop the request stack if we are in debug mode and an - # exception happened. This will allow the debugger to still - # access the request object in the interactive shell. Furthermore - # the context can be force kept alive for the test client. - # See flask.testing for how this works. - self.auto_pop(exc_value) - - def __repr__(self): - return '<%s \'%s\' [%s] of %s>' % ( - self.__class__.__name__, - self.request.url, - self.request.method, - self.app.name, - ) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/debughelpers.py b/Linux_i686/lib/python2.7/site-packages/flask/debughelpers.py deleted file mode 100644 index 2f8510f..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,87 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.debughelpers - ~~~~~~~~~~~~~~~~~~ - - Various helpers to make the development experience better. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" -from ._compat import implements_to_string - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -@implements_to_string -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request, key): - form_matches = request.form.getlist(key) - buf = ['You tried to access the file "%s" in the request.files ' - 'dictionary but it does not exist. The mimetype for the request ' - 'is "%s" instead of "multipart/form-data" which means that no ' - 'file contents were transmitted. To fix this error you should ' - 'provide enctype="multipart/form-data" in your form.' % - (key, request.mimetype)] - if form_matches: - buf.append('\n\nThe browser instead transmitted some file names. ' - 'This was submitted: %s' % ', '.join('"%s"' % x - for x in form_matches)) - self.msg = ''.join(buf) - - def __str__(self): - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised by Flask in debug mode if it detects a - redirect caused by the routing system when the request method is not - GET, HEAD or OPTIONS. Reasoning: form data will be dropped. - """ - - def __init__(self, request): - exc = request.routing_exception - buf = ['A request was sent to this URL (%s) but a redirect was ' - 'issued automatically by the routing system to "%s".' - % (request.url, exc.new_url)] - - # In case just a slash was appended we can be extra helpful - if request.base_url + '/' == exc.new_url.split('?')[0]: - buf.append(' The URL was defined with a trailing slash so ' - 'Flask will automatically redirect to the URL ' - 'with the trailing slash if it was accessed ' - 'without one.') - - buf.append(' Make sure to directly send your %s-request to this URL ' - 'since we can\'t make browsers or HTTP clients redirect ' - 'with form data reliably or without user interaction.' % - request.method) - buf.append('\n\nNote: this exception is only raised in debug mode') - AssertionError.__init__(self, ''.join(buf).encode('utf-8')) - - -def attach_enctype_error_multidict(request): - """Since Flask 0.8 we're monkeypatching the files object in case a - request is detected that does not use multipart form data but the files - object is accessed. - """ - oldcls = request.files.__class__ - class newcls(oldcls): - def __getitem__(self, key): - try: - return oldcls.__getitem__(self, key) - except KeyError as e: - if key not in request.form: - raise - raise DebugFilesKeyError(request, key) - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls diff --git a/Linux_i686/lib/python2.7/site-packages/flask/ext/__init__.py b/Linux_i686/lib/python2.7/site-packages/flask/ext/__init__.py deleted file mode 100644 index f29958a..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/ext/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.ext - ~~~~~~~~~ - - Redirect imports for extensions. This module basically makes it possible - for us to transition from flaskext.foo to flask_foo without having to - force all extensions to upgrade at the same time. - - When a user does ``from flask.ext.foo import bar`` it will attempt to - import ``from flask_foo import bar`` first and when that fails it will - try to import ``from flaskext.foo import bar``. - - We're switching from namespace packages because it was just too painful for - everybody involved. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - - -def setup(): - from ..exthook import ExtensionImporter - importer = ExtensionImporter(['flask_%s', 'flaskext.%s'], __name__) - importer.install() - - -setup() -del setup diff --git a/Linux_i686/lib/python2.7/site-packages/flask/exthook.py b/Linux_i686/lib/python2.7/site-packages/flask/exthook.py deleted file mode 100644 index d0d814c..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/exthook.py +++ /dev/null @@ -1,120 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.exthook - ~~~~~~~~~~~~~ - - Redirect imports for extensions. This module basically makes it possible - for us to transition from flaskext.foo to flask_foo without having to - force all extensions to upgrade at the same time. - - When a user does ``from flask.ext.foo import bar`` it will attempt to - import ``from flask_foo import bar`` first and when that fails it will - try to import ``from flaskext.foo import bar``. - - We're switching from namespace packages because it was just too painful for - everybody involved. - - This is used by `flask.ext`. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" -import sys -import os -from ._compat import reraise - - -class ExtensionImporter(object): - """This importer redirects imports from this submodule to other locations. - This makes it possible to transition from the old flaskext.name to the - newer flask_name without people having a hard time. - """ - - def __init__(self, module_choices, wrapper_module): - self.module_choices = module_choices - self.wrapper_module = wrapper_module - self.prefix = wrapper_module + '.' - self.prefix_cutoff = wrapper_module.count('.') + 1 - - def __eq__(self, other): - return self.__class__.__module__ == other.__class__.__module__ and \ - self.__class__.__name__ == other.__class__.__name__ and \ - self.wrapper_module == other.wrapper_module and \ - self.module_choices == other.module_choices - - def __ne__(self, other): - return not self.__eq__(other) - - def install(self): - sys.meta_path[:] = [x for x in sys.meta_path if self != x] + [self] - - def find_module(self, fullname, path=None): - if fullname.startswith(self.prefix): - return self - - def load_module(self, fullname): - if fullname in sys.modules: - return sys.modules[fullname] - modname = fullname.split('.', self.prefix_cutoff)[self.prefix_cutoff] - for path in self.module_choices: - realname = path % modname - try: - __import__(realname) - except ImportError: - exc_type, exc_value, tb = sys.exc_info() - # since we only establish the entry in sys.modules at the - # very this seems to be redundant, but if recursive imports - # happen we will call into the move import a second time. - # On the second invocation we still don't have an entry for - # fullname in sys.modules, but we will end up with the same - # fake module name and that import will succeed since this - # one already has a temporary entry in the modules dict. - # Since this one "succeeded" temporarily that second - # invocation now will have created a fullname entry in - # sys.modules which we have to kill. - sys.modules.pop(fullname, None) - - # If it's an important traceback we reraise it, otherwise - # we swallow it and try the next choice. The skipped frame - # is the one from __import__ above which we don't care about - if self.is_important_traceback(realname, tb): - reraise(exc_type, exc_value, tb.tb_next) - continue - module = sys.modules[fullname] = sys.modules[realname] - if '.' not in modname: - setattr(sys.modules[self.wrapper_module], modname, module) - return module - raise ImportError('No module named %s' % fullname) - - def is_important_traceback(self, important_module, tb): - """Walks a traceback's frames and checks if any of the frames - originated in the given important module. If that is the case then we - were able to import the module itself but apparently something went - wrong when the module was imported. (Eg: import of an import failed). - """ - while tb is not None: - if self.is_important_frame(important_module, tb): - return True - tb = tb.tb_next - return False - - def is_important_frame(self, important_module, tb): - """Checks a single frame if it's important.""" - g = tb.tb_frame.f_globals - if '__name__' not in g: - return False - - module_name = g['__name__'] - - # Python 2.7 Behavior. Modules are cleaned up late so the - # name shows up properly here. Success! - if module_name == important_module: - return True - - # Some python versions will will clean up modules so early that the - # module name at that point is no longer set. Try guessing from - # the filename then. - filename = os.path.abspath(tb.tb_frame.f_code.co_filename) - test_string = os.path.sep + important_module.replace('.', os.path.sep) - return test_string + '.py' in filename or \ - test_string + os.path.sep + '__init__.py' in filename diff --git a/Linux_i686/lib/python2.7/site-packages/flask/globals.py b/Linux_i686/lib/python2.7/site-packages/flask/globals.py deleted file mode 100644 index 67d41f5..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/globals.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.globals - ~~~~~~~~~~~~~ - - Defines all the global objects that are proxies to the current - active context. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - -from functools import partial -from werkzeug.local import LocalStack, LocalProxy - - -def _lookup_req_object(name): - top = _request_ctx_stack.top - if top is None: - raise RuntimeError('working outside of request context') - return getattr(top, name) - - -def _lookup_app_object(name): - top = _app_ctx_stack.top - if top is None: - raise RuntimeError('working outside of application context') - return getattr(top, name) - - -def _find_app(): - top = _app_ctx_stack.top - if top is None: - raise RuntimeError('working outside of application context') - return top.app - - -# context locals -_request_ctx_stack = LocalStack() -_app_ctx_stack = LocalStack() -current_app = LocalProxy(_find_app) -request = LocalProxy(partial(_lookup_req_object, 'request')) -session = LocalProxy(partial(_lookup_req_object, 'session')) -g = LocalProxy(partial(_lookup_app_object, 'g')) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/helpers.py b/Linux_i686/lib/python2.7/site-packages/flask/helpers.py deleted file mode 100644 index 1e7c87f..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/helpers.py +++ /dev/null @@ -1,849 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.helpers - ~~~~~~~~~~~~~ - - Implements various helpers. - - :copyright: (c) 2011 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" - -import os -import sys -import pkgutil -import posixpath -import mimetypes -from time import time -from zlib import adler32 -from threading import RLock -from werkzeug.routing import BuildError -from functools import update_wrapper - -try: - from werkzeug.urls import url_quote -except ImportError: - from urlparse import quote as url_quote - -from werkzeug.datastructures import Headers -from werkzeug.exceptions import NotFound - -# this was moved in 0.7 -try: - from werkzeug.wsgi import wrap_file -except ImportError: - from werkzeug.utils import wrap_file - -from jinja2 import FileSystemLoader - -from .signals import message_flashed -from .globals import session, _request_ctx_stack, _app_ctx_stack, \ - current_app, request -from ._compat import string_types, text_type - - -# sentinel -_missing = object() - - -# what separators does this operating system provide that are not a slash? -# this is used by the send_from_directory function to ensure that nobody is -# able to access files from outside the filesystem. -_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep] - if sep not in (None, '/')) - - -def _endpoint_from_view_func(view_func): - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, 'expected view func if endpoint ' \ - 'is not provided.' - return view_func.__name__ - - -def stream_with_context(generator_or_function): - """Request contexts disappear when the response is started on the server. - This is done for efficiency reasons and to make it less likely to encounter - memory leaks with badly written WSGI middlewares. The downside is that if - you are using streamed responses, the generator cannot access request bound - information any more. - - This function however can help you keep the context around for longer:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - @stream_with_context - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(generate()) - - Alternatively it can also be used around a specific generator:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) - except TypeError: - def decorator(*args, **kwargs): - gen = generator_or_function() - return stream_with_context(gen) - return update_wrapper(decorator, generator_or_function) - - def generator(): - ctx = _request_ctx_stack.top - if ctx is None: - raise RuntimeError('Attempted to stream with context but ' - 'there was no context in the first place to keep around.') - with ctx: - # Dummy sentinel. Has to be inside the context block or we're - # not actually keeping the context around. - yield None - - # The try/finally is here so that if someone passes a WSGI level - # iterator in we're still running the cleanup logic. Generators - # don't need that because they are closed on their destruction - # automatically. - try: - for item in gen: - yield item - finally: - if hasattr(gen, 'close'): - gen.close() - - # The trick is to start the generator. Then the code execution runs until - # the first dummy None is yielded at which point the context was already - # pushed. This item is discarded. Then when the iteration continues the - # real generator is executed. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g - - -def make_response(*args): - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for(endpoint, **values): - """Generates a URL to the given endpoint with the method provided. - - Variable arguments that are unknown to the target endpoint are appended - to the generated URL as query arguments. If the value of a query argument - is `None`, the whole pair is skipped. In case blueprints are active - you can shortcut references to the same blueprint by prefixing the - local endpoint with a dot (``.``). - - This will reference the index function local to the current blueprint:: - - url_for('.index') - - For more information, head over to the :ref:`Quickstart `. - - To integrate applications, :class:`Flask` has a hook to intercept URL build - errors through :attr:`Flask.build_error_handler`. The `url_for` function - results in a :exc:`~werkzeug.routing.BuildError` when the current app does - not have a URL for the given endpoint and values. When it does, the - :data:`~flask.current_app` calls its :attr:`~Flask.build_error_handler` if - it is not `None`, which can return a string to use as the result of - `url_for` (instead of `url_for`'s default to raise the - :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception. - An example:: - - def external_url_handler(error, endpoint, **values): - "Looks up an external URL when `url_for` cannot build a URL." - # This is an example of hooking the build_error_handler. - # Here, lookup_url is some utility function you've built - # which looks up the endpoint in some external URL registry. - url = lookup_url(endpoint, **values) - if url is None: - # External lookup did not have a URL. - # Re-raise the BuildError, in context of original traceback. - exc_type, exc_value, tb = sys.exc_info() - if exc_value is error: - raise exc_type, exc_value, tb - else: - raise error - # url_for will use this result, instead of raising BuildError. - return url - - app.build_error_handler = external_url_handler - - Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and - `endpoint` and `**values` are the arguments passed into `url_for`. Note - that this is for building URLs outside the current application, and not for - handling 404 NotFound errors. - - .. versionadded:: 0.10 - The `_scheme` parameter was added. - - .. versionadded:: 0.9 - The `_anchor` and `_method` parameters were added. - - .. versionadded:: 0.9 - Calls :meth:`Flask.handle_build_error` on - :exc:`~werkzeug.routing.BuildError`. - - :param endpoint: the endpoint of the URL (name of the function) - :param values: the variable arguments of the URL rule - :param _external: if set to `True`, an absolute URL is generated. Server - address can be changed via `SERVER_NAME` configuration variable which - defaults to `localhost`. - :param _scheme: a string specifying the desired URL scheme. The `_external` - parameter must be set to `True` or a `ValueError` is raised. - :param _anchor: if provided this is added as anchor to the URL. - :param _method: if provided this explicitly specifies an HTTP method. - """ - appctx = _app_ctx_stack.top - reqctx = _request_ctx_stack.top - if appctx is None: - raise RuntimeError('Attempted to generate a URL without the ' - 'application context being pushed. This has to be ' - 'executed when application context is available.') - - # If request specific information is available we have some extra - # features that support "relative" urls. - if reqctx is not None: - url_adapter = reqctx.url_adapter - blueprint_name = request.blueprint - if not reqctx.request._is_old_module: - if endpoint[:1] == '.': - if blueprint_name is not None: - endpoint = blueprint_name + endpoint - else: - endpoint = endpoint[1:] - else: - # TODO: get rid of this deprecated functionality in 1.0 - if '.' not in endpoint: - if blueprint_name is not None: - endpoint = blueprint_name + '.' + endpoint - elif endpoint.startswith('.'): - endpoint = endpoint[1:] - external = values.pop('_external', False) - - # Otherwise go with the url adapter from the appctx and make - # the urls external by default. - else: - url_adapter = appctx.url_adapter - if url_adapter is None: - raise RuntimeError('Application was not able to create a URL ' - 'adapter for request independent URL generation. ' - 'You might be able to fix this by setting ' - 'the SERVER_NAME config variable.') - external = values.pop('_external', True) - - anchor = values.pop('_anchor', None) - method = values.pop('_method', None) - scheme = values.pop('_scheme', None) - appctx.app.inject_url_defaults(endpoint, values) - - if scheme is not None: - if not external: - raise ValueError('When specifying _scheme, _external must be True') - url_adapter.url_scheme = scheme - - try: - rv = url_adapter.build(endpoint, values, method=method, - force_external=external) - except BuildError as error: - # We need to inject the values again so that the app callback can - # deal with that sort of stuff. - values['_external'] = external - values['_anchor'] = anchor - values['_method'] = method - return appctx.app.handle_url_build_error(error, endpoint, values) - - if anchor is not None: - rv += '#' + url_quote(anchor) - return rv - - -def get_template_attribute(template_name, attribute): - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named `_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, - attribute) - - -def flash(message, category='message'): - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # are always in sync with the sess on object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get('_flashes', []) - flashes.append((category, message)) - session['_flashes'] = flashes - message_flashed.send(current_app._get_current_object(), - message=message, category=category) - - -def get_flashed_messages(with_categories=False, category_filter=[]): - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to `True`, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (`True` gives a tuple, where `False` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :ref:`message-flashing-pattern` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to `True` to also receive categories. - :param category_filter: whitelist of categories to limit return values - """ - flashes = _request_ctx_stack.top.flashes - if flashes is None: - _request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \ - if '_flashes' in session else [] - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def send_file(filename_or_fp, mimetype=None, as_attachment=False, - attachment_filename=None, add_etags=True, - cache_timeout=None, conditional=False): - """Sends the contents of a file to the client. This will use the - most efficient method available and configured. By default it will - try to use the WSGI server's file_wrapper support. Alternatively - you can set the application's :attr:`~Flask.use_x_sendfile` attribute - to ``True`` to directly emit an `X-Sendfile` header. This however - requires support of the underlying webserver for `X-Sendfile`. - - By default it will try to guess the mimetype for you, but you can - also explicitly provide one. For extra security you probably want - to send certain files as attachment (HTML for instance). The mimetype - guessing requires a `filename` or an `attachment_filename` to be - provided. - - Please never pass filenames to this function from user sources without - checking them first. Something like this is usually sufficient to - avoid security problems:: - - if '..' in filename or filename.startswith('/'): - abort(404) - - .. versionadded:: 0.2 - - .. versionadded:: 0.5 - The `add_etags`, `cache_timeout` and `conditional` parameters were - added. The default behavior is now to attach etags. - - .. versionchanged:: 0.7 - mimetype guessing and etag support for file objects was - deprecated because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. This functionality - will be removed in Flask 1.0 - - .. versionchanged:: 0.9 - cache_timeout pulls its default from application config, when None. - - :param filename_or_fp: the filename of the file to send. This is - relative to the :attr:`~Flask.root_path` if a - relative path is specified. - Alternatively a file object might be provided - in which case `X-Sendfile` might not work and - fall back to the traditional method. Make sure - that the file pointer is positioned at the start - of data to send before calling :func:`send_file`. - :param mimetype: the mimetype of the file if provided, otherwise - auto detection happens. - :param as_attachment: set to `True` if you want to send this file with - a ``Content-Disposition: attachment`` header. - :param attachment_filename: the filename for the attachment if it - differs from the file's filename. - :param add_etags: set to `False` to disable attaching of etags. - :param conditional: set to `True` to enable conditional responses. - - :param cache_timeout: the timeout in seconds for the headers. When `None` - (default), this value is set by - :meth:`~Flask.get_send_file_max_age` of - :data:`~flask.current_app`. - """ - mtime = None - if isinstance(filename_or_fp, string_types): - filename = filename_or_fp - file = None - else: - from warnings import warn - file = filename_or_fp - filename = getattr(file, 'name', None) - - # XXX: this behavior is now deprecated because it was unreliable. - # removed in Flask 1.0 - if not attachment_filename and not mimetype \ - and isinstance(filename, string_types): - warn(DeprecationWarning('The filename support for file objects ' - 'passed to send_file is now deprecated. Pass an ' - 'attach_filename if you want mimetypes to be guessed.'), - stacklevel=2) - if add_etags: - warn(DeprecationWarning('In future flask releases etags will no ' - 'longer be generated for file objects passed to the send_file ' - 'function because this behavior was unreliable. Pass ' - 'filenames instead if possible, otherwise attach an etag ' - 'yourself based on another value'), stacklevel=2) - - if filename is not None: - if not os.path.isabs(filename): - filename = os.path.join(current_app.root_path, filename) - if mimetype is None and (filename or attachment_filename): - mimetype = mimetypes.guess_type(filename or attachment_filename)[0] - if mimetype is None: - mimetype = 'application/octet-stream' - - headers = Headers() - if as_attachment: - if attachment_filename is None: - if filename is None: - raise TypeError('filename unavailable, required for ' - 'sending as attachment') - attachment_filename = os.path.basename(filename) - headers.add('Content-Disposition', 'attachment', - filename=attachment_filename) - - if current_app.use_x_sendfile and filename: - if file is not None: - file.close() - headers['X-Sendfile'] = filename - headers['Content-Length'] = os.path.getsize(filename) - data = None - else: - if file is None: - file = open(filename, 'rb') - mtime = os.path.getmtime(filename) - headers['Content-Length'] = os.path.getsize(filename) - data = wrap_file(request.environ, file) - - rv = current_app.response_class(data, mimetype=mimetype, headers=headers, - direct_passthrough=True) - - # if we know the file modification date, we can store it as the - # the time of the last modification. - if mtime is not None: - rv.last_modified = int(mtime) - - rv.cache_control.public = True - if cache_timeout is None: - cache_timeout = current_app.get_send_file_max_age(filename) - if cache_timeout is not None: - rv.cache_control.max_age = cache_timeout - rv.expires = int(time() + cache_timeout) - - if add_etags and filename is not None: - rv.set_etag('flask-%s-%s-%s' % ( - os.path.getmtime(filename), - os.path.getsize(filename), - adler32( - filename.encode('utf-8') if isinstance(filename, text_type) - else filename - ) & 0xffffffff - )) - if conditional: - rv = rv.make_conditional(request) - # make sure we don't send x-sendfile for servers that - # ignore the 304 status code for x-sendfile. - if rv.status_code == 304: - rv.headers.pop('x-sendfile', None) - return rv - - -def safe_join(directory, filename): - """Safely join `directory` and `filename`. - - Example usage:: - - @app.route('/wiki/') - def wiki_page(filename): - filename = safe_join(app.config['WIKI_FOLDER'], filename) - with open(filename, 'rb') as fd: - content = fd.read() # Read and process the file content... - - :param directory: the base directory. - :param filename: the untrusted filename relative to that directory. - :raises: :class:`~werkzeug.exceptions.NotFound` if the resulting path - would fall out of `directory`. - """ - filename = posixpath.normpath(filename) - for sep in _os_alt_seps: - if sep in filename: - raise NotFound() - if os.path.isabs(filename) or \ - filename == '..' or \ - filename.startswith('../'): - raise NotFound() - return os.path.join(directory, filename) - - -def send_from_directory(directory, filename, **options): - """Send a file from a given directory with :func:`send_file`. This - is a secure way to quickly expose static files from an upload folder - or something similar. - - Example usage:: - - @app.route('/uploads/') - def download_file(filename): - return send_from_directory(app.config['UPLOAD_FOLDER'], - filename, as_attachment=True) - - .. admonition:: Sending files and Performance - - It is strongly recommended to activate either `X-Sendfile` support in - your webserver or (if no authentication happens) to tell the webserver - to serve files for the given path on its own without calling into the - web application for improved performance. - - .. versionadded:: 0.5 - - :param directory: the directory where all the files are stored. - :param filename: the filename relative to that directory to - download. - :param options: optional keyword arguments that are directly - forwarded to :func:`send_file`. - """ - filename = safe_join(directory, filename) - if not os.path.isfile(filename): - raise NotFound() - options.setdefault('conditional', True) - return send_file(filename, **options) - - -def get_root_path(import_name): - """Returns the path to a package or cwd if that cannot be found. This - returns the path of a package or the folder that contains a module. - - Not to be confused with the package path returned by :func:`find_package`. - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - if mod is not None and hasattr(mod, '__file__'): - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - loader = pkgutil.get_loader(import_name) - - # Loader does not exist or we're referring to an unloaded main module - # or a main module without path (interactive sessions), go with the - # current working directory. - if loader is None or import_name == '__main__': - return os.getcwd() - - # For .egg, zipimporter does not have get_filename until Python 2.7. - # Some other loaders might exhibit the same behavior. - if hasattr(loader, 'get_filename'): - filepath = loader.get_filename(import_name) - else: - # Fall back to imports. - __import__(import_name) - filepath = sys.modules[import_name].__file__ - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) - - -def find_package(import_name): - """Finds a package and returns the prefix (or None if the package is - not installed) as well as the folder that contains the package or - module as a tuple. The package path returned is the module that would - have to be added to the pythonpath in order to make it possible to - import the module. The prefix is the path below which a UNIX like - folder structure exists (lib, share etc.). - """ - root_mod_name = import_name.split('.')[0] - loader = pkgutil.get_loader(root_mod_name) - if loader is None or import_name == '__main__': - # import name is not found, or interactive/main module - package_path = os.getcwd() - else: - # For .egg, zipimporter does not have get_filename until Python 2.7. - if hasattr(loader, 'get_filename'): - filename = loader.get_filename(root_mod_name) - elif hasattr(loader, 'archive'): - # zipimporter's loader.archive points to the .egg or .zip - # archive filename is dropped in call to dirname below. - filename = loader.archive - else: - # At least one loader is missing both get_filename and archive: - # Google App Engine's HardenedModulesHook - # - # Fall back to imports. - __import__(import_name) - filename = sys.modules[import_name].__file__ - package_path = os.path.abspath(os.path.dirname(filename)) - # package_path ends with __init__.py for a package - if loader.is_package(root_mod_name): - package_path = os.path.dirname(package_path) - - site_parent, site_folder = os.path.split(package_path) - py_prefix = os.path.abspath(sys.prefix) - if package_path.startswith(py_prefix): - return py_prefix, package_path - elif site_folder.lower() == 'site-packages': - parent, folder = os.path.split(site_parent) - # Windows like installations - if folder.lower() == 'lib': - base_dir = parent - # UNIX like installations - elif os.path.basename(parent).lower() == 'lib': - base_dir = os.path.dirname(parent) - else: - base_dir = site_parent - return base_dir, package_path - return None, package_path - - -class locked_cached_property(object): - """A decorator that converts a function into a lazy property. The - function wrapped is called the first time to retrieve the result - and then that calculated result is used the next time you access - the value. Works like the one in Werkzeug but has a lock for - thread safety. - """ - - def __init__(self, func, name=None, doc=None): - self.__name__ = name or func.__name__ - self.__module__ = func.__module__ - self.__doc__ = doc or func.__doc__ - self.func = func - self.lock = RLock() - - def __get__(self, obj, type=None): - if obj is None: - return self - with self.lock: - value = obj.__dict__.get(self.__name__, _missing) - if value is _missing: - value = self.func(obj) - obj.__dict__[self.__name__] = value - return value - - -class _PackageBoundObject(object): - - def __init__(self, import_name, template_folder=None): - #: The name of the package or module. Do not change this once - #: it was set by the constructor. - self.import_name = import_name - - #: location of the templates. `None` if templates should not be - #: exposed. - self.template_folder = template_folder - - #: Where is the app root located? - self.root_path = get_root_path(self.import_name) - - self._static_folder = None - self._static_url_path = None - - def _get_static_folder(self): - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - def _set_static_folder(self, value): - self._static_folder = value - static_folder = property(_get_static_folder, _set_static_folder) - del _get_static_folder, _set_static_folder - - def _get_static_url_path(self): - if self._static_url_path is None: - if self.static_folder is None: - return None - return '/' + os.path.basename(self.static_folder) - return self._static_url_path - def _set_static_url_path(self, value): - self._static_url_path = value - static_url_path = property(_get_static_url_path, _set_static_url_path) - del _get_static_url_path, _set_static_url_path - - @property - def has_static_folder(self): - """This is `True` if the package bound object's container has a - folder named ``'static'``. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @locked_cached_property - def jinja_loader(self): - """The Jinja loader for this package bound object. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, - self.template_folder)) - - def get_send_file_max_age(self, filename): - """Provides default cache_timeout for the :func:`send_file` functions. - - By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from - the configuration of :data:`~flask.current_app`. - - Static file functions such as :func:`send_from_directory` use this - function, and :func:`send_file` calls this function on - :data:`~flask.current_app` when the given cache_timeout is `None`. If a - cache_timeout is given in :func:`send_file`, that timeout is used; - otherwise, this method is called. - - This allows subclasses to change the behavior when sending files based - on the filename. For example, to set the cache timeout for .js files - to 60 seconds:: - - class MyFlask(flask.Flask): - def get_send_file_max_age(self, name): - if name.lower().endswith('.js'): - return 60 - return flask.Flask.get_send_file_max_age(self, name) - - .. versionadded:: 0.9 - """ - return current_app.config['SEND_FILE_MAX_AGE_DEFAULT'] - - def send_static_file(self, filename): - """Function used internally to send static files from the static - folder to the browser. - - .. versionadded:: 0.5 - """ - if not self.has_static_folder: - raise RuntimeError('No static folder for this object') - # Ensure get_send_file_max_age is called in all cases. - # Here, we ensure get_send_file_max_age is called for Blueprints. - cache_timeout = self.get_send_file_max_age(filename) - return send_from_directory(self.static_folder, filename, - cache_timeout=cache_timeout) - - def open_resource(self, resource, mode='rb'): - """Opens a resource from the application's resource folder. To see - how this works, consider the following folder structure:: - - /myapplication.py - /schema.sql - /static - /style.css - /templates - /layout.html - /index.html - - If you want to open the `schema.sql` file you would do the - following:: - - with app.open_resource('schema.sql') as f: - contents = f.read() - do_something_with(contents) - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: resource file opening mode, default is 'rb'. - """ - if mode not in ('r', 'rb'): - raise ValueError('Resources can only be opened for reading') - return open(os.path.join(self.root_path, resource), mode) diff --git a/Linux_i686/lib/python2.7/site-packages/flask/json.py b/Linux_i686/lib/python2.7/site-packages/flask/json.py deleted file mode 100644 index 45ba324..0000000 --- a/Linux_i686/lib/python2.7/site-packages/flask/json.py +++ /dev/null @@ -1,243 +0,0 @@ -# -*- coding: utf-8 -*- -""" - flask.jsonimpl - ~~~~~~~~~~~~~~ - - Implementation helpers for the JSON support in Flask. - - :copyright: (c) 2012 by Armin Ronacher. - :license: BSD, see LICENSE for more details. -""" -import io -import uuid -from datetime import datetime -from .globals import current_app, request -from ._compat import text_type, PY2 - -from werkzeug.http import http_date -from jinja2 import Markup - -# Use the same json implementation as itsdangerous on which we -# depend anyways. -try: - from itsdangerous import simplejson as _json -except ImportError: - from itsdangerous import json as _json - - -# figure out if simplejson escapes slashes. This behavior was changed -# from one version to another without reason. -_slash_escape = '\\/' not in _json.dumps('/') - - -__all__ = ['dump', 'dumps', 'load', 'loads', 'htmlsafe_dump', - 'htmlsafe_dumps', 'JSONDecoder', 'JSONEncoder', - 'jsonify'] - - -def _wrap_reader_for_text(fp, encoding): - if isinstance(fp.read(0), bytes): - fp = io.TextIOWrapper(io.BufferedReader(fp), encoding) - return fp - - -def _wrap_writer_for_text(fp, encoding): - try: - fp.write('') - except TypeError: - fp = io.TextIOWrapper(fp, encoding) - return fp - - -class JSONEncoder(_json.JSONEncoder): - """The default Flask JSON encoder. This one extends the default simplejson - encoder by also supporting ``datetime`` objects, ``UUID`` as well as - ``Markup`` objects which are serialized as RFC 822 datetime strings (same - as the HTTP date format). In order to support more data types override the - :meth:`default` method. - """ - - def default(self, o): - """Implement this method in a subclass such that it returns a - serializable object for ``o``, or calls the base implementation (to - raise a ``TypeError``). - - For example, to support arbitrary iterators, you could implement - default like this:: - - def default(self, o): - try: - iterable = iter(o) - except TypeError: - pass - else: - return list(iterable) - return JSONEncoder.default(self, o) - """ - if isinstance(o, datetime): - return http_date(o) - if isinstance(o, uuid.UUID): - return str(o) - if hasattr(o, '__html__'): - return text_type(o.__html__()) - return _json.JSONEncoder.default(self, o) - - -class JSONDecoder(_json.JSONDecoder): - """The default JSON decoder. This one does not change the behavior from - the default simplejson encoder. Consult the :mod:`json` documentation - for more information. This decoder is not only used for the load - functions of this module but also :attr:`~flask.Request`. - """ - - -def _dump_arg_defaults(kwargs): - """Inject default arguments for dump functions.""" - if current_app: - kwargs.setdefault('cls', current_app.json_encoder) - if not current_app.config['JSON_AS_ASCII']: - kwargs.setdefault('ensure_ascii', False) - kwargs.setdefault('sort_keys', current_app.config['JSON_SORT_KEYS']) - else: - kwargs.setdefault('sort_keys', True) - kwargs.setdefault('cls', JSONEncoder) - - -def _load_arg_defaults(kwargs): - """Inject default arguments for load functions.""" - if current_app: - kwargs.setdefault('cls', current_app.json_decoder) - else: - kwargs.setdefault('cls', JSONDecoder) - - -def dumps(obj, **kwargs): - """Serialize ``obj`` to a JSON formatted ``str`` by using the application's - configured encoder (:attr:`~flask.Flask.json_encoder`) if there is an - application on the stack. - - This function can return ``unicode`` strings or ascii-only bytestrings by - default which coerce into unicode strings automatically. That behavior by - default is controlled by the ``JSON_AS_ASCII`` configuration variable - and can be overriden by the simplejson ``ensure_ascii`` parameter. - """ - _dump_arg_defaults(kwargs) - encoding = kwargs.pop('encoding', None) - rv = _json.dumps(obj, **kwargs) - if encoding is not None and isinstance(rv, text_type): - rv = rv.encode(encoding) - return rv - - -def dump(obj, fp, **kwargs): - """Like :func:`dumps` but writes into a file object.""" - _dump_arg_defaults(kwargs) - encoding = kwargs.pop('encoding', None) - if encoding is not None: - fp = _wrap_writer_for_text(fp, encoding) - _json.dump(obj, fp, **kwargs) - - -def loads(s, **kwargs): - """Unserialize a JSON object from a string ``s`` by using the application's - configured decoder (:attr:`~flask.Flask.json_decoder`) if there is an - application on the stack. - """ - _load_arg_defaults(kwargs) - if isinstance(s, bytes): - s = s.decode(kwargs.pop('encoding', None) or 'utf-8') - return _json.loads(s, **kwargs) - - -def load(fp, **kwargs): - """Like :func:`loads` but reads from a file object. - """ - _load_arg_defaults(kwargs) - if not PY2: - fp = _wrap_reader_for_text(fp, kwargs.pop('encoding', None) or 'utf-8') - return _json.load(fp, **kwargs) - - -def htmlsafe_dumps(obj, **kwargs): - """Works exactly like :func:`dumps` but is safe for use in ``') - self.assert_equal(rv, u'"\\u003c/script\\u003e"') - self.assert_equal(type(rv), text_type) - rv = render('{{ ""|tojson }}') - self.assert_equal(rv, '"\\u003c/script\\u003e"') - rv = render('{{ "<\0/script>"|tojson }}') - self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"') - rv = render('{{ "' % ( - render_traceback(self, full=full), - self.render_as_text().decode('utf-8', 'replace') - ) - - @property - def is_template_syntax_error(self): - """`True` if this is a template syntax error.""" - return isinstance(self.exc_value, TemplateSyntaxError) - - @property - def exc_info(self): - """Exception info tuple with a proxy around the frame objects.""" - return self.exc_type, self.exc_value, self.frames[0] - - @property - def standard_exc_info(self): - """Standard python exc_info for re-raising""" - tb = self.frames[0] - # the frame will be an actual traceback (or transparent proxy) if - # we are on pypy or a python implementation with support for tproxy - if type(tb) is not TracebackType: - tb = tb.tb - return self.exc_type, self.exc_value, tb - - -def make_traceback(exc_info, source_hint=None): - """Creates a processed traceback object from the exc_info.""" - exc_type, exc_value, tb = exc_info - if isinstance(exc_value, TemplateSyntaxError): - exc_info = translate_syntax_error(exc_value, source_hint) - initial_skip = 0 - else: - initial_skip = 1 - return translate_exception(exc_info, initial_skip) - - -def translate_syntax_error(error, source=None): - """Rewrites a syntax error to please traceback systems.""" - error.source = source - error.translated = True - exc_info = (error.__class__, error, None) - filename = error.filename - if filename is None: - filename = '' - return fake_exc_info(exc_info, filename, error.lineno) - - -def translate_exception(exc_info, initial_skip=0): - """If passed an exc_info it will automatically rewrite the exceptions - all the way down to the correct line numbers and frames. - """ - tb = exc_info[2] - frames = [] - - # skip some internal frames if wanted - for x in range(initial_skip): - if tb is not None: - tb = tb.tb_next - initial_tb = tb - - while tb is not None: - # skip frames decorated with @internalcode. These are internal - # calls we can't avoid and that are useless in template debugging - # output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - # save a reference to the next frame if we override the current - # one with a faked one. - next = tb.tb_next - - # fake template exceptions - template = tb.tb_frame.f_globals.get('__jinja_template__') - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - tb = fake_exc_info(exc_info[:2] + (tb,), template.filename, - lineno)[2] - - frames.append(make_frame_proxy(tb)) - tb = next - - # if we don't have any exceptions in the frames left, we have to - # reraise it unchanged. - # XXX: can we backup here? when could this happen? - if not frames: - reraise(exc_info[0], exc_info[1], exc_info[2]) - - return ProcessedTraceback(exc_info[0], exc_info[1], frames) - - -def fake_exc_info(exc_info, filename, lineno): - """Helper for `translate_exception`.""" - exc_type, exc_value, tb = exc_info - - # figure the real context out - if tb is not None: - real_locals = tb.tb_frame.f_locals.copy() - ctx = real_locals.get('context') - if ctx: - locals = ctx.get_all() - else: - locals = {} - for name, value in iteritems(real_locals): - if name.startswith('l_') and value is not missing: - locals[name[2:]] = value - - # if there is a local called __jinja_exception__, we get - # rid of it to not break the debug functionality. - locals.pop('__jinja_exception__', None) - else: - locals = {} - - # assamble fake globals we need - globals = { - '__name__': filename, - '__file__': filename, - '__jinja_exception__': exc_info[:2], - - # we don't want to keep the reference to the template around - # to not cause circular dependencies, but we mark it as Jinja - # frame for the ProcessedTraceback - '__jinja_template__': None - } - - # and fake the exception - code = compile('\n' * (lineno - 1) + raise_helper, filename, 'exec') - - # if it's possible, change the name of the code. This won't work - # on some python environments such as google appengine - try: - if tb is None: - location = 'template' - else: - function = tb.tb_frame.f_code.co_name - if function == 'root': - location = 'top-level template code' - elif function.startswith('block_'): - location = 'block "%s"' % function[6:] - else: - location = 'template' - code = code_type(0, code.co_nlocals, code.co_stacksize, - code.co_flags, code.co_code, code.co_consts, - code.co_names, code.co_varnames, filename, - location, code.co_firstlineno, - code.co_lnotab, (), ()) - except: - pass - - # execute the code and catch the new traceback - try: - exec(code, globals, locals) - except: - exc_info = sys.exc_info() - new_tb = exc_info[2].tb_next - - # return without this frame - return exc_info[:2] + (new_tb,) - - -def _init_ugly_crap(): - """This function implements a few ugly things so that we can patch the - traceback objects. The function returned allows resetting `tb_next` on - any python traceback object. Do not attempt to use this on non cpython - interpreters - """ - import ctypes - from types import TracebackType - - # figure out side of _Py_ssize_t - if hasattr(ctypes.pythonapi, 'Py_InitModule4_64'): - _Py_ssize_t = ctypes.c_int64 - else: - _Py_ssize_t = ctypes.c_int - - # regular python - class _PyObject(ctypes.Structure): - pass - _PyObject._fields_ = [ - ('ob_refcnt', _Py_ssize_t), - ('ob_type', ctypes.POINTER(_PyObject)) - ] - - # python with trace - if hasattr(sys, 'getobjects'): - class _PyObject(ctypes.Structure): - pass - _PyObject._fields_ = [ - ('_ob_next', ctypes.POINTER(_PyObject)), - ('_ob_prev', ctypes.POINTER(_PyObject)), - ('ob_refcnt', _Py_ssize_t), - ('ob_type', ctypes.POINTER(_PyObject)) - ] - - class _Traceback(_PyObject): - pass - _Traceback._fields_ = [ - ('tb_next', ctypes.POINTER(_Traceback)), - ('tb_frame', ctypes.POINTER(_PyObject)), - ('tb_lasti', ctypes.c_int), - ('tb_lineno', ctypes.c_int) - ] - - def tb_set_next(tb, next): - """Set the tb_next attribute of a traceback object.""" - if not (isinstance(tb, TracebackType) and - (next is None or isinstance(next, TracebackType))): - raise TypeError('tb_set_next arguments must be traceback objects') - obj = _Traceback.from_address(id(tb)) - if tb.tb_next is not None: - old = _Traceback.from_address(id(tb.tb_next)) - old.ob_refcnt -= 1 - if next is None: - obj.tb_next = ctypes.POINTER(_Traceback)() - else: - next = _Traceback.from_address(id(next)) - next.ob_refcnt += 1 - obj.tb_next = ctypes.pointer(next) - - return tb_set_next - - -# try to get a tb_set_next implementation if we don't have transparent -# proxies. -tb_set_next = None -if tproxy is None: - try: - tb_set_next = _init_ugly_crap() - except: - pass - del _init_ugly_crap diff --git a/Linux_i686/lib/python2.7/site-packages/jinja2/defaults.py b/Linux_i686/lib/python2.7/site-packages/jinja2/defaults.py deleted file mode 100644 index a27cb80..0000000 --- a/Linux_i686/lib/python2.7/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,43 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.defaults - ~~~~~~~~~~~~~~~ - - Jinja default filters and tags. - - :copyright: (c) 2010 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -from jinja2._compat import range_type -from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner - - -# defaults for the parser / lexer -BLOCK_START_STRING = '{%' -BLOCK_END_STRING = '%}' -VARIABLE_START_STRING = '{{' -VARIABLE_END_STRING = '}}' -COMMENT_START_STRING = '{#' -COMMENT_END_STRING = '#}' -LINE_STATEMENT_PREFIX = None -LINE_COMMENT_PREFIX = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE = '\n' -KEEP_TRAILING_NEWLINE = False - - -# default filters, tests and namespace -from jinja2.filters import FILTERS as DEFAULT_FILTERS -from jinja2.tests import TESTS as DEFAULT_TESTS -DEFAULT_NAMESPACE = { - 'range': range_type, - 'dict': lambda **kw: kw, - 'lipsum': generate_lorem_ipsum, - 'cycler': Cycler, - 'joiner': Joiner -} - - -# export all constants -__all__ = tuple(x for x in locals().keys() if x.isupper()) diff --git a/Linux_i686/lib/python2.7/site-packages/jinja2/environment.py b/Linux_i686/lib/python2.7/site-packages/jinja2/environment.py deleted file mode 100644 index 45fabad..0000000 --- a/Linux_i686/lib/python2.7/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1191 +0,0 @@ -# -*- coding: utf-8 -*- -""" - jinja2.environment - ~~~~~~~~~~~~~~~~~~ - - Provides a class that holds runtime and parsing time options. - - :copyright: (c) 2010 by the Jinja Team. - :license: BSD, see LICENSE for more details. -""" -import os -import sys -from jinja2 import nodes -from jinja2.defaults import BLOCK_START_STRING, \ - BLOCK_END_STRING, VARIABLE_START_STRING, VARIABLE_END_STRING, \ - COMMENT_START_STRING, COMMENT_END_STRING, LINE_STATEMENT_PREFIX, \ - LINE_COMMENT_PREFIX, TRIM_BLOCKS, NEWLINE_SEQUENCE, \ - DEFAULT_FILTERS, DEFAULT_TESTS, DEFAULT_NAMESPACE, \ - KEEP_TRAILING_NEWLINE, LSTRIP_BLOCKS -from jinja2.lexer import get_lexer, TokenStream -from jinja2.parser import Parser -from jinja2.nodes import EvalContext -from jinja2.optimizer import optimize -from jinja2.compiler import generate -from jinja2.runtime import Undefined, new_context -from jinja2.exceptions import TemplateSyntaxError, TemplateNotFound, \ - TemplatesNotFound, TemplateRuntimeError -from jinja2.utils import import_string, LRUCache, Markup, missing, \ - concat, consume, internalcode -from jinja2._compat import imap, ifilter, string_types, iteritems, \ - text_type, reraise, implements_iterator, implements_to_string, \ - get_next, encode_filename, PY2, PYPY -from functools import reduce - - -# for direct template usage we have up to ten living environments -_spontaneous_environments = LRUCache(10) - -# the function to create jinja traceback objects. This is dynamically -# imported on the first exception in the exception handler. -_make_traceback = None - - -def get_spontaneous_environment(*args): - """Return a new spontaneous environment. A spontaneous environment is an - unnamed and unaccessible (in theory) environment that is used for - templates generated from a string and not from the file system. - """ - try: - env = _spontaneous_environments.get(args) - except TypeError: - return Environment(*args) - if env is not None: - return env - _spontaneous_environments[args] = env = Environment(*args) - env.shared = True - return env - - -def create_cache(size): - """Return the cache class for the given size.""" - if size == 0: - return None - if size < 0: - return {} - return LRUCache(size) - - -def copy_cache(cache): - """Create an empty copy of the given cache.""" - if cache is None: - return None - elif type(cache) is dict: - return {} - return LRUCache(cache.capacity) - - -def load_extensions(environment, extensions): - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated environments. - """ - result = {} - for extension in extensions: - if isinstance(extension, string_types): - extension = import_string(extension) - result[extension.identifier] = extension(environment) - return result - - -def _environment_sanity_check(environment): - """Perform a sanity check on the environment.""" - assert issubclass(environment.undefined, Undefined), 'undefined must ' \ - 'be a subclass of undefined because filters depend on it.' - assert environment.block_start_string != \ - environment.variable_start_string != \ - environment.comment_start_string, 'block, variable and comment ' \ - 'start strings must be different' - assert environment.newline_sequence in ('\r', '\r\n', '\n'), \ - 'newline_sequence set to unknown line ending string.' - return environment - - -class Environment(object): - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here the possible initialization parameters: - - `block_start_string` - The string marking the begin of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the begin of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the begin of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - based comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is `True`. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - `None` implicitly into an empty string here. - - `autoescape` - If set to true the XML/HTML autoescaping feature is enabled by - default. For more details about auto escaping see - :class:`~jinja2.utils.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return `True` or `False` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``50`` which means - that if more than 50 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - `auto_reload` is set to `True` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: these are currently EXPERIMENTAL undocumented features. - exception_handler = None - exception_formatter = None - - def __init__(self, - block_start_string=BLOCK_START_STRING, - block_end_string=BLOCK_END_STRING, - variable_start_string=VARIABLE_START_STRING, - variable_end_string=VARIABLE_END_STRING, - comment_start_string=COMMENT_START_STRING, - comment_end_string=COMMENT_END_STRING, - line_statement_prefix=LINE_STATEMENT_PREFIX, - line_comment_prefix=LINE_COMMENT_PREFIX, - trim_blocks=TRIM_BLOCKS, - lstrip_blocks=LSTRIP_BLOCKS, - newline_sequence=NEWLINE_SEQUENCE, - keep_trailing_newline=KEEP_TRAILING_NEWLINE, - extensions=(), - optimized=True, - undefined=Undefined, - finalize=None, - autoescape=False, - loader=None, - cache_size=50, - auto_reload=True, - bytecode_cache=None): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # load extensions - self.extensions = load_extensions(self, extensions) - - _environment_sanity_check(self) - - def add_extension(self, extension): - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes): - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in iteritems(attributes): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay(self, block_start_string=missing, block_end_string=missing, - variable_start_string=missing, variable_end_string=missing, - comment_start_string=missing, comment_end_string=missing, - line_statement_prefix=missing, line_comment_prefix=missing, - trim_blocks=missing, lstrip_blocks=missing, - extensions=missing, optimized=missing, - undefined=missing, finalize=missing, autoescape=missing, - loader=missing, cache_size=missing, auto_reload=missing, - bytecode_cache=missing): - """Create a new overlay environment that shares all the data with the - current environment except of cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - """ - args = dict(locals()) - del args['self'], args['cache_size'], args['extensions'] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in iteritems(args): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in iteritems(self.extensions): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - return _environment_sanity_check(rv) - - lexer = property(get_lexer, doc="The lexer for this environment.") - - def iter_extensions(self): - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), - key=lambda x: x.priority)) - - def getitem(self, obj, argument): - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (TypeError, LookupError): - if isinstance(argument, string_types): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj, attribute): - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a bytestring. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def call_filter(self, name, value, args=None, kwargs=None, - context=None, eval_ctx=None): - """Invokes a filter on a value the same way the compiler does it. - - .. versionadded:: 2.7 - """ - func = self.filters.get(name) - if func is None: - raise TemplateRuntimeError('no filter named %r' % name) - args = [value] + list(args or ()) - if getattr(func, 'contextfilter', False): - if context is None: - raise TemplateRuntimeError('Attempted to invoke context ' - 'filter without context') - args.insert(0, context) - elif getattr(func, 'evalcontextfilter', False): - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - args.insert(0, eval_ctx) - elif getattr(func, 'environmentfilter', False): - args.insert(0, self) - return func(*args, **(kwargs or {})) - - def call_test(self, name, value, args=None, kwargs=None): - """Invokes a test on a value the same way the compiler does it. - - .. versionadded:: 2.7 - """ - func = self.tests.get(name) - if func is None: - raise TemplateRuntimeError('no test named %r' % name) - return func(value, *(args or ()), **(kwargs or {})) - - @internalcode - def parse(self, source, name=None, filename=None): - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja2 extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - exc_info = sys.exc_info() - self.handle_exception(exc_info, source_hint=source) - - def _parse(self, source, name, filename): - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, encode_filename(filename)).parse() - - def lex(self, source, name=None, filename=None): - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = text_type(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - exc_info = sys.exc_info() - self.handle_exception(exc_info, source_hint=source) - - def preprocess(self, source, name=None, filename=None): - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce(lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), text_type(source)) - - def _tokenize(self, source, name, filename=None, state=None): - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - return stream - - def _generate(self, source, name, filename, defer_init=False): - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate(source, self, name, filename, defer_init=defer_init) - - def _compile(self, source, filename): - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, 'exec') - - @internalcode - def compile(self, source, name=None, filename=None, raw=False, - defer_init=False): - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, string_types): - source_hint = source - source = self._parse(source, name, filename) - if self.optimized: - source = optimize(source, self) - source = self._generate(source, name, filename, - defer_init=defer_init) - if raw: - return source - if filename is None: - filename = '