From 717a291e1913734f26981d7b123c39685b339b14 Mon Sep 17 00:00:00 2001 From: j Date: Wed, 23 Mar 2016 12:39:11 +0100 Subject: [PATCH] run update.sh, install netifaces --- .../SQLAlchemy-1.0.12.dist-info/RECORD | 354 +- .../ed25519-1.4.dist-info/RECORD | 6 +- .../DESCRIPTION.rst | 196 + .../INSTALLER | 0 .../netifaces-0.10.4.dist-info/METADATA | 216 + .../netifaces-0.10.4.dist-info/RECORD | 9 + .../netifaces-0.10.4.dist-info/WHEEL | 5 + .../netifaces-0.10.4.dist-info/metadata.json | 1 + .../netifaces-0.10.4.dist-info/top_level.txt | 1 + .../zip-safe | 0 .../site-packages/netifaces.cpython-34m.so | Bin 0 -> 50100 bytes .../site-packages/pip-8.0.2.dist-info/RECORD | 113 - .../DESCRIPTION.rst | 4 +- .../INSTALLER | 0 .../METADATA | 7 +- .../site-packages/pip-8.1.1.dist-info/RECORD | 485 ++ .../WHEEL | 2 +- .../entry_points.txt | 2 +- .../metadata.json | 2 +- .../top_level.txt | 0 lib/python3.4/site-packages/pip/__init__.py | 10 +- .../site-packages/pip/_vendor/__init__.py | 42 +- .../{ => pip/_vendor}/_markerlib/__init__.py | 2 +- .../{ => pip/_vendor}/_markerlib/markers.py | 0 .../pip/_vendor/cachecontrol/__init__.py | 11 + .../pip/_vendor/cachecontrol/_cmd.py | 60 + .../pip/_vendor/cachecontrol/adapter.py | 117 + .../pip/_vendor/cachecontrol/cache.py | 39 + .../_vendor/cachecontrol/caches/__init__.py | 18 + .../_vendor/cachecontrol/caches/file_cache.py | 116 + .../cachecontrol/caches/redis_cache.py | 41 + .../pip/_vendor/cachecontrol/compat.py | 20 + .../pip/_vendor/cachecontrol/controller.py | 353 ++ .../pip/_vendor/cachecontrol/filewrapper.py | 63 + .../pip/_vendor/cachecontrol/heuristics.py | 138 + .../pip/_vendor/cachecontrol/serialize.py | 190 + .../pip/_vendor/cachecontrol/wrapper.py | 21 + .../pip/_vendor/colorama/__init__.py | 7 + .../pip/_vendor/colorama/ansi.py | 102 + .../pip/_vendor/colorama/ansitowin32.py | 232 + .../pip/_vendor/colorama/initialise.py | 81 + .../pip/_vendor/colorama/win32.py | 154 + .../pip/_vendor/colorama/winterm.py | 162 + .../pip/_vendor/distlib/__init__.py | 23 + .../pip/_vendor/distlib/_backport/__init__.py | 6 + .../pip/_vendor/distlib/_backport/misc.py | 41 + .../pip/_vendor/distlib/_backport/shutil.py | 761 +++ .../_vendor/distlib/_backport/sysconfig.cfg | 84 + .../_vendor/distlib/_backport/sysconfig.py | 788 +++ .../pip/_vendor/distlib/_backport/tarfile.py | 2607 ++++++++ .../pip/_vendor/distlib/compat.py | 1102 ++++ .../pip/_vendor/distlib/database.py | 1312 ++++ .../pip/_vendor/distlib/index.py | 513 ++ .../pip/_vendor/distlib/locators.py | 1264 ++++ .../pip/_vendor/distlib/manifest.py | 367 ++ .../pip/_vendor/distlib/markers.py | 190 + .../pip/_vendor/distlib/metadata.py | 1066 ++++ .../pip/_vendor/distlib/resources.py | 350 + .../pip/_vendor/distlib/scripts.py | 384 ++ .../site-packages/pip/_vendor/distlib/t32.exe | Bin 0 -> 89088 bytes .../site-packages/pip/_vendor/distlib/t64.exe | Bin 0 -> 97792 bytes .../site-packages/pip/_vendor/distlib/util.py | 1593 +++++ .../pip/_vendor/distlib/version.py | 742 +++ .../site-packages/pip/_vendor/distlib/w32.exe | Bin 0 -> 85504 bytes .../site-packages/pip/_vendor/distlib/w64.exe | Bin 0 -> 94208 bytes .../pip/_vendor/distlib/wheel.py | 976 +++ .../pip/_vendor/html5lib/__init__.py | 25 + .../pip/_vendor/html5lib/constants.py | 3102 +++++++++ .../pip/_vendor/html5lib/filters/__init__.py | 0 .../pip/_vendor/html5lib/filters/_base.py | 12 + .../filters/alphabeticalattributes.py | 20 + .../html5lib/filters/inject_meta_charset.py | 65 + .../pip/_vendor/html5lib/filters/lint.py | 90 + .../_vendor/html5lib/filters/optionaltags.py | 205 + .../pip/_vendor/html5lib/filters/sanitizer.py | 12 + .../_vendor/html5lib/filters/whitespace.py | 38 + .../pip/_vendor/html5lib/html5parser.py | 2724 ++++++++ .../pip/_vendor/html5lib/ihatexml.py | 285 + .../pip/_vendor/html5lib/inputstream.py | 903 +++ .../pip/_vendor/html5lib/sanitizer.py | 300 + .../_vendor/html5lib/serializer/__init__.py | 16 + .../html5lib/serializer/htmlserializer.py | 317 + .../pip/_vendor/html5lib/tokenizer.py | 1731 +++++ .../_vendor/html5lib/treeadapters/__init__.py | 0 .../pip/_vendor/html5lib/treeadapters/sax.py | 44 + .../_vendor/html5lib/treebuilders/__init__.py | 76 + .../_vendor/html5lib/treebuilders/_base.py | 377 ++ .../pip/_vendor/html5lib/treebuilders/dom.py | 227 + .../_vendor/html5lib/treebuilders/etree.py | 337 + .../html5lib/treebuilders/etree_lxml.py | 369 ++ .../_vendor/html5lib/treewalkers/__init__.py | 147 + .../pip/_vendor/html5lib/treewalkers/_base.py | 200 + .../pip/_vendor/html5lib/treewalkers/dom.py | 43 + .../pip/_vendor/html5lib/treewalkers/etree.py | 136 + .../html5lib/treewalkers/genshistream.py | 69 + .../_vendor/html5lib/treewalkers/lxmletree.py | 201 + .../_vendor/html5lib/treewalkers/pulldom.py | 63 + .../pip/_vendor/html5lib/trie/__init__.py | 12 + .../pip/_vendor/html5lib/trie/_base.py | 37 + .../pip/_vendor/html5lib/trie/datrie.py | 44 + .../pip/_vendor/html5lib/trie/py.py | 67 + .../pip/_vendor/html5lib/utils.py | 103 + .../site-packages/pip/_vendor/ipaddress.py | 2417 +++++++ .../pip/_vendor/lockfile/__init__.py | 347 + .../pip/_vendor/lockfile/linklockfile.py | 73 + .../pip/_vendor/lockfile/mkdirlockfile.py | 84 + .../pip/_vendor/lockfile/pidlockfile.py | 190 + .../pip/_vendor/lockfile/sqlitelockfile.py | 156 + .../pip/_vendor/lockfile/symlinklockfile.py | 70 + .../pip/_vendor/packaging/__about__.py | 21 + .../pip/_vendor/packaging/__init__.py | 14 + .../pip/_vendor/packaging/_compat.py | 30 + .../pip/_vendor/packaging/_structures.py | 68 + .../pip/_vendor/packaging/markers.py | 275 + .../pip/_vendor/packaging/requirements.py | 129 + .../pip/_vendor/packaging/specifiers.py | 774 +++ .../pip/_vendor/packaging/utils.py | 14 + .../pip/_vendor/packaging/version.py | 393 ++ .../pip/_vendor/pkg_resources/__init__.py | 3159 ++++++++++ .../pip/_vendor/progress/__init__.py | 123 + .../site-packages/pip/_vendor/progress/bar.py | 83 + .../pip/_vendor/progress/counter.py | 47 + .../pip/_vendor/progress/helpers.py | 91 + .../pip/_vendor/progress/spinner.py | 40 + .../site-packages/pip/_vendor/pyparsing.py | 3835 +++++++++++ .../site-packages/pip/_vendor/re-vendor.py | 34 + .../pip/_vendor/requests/__init__.py | 83 + .../pip/_vendor/requests/adapters.py | 453 ++ .../site-packages/pip/_vendor/requests/api.py | 145 + .../pip/_vendor/requests/auth.py | 223 + .../pip/_vendor/requests/cacert.pem | 5616 +++++++++++++++++ .../pip/_vendor/requests/certs.py | 25 + .../pip/_vendor/requests/compat.py | 62 + .../pip/_vendor/requests/cookies.py | 487 ++ .../pip/_vendor/requests/exceptions.py | 114 + .../pip/_vendor/requests/hooks.py | 34 + .../pip/_vendor/requests/models.py | 851 +++ .../pip/_vendor/requests/packages/__init__.py | 36 + .../requests/packages/chardet/__init__.py | 32 + .../requests/packages/chardet/big5freq.py | 925 +++ .../requests/packages/chardet/big5prober.py | 42 + .../requests/packages/chardet/chardetect.py | 80 + .../packages/chardet/chardistribution.py | 231 + .../packages/chardet/charsetgroupprober.py | 106 + .../packages/chardet/charsetprober.py | 62 + .../packages/chardet/codingstatemachine.py | 61 + .../requests/packages/chardet/compat.py | 34 + .../requests/packages/chardet/constants.py | 39 + .../requests/packages/chardet/cp949prober.py | 44 + .../requests/packages/chardet/escprober.py | 86 + .../requests/packages/chardet/escsm.py | 242 + .../requests/packages/chardet/eucjpprober.py | 90 + .../requests/packages/chardet/euckrfreq.py | 596 ++ .../requests/packages/chardet/euckrprober.py | 42 + .../requests/packages/chardet/euctwfreq.py | 428 ++ .../requests/packages/chardet/euctwprober.py | 41 + .../requests/packages/chardet/gb2312freq.py | 472 ++ .../requests/packages/chardet/gb2312prober.py | 41 + .../requests/packages/chardet/hebrewprober.py | 283 + .../requests/packages/chardet/jisfreq.py | 569 ++ .../requests/packages/chardet/jpcntx.py | 227 + .../packages/chardet/langbulgarianmodel.py | 229 + .../packages/chardet/langcyrillicmodel.py | 329 + .../packages/chardet/langgreekmodel.py | 225 + .../packages/chardet/langhebrewmodel.py | 201 + .../packages/chardet/langhungarianmodel.py | 225 + .../packages/chardet/langthaimodel.py | 200 + .../requests/packages/chardet/latin1prober.py | 139 + .../packages/chardet/mbcharsetprober.py | 86 + .../packages/chardet/mbcsgroupprober.py | 54 + .../requests/packages/chardet/mbcssm.py | 572 ++ .../packages/chardet/sbcharsetprober.py | 120 + .../packages/chardet/sbcsgroupprober.py | 69 + .../requests/packages/chardet/sjisprober.py | 91 + .../packages/chardet/universaldetector.py | 170 + .../requests/packages/chardet/utf8prober.py | 76 + .../requests/packages/urllib3/__init__.py | 93 + .../requests/packages/urllib3/_collections.py | 324 + .../requests/packages/urllib3/connection.py | 288 + .../packages/urllib3/connectionpool.py | 818 +++ .../packages/urllib3/contrib/__init__.py | 0 .../packages/urllib3/contrib/appengine.py | 223 + .../packages/urllib3/contrib/ntlmpool.py | 115 + .../packages/urllib3/contrib/pyopenssl.py | 310 + .../requests/packages/urllib3/exceptions.py | 201 + .../requests/packages/urllib3/fields.py | 178 + .../requests/packages/urllib3/filepost.py | 94 + .../packages/urllib3/packages/__init__.py | 5 + .../packages/urllib3/packages/ordered_dict.py | 259 + .../requests/packages/urllib3/packages/six.py | 385 ++ .../packages/ssl_match_hostname/__init__.py | 13 + .../ssl_match_hostname/_implementation.py | 105 + .../requests/packages/urllib3/poolmanager.py | 281 + .../requests/packages/urllib3/request.py | 151 + .../requests/packages/urllib3/response.py | 514 ++ .../packages/urllib3/util/__init__.py | 44 + .../packages/urllib3/util/connection.py | 101 + .../requests/packages/urllib3/util/request.py | 72 + .../packages/urllib3/util/response.py | 74 + .../requests/packages/urllib3/util/retry.py | 286 + .../requests/packages/urllib3/util/ssl_.py | 317 + .../requests/packages/urllib3/util/timeout.py | 242 + .../requests/packages/urllib3/util/url.py | 217 + .../pip/_vendor/requests/sessions.py | 680 ++ .../pip/_vendor/requests/status_codes.py | 90 + .../pip/_vendor/requests/structures.py | 104 + .../pip/_vendor/requests/utils.py | 721 +++ .../site-packages/pip/_vendor/retrying.py | 267 + .../site-packages/pip/_vendor/six.py | 868 +++ lib/python3.4/site-packages/pip/cmdoptions.py | 2 +- .../site-packages/pip/commands/__init__.py | 6 +- .../site-packages/pip/commands/completion.py | 3 +- .../site-packages/pip/commands/freeze.py | 17 +- .../site-packages/pip/commands/search.py | 11 +- .../site-packages/pip/commands/show.py | 23 + .../site-packages/pip/compat/__init__.py | 12 +- .../site-packages/pip/compat/ordereddict.py | 129 + lib/python3.4/site-packages/pip/download.py | 21 +- lib/python3.4/site-packages/pip/exceptions.py | 17 +- lib/python3.4/site-packages/pip/index.py | 23 +- .../site-packages/pip/operations/freeze.py | 20 +- lib/python3.4/site-packages/pip/pep425tags.py | 158 +- .../site-packages/pip/req/req_file.py | 4 + .../site-packages/pip/req/req_install.py | 112 +- .../site-packages/pip/req/req_set.py | 3 +- .../site-packages/pip/utils/__init__.py | 102 +- .../site-packages/pip/utils/deprecation.py | 32 +- .../site-packages/pip/utils/encoding.py | 31 + lib/python3.4/site-packages/pip/utils/ui.py | 5 + lib/python3.4/site-packages/pip/vcs/git.py | 7 +- lib/python3.4/site-packages/pip/wheel.py | 9 +- .../site-packages/pkg_resources/__init__.py | 337 +- .../_vendor/packaging/__about__.py | 24 +- .../_vendor/packaging/__init__.py | 16 +- .../_vendor/packaging/_compat.py | 16 +- .../_vendor/packaging/_structures.py | 16 +- .../_vendor/packaging/markers.py | 273 + .../_vendor/packaging/requirements.py | 127 + .../_vendor/packaging/specifiers.py | 66 +- .../pkg_resources/_vendor/packaging/utils.py | 14 + .../_vendor/packaging/version.py | 16 +- .../pkg_resources/_vendor/pyparsing.py | 3805 +++++++++++ .../pkg_resources/extern/__init__.py | 2 +- .../setuptools-20.1.1.dist-info/metadata.json | 1 - .../DESCRIPTION.rst | 0 .../setuptools-20.3.1.dist-info/INSTALLER | 1 + .../METADATA | 2 +- .../RECORD | 144 +- .../WHEEL | 2 +- .../dependency_links.txt | 0 .../entry_points.txt | 0 .../setuptools-20.3.1.dist-info/metadata.json | 1 + .../top_level.txt | 1 - .../setuptools-20.3.1.dist-info/zip-safe | 1 + .../setuptools/command/easy_install.py | 19 +- .../site-packages/setuptools/version.py | 2 +- .../site-packages/six-1.10.0.dist-info/RECORD | 2 +- .../wheel-0.29.0.dist-info/RECORD | 56 +- 258 files changed, 71187 insertions(+), 1047 deletions(-) create mode 100644 lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => netifaces-0.10.4.dist-info}/INSTALLER (100%) create mode 100644 lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA create mode 100644 lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD create mode 100644 lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL create mode 100644 lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json create mode 100644 lib/python3.4/site-packages/netifaces-0.10.4.dist-info/top_level.txt rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => netifaces-0.10.4.dist-info}/zip-safe (100%) create mode 100755 lib/python3.4/site-packages/netifaces.cpython-34m.so delete mode 100644 lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => pip-8.1.1.dist-info}/DESCRIPTION.rst (86%) rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => pip-8.1.1.dist-info}/INSTALLER (100%) rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => pip-8.1.1.dist-info}/METADATA (90%) create mode 100644 lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => pip-8.1.1.dist-info}/WHEEL (70%) rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => pip-8.1.1.dist-info}/entry_points.txt (73%) rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => pip-8.1.1.dist-info}/metadata.json (67%) rename lib/python3.4/site-packages/{pip-8.0.2.dist-info => pip-8.1.1.dist-info}/top_level.txt (100%) rename lib/python3.4/site-packages/{ => pip/_vendor}/_markerlib/__init__.py (84%) rename lib/python3.4/site-packages/{ => pip/_vendor}/_markerlib/markers.py (100%) create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/_cmd.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/adapter.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/cache.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/compat.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/controller.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/filewrapper.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/heuristics.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/serialize.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/cachecontrol/wrapper.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/colorama/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/colorama/ansitowin32.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/colorama/initialise.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/colorama/win32.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/colorama/winterm.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/_backport/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/_backport/misc.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/_backport/shutil.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/_backport/tarfile.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/compat.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/database.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/index.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/locators.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/manifest.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/markers.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/metadata.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/resources.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/scripts.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/t32.exe create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/t64.exe create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/util.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/version.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/w32.exe create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/w64.exe create mode 100644 lib/python3.4/site-packages/pip/_vendor/distlib/wheel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/constants.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/_base.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/lint.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/optionaltags.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/sanitizer.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/filters/whitespace.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/html5parser.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/ihatexml.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/inputstream.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/sanitizer.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/serializer/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/serializer/htmlserializer.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/tokenizer.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treeadapters/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treeadapters/sax.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treebuilders/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treebuilders/_base.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treebuilders/dom.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treebuilders/etree.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treebuilders/etree_lxml.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/_base.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/dom.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/etree.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/genshistream.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/lxmletree.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/treewalkers/pulldom.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/trie/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/trie/_base.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/trie/datrie.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/trie/py.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/html5lib/utils.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/ipaddress.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/lockfile/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/lockfile/linklockfile.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/lockfile/mkdirlockfile.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/lockfile/pidlockfile.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/lockfile/sqlitelockfile.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/lockfile/symlinklockfile.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/__about__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/_compat.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/_structures.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/markers.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/requirements.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/specifiers.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/utils.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/packaging/version.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/pkg_resources/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/progress/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/progress/bar.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/progress/counter.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/progress/helpers.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/progress/spinner.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/pyparsing.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/re-vendor.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/adapters.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/api.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/auth.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/cacert.pem create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/certs.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/compat.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/cookies.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/exceptions.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/hooks.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/models.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/big5freq.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/big5prober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/chardetect.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/chardistribution.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/charsetgroupprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/charsetprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/codingstatemachine.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/compat.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/constants.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/cp949prober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/escprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/escsm.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/eucjpprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/euckrfreq.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/euckrprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/euctwprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/gb2312freq.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/gb2312prober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/hebrewprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/jisfreq.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/jpcntx.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/langbulgarianmodel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/langcyrillicmodel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/langgreekmodel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/langhebrewmodel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/langhungarianmodel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/langthaimodel.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/latin1prober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/mbcharsetprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/mbcsgroupprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/mbcssm.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/sbcharsetprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/sbcsgroupprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/sjisprober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/universaldetector.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/utf8prober.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/_collections.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/connection.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/connectionpool.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/appengine.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/exceptions.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/fields.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/filepost.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/packages/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/packages/six.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/poolmanager.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/request.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/response.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/__init__.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/request.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/response.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/retry.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/ssl_.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/timeout.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/packages/urllib3/util/url.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/sessions.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/status_codes.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/structures.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/requests/utils.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/retrying.py create mode 100644 lib/python3.4/site-packages/pip/_vendor/six.py create mode 100644 lib/python3.4/site-packages/pip/compat/ordereddict.py create mode 100644 lib/python3.4/site-packages/pip/utils/encoding.py create mode 100644 lib/python3.4/site-packages/pkg_resources/_vendor/packaging/markers.py create mode 100644 lib/python3.4/site-packages/pkg_resources/_vendor/packaging/requirements.py create mode 100644 lib/python3.4/site-packages/pkg_resources/_vendor/packaging/utils.py create mode 100644 lib/python3.4/site-packages/pkg_resources/_vendor/pyparsing.py delete mode 100644 lib/python3.4/site-packages/setuptools-20.1.1.dist-info/metadata.json rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/DESCRIPTION.rst (100%) create mode 100644 lib/python3.4/site-packages/setuptools-20.3.1.dist-info/INSTALLER rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/METADATA (99%) rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/RECORD (76%) rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/WHEEL (70%) rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/dependency_links.txt (100%) rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/entry_points.txt (100%) create mode 100644 lib/python3.4/site-packages/setuptools-20.3.1.dist-info/metadata.json rename lib/python3.4/site-packages/{setuptools-20.1.1.dist-info => setuptools-20.3.1.dist-info}/top_level.txt (77%) create mode 100644 lib/python3.4/site-packages/setuptools-20.3.1.dist-info/zip-safe diff --git a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD index b75a3c2..c19eb8a 100644 --- a/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD +++ b/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/RECORD @@ -190,187 +190,187 @@ sqlalchemy/util/deprecations.py,sha256=D_LTsfb9jHokJtPEWNDRMJOc372xRGNjputAiTIys sqlalchemy/util/langhelpers.py,sha256=Nhe3Y9ieK6JaFYejjYosVOjOSSIBT2V385Hu6HGcyZk,41607 sqlalchemy/util/queue.py,sha256=rs3W0LDhKt7M_dlQEjYpI9KS-bzQmmwN38LE_-RRVvU,6548 sqlalchemy/util/topological.py,sha256=xKsYjjAat4p8cdqRHKwibLzr6WONbPTC0X8Mqg7jYno,2794 -/openmedialibrary/platform_linux32/p34/lib/python3.4/site-packages/SQLAlchemy-1.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -sqlalchemy/orm/__pycache__/identity.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc,, -sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/util/__pycache__/topological.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc,, -sqlalchemy/__pycache__/exc.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/util.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc,, -sqlalchemy/__pycache__/types.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/result.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/dml.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/engines.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc,, -sqlalchemy/__pycache__/interfaces.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/base.cpython-34.pyc,, -sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/runner.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/crud.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/__pycache__/inspection.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc,, -sqlalchemy/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/expression.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/util/__pycache__/_collections.cpython-34.pyc,, -sqlalchemy/event/__pycache__/attr.cpython-34.pyc,, -sqlalchemy/util/__pycache__/compat.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/state.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/query.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/naming.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/__pycache__/log.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc,, -sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/base.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/entities.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/url.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/default.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/baked.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/elements.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/session.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/schema.cpython-34.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/schema.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc,, -sqlalchemy/event/__pycache__/base.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/properties.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc,, -sqlalchemy/__pycache__/schema.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/operators.cpython-34.pyc,, +SQLAlchemy-1.0.12.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 sqlalchemy/ext/declarative/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/sync.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc,, -sqlalchemy/__pycache__/events.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/util.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc,, -sqlalchemy/event/__pycache__/registry.cpython-34.pyc,, -sqlalchemy/event/__pycache__/api.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/functions.cpython-34.pyc,, -sqlalchemy/event/__pycache__/legacy.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc,, -sqlalchemy/util/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/__pycache__/processors.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc,, -sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc,, -sqlalchemy/ext/__pycache__/automap.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/provision.cpython-34.pyc,, -sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc,, -sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/base.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc,, -sqlalchemy/util/__pycache__/queue.cpython-34.pyc,, -sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/util.cpython-34.pyc,, -sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc,, -sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc,, -sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc,, -sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc,, -sqlalchemy/__pycache__/pool.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/loading.cpython-34.pyc,, +sqlalchemy/databases/__pycache__/__init__.cpython-34.pyc,, sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/mock.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/result.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/profiling.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertions.cpython-34.pyc,, sqlalchemy/dialects/mysql/__pycache__/oursql.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/attributes.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/fdb.cpython-34.pyc,, +sqlalchemy/util/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/event/__pycache__/legacy.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/util/__pycache__/topological.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/state.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/hybrid.cpython-34.pyc,, sqlalchemy/testing/__pycache__/config.cpython-34.pyc,, -sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc,, -sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/collections.cpython-34.pyc,, -sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/base.cpython-34.pyc,, +sqlalchemy/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/dml.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/kinterbasdb.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/relationships.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/type_api.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dependency.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/noseplugin.cpython-34.pyc,, +sqlalchemy/__pycache__/events.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/util/__pycache__/compat.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/runner.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_results.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/util/__pycache__/langhelpers.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/constraints.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/reflection.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/assertsql.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/pickleable.cpython-34.pyc,, sqlalchemy/orm/__pycache__/events.cpython-34.pyc,, -sqlalchemy/event/__pycache__/__init__.cpython-34.pyc,, -sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc,, -sqlalchemy/orm/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/adodbapi.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/util.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/session.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/collections.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/strategies.cpython-34.pyc,, +sqlalchemy/__pycache__/pool.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/automap.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/distutils_run.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/crud.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/url.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/expression.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/provision.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_insert.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/default.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/query.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/__pycache__/inspection.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/operators.cpython-34.pyc,, +sqlalchemy/event/__pycache__/base.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/evaluator.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/replay_fixture.cpython-34.pyc,, +sqlalchemy/__pycache__/types.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/engines.cpython-34.pyc,, +sqlalchemy/event/__pycache__/api.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/scoping.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/base.cpython-34.pyc,, +sqlalchemy/__pycache__/__init__.cpython-34.pyc,, sqlalchemy/testing/__pycache__/requirements.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/exc.cpython-34.pyc,, +sqlalchemy/util/__pycache__/deprecations.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/mapper.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/schema.cpython-34.pyc,, sqlalchemy/sql/__pycache__/util.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/horizontal_shard.cpython-34.pyc,, +sqlalchemy/util/__pycache__/queue.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/util.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_types.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/exclusions.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/interfaces.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/visitors.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/compiler.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pypostgresql.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/zxJDBC.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/warnings.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/mock.cpython-34.pyc,, +sqlalchemy/engine/__pycache__/threadlocal.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/identity.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/__pycache__/processors.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/unitofwork.cpython-34.pyc,, +sqlalchemy/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/gaerdbms.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/json.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_select.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/dynamic.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/path_registry.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/strategy_options.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/baked.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/properties.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/mutable.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/instrumentation.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/clsregistry.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/base.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/sqltypes.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/elements.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/selectable.cpython-34.pyc,, +sqlalchemy/dialects/__pycache__/postgres.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/firebird/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-34.pyc,, +sqlalchemy/util/__pycache__/_collections.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/fixtures.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/util.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/functions.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/entities.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/serializer.cpython-34.pyc,, +sqlalchemy/event/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/testing/__pycache__/schema.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/sync.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/base.cpython-34.pyc,, +sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-34.pyc,, +sqlalchemy/connectors/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/ddl.cpython-34.pyc,, +sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-34.pyc,, +sqlalchemy/event/__pycache__/attr.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/zxjdbc.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/naming.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/deprecated_interfaces.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/descriptor_props.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/annotation.cpython-34.pyc,, +sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/persistence.cpython-34.pyc,, +sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/sql/__pycache__/default_comparator.cpython-34.pyc,, +sqlalchemy/dialects/sybase/__pycache__/pysybase.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/orderinglist.cpython-34.pyc,, +sqlalchemy/event/__pycache__/registry.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/associationproxy.cpython-34.pyc,, +sqlalchemy/ext/declarative/__pycache__/api.cpython-34.pyc,, +sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-34.pyc,, +sqlalchemy/dialects/mssql/__pycache__/mxodbc.cpython-34.pyc,, +sqlalchemy/__pycache__/log.cpython-34.pyc,, +sqlalchemy/ext/__pycache__/__init__.cpython-34.pyc,, +sqlalchemy/orm/__pycache__/loading.cpython-34.pyc,, +sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-34.pyc,, +sqlalchemy/__pycache__/interfaces.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD b/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD index 952adb8..607d8d3 100644 --- a/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD +++ b/lib/python3.4/site-packages/ed25519-1.4.dist-info/RECORD @@ -10,8 +10,8 @@ ed25519-1.4.dist-info/RECORD,, ed25519-1.4.dist-info/WHEEL,sha256=KCRSbpx482pKJiCy208NYejONObuAgYlRBN1EilWzxM,102 ed25519-1.4.dist-info/metadata.json,sha256=LyRoPQ8zyOxjJH1CoRteHtukVr0HLA_z_rRyigiJl5c,802 ed25519-1.4.dist-info/top_level.txt,sha256=U3-N9ZJMBO9MUuZLwoiMbsWSkxsd0TfkNSuzO6O_gYY,8 -/openmedialibrary/platform_linux32/p34/lib/python3.4/site-packages/ed25519-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -ed25519/__pycache__/_version.cpython-34.pyc,, -ed25519/__pycache__/__init__.cpython-34.pyc,, +ed25519-1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 ed25519/__pycache__/keys.cpython-34.pyc,, +ed25519/__pycache__/__init__.cpython-34.pyc,, +ed25519/__pycache__/_version.cpython-34.pyc,, ed25519/__pycache__/test_ed25519.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..3b79ac6 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/DESCRIPTION.rst @@ -0,0 +1,196 @@ +netifaces 0.10.4 +================ + +.. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png + :target: https://drone.io/bitbucket.org/al45tair/netifaces/latest + :alt: Build Status + +1. What is this? +---------------- + +It's been annoying me for some time that there's no easy way to get the +address(es) of the machine's network interfaces from Python. There is +a good reason for this difficulty, which is that it is virtually impossible +to do so in a portable manner. However, it seems to me that there should +be a package you can easy_install that will take care of working out the +details of doing so on the machine you're using, then you can get on with +writing Python code without concerning yourself with the nitty gritty of +system-dependent low-level networking APIs. + +This package attempts to solve that problem. + +2. How do I use it? +------------------- + +First you need to install it, which you can do by typing:: + + tar xvzf netifaces-0.10.4.tar.gz + cd netifaces-0.10.4 + python setup.py install + +Once that's done, you'll need to start Python and do something like the +following:: + +>>> import netifaces + +Then if you enter + +>>> netifaces.interfaces() +['lo0', 'gif0', 'stf0', 'en0', 'en1', 'fw0'] + +you'll see the list of interface identifiers for your machine. + +You can ask for the addresses of a particular interface by doing + +>>> netifaces.ifaddresses('lo0') +{18: [{'addr': ''}], 2: [{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}], 30: [{'peer': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', 'addr': '::1'}, {'peer': '', 'netmask': 'ffff:ffff:ffff:ffff::', 'addr': 'fe80::1%lo0'}]} + +Hmmmm. That result looks a bit cryptic; let's break it apart and explain +what each piece means. It returned a dictionary, so let's look there first:: + + { 18: [...], 2: [...], 30: [...] } + +Each of the numbers refers to a particular address family. In this case, we +have three address families listed; on my system, 18 is ``AF_LINK`` (which means +the link layer interface, e.g. Ethernet), 2 is ``AF_INET`` (normal Internet +addresses), and 30 is ``AF_INET6`` (IPv6). + +But wait! Don't use these numbers in your code. The numeric values here are +system dependent; fortunately, I thought of that when writing netifaces, so +the module declares a range of values that you might need. e.g. + +>>> netifaces.AF_LINK +18 + +Again, on your system, the number may be different. + +So, what we've established is that the dictionary that's returned has one +entry for each address family for which this interface has an address. Let's +take a look at the ``AF_INET`` addresses now: + +>>> addrs = netifaces.ifaddresses('lo0') +>>> addrs[netifaces.AF_INET] +[{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}] + +You might be wondering why this value is a list. The reason is that it's +possible for an interface to have more than one address, even within the +same family. I'll say that again: *you can have more than one address of +the same type associated with each interface*. + +*Asking for "the" address of a particular interface doesn't make sense.* + +Right, so, we can see that this particular interface only has one address, +and, because it's a loopback interface, it's point-to-point and therefore +has a *peer* address rather than a broadcast address. + +Let's look at a more interesting interface. + +>>> addrs = netifaces.ifaddresses('en0') +>>> addrs[netifaces.AF_INET] +[{'broadcast': '10.15.255.255', 'netmask': '255.240.0.0', 'addr': '10.0.1.4'}, {'broadcast': '192.168.0.255', 'addr': '192.168.0.47'}] + +This interface has two addresses (see, I told you...) Both of them are +regular IPv4 addresses, although in one case the netmask has been changed +from its default. The netmask *may not* appear on your system if it's set +to the default for the address range. + +Because this interface isn't point-to-point, it also has broadcast addresses. + +Now, say we want, instead of the IP addresses, to get the MAC address; that +is, the hardware address of the Ethernet adapter running this interface. We +can do + +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a'}] + +Note that this may not be available on platforms without getifaddrs(), unless +they happen to implement ``SIOCGIFHWADDR``. Note also that you just get the +address; it's unlikely that you'll see anything else with an ``AF_LINK`` address. +Oh, and don't assume that all ``AF_LINK`` addresses are Ethernet; you might, for +instance, be on a Mac, in which case: + +>>> addrs = netifaces.ifaddresses('fw0') +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a:bc:de'}] + +No, that isn't an exceptionally long Ethernet MAC address---it's a FireWire +address. + +As of version 0.10.0, you can also obtain a list of gateways on your +machine: + +>>> netifaces.gateways() +{2: [('10.0.1.1', 'en0', True), ('10.2.1.1', 'en1', False)], 30: [('fe80::1', 'en0', True)], 'default': { 2: ('10.0.1.1', 'en0'), 30: ('fe80::1', 'en0') }} + +This dictionary is keyed on address family---in this case, ``AF_INET``---and +each entry is a list of gateways as ``(address, interface, is_default)`` tuples. +Notice that here we have two separate gateways for IPv4 (``AF_INET``); some +operating systems support configurations like this and can either route packets +based on their source, or based on administratively configured routing tables. + +For convenience, we also allow you to index the dictionary with the special +value ``'default'``, which returns a dictionary mapping address families to the +default gateway in each case. Thus you can get the default IPv4 gateway with + +>>> gws = netifaces.gateways() +>>> gws['default'][netifaces.AF_INET] +('10.0.1.1', 'en0') + +Do note that there may be no default gateway for any given address family; +this is currently very common for IPv6 and much less common for IPv4 but it +can happen even for ``AF_INET``. + +BTW, if you're trying to configure your machine to have multiple gateways for +the same address family, it's a very good idea to check the documentation for +your operating system *very* carefully, as some systems become extremely +confused or route packets in a non-obvious manner. + +I'm very interested in hearing from anyone (on any platform) for whom the +``gateways()`` method doesn't produce the expected results. It's quite +complicated extracting this information from the operating system (whichever +operating system we're talking about), and so I expect there's at least one +system out there where this just won't work. + +3. This is great! What platforms does it work on? +-------------------------------------------------- + +It gets regular testing on OS X, Linux and Windows. It has also been used +successfully on Solaris, and it's expected to work properly on other UNIX-like +systems as well. If you are running something that is not supported, and +wish to contribute a patch, please use BitBucket to send a pull request. + +4. What license is this under? +------------------------------ + +It's an MIT-style license. Here goes: + +Copyright (c) 2007-2014 Alastair Houghton + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +5. Why the jump to 0.10.0? +-------------------------- + +Because someone released a fork of netifaces with the version 0.9.0. +Hopefully skipping the version number should remove any confusion. In +addition starting with 0.10.0 Python 3 is now supported and other +features/bugfixes have been included as well. See the CHANGELOG for a +more complete list of changes. + + diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER rename to lib/python3.4/site-packages/netifaces-0.10.4.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA new file mode 100644 index 0000000..7ce084b --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/METADATA @@ -0,0 +1,216 @@ +Metadata-Version: 2.0 +Name: netifaces +Version: 0.10.4 +Summary: Portable network interface information. +Home-page: https://bitbucket.org/al45tair/netifaces +Author: Alastair Houghton +Author-email: alastair@alastairs-place.net +License: MIT License +Platform: UNKNOWN +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: System :: Networking +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.5 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 + +netifaces 0.10.4 +================ + +.. image:: https://drone.io/bitbucket.org/al45tair/netifaces/status.png + :target: https://drone.io/bitbucket.org/al45tair/netifaces/latest + :alt: Build Status + +1. What is this? +---------------- + +It's been annoying me for some time that there's no easy way to get the +address(es) of the machine's network interfaces from Python. There is +a good reason for this difficulty, which is that it is virtually impossible +to do so in a portable manner. However, it seems to me that there should +be a package you can easy_install that will take care of working out the +details of doing so on the machine you're using, then you can get on with +writing Python code without concerning yourself with the nitty gritty of +system-dependent low-level networking APIs. + +This package attempts to solve that problem. + +2. How do I use it? +------------------- + +First you need to install it, which you can do by typing:: + + tar xvzf netifaces-0.10.4.tar.gz + cd netifaces-0.10.4 + python setup.py install + +Once that's done, you'll need to start Python and do something like the +following:: + +>>> import netifaces + +Then if you enter + +>>> netifaces.interfaces() +['lo0', 'gif0', 'stf0', 'en0', 'en1', 'fw0'] + +you'll see the list of interface identifiers for your machine. + +You can ask for the addresses of a particular interface by doing + +>>> netifaces.ifaddresses('lo0') +{18: [{'addr': ''}], 2: [{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}], 30: [{'peer': '::1', 'netmask': 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff', 'addr': '::1'}, {'peer': '', 'netmask': 'ffff:ffff:ffff:ffff::', 'addr': 'fe80::1%lo0'}]} + +Hmmmm. That result looks a bit cryptic; let's break it apart and explain +what each piece means. It returned a dictionary, so let's look there first:: + + { 18: [...], 2: [...], 30: [...] } + +Each of the numbers refers to a particular address family. In this case, we +have three address families listed; on my system, 18 is ``AF_LINK`` (which means +the link layer interface, e.g. Ethernet), 2 is ``AF_INET`` (normal Internet +addresses), and 30 is ``AF_INET6`` (IPv6). + +But wait! Don't use these numbers in your code. The numeric values here are +system dependent; fortunately, I thought of that when writing netifaces, so +the module declares a range of values that you might need. e.g. + +>>> netifaces.AF_LINK +18 + +Again, on your system, the number may be different. + +So, what we've established is that the dictionary that's returned has one +entry for each address family for which this interface has an address. Let's +take a look at the ``AF_INET`` addresses now: + +>>> addrs = netifaces.ifaddresses('lo0') +>>> addrs[netifaces.AF_INET] +[{'peer': '127.0.0.1', 'netmask': '255.0.0.0', 'addr': '127.0.0.1'}] + +You might be wondering why this value is a list. The reason is that it's +possible for an interface to have more than one address, even within the +same family. I'll say that again: *you can have more than one address of +the same type associated with each interface*. + +*Asking for "the" address of a particular interface doesn't make sense.* + +Right, so, we can see that this particular interface only has one address, +and, because it's a loopback interface, it's point-to-point and therefore +has a *peer* address rather than a broadcast address. + +Let's look at a more interesting interface. + +>>> addrs = netifaces.ifaddresses('en0') +>>> addrs[netifaces.AF_INET] +[{'broadcast': '10.15.255.255', 'netmask': '255.240.0.0', 'addr': '10.0.1.4'}, {'broadcast': '192.168.0.255', 'addr': '192.168.0.47'}] + +This interface has two addresses (see, I told you...) Both of them are +regular IPv4 addresses, although in one case the netmask has been changed +from its default. The netmask *may not* appear on your system if it's set +to the default for the address range. + +Because this interface isn't point-to-point, it also has broadcast addresses. + +Now, say we want, instead of the IP addresses, to get the MAC address; that +is, the hardware address of the Ethernet adapter running this interface. We +can do + +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a'}] + +Note that this may not be available on platforms without getifaddrs(), unless +they happen to implement ``SIOCGIFHWADDR``. Note also that you just get the +address; it's unlikely that you'll see anything else with an ``AF_LINK`` address. +Oh, and don't assume that all ``AF_LINK`` addresses are Ethernet; you might, for +instance, be on a Mac, in which case: + +>>> addrs = netifaces.ifaddresses('fw0') +>>> addrs[netifaces.AF_LINK] +[{'addr': '00:12:34:56:78:9a:bc:de'}] + +No, that isn't an exceptionally long Ethernet MAC address---it's a FireWire +address. + +As of version 0.10.0, you can also obtain a list of gateways on your +machine: + +>>> netifaces.gateways() +{2: [('10.0.1.1', 'en0', True), ('10.2.1.1', 'en1', False)], 30: [('fe80::1', 'en0', True)], 'default': { 2: ('10.0.1.1', 'en0'), 30: ('fe80::1', 'en0') }} + +This dictionary is keyed on address family---in this case, ``AF_INET``---and +each entry is a list of gateways as ``(address, interface, is_default)`` tuples. +Notice that here we have two separate gateways for IPv4 (``AF_INET``); some +operating systems support configurations like this and can either route packets +based on their source, or based on administratively configured routing tables. + +For convenience, we also allow you to index the dictionary with the special +value ``'default'``, which returns a dictionary mapping address families to the +default gateway in each case. Thus you can get the default IPv4 gateway with + +>>> gws = netifaces.gateways() +>>> gws['default'][netifaces.AF_INET] +('10.0.1.1', 'en0') + +Do note that there may be no default gateway for any given address family; +this is currently very common for IPv6 and much less common for IPv4 but it +can happen even for ``AF_INET``. + +BTW, if you're trying to configure your machine to have multiple gateways for +the same address family, it's a very good idea to check the documentation for +your operating system *very* carefully, as some systems become extremely +confused or route packets in a non-obvious manner. + +I'm very interested in hearing from anyone (on any platform) for whom the +``gateways()`` method doesn't produce the expected results. It's quite +complicated extracting this information from the operating system (whichever +operating system we're talking about), and so I expect there's at least one +system out there where this just won't work. + +3. This is great! What platforms does it work on? +-------------------------------------------------- + +It gets regular testing on OS X, Linux and Windows. It has also been used +successfully on Solaris, and it's expected to work properly on other UNIX-like +systems as well. If you are running something that is not supported, and +wish to contribute a patch, please use BitBucket to send a pull request. + +4. What license is this under? +------------------------------ + +It's an MIT-style license. Here goes: + +Copyright (c) 2007-2014 Alastair Houghton + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +5. Why the jump to 0.10.0? +-------------------------- + +Because someone released a fork of netifaces with the version 0.9.0. +Hopefully skipping the version number should remove any confusion. In +addition starting with 0.10.0 Python 3 is now supported and other +features/bugfixes have been included as well. See the CHANGELOG for a +more complete list of changes. + + diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD new file mode 100644 index 0000000..39899f3 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/RECORD @@ -0,0 +1,9 @@ +netifaces.cpython-34m.so,sha256=sOwHfA5axT70ZGV6_TxyMEpnqO7znYe9Zf2qBARjkP8,50100 +netifaces-0.10.4.dist-info/DESCRIPTION.rst,sha256=EyJf6yFbUVkw5TpZ0M61ZeVbtCZdHYAqSdfTXT4lo6w,8397 +netifaces-0.10.4.dist-info/METADATA,sha256=nrFMGFClFWGJIsVo8nC9bLS6iermD9vl6cCQS56eG50,9130 +netifaces-0.10.4.dist-info/RECORD,, +netifaces-0.10.4.dist-info/WHEEL,sha256=KCRSbpx482pKJiCy208NYejONObuAgYlRBN1EilWzxM,102 +netifaces-0.10.4.dist-info/metadata.json,sha256=FHsfPfcnSFKu412WegZiPVzBB-sHxrG_wCDSiaB6SHQ,846 +netifaces-0.10.4.dist-info/top_level.txt,sha256=PqMTaIuWtSjkdQHX6lH1Lmpv2aqBUYAGqATB8z3A6TQ,10 +netifaces-0.10.4.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +netifaces-0.10.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL new file mode 100644 index 0000000..d7cd549 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: false +Tag: cp34-cp34m-linux_i686 + diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json new file mode 100644 index 0000000..c7b0939 --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: System :: Networking", "Programming Language :: Python", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.5", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"contacts": [{"email": "alastair@alastairs-place.net", "name": "Alastair Houghton", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://bitbucket.org/al45tair/netifaces"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT License", "metadata_version": "2.0", "name": "netifaces", "summary": "Portable network interface information.", "version": "0.10.4"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/top_level.txt b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/top_level.txt new file mode 100644 index 0000000..3f008fd --- /dev/null +++ b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/top_level.txt @@ -0,0 +1 @@ +netifaces diff --git a/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/zip-safe b/lib/python3.4/site-packages/netifaces-0.10.4.dist-info/zip-safe similarity index 100% rename from lib/python3.4/site-packages/setuptools-20.1.1.dist-info/zip-safe rename to lib/python3.4/site-packages/netifaces-0.10.4.dist-info/zip-safe diff --git a/lib/python3.4/site-packages/netifaces.cpython-34m.so b/lib/python3.4/site-packages/netifaces.cpython-34m.so new file mode 100755 index 0000000000000000000000000000000000000000..487fecdf49cedfa58ed5073bb080767ee1ef8562 GIT binary patch literal 50100 zcmeFad3;pm^*?^^%p|!(mI)z1U;+#nRzt`Jq6Ezf1hNcS*oqDznUH~OW+p5a6iuSW z7zMZbu~ezG)>hkEwMvyLf(zCzR;^2`)z&Q$t^3ky&G-F2&%HAf^z-@r^ZUMjzt`*6 zz%$P|&vVXs&U4OrwmWz3y`ZdOnXc=a^=H@Y8Zl3jrsX3~eNffpYaT65o1X@wh)u;#hwLi*-dRh(YqxDNeTTJ9fCV47yJJRy1daZiwvrmp(-*@L< zLeGBh`o!|vOKht*#-T13iG88mOSL&p?U6z!Kq=27fIr%B5fV(KJ=1*FUr#*qUQ%({ zpWb=#+;!={8vG#Xf!D6ujjB<9-BB3zEud27AMx+7QS*=Z*->~3@N}yZxd-X>D4g`4 zMad)mDN+1VABD3%_auB=6i)i4lknS6X8p0g#zK(KK>92SXMJ3hKE#`&aJGN=B>Lx0 z!uz7=$sdJk9^e=Z~dXoH~pM*C=(R%<-h>}PC+oN#G-*pmx^d!9P zBz!S=tUnKc0SiG+d0(DH|Jx{>?VlN?FX_91ADO0U4`I-yXbIW`l-)8LANWhr5;X$@ zb<=8vCul!w<`c>UG*Z3NDHc&uAYv4Q(uH5dcF(s6=HUxwI zpyu6G0g1jU{|;@7KNM(gXlm*TYQa!fdt+NCYU|qrjU7#X-?FZbw%SlvpnVGpYyIbT z``a7+zLJjiP(z?SET*Wl)8F2tHFx>_mMHJ8vYm~->e{ldu8uD7BKOt$LzD-wy@MKb zv~Q6H2mmh)G=_Z3{h{)Zzs*9yA}EyXY}n!tvOgvzbvHqiDy(Rk$jnMB(g+F&E^F5j|-)}SBc-Hk9m@Vc&U3k{-sp$@If-?+UkxJ3&z!%0p4 zouLk44z&A2zV=WD4OiFQ+3NRs8ydF(souQRk3N+LDJp{3vKKU#*4Ww+MB_d`P2z)s z4WU3sJ19+ag?mK2s-x21*3q?#inHB7dvgbPgPm|ssM*)p!X}HlwxIQ{pkEZBN0meA zjWrFe-F`?Chm<4Ad{7(?Ym|Vf>}cwS>Lp$NhLArOqRfJ#rl#`t5L}J`Z4ZqqSN$7R zq%;mww+1$MhFT!9DJ$5Km8}79B(#W%hhQbD?Q3obv}!(oQ$wf$<(q>+H5gC?H!ZIy zFD~)rg4u%dvhpKv7|O}Yi@;%sE1>#Q!Y)xafP*D zmc*f+|47Gj19Sf9e=nnwbBPBjIS@!BkB197{&dKG*Q_TVniRmgZD{jQPo8l?AFC}g z@!&Mp$7qEnK33`-T7`+H3U1fdn7C@s7H*IGAOmvWG4mr{g@0`38s;L_d(1q;%rnhA z+syOLyui#0&AimiE6n_*YV1j1*jJe+VEt#FggqAXanOS~HaPDyACEm4bC0Hd&U`lJ zXXe;nB|tx}&B@GRuTdP0MDE`8S_AJh7O$ z3;P=8$(mNke5|IGGRK6!f;lGn3g#0v&C49qbRF|anzoMlWKG-1+>N=PIW|2_%%^Bt z3-hViA2P?f(aC%|{LFlYrtM^Y3ihPTGq9dBpQ&lR%x7UQ$9#^aUCw;2rtM=sPt&ep zeyXNj&pcDp4lvKsw1dnSU`=A4t!cM0&(XBno_CC)2Vl4hkA1fk%V5v@BO@byM?x_} zH=?lT5#w9Ow2`}ENNwZ?^gsVbegKDQEd+;O1^LJil8Li=_$k4;Gi_nv@Dagb7_EgL z!*>eK-Cv7`_(8!5wxBoK@IJxOVXXyPYQuX4XI0B$;+=w{V_Hig@lAr0v}FZxuizZd zw6Hc@C^%VL))CJZyny&7;vT^ZiMJ3>795t)S~`hqf>U(MPU7!>g@PYc5Z_DuRl(U* z%caDh61TUKPdPn;Mn#A0nPCcrWqyiEDyiO8j%;@Bf?q zznnPcDD+?OeZ&poPYHev@nqsh1izj*k1xY_3VwjNhxkFk4-%h4e4pUA5YHyQSMb}2 zFDBk8`0G%jdEdwn-j{qJJ>yvOzR16!iJ-jj=#xd7Hh9@4z=Ibx;&bltp3h^qPaL@D zC2%3c6YKlm=y@k~^~h5_k0d|w(Xswd2M=Rr!5s8FJ}x{u0RDPQ0LApbQ9Jke!=KxF z-f{FFd-%jO{e?e%9!ed!>PyOdIy44D$-8>wX-FG5`^zUH^3YE0&{_~(cU?<9=eeT1z~K?!6GEkyoU4+-_r@`iX&yYT3VLiX%^IB3A=d!pMp zTndK4&@64>s?RC(>Cn5XhhEX?iQx>C4y0xrt6#gOcIa{NU3lzAkkRwbV*mG3A+ogOL3e(mPqRcx~3fn`mz9X(n9|OTaC?l=!Q&;c3fOFnGF)eJr zfii8-i}w5Lwa_^O?T&#(eV>N5_PslBz9Tdaz2BN#zjo*|OIYYtbbam67U2DluG>&Q z63R#&Ivp)bQ{8vLs;bP7GKfa8CB6G*unv?t>PNh3C_ZuZAa&6OpfW1FL$4_*z>R^5 zjC6lqBD>kWyyub3Z=K^i`&iDA6VIc<7|_wngq|V2??}JiO;?~ab)d+xS_vNL8G`9h zJd!&enx zz289vTpT}OQ@plN3)pgwC`PdZ__pdpRsS+g*?-j4dm*?63KJN?*){!j8L9BZ6xy{e zBOPjlg}M5Is>cJxjvCdlXg1t{Vj62;r32z?Mpo1CFmS@~a?~jMi!q~hhV+Et>To^u z51mR+iowe?S6{IuwVqPzM{2@SKR}p{?*C)^pmPhh1Tn))H4Sz@cHujRVKx82QVHV2 z8Fv87p_b`G4e0z*&7)%bd;_sPhuFaGjP(951|K@|{K&}A??40M#cH1m?PPP?kS*m%`@2CKx5X8?f&`CxfAk7AV&VP;2hrC??p3<{Uwf(jIqUkAzl_ zIC?%$?|K)VyF&>6t}9MpD})&#ZD4HAzg(`1Ujlmi(f&tWmpsi1OoJyfW!{bLIb!d5^~<@B z>W}sO8+>=OdV=uWMxM7_mmEaCS ziLNHQdXIrjwi6({6ND&DBXO$h-et*N;keD>aP?jb>b>WueuYK~F|#CtIw52LTS17k zl5k7_2MRpvAjl&n4wQ%rbr~6){OY`@&)#ZBWx>!TiafjLkyOy62}94Prka*fWm%}p zC~)=hdS}mE%)^ri%meTh6i0n>-xDDZ^@bvsqWRS#Y4{UsF!p7E0dxs{Pq=!2g9XE+ zRoz@IbR0>^R4yHWs@Mrt&{+>gbYzn5<(y1evVfV3h>p1%VmWCtWW$Tt@Y!Z*cqtjq z#`1&pOT|ON@CguzU(rtA)2_Y{mrpEup^OZ3wyVL6L3?Hotx%?c)fW#xjGmK^O@@~X z*(q~{fd;6sR#FrU|4yR9K^K&+?yt!3Y_%!>KqfoQKElXQKXv$e(7NuW7?_r=4PAlF z4PvYB5Ne=S!ay8&%)ZCDdjCq3?9Ny$#paqG7ur3#AD4YJGQt@;@(5xMXYE-bR5_J4+Pp5Fh_kPjvpSWEAI_UHcO{`Uv=a&Lf*!7Ru`!z-lWbgXUY z2KEzi-MzHSZmbx~9c$H6I@}HpmPV|#2)*h5_waUEfD&MiIsGr3_yOi$fEj~5$Dbb= z@v3bInuVDaGE5zONW{mD5;=)6oc0mE=SpuTj#>+nV`t4XE; zXeTVyPf_300of3Z>HvgBMMVJ^RaE4{IOulHk>Q_;kEJ8w1|0uQ1EI;5JT8&{_`s48 zFlCHt`ouHr5tgbbb0&ars2Qhrwf~U)?OU+Xg~uLWh|&2@I^v}_39fYAXEbx%*#P$p z8xL~n?fECB^gnXesmn09_&>T;$10}IH#OL#4T?fs#Cop;nd`nXO&I$h?in($KJ17a zxTnYSGPM2&=FESN9jwP(&E;y)v3)7mEV{vU$rUJpCyc&NAA^wIi`nAygQ>ZLxBnf^ z?SBl-JCy#ed*jf@p2u}>{nnAK`kJ1{>_eZ!c0G@Hmh8@ObiV>I33iBjtZ+m`?p>_8 zt~uhm_sC~2U=OHvfIJ7>GeEV5Pl2;JK3(^A#}C#g3^$qhA><<`en+KF+(QEM{3G`J z5gNSk*d_TG1@9E16Xp@2H-pEroFiDc=U`UML@zY&U{BGDh|iIGGB%-dXruBV4fY1{ zR^S*3SS?(=&!PaG1AD%A=p&rpWR$or-G;RdBWw;26yF0p*z+$~MRsv%gFTU|eHLyMsVZ1$+D z_X{=;QIMaCp2OHfoDAxR`u?Fhk6?(@y{qa1s;+Ou)%ywt$EVATG*{mvGM>bCp8BC> zie&g9l!nhusjmB;?;jqThrz=+mRCPG4U1t->%m~Vl@noy{Gkh!5io&Xo{@=i^N{80 z8v%0RBU~p{KyIhs>xCY!;0Awqh)tX2Ta=vs$A^{zlCJl~sOG=PxBNZ~nqd_+-5$~I zV;GDbF9A3G+jj;5Od65-cMeLp@Tv2@==B!N!4RWfTf=&xiRdNt3Am^k@fR)YD9V)m zSU&39IDEcEFQ%%qoC0^Iy850(E8k$;UqJoAo+b<^gc|1jSD`EB{E6?1w}-C=mukop zwYIm1OQY50R_yCaJ^D@UTBqJ9q0y(_avU4f#6Dih#MOa5Q=`SEd9jd3X>>YDhn~i1 zq8aBkTyI#5&g1$=Hb)Q^v{%u~mQOIyy+hwqvf=W_T)lnZ7`)x_0+<(4KIY@cw`%ok zhJJJc{>jW3It!(bJ2Jqo;lMUscoXWqgSRJvXYG(zl)||eEFqI6L9rlW$np=A_B}3#JIRa%qHRmayDsve?1$m$y}4 zf{F!JAWMCOB}-U(O_m8@xy@uLR4lmgz-1+U$fKCJ53cd99a@d*!K)X79bMd-z%5qV znxSoA21R;*YR1qA7|=7$KO*22)PdF#kSqeuG6lq#0_H^tcu)zzaZU-iakPLxpnCA? z3R6I`DPVXwqQM>|fNQU4aQ0{cSBQXmQ@~hLz->_i3X}j`jaLHFM+>MF0cW2iU`v#M zFTemBga!Nsbx}4*5CL0E0kThp5objSxL*mt`A}(a!)O6dqI&S^c2mF%vkPxvqiA_y zml80;60m->fXhU{c2mF@Q@|}z0v0O)_~t@skTzOCxd^zx6p&~NXp9o@85m##Y?_pS z=TR4B149H{W(qJ&0W+cm+^Ym|i4!+oH(J2ss2;rf8dE^3Dd2B!MQqTm1n`t90_sN# zxL5?-XbPBU3OE=gAYTcSzIFBH&h2K&&ai7bV~$Fu(@5I-&$TgSsdiI7Gmm zrhqt8z|<%KcPRn9W+Gjx|B##zQBm$l=1tggQz7-|lJutuq`07mw_yg*qY@msNKbZntrhv&& z0)D0h;JY#<;EK@#9zylt)i0R>toh)@HzGD@Qv$;CLFH%x=Zk>1OaUpT2G>Uk$W#J& z8-~~*VYGmyBH$mUfN`dPwNV02fB`nZ*N#fSlch;z`rJcb3=D)NW*;u zt%3G!o;H6lh}$$gxQU`2_cHjKw3*qtJI~N~dkKl!8iL!jPQSlP+uYUB(A3xv3~5En zeDzhe-m($}m9JyIth}O(d3jY?9rL1fxeJ-sRuwTTEh~XEsqlI$%Ibj|$alMGyzCPlR?SW#9|jY2|RueVxMT3b}*E%%Cu z;)?pRy6Wn>6=E>h5_d+l1zH2RKO;LUCmXjtZ1;Bsd51{k#tn7D4QN4x`#}6%>VA$b zu>OvQT|rG--_h-9>kfuI!A^f;pm~?4!LuDV1~qxY)E?eVf^U*Ft$H)>>p@L(M^~HN zi{fe6+|eEKgj)P2f56}5LF+p@y0%3mWQ`e9;}3OrwFe=+3HR}McC_Hm7SA^SE|hfn zJ-BnIq16-W5J8LvDD08W?ea95x0VDm$N00hWKp`hDJ2VLqN6fB4ed>)M5x~FS27zs zt+w zyT1#{Z1yAKTSEbK)Djh3g1BsMhVtzpOUf8)bitFmJS`2|p+K0%QX%LMg>d^@&@)GZ z(+o+^_MpcE7tI~R@I+7qJK8WxcHvIFHbi1*wrQl!j$kmrxIv$#k+6N|t05U^9CK2~ zgIV1EsV_;mdh)v&=KQ9Hd353~%Y(#yKQG!?bv#HBw*GXteTf$ccOsw9EonheE&Z3M{rhHhdru|}a37e^LxQKZN1Y{sTf}CyhPd~3l z({~%XJHg?&6#O2PQRjm%Lq5+EjpHzT=ZB{Leb+%M$w~5j-vPtNfj%Syr3&jswfsIa^LT( zQ0q(R*D+eZlCXZ>q=KTP!e#tiTqN+yr0mKlc=b0lw<5kIsi2f>+2^3Tpkdu1LzRvDoU&X0z8=B^_2tg;m1)q7OpvJN`a0GMRh%h<#Uu^{eaqylfXq_JWG7aHJEq`eP~K{v@D{U;3Nxn_E%~gkHf0i~ z+#<*=L)kJ@uC4AH1Z@0LIEeFjYHQo$4{Hh%Xs~W>%Rr%i{x4)Ks+M z8>Ez=TzO38?8wTKXf2Pj+-jYjv0%<42Ihz+oF^8J9w*V`IJ-3|zcguav4nP6QmGoT zGK{lrof5Qaa2Lx+U6qvG^bH0ni`TA48#~b+&xlpXqvAT!9wNlHG3f={@}!rLUbUfP zz+bY#>jDQ+53kq|XUJ8V4faKWLN=I{C01p-$w7%02L^*ki7u=E2Fc%!68ueWF2#sf zQZd9Z93w5*5T2qFd~T~bi3lFBu^gTW(;)oO!O`XHd@K5c(3U#l25Cg8Z_(_wZ2rT@?T@%Q)E-*r&@R-}889z%K#=}n|hk@!2Z z2}rY$79lM|T7%S#v>oYUr0bAwMYt zi~BJ=fWl{`Fm|mJ#@vY(9F7=V3)SpK%%k|a&2Gf~0VK8}jDVQa8X(Emf|g=>`b22V zZ$M*P3~$8rcBp2$AvnfiD@W5YcWgjetm7A~(PPg+Beuyb|20*zeVUBQ?B4^7`w-wY zai3H{v@sb1;yxv(X8aI8Jc#?^c7P7!kD!VBlCWVkp~bj=6HYKbC;S!RWaA~AZ{tP? zr#e&MfH+MjlIFag%(@`y&hL=JCWyy5$qB?ENQU!TwihGF9Op0Mfy4@u>D&R2#u+*v z9FXmN7ehR5jD9(gd?$?(m!MPO#m;A7kGNz(3Y;s5r0D0NrqKB&%osO8$3rf(Qs@0J zblgO}0?0Baf9xNZre6hV%bh>LXI#2|GqM%V3JRF1Uk znZTNCK{h(a5y=r`lXEhWTzw{Znw)R3+xenfi?awtaf@``DBr4$xdE<;yCJR{LnTMBE&|Kb?xZ12H*@%QIJhCRWt86+MRhW1;?AXtiGe#vp9eoCZlh4mcn-mr*g6kpb!cN} zq3C#Gl}^;zi`Yr5);9vyoFBokiC$d{L<47{Xi<5Efl>?zZ?W!XAYLQ#CE+5y{>bEiOXqHJ`sm=Rq|w`)aZoDpYC-OBPNuW25#&xMw*ZOP3UZJ0BFtim*9mgS`A5di zcO)ndI}g)YH|UJ>BhG(O(F0QR2%?q2X&XbW+AaZ@l91TJ=rbe|JF@|5#!uLta~XPu zX5$Vfv1 z`uVJSjRX$OxRK1T6F)Yz8C z5%F*_C}SNf@F~4+03ba`6^QMkIuoU~582AmE}`Fv&k2`~qSb7RFi9r9b+r)u7d}Pc zPs2p)vu0buQ808!h#s_vIN(*QVkFxzJ39Qb5WQv*eHkVanL?RzY2rI~BAgB00G;>& z<4`lYIKO;I*kP1#%zQ-HFg$GfW5Nl>6zcj3;bh|`@_s5YYW$9F_^b`(X~vx#VxJRE zH~MMHe-ic>>tVgbe=%G$jB>i_3zp9@Iw|MNLBN^DE*k6KgtLt_M*CNU^Nn#Fdn1Gw zJI`S+HJwO-(@V$bdK-{J=jZ5ZqD?03QYQ~4iFT=3;T&d7jPO)A|3;ozLA=f`_AXA4 zI_D&+YY4K=IgU1P%7neqS&S%193wTGoSQhk#0%2oypl6lf*>u<4;UPYg0wo{rnDqM zI-R)`kSs{Zc^*ZN6=bKgkbO=OWViEFPD$eg+3Wm0dB#f@dNtQG=wsppo&KCSCoxT5 z0aE8*C~1<;p{+Uji@C(f!t8KrMBF+Dwc+H`h!WEUNpRlEE>96NB|Dd~W~we@+qs%1 zpC&bF&c6}y=nU6%=XoTXA)-CbbBLTG(lRvHO8PrPKMWU5Oh}xi_ffAu(Gasuz2?yK zb48&jNG-R6dhcwRueG3qW<~qhe$kz9R z%ykC)o~Qo~aIE7Qd`{rR(dYw_dROc-2`kZ|}Q{O5ao#dcAF6$8v zQ=1p*sk3xWkFMp|RwWkdTqs_ye443Lu z?#Ci+X)c~F6U+5YLfUMRa-XC~W#Nc*aPGnEDZXL2#5(BrxPL%ea?%{`Jd6!!G-)ok zb(-;8!t;KL0qro}1Z&c%?37{DvV6X5{UG8hu%+?IAF+-<0gNjHZ>lSvyA|UB%x$g& z@zQuaXGn7;UJG8wc-#h~xsvV&Y>aQ}Xw+OT(j*u$u%RoNk(3N^uTtE4G@P8gzK-I! z?k8{92z51M6GSI}%ZK6w&DKcd^fOT&PeNWMk^&PU~gj`hfo){0)dm7GCh{Ppk^sE)Q%(x zm<9AGLtPNYm=D8I>>9IoRnElP~_(@V}8m48p#M^n5ATo1*jRZ zP$MOW7Ed*55H2aXgwqTghH*+B#itveL+h0MV-Vvpc)NDWLN({#1Ouil;+i$b=wkV4 zEXNv&wo(>zf0wPxIUg0GCrQL`;NXjdqM8Nb zW~1B2xKo`NNk#w$9yebk7~dmT)=aQDw8>de*pUs`lU4^p_sej;?(TtdHup5x)9#j2 zh}(wB825vi{9@g!(7iF~{V)wAYg5L7nESpzqmQZS^nct7;7lHWZ7-YN0K<&`4uxyR zH7Fl{J&SGXP<9PK36ex@s;Dvkj(OOnxC5B@boaNxYIFYp)^WId&;y5iA4&}OU2s;i zdo6~t$NeTs3f&i?8+)gu!M>V4Wfu_5Hf28guQ{fyCT2`YM$=mIlu}}zDc=Q`RygHs zVtdm+<@A6u?JFhc9B7v|B66H(L&r2tpF>kSe}y1R!=8tdoCR#bCN(tVr4&n1Y4cGL z>!6X6U&lw>HgqR>(u`7stT7+WOv*6Th3O~F;$Czq z)^P#xUVOwluEl4fgus=8%V|?wd6VG3WWBiO=Yl^yQ(2HL8-Q3BHi^KH>`mFC*ST{4BxiiC;jx8ju>w zlTYR4LH7u3sk`~ORt#l?k;7dEryFh_;*#AB2x^ae7$t@7rLfB0DNiG`Fo+vDmTgnm z5e#Ckq8P+}$ig5_L!4p|r@_h?#7Dra?N#&1>u6Et6PIR70#({nhE3eL@U0_l8so_* zW4KNyA>=QCT4~)=Kr<~l?U|*h_gumV`;&l6DB<}^mabx0y&&^NdmeyU{rh0C{Sn}lc$sZ`(O$yTQ^sJ78`-ee)C@IYtVH}yohdSm2S77*)*aw< z7y)!^>TJS>7B|Eoo;t6V%r79kr=H3R&6tkzshMobVL0H6sq+cP8bvJ65@v(D$Egbl zCuni}g;wg+rkxaYBN_ahR<8Fbpx$gN;dY=2I_{?;N5RmiTU2ssAdn$r3 z+kF7^Iqn+p=DI(H{Csx-WS-&v6!d4hzk-}X_q_aMc8Jv!{5{HCu}#KK?Bnc6LuIMAy}sUnsAK4lf$$J3C9}OLjJUe2*(*` zvf+mrm4-I`FtsB*ZM=RdT6g~el$!e?e17Tv5&HLU_iA|cEBCz!gOT(rC(3tGBii(h z9GJp5L+5JdJ_A+qJsIW<8;oRgCqrSo`(nnAdp=5H++BcU-8VvboO=?~Gu+R>cux0x zjO;P)E+`-GZUUU(J_$ULbAIF7gomNk(e22~T66NC=G zzz0`Z8Qrm(?Z2SX#BvIffq$h{6{E$y3FD+rpYT443_b{PdYbZXHhMaJk{ZD=sGL5T zW6ELN2zyU=v)nMQL;y}tr&GolZo*Rt#~Zi6Zqui7(n(Od@K$rtWkj3im@#V_P|v^7 zz>G|8J3O7xVa9xdTHN{gNS`rfFG!3BVc{9m2x`WCu+NO?w*zz-6QRS58T6fDY(l#; zGRT`?+=u9#F_Unz!7ciXEH<2KOou&ZEWQ!+T&vmoj9rK!{XzLkL0^gQ#7?;gZJ~hs z@>AwQHM25SdB!YR5-xdBm6$?aQkXVp3skmMv0L`tpq-nA_7pz%9rVCfwEFuR^C+P2 zPHBgG^`-Ll)hYdS%b7wrvj7357YfbH4h$W=NR{^^-1K5qeu8c((YZOEJ-=Pk{qhyk zd|tb>;LcE&rZ&=|K_|TU-7|pi*>$0W8D+H}aO)iqw46)fi zrq$|1&HyWbYAabG;j&ID?jd#FF!Z@03&3XpZY2vr;&p7S26j z_%#_W2XLIgivXS>uoR4cC9ogBFoEv@_!xk;@M7qG5Ey^JpdYw%D!%^y4vOAH-J%}= zScLLLKL@ZDz-bQv_z{7}0sI9(?qU!of$Ovagh~L0(Q>``n9iaXKy--|wV^Up>vIl5&#=Wl?7RU6UOUNv<*e4k3o^XM$7*b=uSo9MWjnd;U}ca z;{~37vuBS3Fctvd2Ap=wD+Y%chE0q^m6r88Op5vgVmy91&L2PM2e!htDgabXH~@Yv zga*Pd`=3}PbcORYIkpaCGF|^=%zT8y87=q;qkeA;_a@pIT_H`sFNOZx?u9oB;M@_*g zX!;&Rh(0D(DR;}BrGbv^njRm^BdUG>zCAfS4eJT9Jfqw9-?=B7*Yx!9vFv;mq~#jm zp9?`tn(#AFJu~)3RJ5XE$(9|OK3~um+3FARp=Sx-YAgcThq)3gi0y?DUL-!2dMKRV z4lR+XBU|X#0MGTJ{;xBjuuM}ev0ORr`(N6V^IddQ55&ri3HyJ$M-AkyvA08y3&5yG z{I*z52saV`5$)9)OHhv2CIvy&ruCBR(%R4O4)DdSF64MF zd>kKnkAq_>21ii}y1==xSob>i5ABXE)@zLYkL#H66kL*S#wA>fKzYaFb z{wHJT8c|>dX4_o;JZv^AMCa?Jq8b%i{#1+xM5I<0uKh2m-Obv(_n?6cVFP3`PIC>| zwJS_~xe)Isajpk7mxpTxrH0e5miHXOeI7cJXF%RU0%rh_IhA)X2oL~J$f`X6RuOmy zZA!@+bbl>NHUVe^pp{$<=nMea=YvRJ5qlZJ>KU|^{}WU|fHoc>s(QOh6d4jl_M^6g zl1Ro`7hoge={%N=M}+lJdMS4*er$4}wx)j;(+XLOct_8_A!Lysz*vmAMH>Lj1b_(_ zJ-;~I;2&k!oxz&?ZIC$|z%AigyQotwd|vQ97_NCy_?lRgcNRwBJ}T7+-~fSk3-~sG zAF<>hfV&9%48X4m`~kod1YA)5SpxhL;S~Z80(hIi6)>0>^LhX@W}XK+enNoC%2IR- z0HOIA0HOJm4WRHG04uUssh7SDv#*CB^c9}h zKSRrl{)Mt@K&a*N=A(-t{IqSL{yBwo1NZ}h3jusiU=V;0+cgn&6+~fL#gw8KQz?qN z8kO3ahXB2g+Djo)AJn^8`wYr{joL-W0q{C}o&f-tb`g^+0Aa4yPP+l}b*vz$?E$}w zwOQxmLw`f|lnf>Q2ueS&ZLYPxsT!nS8gvCD)@` zZ?c~YR+)VN3)Qlpx(|SwfJqWH0SCb)6YzFSzTpY@kv*J%V|Qp{@Ec7`zqW2s>kxPc zsFuvJeo^ah)FAgM)I1PYxI$`@5Wb6tP@%|G;S$C)_NcJ*i7?>p zSU&uiW%oyw zoiNLOA1TZGBN~-uV%P!#=od@0+-z9Ifq|qfF0$%_S1)LtlA4|ppkp4~sAS-Y-0ItA! z1r|-IMN?zZG+Hzr7R?Terq80e&Z4>5qWOtMbI783!lL<;Mf0*nbHbwe(xNe@n>r^0 z5S={$MCSznMCUS#X0=7rV9|71G#6VmS6MVSTQtA0Xdbs{p0Q|NvuHlFXp9-AKB)ji zp9}z^PaXi#r`)2cw`lwpO~|6T%%Zu@qPf+g`K3kkq($?*Me~+L^QlFXaEhr<8UWE} z4gk^TGytMcrA4#eq6t_uJ1v?kESeiEnx9xSzp`llVA1@=qB&vF{L7+A#)yy@O9vqO zWC9Rnxg^Etn$3;MbmB3^jb9gESl>rnp-WJ`z)Gc7R{3u&5IVz+ZN5I7R^@{O#;>i z(Ps(((RnTa(K!cz=zNw%v(lpZ7JyZpTe<+O0ieb6;$1%GDY<57*K+=aO`HA~gHu5!0A}38 zd2!VXsLJ>y!HS(&6q#oJ-c~UZgX_ddvrctXUV_6A)7aB1FUR!FG&aAo89Fh|D6Wi! zGEB3tt~?LwGtDTg=EbAEvo&6`81%^t4`i zI?g9dS?5;X2!5ulZ&zLcOEP6$P{~_NnX-ORxg2I?%6P4s=a+9GWy?vX;%$^NE%>3W z`ZeVI{y1ABRCxs~#*{Oq@=mywDW|LQ9(aW*C%f_s^qnbZLFIJppP6!UG<~c-GihAB zJ;|LkR!@mdMmKSvggxnu_$Be>XxN^i#mAqCdaTYS2>ZmiPzpCPBURiu-JTR5PbM9x zkr%a8c>)Q@Hqm4g8ENC7PXSIuxU-J@h#`k`r+l2% zA@>}Rq(efw0Pxrp0btpw4rmaeoHj8^Ay32(FnE+uX?I2zT0sjjY}6x$J0(V$MtYn9 zgvtvtGe4?k7VHK!XY;o3Ia&HylaxJ`oP6owTvL*`XPzlZkW(?pwM?_^_(xq6^q~n5fyEalpWE_hSDpL52Vj0YVlE|?r3vJ!<0+W823;Snh`9%4BFb#_7{VzM5&8&CfTX1#g6R|gJoHy6W>uLT26tCmKAIu z9YIoV$!4!thCL>pSN%6xSDE&Mr_P$hn5@Xs2(%}uP{BYSRe(mSA_e%~GJ-I6+c9jl z)flig?O{9C0R4+3+?A?c`w9c;3%Y|{3j*zpt=&!j1%bT9i{|55a@{-UZ)xvdusIM4 zYC?cF$1T{>*tj6Da1_Bpi&IgVVn!h+%fu6!M%Q=l3bk~!{ zif)w7d|(;U*9$v$eywbDK|`=@zC5%l&^WpR;yUqmypx4Cbaggpk%zx!HNvud#~&V0 zm6s*&SqO%j0v)+w7|)dgjA=NutJ5D07Y8~3!WS^A1zH4Hkh`nbVd+M^W;2?gVKe;MU>Q{TR*9nJqAB^Dy=a^<2udv9@`$(SDtHmDR$|b8 zJKY~`LAVjjO(sRb=I%giXnvsmOokWy5$x`acpeL{jo+);uP}7OV_$rK-_MM5j7y)u zUdCqMRj>uQ9vg}^7W~Xu@Whg(g5DTwTw;Gg+h@P_f&%+H+DiL@3yhUzQe|Yt8sk2) zzomujm!5Z#v1v>Bf(>i3^X&;MZ+db4tM>K}_uI$6Zf`BHr)JqZ@3?90g4U%=7Cc}( zCC%RYobj4*&wAUbW9@Ipd}!~rJ!k)NPgXMkqhS5oRX4XZ{dE131wF(mpO^A7h)P8{6NqosxPts2s+A`5ed37X?#QjK0?y$~7AiuUK{8|&;> z>c+}$`*CgGuZ`*UyBs&#-`iDS-yf@_-2H8KNjKISYiwsu>@l_&3ykSE*{^`teif5t z|L{EH!{?W*dP7<|su}A9fO+hfU0_^f-+71q-V2QT_rEb;+Pgp2o@4AYR0J67;AYCI zxzSi^tcM*P`T=3TTQ{=oPdXsvjm|slPK4CY&NrIvZ!x&W7@=R-FFSvw{ZoCNJ=I9D z?<}x8SK4-85NmlI_&mq_ZauX0Pos91akGB0>CG1>x>DNL-tL^9jokB_uC&s zaQ#^`QsC#L*Oy&vytpB=!u~T`vvHn1`+mE(*`A%X!uU*-z6gOk8dw>7mHn071@`aT z3nc%@W~6|^X5X>yW`@QNd*^X`@q>H_V*B-V*riAcxu{kM)rQ==FM9l zIO)TlY20Y9Yqn=LKVX|Z!B}q}XFF9lFyH`3KI}(I$c z8J`%Fjh3aj-AC7J>WX}&wRKuos7*a7b(dCy7baJgtt~1ot20G_+K5Lra z(!9N;ahE@|08^d$)>~h9J03>al#92{lCiRAod{UYr@it~vUrIyzD(@U@X?GX+VZ8f zIX-^982zeaYgO_Ef*pKgyQd^4=cM0Qd*-)=y4(Fr&9xqALsw(VQto9u^S8+V>RiwK z<{e!Po!gPit8wRdc6EfXw03mO$1;G)9~Iv661;Br8{Tgm>h$4ueLUv0OADUs3-A@n zmMKc=%B!oi_RT&lomj1XUH;}q{OTIF+WF-5#h}Map%o7;iEm@2kCfBi*^1W#gQXL1 z1*VMFU`wDmByE^=czkOk>YzX%=-UDt1zWI7;7`&(-r2Fk4~@)q8}hVZLwl>Q8LxrX zYO70D`ASPlyjYK|7bur6!;^9Gq}}q`lCb`8WkpeOSp_p{W^L=VLedW@VI` zI(1{A&spRPp@BdM*2Mdt1MR-^{9PR=fo^!;HeYDG+#kXlq)YwHXwp|#R9sPpNGPvj zmo_)>xw<}IM{_g9GszA#&b0VXx2$Yc#JBBAyOO$^`l=GCud1#sUtU#Ip|$zj8asDIkp?vcL1&;z zQ}oY(+#VZGGj{ zz8zf%1$lKede`M|!xN413Uz4;p;cF1UbU=R+a3s7A;EFc^|d#CH-uE6u>U1RB`eB= zEy`Zz6J~eGKtjM_h_>Rf)*_61P!WsA9BVro@ILEveZkg1BVPiJmw0dXcQM*Vza!mx zTrh_yHK>h?Vp70!wBPC=_Lszxz!NpQFh(>6MN7AYRR#UVV@!P$a;u;Yu z6dyf3tS;qPLnY=IOI1z>N?o+p-q5v6#i2Q7V981!rbxvJaU3L?`5JOYj=aa4M5SdF zMQZk;6=h_Fb@vsGey_ON>B`LDvm)Lc%IaP7zQz`KQZzLux^H?Ty-X@q)m6n6m|eIP zSK3H%O=+bT2;xX2&n&kZ!5W}RxV3dkH@n+|fi3NLR5aS_(%L#@atwBF#yKX?8IsPU zUv+pwd0knJIf*T=DXJX(K6%6;hrCQ0?M*vnfx$q+0s~Xa&b_&rzp0W54Mfe8oy8@jg0yo)H2xta492I0E(II)Ggf|fCO$iU0z;To;W-?*K3fk#F% z@BV{SjH0%N&Q2V4t?5zR)!xdPuUY!RC@U{tCgXs!MNvhCGRI>0VKeMyMlzQ1Tv_6L zJ~<81Rr0iR*pNSNg8{AQsLNDZQBhf6QCF^QZssD3r47%Krn@nYxo4pBOKQ=UJPl}B7f};8-v*!B39VY5I0?f6<6f-Pb?8N(D&cSo-a@H0*iTh=t@5Hqh(=oWL z;A_X}9X#EOWVTn~Jl!|E0e@5-W@RU8fN{vD!Oy%){MHVy&PiFYr^Fr!&2v574&snwc0 zy5SYr#aig`at6P({#$nQ)p905;GvQ`P8hUg8FsuOzH9)?H>WCbY)242#Nw^=MCb(h z{s#;R&QKhQya~`3kPvPM#M&?@v)XGv@S5tiAj#pBEP`g$2fvEf@()%ZlrlTYZp|8I(u3vL78`{Y&d>%WA5< zb((Zb=03F6fqkP7tAlKZ)a6AClkhH2wjPky20@|oxH;#ANysent-;n8-D4y|$2^|# z7~c{v7_*(bd^?-`jW`VXa7P7DO~f&^Tt3ojmM9LF~d0Q6c;VzvHI zxpm;e)_?(F&C0SQhUc}?vSmeBNF+dUmS78*U~zaFeWU|gwzJW%LYcnL?F=w@9gk5 zY?I5pYR@y;Y-(JxMYNQTfvi+x$R_C_^K5RI}911kSy=U ziJsZarO#9j+Nw=uO)V_~9M$=Vh*jif$Sk8QzjW5)I zEuA&z$k!`aKzRuoJ;H*-y)BHycoZv|W2>lYy{yQxGs!`RutCXFS-#2^nrnhg!EI`` zgdpV*8A@u+z?>hk2PO(LKC7_AvMr1*wR$z<1u$G`%;ArD2;SsoRGrCW6J#+J7uCw~ z9BU_b^J=4lLCdRE=(#nXInrTzOd#Ck0K~N>Y}VoFEf@+ko@^3CP5%Bs#;Lj_!(9#} zVZxVX1Xn4r@5H$}vd*h(FQl=iT3&fk=VTr%)ri`xBDqtJ`qEk1N=7_@9g*7f-~hG_ z%ZUUvnu*><$S|~K7t_(!UIz-m=+J>Hm&^!i;a4w%LM?aEvJbErxs}C%*|Y+Kxe_xB z4qGOH8NwDh=U8syAWC-5Y|dPKIb_Th)NnGp&u9Ztl(9x#6+zJ9;t1ZSgpGtafU{h6 zbundG-S09lgvl;h8I=ou=eCf9TcDZoicsUB0V}W{$0`+g)<6tzJ|R>LtjT06AER~( zzQ*n@EQGDyZ8)e#OOg;)J8C)E%gHY?pIevitP3D&PmC*VqTuGhmT=d~YHC*3m(`c4 zl>!^o@|sG4E30eDdD9UPrrZ5_p=S zqZ;|bC-cBzxt$KOE*{}D5#IE7v^H@|BteQ@CeD~X&T~<%n(kI-n(}IS(O2g#u^`K; zaq9H$D(c#T9T4{WvKhjhBG+*_ZZPfwt#Zs{Z_Q&4E>Ym(Sj6kx7OIUa=b%b;*tOOp zAH+n>lImilyhSangsk?yy7hROSa>=`c&ZJRyp0WJi5w-&Qx?ZpMRm2e81G86JYin7 z!Ks}mHME7b+zNcmIPwK0-s6TEys1%+S&9#MuxU5)JUZwVtm`O!2HgTqFxu1*L&RKJc%yD63 zG=)PgvRyLUm94e-TSd3JirIk8VTa6FVHr59L}nTW4C2Tf*zg}r(%s4T-kFn=i0#~^ zW&`sWAa33s;wIk;IyJ8Z8qJNHTwdcwJt_dhX7{1NWeu%CKWu}un#^k4#&V})>K--HtRn}yF+ zNaSMvF_NyWA!#BKKxQX%u^qMn`E2HZfL>;74yE{%zbL|oQE*aiM0BJZ{L$DpY1zw+ zKa140wak8u_s70B#Y0@hePxB`tG zkapKlZE6IIShYm?D>W3cZAy)*rRu7-hP9QWCI9Y!tQ`kpGErHpH7ZPPqhe|jZtvjJ zJTMNJ*>MvEYMFh3n@{W-eqKi)5qF@VoY~{J2Sv&W#NkF1lrw8Vl5zqMsd8p~NRpIW zTAYG*=BXx`@mJjFGqbZ*DKoS6kE2>&!`4k{AMduLjfbi?BB6=nyEAb+N*$6Q1Skf= z$(b%)SW}1WV+H1;17{$CfEji$J`970A{XA(;lDA^LdXOxZCxsd;R%W8 zdq|YW-0ajN;%|l}w0cB3!oU=SDli_&!hpOUNh%1?w5*8g)Fa{8`fnOLER9W6@(=GO zpaw)Odv3OSSPb|L?H=C!-?V%9Ywg-K=ZN&O2)Ca>7Gyo(b6Nu$}$~;4Qu$rCe zQ($CLSshKdSZZrX$VbQPT1|v7yeeFbn(s@km)Sp%#8#G|LnwGs)mufRc~2EFo1&)Y zNXNfKVVX>LUS@o}pF~VA{L2UY3StR+8N_EOiiiu_O|gtEiqAwsL^Hcx(JDVZtE5Gm zCL`M;BXiTmhsCRGA8jArP2eCBshHo11o6yHAPG&x7hs6^0*IwKif2S}p{$~CTi;}?I zNazwX_HdMhS5c6Q8(9$%%&rlzp4sb2YO6}wGEVO~b!_x4TX2Z-) z(;k+F*$NwG&PIfTz!W4_FlWoqh?&{$;XRh|gdw_J{`w2XWq0}fMap2#?xL8Tz0quw z`OQ-7WyV%PVRe;^R*NWp9<|Eq;RCVNON`<>&;nn(11*^;Pu24o1NW&0n5hQpBSPz~ zPDITEe~k7SE$tJvRx=A5-Rkd)Ab!Vk@c%|k!mv|ljw~cGA2nJi&D1cXpiyT17@TqF z);AHKc``8xO&_q1!eBm2<9O7jBBAITYRN|xVGk-e3n|JpR&AW79yf54MH4QtT4B%3 zFndHI<+y0EnwdC~BikbWDTu>?&T#%%5i|Q*Wd9Px$B|%)Lwo)=ViMBnVmG$?S0$O5 zj2jLQOO2;M{F;({c+dYvOhPkWE&dZnUL@yHP%rAXYUC1bl~ z{e8hhAofmiNZcPCM6zf+pyPwvcpl|$jK^hb6Z_xsufy4*35n;nljZQyOO(R{-~T3u zdvNlzee!YB_f34({<$4=+>u*<*3Oyyth4@_Av1u)y)!q=)?V4FzXD}!m)l~tXZ>+U z%#H5J<=p3T`+9PJtSv3|CXe;U?dB^;e?#K7)B5A)l251PzLK)6zcaBf;4bBJBmTqhp_%`0qy^3?0jOYDB?K2b%}TZDnUz(#1|En1Dh5q!iiVPpMph@ zHtNA-d9_=E3|iC!MlEq#kY(+cRHrgTbCV zO1tbIkuH{x$(?EVnUHSf**sp2$Cmk&^6F9(n~sI4CWP6X)X4ciJ zKa;Kr^XLvKlL%9i2~#GMo89~^-e$*YJhn_(ltG$_&3V;$Ca!m-%vnrS|1GP+%#doz zK{glI4{6>lh@|>MmT0sgseYIv!=cBgiBDjOi=;TwNqEN~qP>mX@!rtm(!?7+8+wwS zOStrt1@?yRx^g5M@1m9Nn{K|qzG-;ZB9gi{BC3ZK zc%_TKR$#f!N^TTbp7fGS``khK@A85DhVOQP`2u%1d@!uIQ*CV;OX}YISjMF6gc?ce zce*WNQupG%0_yy0N}y1h@Sm7cQn#sPjBDW=_i;;->o;jpyWTdv4obeiN#}B(ew%z7 zwW&=q72!;HKEm6Dxjwo~o33v_M{CbEZSeD=Fe4nHywf^b`^z;uIqy>Z2lctA;C|Wf zRe{)lC2ZSSf`1UUEgr%AUx!^0xsJAEd2&@p+bN9nWh$qp^m<6|5VkF}NWV{*ttq-) zdPlMjdtHyW?@*bgP`@rk^gZU|+O%WWR^hn)N^6#}*Ka;fd1^044(DqK_ebsVg0S_M zHf<=(*gFyWGxl!_TmNfIRweHVpHVtJk@PQwt-ovjPPlVhf1|u#gl(^B24}*72FqL&d2L2Ox*QW@UaR$9pRcX zo{cd1->={=Bh2}Jj4<|>BTV|W3clS>(Mr)0PFfQlR)0p?H9pr?(t9FI`A=5xp$a}K zTz;Rc-JYhdaD8yTH!Ak$B24gl!d%bi5hi~!G~JwOiV!G&uRPz=dm!)MJusN( zVqQ;=h!QXUfY_+w*ow;{wv7mo`Ka8-8%J_cd8cRc#_U{zThx&eaSXCek8XzPwulkV zAFLICv2dFr^&OrT;UXWN9Um8;R$ev*a^4y(rm<6+qcj|r3!vy_vCl^~)zD4= zok`)e6p_NSuI?_q1rrjLcr>{B6%IX_xW+Sh;!R~;N$2PyIu<*(`{4&Nss|lj34e42#CPh)F{k zw+9}|El4|gF3-+;RG(qd5uY& zyC<3jH(gC9(qp~8tAF>l-h6}FIo~aMOxa0vV!FtnBZ?PfnUX^Yy{{$vi1W!>;6UlK z7R*oAZFF6&w5773?wE&JMtAAz0*x{7b%a|9%q71z*Hs3Gu6$@tME&5#6cv|kq~$8E zAFnz4mQHKx;Dh4Nzi&Xb5?!pyn91|e@kywq>ci?)#kbHa?s0RZ;D7G&wC-?8 z>S^X)(RyA_X7dgdxIDzE$`tx`8M{&4J*GpIDI&4L`>i@9l?Gg9I9(t(FXWtw9R&4R zwT2ud8YjMdTxsmHToy@FyQZa+dY6GRk@GFW^eq~!XMtBmoaAeo_g2g2mmi^qfJ zlGf26R-^OzichSTgK`C5ERHTbh2uyJN1Abr8!k4ZwhR^xEu7_AjGez4ZX_+IYISLd zNw8&R?HQv-ZZpTn#pcqnDTXV@F$+z|fjiSaHW_s0;q@Y#JLEXk+3@iR^q=}f_FbW*U1BTdG%FPlj>+Ov_dFwsLDE|AaaXMCT-!p{%M zOlJ|Bbx1XsY*M^pw7E)>sUIbpzQ@OPKT({>tfR`;n2^VjLC4Ovq(g^9hqn}Mq%vY* zwT%UFUkUc1B<_*_Q&@{EmGYC0H7xpIl-GVLutxbayvv@oE!wX*`>BK-x>?E!lk)i8 zjh>VdX$ra{_w>4yyx6f8Mh{wHio+J^lj;@kli0CFMyC`uQ{qwnhw@LbW9^KdQ5@%W z=g<)uma;;2GPXw{sCS|RS)>SF&Sdp6DF@>ost8{+Tpbsc)f*o>H zYIdy6(dzGXZ24!Z*|BDKsyGwshMba`9cv-<6&9L7Xk+A6soAjxK>do_CA;e&^m?$P zU0D00@$ayj;*qzcl!qN_By><=6T99I6k&EeD?nePL4%mxMPXIlwJU4bE7~o`(lE8~ z*9x=5qy1S6U%iFhcS`5H*s+GZb_=`jmCkpD9c#fGvb&fq)`jB( z+n*vkw>GS+{S;?{4SHE><*`PcuWK!?IFnik{H7!4r+%yr4)&CF!-O)BKcs{^0TS%i z`k?zH4ZRVLE>a(-WLGnwBQ{8?fQ7;5d$jwK6)tRPD=KzNr}cD=$_6pJH5I$xXEj!> z6U)}@ct$|jPPO$VF(sn@Od8r9>=bnJ$(tHm{*jWy*dRPZurTZ})_W95(d|;YcwFAKPc;{5ZZom+{sqj1_n80y literal 0 HcmV?d00001 diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD b/lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD deleted file mode 100644 index 1146918..0000000 --- a/lib/python3.4/site-packages/pip-8.0.2.dist-info/RECORD +++ /dev/null @@ -1,113 +0,0 @@ -pip/__init__.py,sha256=mPGsfFwpIvtXlWhanTBtjdVYuTc0KNDvb2Tr-gHZBvU,10431 -pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 -pip/basecommand.py,sha256=Zlg6SE42TIjRyt1mct0LCkgNxcKKnss3xvASJyDqucE,11429 -pip/baseparser.py,sha256=Nlc7Un9gat27xtB24SnKL_3pZZOoh62gNNRdS6tDRZY,10465 -pip/cmdoptions.py,sha256=OJhbVR6zQ8kbbGcnv0RTZyvwvFqzKxtmO4lPYymMBKM,15877 -pip/download.py,sha256=srwSU5WnOa59_TPGaCfEWODDSZSRBJUHgU5jkC467MY,31715 -pip/exceptions.py,sha256=4KrgxMQuOpP8JlWc90S0vsJ_Ch-EBeD026knOgk9U8A,7741 -pip/index.py,sha256=VzgEo93kTlHeoPrlgPDg24h2ly0jzdg92OBjkG--gMg,36776 -pip/locations.py,sha256=MqUzS8YI2wDa7oFzTQw4zM4s0Hci05yubxfU_kTXXlU,5632 -pip/pep425tags.py,sha256=nXeMZN4d3h14oVovpI0WholWCNCR0MD2mAERl3YHs08,7240 -pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 -pip/wheel.py,sha256=e3iaG7X6Z6eQvfChzGiZHK4yw12Z_PqLSRCi6_AUf4s,32030 -pip/_vendor/__init__.py,sha256=OuNX6SjnmNk4mvSyZXarkqf7LtKke3IH7CWoAi_w-5o,4229 -pip/commands/__init__.py,sha256=BSfOTIkMIBWElsuWJ_uAuCmDQS8b-OHYiyOkk77tWSU,2215 -pip/commands/completion.py,sha256=7JkLif3DF0QGpjMaUjHvF8knJ3IumcED2gWRvMRpFy0,1991 -pip/commands/download.py,sha256=dMRtH0JMBhNGlJWr1qC29vOeiBzG2K0OjOAfzdxSVgA,4804 -pip/commands/freeze.py,sha256=_wHnuHYXC4V0zBLD7LfDhgI_bWL6KdcCgzzQ9bXwDkU,2330 -pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 -pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 -pip/commands/install.py,sha256=8MOsH3IlL3ovZhTQtZwHhJb19pnkr8eKNE_9klVJ3PU,14971 -pip/commands/list.py,sha256=u76U5TLODQ2g53sSUA4q6WhYus7usbuWuITQJsCnP3E,7412 -pip/commands/search.py,sha256=dJe9rcam1TEfNp9-Gi36WjHc3l4mdj8gheVjqK5BrR0,4605 -pip/commands/show.py,sha256=yxghAwGYaYphL2LJdJbYXVLFr8tBMHnuH8n8s2fWMr4,4903 -pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 -pip/commands/wheel.py,sha256=iT92Uo8qpVILl_Yk8L7AtkFVYGmY0ep5oDeyQSpwkLs,7528 -pip/compat/__init__.py,sha256=-k3m7JYe8ztMz2GGCPMc-XK7Uo-RiLdV00dSxWKMjfg,4536 -pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 -pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 -pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 -pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -pip/operations/freeze.py,sha256=Px8en5guEfc6mhYh0cATtT6tlFzqTzLj4ad8gqAkIqw,3925 -pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 -pip/req/req_file.py,sha256=h1YmFfD7Opb_ulTTyEp7Osv2d8gbQJ1KGWMptEl_S08,11764 -pip/req/req_install.py,sha256=DYLV95E3U81nRJy4q8qs0fozLiCQZbG8Yg-CbUTwA2w,46670 -pip/req/req_set.py,sha256=0ncBet1v7gbsKeTUPpBj_-6Kowxx-iskw0_kLMGObi4,32236 -pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 -pip/utils/__init__.py,sha256=4Tz09B6nsZm6bQ3mR7K-AeDjlMLMFjnehaXH4vG_E-0,26759 -pip/utils/appdirs.py,sha256=KTpZANfjYw5K2tZ0_jNNdP_kMxQAns79qZWelwaJo0c,7896 -pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 -pip/utils/deprecation.py,sha256=0y-RdGVpnt-_byop0WJOOb509f8jjOzSmKghHorTclU,2282 -pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 -pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 -pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 -pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 -pip/utils/setuptools_build.py,sha256=8IGop-SZ6lxUl5HMOjLRaDlORPugIH_b_b2Y67x4jQc,240 -pip/utils/ui.py,sha256=fY7lHmQg3Pdnsgkge2mpZMNU9e1jg6unYYs2Ryfulhk,11320 -pip/vcs/__init__.py,sha256=lnea41zMq9HqB1Qo7hxy2IjUzk5WtBvnoloCCMR6Vk4,12349 -pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 -pip/vcs/git.py,sha256=jN3vZCn1DqE-RdKGfMqlDhObZ3WFjC21dEav29M62xI,10054 -pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 -pip/vcs/subversion.py,sha256=mGT7sAzuVc1u-9MPoXJNyShnRzhdJpDdGNuhhzUPv6w,8687 -pip-8.0.2.dist-info/DESCRIPTION.rst,sha256=_rptqJIyCNNmh7m8q-4qZfQDc9gqAjMxVITAEfItc08,1060 -pip-8.0.2.dist-info/METADATA,sha256=0KwLFgIzCAQ506gjLJ_VyrUxbw2NC8b-kUbTM8Uo42Y,2212 -pip-8.0.2.dist-info/RECORD,, -pip-8.0.2.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110 -pip-8.0.2.dist-info/entry_points.txt,sha256=1-e4WB_Fe8mWHrMi1YQo_s5knbh0lu_uRmd8Wb6MJfY,68 -pip-8.0.2.dist-info/metadata.json,sha256=Sl2y0vogC_Ro8peVqn3OBZJQ_kV0PdQ9QfraUV3XPj0,1393 -pip-8.0.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -/openmedialibrary/platform_linux32/p34/bin/pip,sha256=6W1H8ra-gP5cHQEOiq5OH0mgwj2M-TyEDGSW1ao8LN4,241 -/openmedialibrary/platform_linux32/p34/bin/pip3,sha256=6W1H8ra-gP5cHQEOiq5OH0mgwj2M-TyEDGSW1ao8LN4,241 -/openmedialibrary/platform_linux32/p34/bin/pip3.4,sha256=6W1H8ra-gP5cHQEOiq5OH0mgwj2M-TyEDGSW1ao8LN4,241 -/openmedialibrary/platform_linux32/p34/lib/python3.4/site-packages/pip-8.0.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -pip/__pycache__/exceptions.cpython-34.pyc,, -pip/utils/__pycache__/ui.cpython-34.pyc,, -pip/__pycache__/locations.cpython-34.pyc,, -pip/commands/__pycache__/__init__.cpython-34.pyc,, -pip/__pycache__/__main__.cpython-34.pyc,, -pip/__pycache__/cmdoptions.cpython-34.pyc,, -pip/operations/__pycache__/__init__.cpython-34.pyc,, -pip/req/__pycache__/req_file.cpython-34.pyc,, -pip/commands/__pycache__/show.cpython-34.pyc,, -pip/commands/__pycache__/install.cpython-34.pyc,, -pip/vcs/__pycache__/bazaar.cpython-34.pyc,, -pip/utils/__pycache__/filesystem.cpython-34.pyc,, -pip/utils/__pycache__/logging.cpython-34.pyc,, -pip/req/__pycache__/req_uninstall.cpython-34.pyc,, -pip/vcs/__pycache__/__init__.cpython-34.pyc,, -pip/operations/__pycache__/freeze.cpython-34.pyc,, -pip/__pycache__/pep425tags.cpython-34.pyc,, -pip/vcs/__pycache__/git.cpython-34.pyc,, -pip/__pycache__/basecommand.cpython-34.pyc,, -pip/commands/__pycache__/download.cpython-34.pyc,, -pip/utils/__pycache__/appdirs.cpython-34.pyc,, -pip/compat/__pycache__/dictconfig.cpython-34.pyc,, -pip/commands/__pycache__/wheel.cpython-34.pyc,, -pip/commands/__pycache__/list.cpython-34.pyc,, -pip/utils/__pycache__/build.cpython-34.pyc,, -pip/req/__pycache__/req_install.cpython-34.pyc,, -pip/compat/__pycache__/__init__.cpython-34.pyc,, -pip/utils/__pycache__/hashes.cpython-34.pyc,, -pip/__pycache__/download.cpython-34.pyc,, -pip/utils/__pycache__/setuptools_build.cpython-34.pyc,, -pip/models/__pycache__/index.cpython-34.pyc,, -pip/__pycache__/status_codes.cpython-34.pyc,, -pip/req/__pycache__/__init__.cpython-34.pyc,, -pip/utils/__pycache__/outdated.cpython-34.pyc,, -pip/models/__pycache__/__init__.cpython-34.pyc,, -pip/vcs/__pycache__/subversion.cpython-34.pyc,, -pip/commands/__pycache__/freeze.cpython-34.pyc,, -pip/__pycache__/index.cpython-34.pyc,, -pip/commands/__pycache__/hash.cpython-34.pyc,, -pip/__pycache__/__init__.cpython-34.pyc,, -pip/__pycache__/baseparser.cpython-34.pyc,, -pip/req/__pycache__/req_set.cpython-34.pyc,, -pip/vcs/__pycache__/mercurial.cpython-34.pyc,, -pip/utils/__pycache__/deprecation.cpython-34.pyc,, -pip/commands/__pycache__/help.cpython-34.pyc,, -pip/__pycache__/wheel.cpython-34.pyc,, -pip/_vendor/__pycache__/__init__.cpython-34.pyc,, -pip/commands/__pycache__/completion.cpython-34.pyc,, -pip/utils/__pycache__/__init__.cpython-34.pyc,, -pip/commands/__pycache__/search.cpython-34.pyc,, -pip/commands/__pycache__/uninstall.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst b/lib/python3.4/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst similarity index 86% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst index 2c149b7..39586d2 100644 --- a/lib/python3.4/site-packages/pip-8.0.2.dist-info/DESCRIPTION.rst +++ b/lib/python3.4/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst @@ -17,11 +17,13 @@ tool for installing Python packages. .. image:: https://img.shields.io/pypi/v/pip.svg - :target: https://pypi.python.org/pypi/pip + :target: https://pypi.python.org/pypi/pip .. image:: https://img.shields.io/travis/pypa/pip/develop.svg :target: http://travis-ci.org/pypa/pip +.. image:: https://readthedocs.org/projects/pip/badge/?version=stable + :target: https://pip.pypa.io/en/stable Code of Conduct --------------- diff --git a/lib/python3.4/site-packages/setuptools-20.1.1.dist-info/INSTALLER b/lib/python3.4/site-packages/pip-8.1.1.dist-info/INSTALLER similarity index 100% rename from lib/python3.4/site-packages/setuptools-20.1.1.dist-info/INSTALLER rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/INSTALLER diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/METADATA b/lib/python3.4/site-packages/pip-8.1.1.dist-info/METADATA similarity index 90% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/METADATA rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/METADATA index a49cee5..79657d0 100644 --- a/lib/python3.4/site-packages/pip-8.0.2.dist-info/METADATA +++ b/lib/python3.4/site-packages/pip-8.1.1.dist-info/METADATA @@ -1,6 +1,6 @@ Metadata-Version: 2.0 Name: pip -Version: 8.0.2 +Version: 8.1.1 Summary: The PyPA recommended tool for installing Python packages. Home-page: https://pip.pypa.io/ Author: The pip developers @@ -22,6 +22,7 @@ Classifier: Programming Language :: Python :: 3.5 Classifier: Programming Language :: Python :: Implementation :: PyPy Provides-Extra: testing Requires-Dist: mock; extra == 'testing' +Requires-Dist: pretend; extra == 'testing' Requires-Dist: pytest; extra == 'testing' Requires-Dist: scripttest (>=1.3); extra == 'testing' Requires-Dist: virtualenv (>=1.10); extra == 'testing' @@ -45,11 +46,13 @@ tool for installing Python packages. .. image:: https://img.shields.io/pypi/v/pip.svg - :target: https://pypi.python.org/pypi/pip + :target: https://pypi.python.org/pypi/pip .. image:: https://img.shields.io/travis/pypa/pip/develop.svg :target: http://travis-ci.org/pypa/pip +.. image:: https://readthedocs.org/projects/pip/badge/?version=stable + :target: https://pip.pypa.io/en/stable Code of Conduct --------------- diff --git a/lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD b/lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD new file mode 100644 index 0000000..b0e6f8c --- /dev/null +++ b/lib/python3.4/site-packages/pip-8.1.1.dist-info/RECORD @@ -0,0 +1,485 @@ +pip/__init__.py,sha256=fFs-ytm2H4V2evGESaozmF7U0BaGIMM0drFJZ5Ifj4s,10427 +pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 +pip/basecommand.py,sha256=Zlg6SE42TIjRyt1mct0LCkgNxcKKnss3xvASJyDqucE,11429 +pip/baseparser.py,sha256=Nlc7Un9gat27xtB24SnKL_3pZZOoh62gNNRdS6tDRZY,10465 +pip/cmdoptions.py,sha256=pf24iszA39rhcJ5DjFA4oD_z5vTI0NG98qUahHs3qPM,15878 +pip/download.py,sha256=oJ3sZ8I6ct9X3eoXQ9xm_Ne0e6N85G_rWaERmMCVF2k,31722 +pip/exceptions.py,sha256=GdDhHOROBj-kW2rgerLJYXsxN8ENy1BX5RUb_Vs9TXM,7980 +pip/index.py,sha256=kpyj_O5c0VVlvhg5VuVm4oAGGh6RvD7Xr0syPN-eGa0,37191 +pip/locations.py,sha256=MqUzS8YI2wDa7oFzTQw4zM4s0Hci05yubxfU_kTXXlU,5632 +pip/pep425tags.py,sha256=4PNr9hd8OsXnKYR2q2oLzfDDhF5bFBwUZA-ZQxAClSI,11318 +pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/wheel.py,sha256=qg1DgjXtiQCnY-IJY5HC5VgpeQm9WCjDKYmefSfOjq0,32088 +pip/_vendor/__init__.py,sha256=yzwJqv89TDqeqqWRgBlZZmiAc5sTNOrdIMgF0MT8TWI,4647 +pip/_vendor/ipaddress.py,sha256=FngG_V6As8JnDJVR1g1i0D6wVa5ycSKJYOh7IrI4fq0,79904 +pip/_vendor/pyparsing.py,sha256=qgvvVK_6xQoazMbOWZEoRsFFblQcrlse4ZnE1ZfvTrk,157947 +pip/_vendor/re-vendor.py,sha256=PcdZ40d0ohMsdJmA4t0AeAWbPXi1tFsvAwA5KE5FGeY,773 +pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972 +pip/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pip/_vendor/_markerlib/__init__.py,sha256=2hgtRuYDOrimZF9-ENCkrP8gnJ59HZEtlk-zoTEvn1Y,564 +pip/_vendor/_markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979 +pip/_vendor/cachecontrol/__init__.py,sha256=uz7Oxs9hQmUxtxod1WyVoUj6Jab7NfiLECPHFuc-22U,302 +pip/_vendor/cachecontrol/_cmd.py,sha256=MPxZfZd2LKDzVrs55X3wA1rsI2YuP8evLZSwQj0dIk0,1320 +pip/_vendor/cachecontrol/adapter.py,sha256=eizWWJwOnG5TToxL-XiEywPEf2k20--e-5C6u6wAEts,4196 +pip/_vendor/cachecontrol/cache.py,sha256=xtl-V-pr9KSt9VvFDRCB9yrHPEvqvbk-5M1vAInZb5k,790 +pip/_vendor/cachecontrol/compat.py,sha256=uyovOpd1ehI3J1XeBqJvcsIp6fvkjBpoQmu_0J2st8c,416 +pip/_vendor/cachecontrol/controller.py,sha256=Aky7U9UZ1VqutfvilljlWi1uk40yis6lmaz8c_wQLw8,12996 +pip/_vendor/cachecontrol/filewrapper.py,sha256=jkC0GOorbWIpy9CzP3PwxSHx5J2CXBAxNbGH68HmP1M,2168 +pip/_vendor/cachecontrol/heuristics.py,sha256=WtJrVsyWjpP9WoUiDVdTZZRNBCz5ZVptaQpYnqofDQU,4141 +pip/_vendor/cachecontrol/serialize.py,sha256=vZQ-Rsn4618ATipqqZJ-Hife2qVzpIepNjxTCUM-eAw,6305 +pip/_vendor/cachecontrol/wrapper.py,sha256=Kqyu_3TW_54XDudha4-HF21vyEOAJ4ZnRXFysTiLmXA,498 +pip/_vendor/cachecontrol/caches/__init__.py,sha256=uWnUtyMvHY_LULaL_4_IR1F_xPgK5zHfJyRnBq4DnPE,369 +pip/_vendor/cachecontrol/caches/file_cache.py,sha256=FsDug3bwUAQ3okjjfGzxlDaBf2fwVSn1iBKMTL6SyGU,3532 +pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=XywqxkS9MkCaflTOY_wjrE02neKdywB9YwlOBbP7Ywc,973 +pip/_vendor/colorama/__init__.py,sha256=KHtwPTmdX3-XyE18JUOqRxxnoliw3fXU_12BIAS0zLk,240 +pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524 +pip/_vendor/colorama/ansitowin32.py,sha256=YOemHWTdCzwAuzZ-S5AdPMtMSC0opO75l8yoTu7Nrkg,9545 +pip/_vendor/colorama/initialise.py,sha256=QTPJjlrrFn_XzRiuDgVUgWiVhDgqEeBrQLLvEv5XGV4,1831 +pip/_vendor/colorama/win32.py,sha256=_SCEoTK_GA2tU1nhbayKKac-v9Jn98lCPIFOeFMGCHQ,5365 +pip/_vendor/colorama/winterm.py,sha256=V7U7ojwG1q4n6PKripjEvW_htYQi5ueXSM3LUUoqqDY,6290 +pip/_vendor/distlib/__init__.py,sha256=97krSLlP12JZ0MsSySC2t_h_dDyaRBRJuYwJGmVPr_U,581 +pip/_vendor/distlib/compat.py,sha256=13653e8hB0lLj87tSxctbi4I9zC3VtCjJxjwQ3yxwAI,40660 +pip/_vendor/distlib/database.py,sha256=rZHwz4NcZZ7HtXH4SNca6ITyYBSvalvPXZmIj1BjNP8,49671 +pip/_vendor/distlib/index.py,sha256=qU38JCRI5F_1Z-QmydiocDE58d4KvHzKjS_T0dBsPlg,20976 +pip/_vendor/distlib/locators.py,sha256=LhEPbiagqlvOA6PWGuN-AGVDeakS3ykStoe_OYBkAUE,50493 +pip/_vendor/distlib/manifest.py,sha256=JF5EstaCOPnsW2tUdXCdjIFn-Zkf48Dqw0TcxKjuni0,13598 +pip/_vendor/distlib/markers.py,sha256=iRrVWwpyVwjkKJSX8NEQ92_MRMwpROcfNGKCD-Ch1QM,6282 +pip/_vendor/distlib/metadata.py,sha256=UJdzZIbC6Bkp1H9BtiOLXzph6HVujHKcUW1yDgO9oJ0,38702 +pip/_vendor/distlib/resources.py,sha256=7hQ2OgeRRwMrU81hztflAy3b0no-JOtUGCAIC8GImtk,10620 +pip/_vendor/distlib/scripts.py,sha256=ZVGGXYJwW06rIT5gOhO0fvc5CO2Q89LVVNYNF2XVcIY,15223 +pip/_vendor/distlib/t32.exe,sha256=rOJD6eDYk88TJ5lJtyt58El-nYNip4UvnYIDJ2y6QNs,89088 +pip/_vendor/distlib/t64.exe,sha256=qDBQu9uPHCVdBFM6ANg-Xp9nc5Wz_iFnSmsCTvdEQec,97792 +pip/_vendor/distlib/util.py,sha256=28BUzIX-KTJVdGJcPAoC_QKkLI8EVwu2NGggXhZnq04,51868 +pip/_vendor/distlib/version.py,sha256=_XjbARzhJfjH7MyFyBr3X5NWzSt7pY73la5KCWbD4Sc,23711 +pip/_vendor/distlib/w32.exe,sha256=LrnXXqK-Yb1tzS0lxymvQPiMlkQZWAB0eHM5jnL0mAk,85504 +pip/_vendor/distlib/w64.exe,sha256=GbKq4oBmzHZXdcpaLupKLenmQD7_DXsYX8PDPWo_U3M,94208 +pip/_vendor/distlib/wheel.py,sha256=cUwvre7CT0Mq8atw5nIXNeu1drCnh83wir_TaMyTI9Q,39043 +pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274 +pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971 +pip/_vendor/distlib/_backport/shutil.py,sha256=AUi8718iRoJ9K26mRi-rywtt8Gx7ykvrvbUbZszjfYE,25650 +pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617 +pip/_vendor/distlib/_backport/sysconfig.py,sha256=7WdYP0wbw8izH1eAEGNA-HXUyJrhzIAGK_LniUs4UNI,26958 +pip/_vendor/distlib/_backport/tarfile.py,sha256=bjyTNONZb-YEXrHFLExOSuagtSOoPaONP2UUoxwkAqE,92627 +pip/_vendor/html5lib/__init__.py,sha256=emLYTm9_7OPnOInA9dPc4IIVF7GWA5tpl2H_bLqVoWk,779 +pip/_vendor/html5lib/constants.py,sha256=B5LN2DMP-6lEp9wpON4ecX3Kx01n_cbMjuGd6AteixE,86873 +pip/_vendor/html5lib/html5parser.py,sha256=o9FOrhdLGYOtUhsueAsuXwduydagASvwxJ0lUpGYrYg,117347 +pip/_vendor/html5lib/ihatexml.py,sha256=MT12cVXAKaW-ALUkUeN175HpUP73xK8wAIpPzQ8cgfI,16581 +pip/_vendor/html5lib/inputstream.py,sha256=ss3wjtlObOVoVGWFsBztYdpnUqRaezyJ0sTXfdb4Ly4,31665 +pip/_vendor/html5lib/sanitizer.py,sha256=sbyGySzFzCD_v0JYYSr6sLYVLpO6bpVmRiDMKbFRcCw,17804 +pip/_vendor/html5lib/tokenizer.py,sha256=6Uf8sDUkvNn661bcBSBYUCTfXzSs9EyCTiPcj5PAjYI,76929 +pip/_vendor/html5lib/utils.py,sha256=PSVv1ig9oAZa-DU16DT4cbbggnG7K3qQIkPm6uJKxFg,3267 +pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/filters/_base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286 +pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=fpRLbz6TCe5yXEkGmyMlJ80FekWsTR-sHk3Ano0U9LQ,624 +pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=xllv1I7unxhcyZTf3LTsv30wh2mAkT7wmTZx7zIhpuY,2746 +pip/_vendor/html5lib/filters/lint.py,sha256=8eJo0SXDcY40OhsNd0Cft36kUXCZ5t-30mNFSUf4LnE,4208 +pip/_vendor/html5lib/filters/optionaltags.py,sha256=4ozLwBgMRaxe7iqxefLQpDhp3irK7YHo9LgSGsvZYMw,10500 +pip/_vendor/html5lib/filters/sanitizer.py,sha256=MvGUs_v2taWPgGhjxswRSUiHfxrqMUhsNPz-eSeUYUQ,352 +pip/_vendor/html5lib/filters/whitespace.py,sha256=LbOUcC0zQ9z703KNZrArOr0kVBO7OMXjKjucDW32LU4,1142 +pip/_vendor/html5lib/serializer/__init__.py,sha256=xFXFP-inaTNlbnau5c5DGrH_O8yPm-C6HWbJxpiSqFE,490 +pip/_vendor/html5lib/serializer/htmlserializer.py,sha256=G-aVHmlR7uMR011jO0ev7sZvkVHpLr3OrLSYMZ7liVs,12855 +pip/_vendor/html5lib/treeadapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/html5lib/treeadapters/sax.py,sha256=3of4vvaUYIAic7pngebwJV24hpOS7Zg9ggJa_WQegy4,1661 +pip/_vendor/html5lib/treebuilders/__init__.py,sha256=Xz4X6B5DA1R-5GyRa44j0sJwfl6dUNyb0NBu9-7sK3U,3405 +pip/_vendor/html5lib/treebuilders/_base.py,sha256=Xf0FZVcVwIQS6tEseJdj5wKbYucbNCnbAsnsG4lONis,13711 +pip/_vendor/html5lib/treebuilders/dom.py,sha256=jvmtvnERtpxXpHvBgiq1FpzAUYAAzoolOTx_DoXwGEI,8469 +pip/_vendor/html5lib/treebuilders/etree.py,sha256=etbO6yQlyV46rWlj9mSyVqQOWrgoHgyJ01Tut4lWZkk,12621 +pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=z3Bnfm2MstEEb_lbaAeicl5l-ab6MSQa5Q1ZZreK7Pc,14031 +pip/_vendor/html5lib/treewalkers/__init__.py,sha256=m2-4a5P4dMNlQb26MNIhgj69p6ms1i-JD2HPDr7iTfw,5766 +pip/_vendor/html5lib/treewalkers/_base.py,sha256=9nXtXtgubdWKFlKxhVzWarE0Hiv3T4VC7_Wt9ulVzB0,7014 +pip/_vendor/html5lib/treewalkers/dom.py,sha256=Lb63Nuz8HtgvkuuvSmU5LOyUkEtstH5saPPAg5xN4r8,1421 +pip/_vendor/html5lib/treewalkers/etree.py,sha256=966h5cOYPwQIcHjxJmVP5cDOnWKiyqpyt9QKQo9W-uo,4597 +pip/_vendor/html5lib/treewalkers/genshistream.py,sha256=IbBFrlgi-59-K7P1zm0d7ZFIknBN4c5E57PHJDkx39s,2278 +pip/_vendor/html5lib/treewalkers/lxmletree.py,sha256=am6t_JHh_Fpm10CaW-zDaGGsDwTPK3Pas7TRBezFs4w,5992 +pip/_vendor/html5lib/treewalkers/pulldom.py,sha256=9W6i8yWtUzayV6EwX-okVacttHaqpQZwdBCc2S3XeQ4,2302 +pip/_vendor/html5lib/trie/__init__.py,sha256=mec5zyJ5wIKRM8819gIcIsYQwncg91rEmPwGH1dG3Ho,212 +pip/_vendor/html5lib/trie/_base.py,sha256=WGY8SGptFmx4O0aKLJ54zrIQOoyuvhS0ngA36vAcIcc,927 +pip/_vendor/html5lib/trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178 +pip/_vendor/html5lib/trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775 +pip/_vendor/lockfile/__init__.py,sha256=Tqpz90DwKYfhPsfzVOJl84TL87pdFE5ePNHdXAxs4Tk,9371 +pip/_vendor/lockfile/linklockfile.py,sha256=C7OH3H4GdK68u4FQgp8fkP2kO4fyUTSyj3X6blgfobc,2652 +pip/_vendor/lockfile/mkdirlockfile.py,sha256=e3qgIL-etZMLsS-3ft19iW_8IQ360HNkGOqE3yBKsUw,3096 +pip/_vendor/lockfile/pidlockfile.py,sha256=ukH9uk6NFuxyVmG5QiWw4iKq3fT7MjqUguX95avYPIY,6090 +pip/_vendor/lockfile/sqlitelockfile.py,sha256=o2TMkMRY0iwn-iL1XMRRIFStMUkS4i3ajceeYNntKFg,5506 +pip/_vendor/lockfile/symlinklockfile.py,sha256=ABwXXmvTHvCl5viPblShL3PG-gGsLiT1roAMfDRwhi8,2616 +pip/_vendor/packaging/__about__.py,sha256=AEwkfVSNgMMAAugtYao7b7wah9XryokeoXBuIw4h6d8,720 +pip/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pip/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pip/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pip/_vendor/packaging/markers.py,sha256=pmVQ8Si5HNFjTCxRljmOTSt6IiNBvAB2UZhp2AnASvg,7341 +pip/_vendor/packaging/requirements.py,sha256=SD7dVJGjdPUqtoHb47qwK6wWJTQd-ZXWjxpJg83UcBA,4327 +pip/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pip/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pip/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pip/_vendor/pkg_resources/__init__.py,sha256=wK-1CRMHd_d02MR35ZeK2chkEEgd2dr4YU3CBEQRE-o,108219 +pip/_vendor/progress/__init__.py,sha256=Wn1074LUDZovd4zfoVYojnPBgOc6ctHbQX7rp_p8lRA,3023 +pip/_vendor/progress/bar.py,sha256=YNPJeRrwYVKFO2nyaEwsQjYByamMWTgJMvQO1NpD-AY,2685 +pip/_vendor/progress/counter.py,sha256=kEqA8jWEdwrc6P_9VaRx7bjOHwk9gxl-Q9oVbQ08v5c,1502 +pip/_vendor/progress/helpers.py,sha256=FehfwZTv-5cCfsbcMlvlUkm3xZ0cRhsev6XVpmeTF4c,2854 +pip/_vendor/progress/spinner.py,sha256=iCVtUQbaJUFHTjn1ZLPQLPYeao4lC9aXAa_HxIeUK6k,1314 +pip/_vendor/requests/__init__.py,sha256=GzCmm6OIsjKVbUEMAxbtw4iBIcFj8GRsRi8GX84y6OY,2007 +pip/_vendor/requests/adapters.py,sha256=RqmOfpR96Lfv-GqSa8QEYIn8ZgfKb05nECh94rTQoJQ,17495 +pip/_vendor/requests/api.py,sha256=mZZtHywR0qme1BStj7fKAkHdpg_3FMdDawBsvWV4eh0,5419 +pip/_vendor/requests/auth.py,sha256=iez9OrPDCyE1zFxJMLL8MNw7CLj3id77gJkwNH4OlbU,7550 +pip/_vendor/requests/cacert.pem,sha256=5xzWFRrSP0ZsXiW6emg8UQ_w497lT4qWCv32OO8R1ME,344712 +pip/_vendor/requests/certs.py,sha256=RX5H1cSiB52Hbjh_qv3eMW8hqHEF_r4Qiv_4AwfziuU,613 +pip/_vendor/requests/compat.py,sha256=hq7CKHoykNs8yzKPAJiOkHQJPoNp9A89MufTdhlCniY,1469 +pip/_vendor/requests/cookies.py,sha256=mrrSrRYhxytuLDKrI5cyH5NL4zvpA373YvO7Ant9rxc,17387 +pip/_vendor/requests/exceptions.py,sha256=lyzK5I-zkNCN9zfYGJgkDMvtt3akjw0QUq4q8pYI4wA,2776 +pip/_vendor/requests/hooks.py,sha256=jSdmZjB5oRJ6xmKM3VtqkYkq8oFTwx6gqG9AaUWpAlw,767 +pip/_vendor/requests/models.py,sha256=QQKNAR4bXpt0q2RwKTAraQlyrFJrZN__lvI0bpjPJvw,29277 +pip/_vendor/requests/sessions.py,sha256=PB_4RAr5Mr-CKStD5haAgDVJl7wBQEuShzDAP0JG-Ho,24544 +pip/_vendor/requests/status_codes.py,sha256=2RTAbhP2u3h-26-iyV0SuN1R0LknUTvga3RXwftdLtc,3280 +pip/_vendor/requests/structures.py,sha256=i3yMaaDbl4_gNJKdcK3kDmeSLoo0r59XEIWoc_qtNyo,2977 +pip/_vendor/requests/utils.py,sha256=pH5DwoyZZjwzDW_2OLrm0asDs2b_9mjzafwfm5SfzLQ,21845 +pip/_vendor/requests/packages/__init__.py,sha256=CVheqNRcXIkAi5037RhxeqbAqd0QhrK1o9R9kS2xvuI,1384 +pip/_vendor/requests/packages/chardet/__init__.py,sha256=XuTKCYOR7JwsoHxqZTYH86LVyMDbDI3s1s0W_qoGEBM,1295 +pip/_vendor/requests/packages/chardet/big5freq.py,sha256=D8oTdz-GM7Jg8TsaWJDm65vM_OLHC3xub6qUJ3rOgsQ,82594 +pip/_vendor/requests/packages/chardet/big5prober.py,sha256=XX96C--6WKYW36mL-z7pJSAtc169Z8ZImByCP4pEN9A,1684 +pip/_vendor/requests/packages/chardet/chardetect.py,sha256=f4299UZG6uWd3i3r_N0OdrFj2sA9JFI54PAmDLAFmWA,2504 +pip/_vendor/requests/packages/chardet/chardistribution.py,sha256=cUARQFr1oTLXeJCDQrDRkUP778AvSMzhSCnG8VLCV58,9226 +pip/_vendor/requests/packages/chardet/charsetgroupprober.py,sha256=0lKk7VE516fgMw119tNefFqLOxKfIE9WfdkpIT69OKU,3791 +pip/_vendor/requests/packages/chardet/charsetprober.py,sha256=Z48o2KiOj23FNqYH8FqzhH5m1qdm3rI8DcTm2Yqtklg,1902 +pip/_vendor/requests/packages/chardet/codingstatemachine.py,sha256=E85rYhHVMw9xDEJVgiQhp0OnLGr6i2r8_7QOWMKTH08,2318 +pip/_vendor/requests/packages/chardet/compat.py,sha256=5mm6yrHwef1JEG5OxkPJlSq5lkjLVpEGh3iPgFBkpkM,1157 +pip/_vendor/requests/packages/chardet/constants.py,sha256=-UnY8U7EP7z9fTyd09yq35BEkSFEAUAiv9ohd1DW1s4,1335 +pip/_vendor/requests/packages/chardet/cp949prober.py,sha256=FMvdLyB7fejPXRsTbca7LK1P3RUvvssmjUNyaEfz8zY,1782 +pip/_vendor/requests/packages/chardet/escprober.py,sha256=q5TcQKeVq31WxrW7Sv8yjpZkjEoaHO8S92EJZ9hodys,3187 +pip/_vendor/requests/packages/chardet/escsm.py,sha256=7iljEKN8lXTh8JFXPUSwlibMno6R6ksq4evLxbkzfro,7839 +pip/_vendor/requests/packages/chardet/eucjpprober.py,sha256=5IpfSEjAb7h3hcGMd6dkU80O900C2N6xku28rdYFKuc,3678 +pip/_vendor/requests/packages/chardet/euckrfreq.py,sha256=T5saK5mImySG5ygQPtsp6o2uKulouCwYm2ElOyFkJqU,45978 +pip/_vendor/requests/packages/chardet/euckrprober.py,sha256=Wo7dnZ5Erw_nB4H-m5alMiOxOuJUmGHlwCSaGqExDZA,1675 +pip/_vendor/requests/packages/chardet/euctwfreq.py,sha256=G_I0BW9i1w0ONeeUwIYqV7_U09buIHdqh-wNHVaql7I,34872 +pip/_vendor/requests/packages/chardet/euctwprober.py,sha256=upS2P6GuT5ujOxXYw-RJLcT7A4PTuo27KGUKU4UZpIQ,1676 +pip/_vendor/requests/packages/chardet/gb2312freq.py,sha256=M2gFdo_qQ_BslStEchrPW5CrPEZEacC0uyDLw4ok-kY,36011 +pip/_vendor/requests/packages/chardet/gb2312prober.py,sha256=VWnjoRa83Y6V6oczMaxyUr0uy48iCnC2nzk9zfEIRHc,1681 +pip/_vendor/requests/packages/chardet/hebrewprober.py,sha256=8pdoUfsVXf_L4BnJde_BewS6H2yInV5688eu0nFhLHY,13359 +pip/_vendor/requests/packages/chardet/jisfreq.py,sha256=ZcL4R5ekHHbP2KCYGakVMBsiKqZZZAABzhwi-uRkOps,47315 +pip/_vendor/requests/packages/chardet/jpcntx.py,sha256=yftmp0QaF6RJO5SJs8I7LU5AF4rwP23ebeCQL4BM1OY,19348 +pip/_vendor/requests/packages/chardet/langbulgarianmodel.py,sha256=ZyPsA796MSVhYdfWhMCgKWckupAKAnKqWcE3Cl3ej6o,12784 +pip/_vendor/requests/packages/chardet/langcyrillicmodel.py,sha256=fkcd5OvogUp-GrNDWAZPgkYsSRCD2omotAEvqjlmLKE,17725 +pip/_vendor/requests/packages/chardet/langgreekmodel.py,sha256=QHMy31CH_ot67UCtmurCEKqKx2WwoaKrw2YCYYBK2Lw,12628 +pip/_vendor/requests/packages/chardet/langhebrewmodel.py,sha256=4ASl5vzKJPng4H278VHKtRYC03TpQpenlHTcsmZH1rE,11318 +pip/_vendor/requests/packages/chardet/langhungarianmodel.py,sha256=SXwuUzh49_cBeMXhshRHdrhlkz0T8_pZWV_pdqBKNFk,12536 +pip/_vendor/requests/packages/chardet/langthaimodel.py,sha256=-k7djh3dGKngAGnt3WfuoJN7acDcWcmHAPojhaUd7q4,11275 +pip/_vendor/requests/packages/chardet/latin1prober.py,sha256=238JHOxH8aRudJY2NmeSv5s7i0Qe3GuklIU3HlYybvg,5232 +pip/_vendor/requests/packages/chardet/mbcharsetprober.py,sha256=9rOCjDVsmSMp6e7q2syqak22j7lrbUZhJhMee2gbVL0,3268 +pip/_vendor/requests/packages/chardet/mbcsgroupprober.py,sha256=SHRzNPLpDXfMJLA8phCHVU0WgqbgDCNxDQMolGX_7yk,1967 +pip/_vendor/requests/packages/chardet/mbcssm.py,sha256=IKwJXyxu34n6NojmxVxC60MLFtJKm-hIfxaFEnb3uBA,19590 +pip/_vendor/requests/packages/chardet/sbcharsetprober.py,sha256=Xq0lODqJnDgxglBiQI4BqTFiPbn63-0a5XNA5-hVu7U,4793 +pip/_vendor/requests/packages/chardet/sbcsgroupprober.py,sha256=8hLyH8RAG-aohBo7o_KciWVgRo42ZE_zEtuNG1JMRYI,3291 +pip/_vendor/requests/packages/chardet/sjisprober.py,sha256=UYOmiMDzttYIkSDoOB08UEagivJpUXz4tuWiWzTiOr8,3764 +pip/_vendor/requests/packages/chardet/universaldetector.py,sha256=h-E2x6XSCzlNjycYWG0Fe4Cf1SGdaIzUNu2HCphpMZA,6840 +pip/_vendor/requests/packages/chardet/utf8prober.py,sha256=7tdNZGrJY7jZUBD483GGMkiP0Tx8Fp-cGvWHoAsilHg,2652 +pip/_vendor/requests/packages/urllib3/__init__.py,sha256=416Z27e3riIyAXRLwsOS5dMrU2EQB361eLxksZFWnrU,2648 +pip/_vendor/requests/packages/urllib3/_collections.py,sha256=8G9PhO4XdkNDQATNL1uy86tSlH3EvIJHXebiOJnfFok,10542 +pip/_vendor/requests/packages/urllib3/connection.py,sha256=XREoqqZh54Lgag5CLdVlC27bwCpOq0aYrMmNEMtSJWk,10286 +pip/_vendor/requests/packages/urllib3/connectionpool.py,sha256=Zzn84qmJhMaSWXqDjhA1WBzt9E_wg3XXi0fsJ80aVPE,31221 +pip/_vendor/requests/packages/urllib3/exceptions.py,sha256=O_rlqjhV5PJFr28ZFW4Y0kgf_Q_l84sRx7ufgBsVEG8,5440 +pip/_vendor/requests/packages/urllib3/fields.py,sha256=WVUvPfSzNBniw9zKVDoLl9y5ko2qKBjbzkH-bTQMSgQ,5872 +pip/_vendor/requests/packages/urllib3/filepost.py,sha256=NvLlFsdt8ih_Q4S2ekQF3CJG0nOXs32YI-G04_AdT2g,2320 +pip/_vendor/requests/packages/urllib3/poolmanager.py,sha256=3KaeL_tJpPPRmnSZY9x8qHcf9SNT8QucwPUNBnqCUx4,9470 +pip/_vendor/requests/packages/urllib3/request.py,sha256=jET7OvA3FSjxABBRGhCyMdPvM9XuJA6df9gRhkJiJiY,5988 +pip/_vendor/requests/packages/urllib3/response.py,sha256=6Bs5LNzhW1YEEd6stBFJtruDVFMlWNxo0MFPmtJhvDU,18103 +pip/_vendor/requests/packages/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/_vendor/requests/packages/urllib3/contrib/appengine.py,sha256=ewIhezCrySj0r1SCTm8MtpnlE6EFJpEQ-AZhNjXe6dE,7531 +pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py,sha256=EDJwycyalpMD89DiGF5pFNCZOGTBQBZDtLN8oOAialc,4546 +pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py,sha256=JBL3GO8YucHXkdpU7uxUGd9UgShsIhAU8oCMJDOo47s,10094 +pip/_vendor/requests/packages/urllib3/packages/__init__.py,sha256=nlChrGzkjCkmhCX9HrF_qHPUgosfsPQkVIJxiiLhk9g,109 +pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935 +pip/_vendor/requests/packages/urllib3/packages/six.py,sha256=U-rO-WBrFS8PxHeamSl6okKCjqPF18NhiZb0qPZ67XM,11628 +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py,sha256=cOWMIn1orgJoA35p6pSzO_-Dc6iOX9Dhl6D2sL9b_2o,460 +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=fK28k37hL7-D79v9iM2fHgNK9Q1Pw0M7qVRL4rkfFjQ,3778 +pip/_vendor/requests/packages/urllib3/util/__init__.py,sha256=7LnyUDyddbD9VVmsbPP0ckT2paVTmgLPs5E_BUoHVu8,854 +pip/_vendor/requests/packages/urllib3/util/connection.py,sha256=6PvDBlK_6QDLHzEDT-uEMhqKcDoSuRO43Vtb4IXfkzQ,3380 +pip/_vendor/requests/packages/urllib3/util/request.py,sha256=ZMDewRK-mjlK72szGIIjzYnLIn-zPP0WgJUMjKeZ6Tg,2128 +pip/_vendor/requests/packages/urllib3/util/response.py,sha256=-vdS4K9hXQAkcvFLgnZqNmah_yyH7NZ2I6gRaKX2EwU,2167 +pip/_vendor/requests/packages/urllib3/util/retry.py,sha256=Q4IdYYD3JwjNvc49r7tGsZt7jB6nVkRBohHpvSlONY4,9981 +pip/_vendor/requests/packages/urllib3/util/ssl_.py,sha256=Rq7M8Y04fwHjA9EkewYXE2SE5ZK7UGZhnhZ5JledWh0,11401 +pip/_vendor/requests/packages/urllib3/util/timeout.py,sha256=ioAIYptFyBG7eU_r8_ZmO45hpj1dJE6WCvrGR9dNFjs,9596 +pip/_vendor/requests/packages/urllib3/util/url.py,sha256=EcX4ZfmgKWcqM4sY9FlC-yN4y_snuURPV0TpUPHNjnc,5879 +pip/commands/__init__.py,sha256=naZ1iIWRutNznOVpLj8qyn1GPE0B5rhCWCrSUOZSt4M,2145 +pip/commands/completion.py,sha256=2BEUY3jowgemiIGgUP3rpk6A9My4Eu8rTPosFxlESOE,1967 +pip/commands/download.py,sha256=dMRtH0JMBhNGlJWr1qC29vOeiBzG2K0OjOAfzdxSVgA,4804 +pip/commands/freeze.py,sha256=KmQoLf-HruqBDzc-F2-ganGVn2lboNQqppfyrMsx3SU,2774 +pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 +pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 +pip/commands/install.py,sha256=8MOsH3IlL3ovZhTQtZwHhJb19pnkr8eKNE_9klVJ3PU,14971 +pip/commands/list.py,sha256=u76U5TLODQ2g53sSUA4q6WhYus7usbuWuITQJsCnP3E,7412 +pip/commands/search.py,sha256=9ClAcFzkJ_7AksTkNrUed5qzsplpBtMlJByJLqiZFqw,4777 +pip/commands/show.py,sha256=dytBbI9XV-ChpV51tsuBygZJJO-QaO2Gtz5kbLkBCZE,5815 +pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 +pip/commands/wheel.py,sha256=iT92Uo8qpVILl_Yk8L7AtkFVYGmY0ep5oDeyQSpwkLs,7528 +pip/compat/__init__.py,sha256=7WN0B0XMYIldfminnT679VoEJLxNQPi9MFwCIt1_llU,4669 +pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 +pip/compat/ordereddict.py,sha256=6RQCd4PyTE4tvLUoAnsygvrreOSTV4BRDbc_4gCSkTs,4110 +pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 +pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 +pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/operations/freeze.py,sha256=H6xpxe1XgoNm5f3UXK47kNy0OQfM5jzo4UUwQu7G-Lo,4048 +pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 +pip/req/req_file.py,sha256=3eaVnPMUAjikLdC5i8hZUAf8aAOby2UxmAVFf94FOXY,11928 +pip/req/req_install.py,sha256=aG0_hj8WqLLUH5tO40OFIncIxU50Vm4rFqYcx5hmoYk,45589 +pip/req/req_set.py,sha256=Xwia1h7o2Z3Qogae3RHIDCGlXS3w2AeQPG8LBz7GmFM,32312 +pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 +pip/utils/__init__.py,sha256=WdAVVUl5j0OQ3u3eR3Qtu3Um3OmeVflR7W1BmMmvmDU,27531 +pip/utils/appdirs.py,sha256=KTpZANfjYw5K2tZ0_jNNdP_kMxQAns79qZWelwaJo0c,7896 +pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 +pip/utils/deprecation.py,sha256=DR3cKqzovYu9Pif7c9bT2KmwekfW95N3BsI45_5u38I,2239 +pip/utils/encoding.py,sha256=NQxGiFS5GbeAveLZTnx92t5r0PYqvt0iRnP2u9SGG1w,971 +pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 +pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 +pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 +pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 +pip/utils/setuptools_build.py,sha256=8IGop-SZ6lxUl5HMOjLRaDlORPugIH_b_b2Y67x4jQc,240 +pip/utils/ui.py,sha256=pbDkSAeumZ6jdZcOJ2yAbx8iBgeP2zfpqNnLJK1gskQ,11597 +pip/vcs/__init__.py,sha256=lnea41zMq9HqB1Qo7hxy2IjUzk5WtBvnoloCCMR6Vk4,12349 +pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 +pip/vcs/git.py,sha256=u16VCiNW_a9AaYqLri2b8-f4lOZlOYwsGpHHV3uv_dQ,10218 +pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 +pip/vcs/subversion.py,sha256=mGT7sAzuVc1u-9MPoXJNyShnRzhdJpDdGNuhhzUPv6w,8687 +pip-8.1.1.dist-info/DESCRIPTION.rst,sha256=jSvW1qOjwzndvm_p_DexGCVJfwgg3rWPMJWzf6Rmsfc,1167 +pip-8.1.1.dist-info/METADATA,sha256=p_9D2tGGDX-wd8S14XVVx0K-qOjDrrwu-CmYn9Dndlc,2362 +pip-8.1.1.dist-info/RECORD,, +pip-8.1.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pip-8.1.1.dist-info/entry_points.txt,sha256=GWc-Wb9WUKZ1EuVWNz-G0l3BeIpbNJLx0OJbZ61AAV0,68 +pip-8.1.1.dist-info/metadata.json,sha256=wAnzudgBGV69N0kQOAgeAXIjQSbkBZhZEs98ULrfRUE,1513 +pip-8.1.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +/openmedialibrary/platform_linux32/p34/bin/pip,sha256=6W1H8ra-gP5cHQEOiq5OH0mgwj2M-TyEDGSW1ao8LN4,241 +/openmedialibrary/platform_linux32/p34/bin/pip3,sha256=6W1H8ra-gP5cHQEOiq5OH0mgwj2M-TyEDGSW1ao8LN4,241 +/openmedialibrary/platform_linux32/p34/bin/pip3.4,sha256=6W1H8ra-gP5cHQEOiq5OH0mgwj2M-TyEDGSW1ao8LN4,241 +/openmedialibrary/platform_linux32/p34/lib/python3.4/site-packages/pip-8.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/_vendor/requests/packages/chardet/__pycache__/euckrfreq.cpython-34.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/markers.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/sjisprober.cpython-34.pyc,, +pip/utils/__pycache__/logging.cpython-34.pyc,, +pip/utils/__pycache__/outdated.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/cookies.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/retry.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/sanitizer.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/__about__.cpython-34.pyc,, +pip/models/__pycache__/index.cpython-34.pyc,, +pip/commands/__pycache__/list.cpython-34.pyc,, +pip/_vendor/html5lib/serializer/__pycache__/htmlserializer.cpython-34.pyc,, +pip/utils/__pycache__/encoding.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/database.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/html5parser.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/_compat.cpython-34.pyc,, +pip/vcs/__pycache__/git.cpython-34.pyc,, +pip/operations/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/trie/__pycache__/datrie.cpython-34.pyc,, +pip/utils/__pycache__/ui.cpython-34.pyc,, +pip/compat/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/locators.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/ihatexml.cpython-34.pyc,, +pip/_vendor/distlib/_backport/__pycache__/misc.cpython-34.pyc,, +pip/utils/__pycache__/__init__.cpython-34.pyc,, +pip/req/__pycache__/req_set.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/request.cpython-34.pyc,, +pip/_vendor/requests/packages/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/lockfile/__pycache__/symlinklockfile.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/auth.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/connection.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/chardistribution.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/response.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/scripts.cpython-34.pyc,, +pip/utils/__pycache__/deprecation.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/lint.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-34.pyc,, +pip/commands/__pycache__/uninstall.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/constants.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/adapters.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/adapter.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/connectionpool.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/models.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/lxmletree.cpython-34.pyc,, +pip/commands/__pycache__/freeze.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/_collections.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/packages/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/packages/__pycache__/six.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/requirements.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/utils.cpython-34.pyc,, +pip/__pycache__/download.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/mbcssm.cpython-34.pyc,, +pip/_vendor/pkg_resources/__pycache__/__init__.cpython-34.pyc,, +pip/utils/__pycache__/appdirs.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/escsm.cpython-34.pyc,, +pip/commands/__pycache__/help.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/gb2312freq.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/pulldom.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/compat.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/escprober.cpython-34.pyc,, +pip/_vendor/lockfile/__pycache__/__init__.cpython-34.pyc,, +pip/operations/__pycache__/freeze.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/specifiers.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/cache.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/__init__.cpython-34.pyc,, +pip/vcs/__pycache__/bazaar.cpython-34.pyc,, +pip/commands/__pycache__/install.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/serialize.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/big5prober.cpython-34.pyc,, +pip/_vendor/colorama/__pycache__/winterm.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/compat.cpython-34.pyc,, +pip/req/__pycache__/req_uninstall.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-34.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/_base.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langhebrewmodel.cpython-34.pyc,, +pip/_vendor/__pycache__/pyparsing.cpython-34.pyc,, +pip/commands/__pycache__/wheel.cpython-34.pyc,, +pip/req/__pycache__/req_install.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/big5freq.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/manifest.cpython-34.pyc,, +pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euckrprober.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/compat.cpython-34.pyc,, +pip/__pycache__/status_codes.cpython-34.pyc,, +pip/_vendor/lockfile/__pycache__/sqlitelockfile.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/sbcharsetprober.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/_base.cpython-34.pyc,, +pip/_vendor/html5lib/trie/__pycache__/_base.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-34.pyc,, +pip/_vendor/__pycache__/six.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/connection.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/charsetprober.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/index.cpython-34.pyc,, +pip/commands/__pycache__/show.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langcyrillicmodel.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/markers.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/chardetect.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/wheel.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/utils.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/api.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/hebrewprober.cpython-34.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/_markerlib/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/sessions.cpython-34.pyc,, +pip/__pycache__/index.cpython-34.pyc,, +pip/__pycache__/__main__.cpython-34.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/ssl_.cpython-34.pyc,, +pip/_vendor/progress/__pycache__/counter.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/_base.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/status_codes.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/jisfreq.cpython-34.pyc,, +pip/_vendor/__pycache__/retrying.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/utils.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/codingstatemachine.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/compat.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/fields.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/__init__.cpython-34.pyc,, +pip/utils/__pycache__/filesystem.cpython-34.pyc,, +pip/_vendor/colorama/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/locations.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/packages/__pycache__/ordered_dict.cpython-34.pyc,, +pip/__pycache__/basecommand.cpython-34.pyc,, +pip/__pycache__/wheel.cpython-34.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/inputstream.cpython-34.pyc,, +pip/__pycache__/pep425tags.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/structures.cpython-34.pyc,, +pip/_vendor/colorama/__pycache__/ansi.cpython-34.pyc,, +pip/compat/__pycache__/dictconfig.cpython-34.pyc,, +pip/vcs/__pycache__/mercurial.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/poolmanager.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/mbcsgroupprober.cpython-34.pyc,, +pip/_vendor/colorama/__pycache__/win32.cpython-34.pyc,, +pip/_vendor/colorama/__pycache__/ansitowin32.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/ntlmpool.cpython-34.pyc,, +pip/_vendor/lockfile/__pycache__/linklockfile.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/hooks.cpython-34.pyc,, +pip/_vendor/colorama/__pycache__/initialise.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/mbcharsetprober.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/universaldetector.cpython-34.pyc,, +pip/_vendor/lockfile/__pycache__/mkdirlockfile.cpython-34.pyc,, +pip/__pycache__/baseparser.cpython-34.pyc,, +pip/_vendor/progress/__pycache__/__init__.cpython-34.pyc,, +pip/req/__pycache__/__init__.cpython-34.pyc,, +pip/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/request.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/version.cpython-34.pyc,, +pip/_vendor/html5lib/trie/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/filepost.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/sbcsgroupprober.cpython-34.pyc,, +pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-34.pyc,, +pip/_vendor/lockfile/__pycache__/pidlockfile.cpython-34.pyc,, +pip/utils/__pycache__/hashes.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langthaimodel.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euctwprober.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/gb2312prober.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/charsetgroupprober.cpython-34.pyc,, +pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/url.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/latin1prober.cpython-34.pyc,, +pip/utils/__pycache__/build.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langgreekmodel.cpython-34.pyc,, +pip/_vendor/__pycache__/ipaddress.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/exceptions.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/cp949prober.cpython-34.pyc,, +pip/commands/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/timeout.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/response.cpython-34.pyc,, +pip/__pycache__/exceptions.cpython-34.pyc,, +pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-34.pyc,, +pip/compat/__pycache__/ordereddict.cpython-34.pyc,, +pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/metadata.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-34.pyc,, +pip/_vendor/__pycache__/__init__.cpython-34.pyc,, +pip/commands/__pycache__/search.cpython-34.pyc,, +pip/__pycache__/cmdoptions.cpython-34.pyc,, +pip/_vendor/html5lib/trie/__pycache__/py.cpython-34.pyc,, +pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/constants.cpython-34.pyc,, +pip/commands/__pycache__/completion.cpython-34.pyc,, +pip/_vendor/progress/__pycache__/spinner.cpython-34.pyc,, +pip/_vendor/packaging/__pycache__/_structures.cpython-34.pyc,, +pip/commands/__pycache__/download.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/util/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/eucjpprober.cpython-34.pyc,, +pip/_vendor/progress/__pycache__/helpers.cpython-34.pyc,, +pip/_vendor/__pycache__/re-vendor.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/utf8prober.cpython-34.pyc,, +pip/_vendor/cachecontrol/__pycache__/controller.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/util.cpython-34.pyc,, +pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-34.pyc,, +pip/models/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/serializer/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/resources.cpython-34.pyc,, +pip/vcs/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/certs.cpython-34.pyc,, +pip/_vendor/html5lib/__pycache__/tokenizer.cpython-34.pyc,, +pip/_vendor/progress/__pycache__/bar.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langhungarianmodel.cpython-34.pyc,, +pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/exceptions.cpython-34.pyc,, +pip/_vendor/requests/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/_markerlib/__pycache__/markers.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/html5lib/treewalkers/__pycache__/genshistream.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/euctwfreq.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/langbulgarianmodel.cpython-34.pyc,, +pip/vcs/__pycache__/subversion.cpython-34.pyc,, +pip/_vendor/distlib/__pycache__/version.cpython-34.pyc,, +pip/commands/__pycache__/hash.cpython-34.pyc,, +pip/req/__pycache__/req_file.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/pyopenssl.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/__init__.cpython-34.pyc,, +pip/_vendor/requests/packages/chardet/__pycache__/jpcntx.cpython-34.pyc,, +pip/utils/__pycache__/setuptools_build.cpython-34.pyc,, +pip/_vendor/requests/packages/urllib3/contrib/__pycache__/appengine.cpython-34.pyc,, diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/WHEEL b/lib/python3.4/site-packages/pip-8.1.1.dist-info/WHEEL similarity index 70% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/WHEEL rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/WHEEL index 0de529b..8b6dd1b 100644 --- a/lib/python3.4/site-packages/pip-8.0.2.dist-info/WHEEL +++ b/lib/python3.4/site-packages/pip-8.1.1.dist-info/WHEEL @@ -1,5 +1,5 @@ Wheel-Version: 1.0 -Generator: bdist_wheel (0.26.0) +Generator: bdist_wheel (0.29.0) Root-Is-Purelib: true Tag: py2-none-any Tag: py3-none-any diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/entry_points.txt b/lib/python3.4/site-packages/pip-8.1.1.dist-info/entry_points.txt similarity index 73% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/entry_points.txt rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/entry_points.txt index a237b5e..c02a8d5 100644 --- a/lib/python3.4/site-packages/pip-8.0.2.dist-info/entry_points.txt +++ b/lib/python3.4/site-packages/pip-8.1.1.dist-info/entry_points.txt @@ -1,5 +1,5 @@ [console_scripts] pip = pip:main pip3 = pip:main -pip3.4 = pip:main +pip3.5 = pip:main diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/metadata.json b/lib/python3.4/site-packages/pip-8.1.1.dist-info/metadata.json similarity index 67% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/metadata.json rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/metadata.json index ccc2ef2..91434c5 100644 --- a/lib/python3.4/site-packages/pip-8.0.2.dist-info/metadata.json +++ b/lib/python3.4/site-packages/pip-8.1.1.dist-info/metadata.json @@ -1 +1 @@ -{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.4": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.4": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.26.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "run_requires": [{"extra": "testing", "requires": ["mock", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "version": "8.0.2"} \ No newline at end of file +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.29.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "run_requires": [{"extra": "testing", "requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "test_requires": [{"requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "version": "8.1.1"} \ No newline at end of file diff --git a/lib/python3.4/site-packages/pip-8.0.2.dist-info/top_level.txt b/lib/python3.4/site-packages/pip-8.1.1.dist-info/top_level.txt similarity index 100% rename from lib/python3.4/site-packages/pip-8.0.2.dist-info/top_level.txt rename to lib/python3.4/site-packages/pip-8.1.1.dist-info/top_level.txt diff --git a/lib/python3.4/site-packages/pip/__init__.py b/lib/python3.4/site-packages/pip/__init__.py index 0603ca1..51e7eaf 100644 --- a/lib/python3.4/site-packages/pip/__init__.py +++ b/lib/python3.4/site-packages/pip/__init__.py @@ -1,6 +1,7 @@ #!/usr/bin/env python from __future__ import absolute_import +import locale import logging import os import optparse @@ -30,7 +31,7 @@ import pip.cmdoptions cmdoptions = pip.cmdoptions # The version as used in the setup.py and the docs conf.py -__version__ = "8.0.2" +__version__ = "8.1.1" logger = logging.getLogger(__name__) @@ -197,10 +198,6 @@ def main(args=None): if args is None: args = sys.argv[1:] - # Enable our Deprecation Warnings - for deprecation_warning in deprecation.DEPRECATIONS: - warnings.simplefilter("default", deprecation_warning) - # Configure our deprecation warnings to be sent through loggers deprecation.install_warning_logger() @@ -213,6 +210,9 @@ def main(args=None): sys.stderr.write(os.linesep) sys.exit(1) + # Needed for locale.getpreferredencoding(False) to work + # in pip.utils.encoding.auto_decode + locale.setlocale(locale.LC_ALL, '') command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) return command.main(cmd_args) diff --git a/lib/python3.4/site-packages/pip/_vendor/__init__.py b/lib/python3.4/site-packages/pip/_vendor/__init__.py index c64896a..d0e7b34 100644 --- a/lib/python3.4/site-packages/pip/_vendor/__init__.py +++ b/lib/python3.4/site-packages/pip/_vendor/__init__.py @@ -14,13 +14,13 @@ import sys # Downstream redistributors which have debundled our dependencies should also # patch this value to be true. This will trigger the additional patching # to cause things like "six" to be available as pip. -DEBUNDLED = True +DEBUNDLED = False # By default, look in this directory for a bunch of .whl files which we will # add to the beginning of sys.path before attempting to import anything. This # is done to support downstream re-distributors like Debian and Fedora who # wish to create their own Wheels for our dependencies to aid in debundling. -WHEEL_DIR = os.path.abspath(os.path.join(sys.prefix, 'share', 'python-wheels')) +WHEEL_DIR = os.path.abspath(os.path.dirname(__file__)) # Define a small helper function to alias our vendored modules to the real ones @@ -32,10 +32,22 @@ def vendored(modulename): try: __import__(vendored_name, globals(), locals(), level=0) except ImportError: - __import__(modulename, globals(), locals(), level=0) - sys.modules[vendored_name] = sys.modules[modulename] - base, head = vendored_name.rsplit(".", 1) - setattr(sys.modules[base], head, sys.modules[modulename]) + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) # If we're operating in a debundled setup, then we want to go ahead and trigger @@ -70,24 +82,14 @@ if DEBUNDLED: vendored("requests.packages.urllib3.connection") vendored("requests.packages.urllib3.connectionpool") vendored("requests.packages.urllib3.contrib") - try: - vendored("requests.packages.urllib3.contrib.ntlmpool") - except ImportError: - pass - try: - vendored("requests.packages.urllib3.contrib.pyopenssl") - except ImportError: - pass + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") vendored("requests.packages.urllib3.exceptions") vendored("requests.packages.urllib3.fields") vendored("requests.packages.urllib3.filepost") vendored("requests.packages.urllib3.packages") - try: - vendored("requests.packages.urllib3.packages.ordered_dict") - vendored("requests.packages.urllib3.packages.six") - except ImportError: - # Debian already unbundles these from requests. - pass + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") vendored("requests.packages.urllib3.packages.ssl_match_hostname") vendored("requests.packages.urllib3.packages.ssl_match_hostname." "_implementation") diff --git a/lib/python3.4/site-packages/_markerlib/__init__.py b/lib/python3.4/site-packages/pip/_vendor/_markerlib/__init__.py similarity index 84% rename from lib/python3.4/site-packages/_markerlib/__init__.py rename to lib/python3.4/site-packages/pip/_vendor/_markerlib/__init__.py index e2b237b..197781a 100644 --- a/lib/python3.4/site-packages/_markerlib/__init__.py +++ b/lib/python3.4/site-packages/pip/_vendor/_markerlib/__init__.py @@ -1,6 +1,6 @@ try: import ast - from _markerlib.markers import default_environment, compile, interpret + from pip._vendor._markerlib.markers import default_environment, compile, interpret except ImportError: if 'ast' in globals(): raise diff --git a/lib/python3.4/site-packages/_markerlib/markers.py b/lib/python3.4/site-packages/pip/_vendor/_markerlib/markers.py similarity index 100% rename from lib/python3.4/site-packages/_markerlib/markers.py rename to lib/python3.4/site-packages/pip/_vendor/_markerlib/markers.py diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/__init__.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000..724e220 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,11 @@ +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = 'Eric Larson' +__email__ = 'eric@ionrock.org' +__version__ = '0.11.6' + +from .wrapper import CacheControl +from .adapter import CacheControlAdapter +from .controller import CacheController diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/_cmd.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000..afdcc88 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,60 @@ +import logging + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +from argparse import ArgumentParser + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session(): + adapter = CacheControlAdapter( + DictCache(), + cache_etags=True, + serializer=None, + heuristic=None, + ) + sess = requests.Session() + sess.mount('http://', adapter) + sess.mount('https://', adapter) + + sess.cache_controller = adapter.controller + return sess + + +def get_args(): + parser = ArgumentParser() + parser.add_argument('url', help='The URL to try and cache') + return parser.parse_args() + + +def main(args=None): + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + sess.cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if sess.cache_controller.cached_request(resp.request): + print('Cached!') + else: + print('Not cached :(') + + +if __name__ == '__main__': + main() diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/adapter.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000..74589e0 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,117 @@ +import functools + +from pip._vendor.requests.adapters import HTTPAdapter + +from .controller import CacheController +from .cache import DictCache +from .filewrapper import CallbackFileWrapper + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = set(['PUT', 'DELETE']) + + def __init__(self, cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + *args, **kw): + super(CacheControlAdapter, self).__init__(*args, **kw) + self.cache = cache or DictCache() + self.heuristic = heuristic + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, + cache_etags=cache_etags, + serializer=serializer, + ) + + def send(self, request, **kw): + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + if request.method == 'GET': + cached_response = self.controller.cached_request(request) + if cached_response: + return self.build_response(request, cached_response, + from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update( + self.controller.conditional_headers(request) + ) + + resp = super(CacheControlAdapter, self).send(request, **kw) + + return resp + + def build_response(self, request, response, from_cache=False): + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + if not from_cache and request.method == 'GET': + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif response.status == 301: + self.controller.cache_response(request, response) + else: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( + response._fp, + functools.partial( + self.controller.cache_response, + request, + response, + ) + ) + + resp = super(CacheControlAdapter, self).build_response( + request, response + ) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache + + return resp + + def close(self): + self.cache.close() + super(CacheControlAdapter, self).close() diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/cache.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000..7389a73 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,39 @@ +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from threading import Lock + + +class BaseCache(object): + + def get(self, key): + raise NotImplemented() + + def set(self, key, value): + raise NotImplemented() + + def delete(self, key): + raise NotImplemented() + + def close(self): + pass + + +class DictCache(BaseCache): + + def __init__(self, init_dict=None): + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key): + return self.data.get(key, None) + + def set(self, key, value): + with self.lock: + self.data.update({key: value}) + + def delete(self, key): + with self.lock: + if key in self.data: + self.data.pop(key) diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000..f9e66a1 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,18 @@ +from textwrap import dedent + +try: + from .file_cache import FileCache +except ImportError: + notice = dedent(''' + NOTE: In order to use the FileCache you must have + lockfile installed. You can install it via pip: + pip install lockfile + ''') + print(notice) + + +try: + import redis + from .redis_cache import RedisCache +except ImportError: + pass diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000..b77728f --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,116 @@ +import hashlib +import os + +from pip._vendor.lockfile import LockFile +from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile + +from ..cache import BaseCache +from ..controller import CacheController + + +def _secure_open_write(filename, fmode): + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except (IOError, OSError): + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class FileCache(BaseCache): + def __init__(self, directory, forever=False, filemode=0o0600, + dirmode=0o0700, use_dir_lock=None, lock_class=None): + + if use_dir_lock is not None and lock_class is not None: + raise ValueError("Cannot use use_dir_lock and lock_class together") + + if use_dir_lock: + lock_class = MkdirLockFile + + if lock_class is None: + lock_class = LockFile + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + + @staticmethod + def encode(x): + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name): + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + name = self._fn(key) + if not os.path.exists(name): + return None + + with open(name, 'rb') as fh: + return fh.read() + + def set(self, key, value): + name = self._fn(key) + + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(name), self.dirmode) + except (IOError, OSError): + pass + + with self.lock_class(name) as lock: + # Write our actual file + with _secure_open_write(lock.path, self.filemode) as fh: + fh.write(value) + + def delete(self, key): + name = self._fn(key) + if not self.forever: + os.remove(name) + + +def url_to_file_path(url, filecache): + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000..9f5d55f --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,41 @@ +from __future__ import division + +from datetime import datetime + + +def total_seconds(td): + """Python 2.6 compatability""" + if hasattr(td, 'total_seconds'): + return td.total_seconds() + + ms = td.microseconds + secs = (td.seconds + td.days * 24 * 3600) + return (ms + secs * 10**6) / 10**6 + + +class RedisCache(object): + + def __init__(self, conn): + self.conn = conn + + def get(self, key): + return self.conn.get(key) + + def set(self, key, value, expires=None): + if not expires: + self.conn.set(key, value) + else: + expires = expires - datetime.now() + self.conn.setex(key, total_seconds(expires), value) + + def delete(self, key): + self.conn.delete(key) + + def clear(self): + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self): + self.conn.disconnect() diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/compat.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000..018e6ac --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/compat.py @@ -0,0 +1,20 @@ +try: + from urllib.parse import urljoin +except ImportError: + from urlparse import urljoin + + +try: + import cPickle as pickle +except ImportError: + import pickle + + +from pip._vendor.requests.packages.urllib3.response import HTTPResponse +from pip._vendor.requests.packages.urllib3.util import is_fp_closed + +# Replicate some six behaviour +try: + text_type = (unicode,) +except NameError: + text_type = (str,) diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/controller.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000..6e591f8 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,353 @@ +""" +The httplib2 algorithms ported for use with requests. +""" +import logging +import re +import calendar +import time +from email.utils import parsedate_tz + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .cache import DictCache +from .serialize import Serializer + + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController(object): + """An interface to see if request should cached or not. + """ + def __init__(self, cache=None, cache_etags=True, serializer=None): + self.cache = cache or DictCache() + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + + @classmethod + def _urlnorm(cls, uri): + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri): + return cls._urlnorm(uri) + + def parse_cache_control(self, headers): + """ + Parse the cache control headers returning a dictionary with values + for the different directives. + """ + retval = {} + + cc_header = 'cache-control' + if 'Cache-Control' in headers: + cc_header = 'Cache-Control' + + if cc_header in headers: + parts = headers[cc_header].split(',') + parts_with_args = [ + tuple([x.strip().lower() for x in part.split("=", 1)]) + for part in parts if -1 != part.find("=") + ] + parts_wo_args = [ + (name.strip().lower(), 1) + for name in parts if -1 == name.find("=") + ] + retval = dict(parts_with_args + parts_wo_args) + return retval + + def cached_request(self, request): + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if 'no-cache' in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if 'max-age' in cc and cc['max-age'] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Request allows serving from the cache, let's see if we find something + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug('No cache entry available') + return False + + # Check whether it can be deserialized + resp = self.serializer.loads(request, cache_data) + if not resp: + logger.warning('Cache entry deserialization failed, entry ignored') + return False + + # If we have a cached 301, return it immediately. We don't + # need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if resp.status == 301: + msg = ('Returning cached "301 Moved Permanently" response ' + '(ignoring date and etag information)') + logger.debug(msg) + return resp + + headers = CaseInsensitiveDict(resp.headers) + if not headers or 'date' not in headers: + if 'etag' not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug('Purging cached response: no date or etag') + self.cache.delete(cache_url) + logger.debug('Ignoring cached response: no date') + return False + + now = time.time() + date = calendar.timegm( + parsedate_tz(headers['date']) + ) + current_age = max(0, now - date) + logger.debug('Current age based on date: %i', current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + if 'max-age' in resp_cc and resp_cc['max-age'].isdigit(): + freshness_lifetime = int(resp_cc['max-age']) + logger.debug('Freshness lifetime from max-age: %i', + freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif 'expires' in headers: + expires = parsedate_tz(headers['expires']) + if expires is not None: + expire_time = calendar.timegm(expires) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", + freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + if 'max-age' in cc: + try: + freshness_lifetime = int(cc['max-age']) + logger.debug('Freshness lifetime from request max-age: %i', + freshness_lifetime) + except ValueError: + freshness_lifetime = 0 + + if 'min-fresh' in cc: + try: + min_fresh = int(cc['min-fresh']) + except ValueError: + min_fresh = 0 + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug('Adjusted current age from min-fresh: %i', + current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug('%i > %i', freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if 'etag' not in headers: + logger.debug( + 'The cached response is "stale" with no etag, purging' + ) + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request): + cache_url = self.cache_url(request.url) + resp = self.serializer.loads(request, self.cache.get(cache_url)) + new_headers = {} + + if resp: + headers = CaseInsensitiveDict(resp.headers) + + if 'etag' in headers: + new_headers['If-None-Match'] = headers['ETag'] + + if 'last-modified' in headers: + new_headers['If-Modified-Since'] = headers['Last-Modified'] + + return new_headers + + def cache_response(self, request, response, body=None): + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = [200, 203, 300, 301] + if response.status not in cacheable_status_codes: + logger.debug( + 'Status code %s not in %s', + response.status, + cacheable_status_codes + ) + return + + response_headers = CaseInsensitiveDict(response.headers) + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if (body is not None and + "content-length" in response_headers and + response_headers["content-length"].isdigit() and + int(response_headers["content-length"]) != len(body)): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if cc.get('no-store'): + no_store = True + logger.debug('Response header has "no-store"') + if cc_req.get('no-store'): + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + + # If we've been given an etag, then keep the response + if self.cache_etags and 'etag' in response_headers: + logger.debug('Caching due to etag') + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + ) + + # Add to the cache any 301s. We do this before looking that + # the Date headers. + elif response.status == 301: + logger.debug('Caching permanant redirect') + self.cache.set( + cache_url, + self.serializer.dumps(request, response) + ) + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif 'date' in response_headers: + # cache when there is a max-age > 0 + if cc and cc.get('max-age'): + if int(cc['max-age']) > 0: + logger.debug('Caching b/c date exists and max-age > 0') + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif 'expires' in response_headers: + if response_headers['expires']: + logger.debug('Caching b/c of expires header') + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + ) + + def update_cached_response(self, request, response): + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + cache_url = self.cache_url(request.url) + + cached_response = self.serializer.loads( + request, + self.cache.get(cache_url) + ) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = [ + "content-length", + ] + + cached_response.headers.update( + dict((k, v) for k, v in response.headers.items() + if k.lower() not in excluded_headers) + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self.cache.set( + cache_url, + self.serializer.dumps(request, cached_response), + ) + + return cached_response diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000..4b91bce --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,63 @@ +from io import BytesIO + + +class CallbackFileWrapper(object): + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + """ + + def __init__(self, fp, callback): + self.__buf = BytesIO() + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name): + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__('_CallbackFileWrapper__fp') + return getattr(fp, name) + + def __is_fp_closed(self): + try: + return self.__fp.fp is None + except AttributeError: + pass + + try: + return self.__fp.closed + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def read(self, amt=None): + data = self.__fp.read(amt) + self.__buf.write(data) + + if self.__is_fp_closed(): + if self.__callback: + self.__callback(self.__buf.getvalue()) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + return data diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/heuristics.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000..94715a4 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,138 @@ +import calendar +import time + +from email.utils import formatdate, parsedate, parsedate_tz + +from datetime import datetime, timedelta + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta, date=None): + date = date or datetime.now() + return date + delta + + +def datetime_to_header(dt): + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic(object): + + def warning(self, response): + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response): + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response): + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({'Warning': warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + def update_headers(self, response): + headers = {} + + if 'expires' not in response.headers: + date = parsedate(response.headers['date']) + expires = expire_after(timedelta(days=1), + date=datetime(*date[:6])) + headers['expires'] = datetime_to_header(expires) + headers['cache-control'] = 'public' + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw): + self.delta = timedelta(**kw) + + def update_headers(self, response): + expires = expire_after(self.delta) + return { + 'expires': datetime_to_header(expires), + 'cache-control': 'public', + } + + def warning(self, response): + tmpl = '110 - Automatically cached for %s. Response might be stale' + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + cacheable_by_default_statuses = set([ + 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + ]) + + def update_headers(self, resp): + headers = resp.headers + + if 'expires' in headers: + return {} + + if 'cache-control' in headers and headers['cache-control'] != 'public': + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if 'date' not in headers or 'last-modified' not in headers: + return {} + + date = calendar.timegm(parsedate_tz(headers['date'])) + last_modified = parsedate(headers['last-modified']) + if date is None or last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp): + return None diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/serialize.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000..ffbfbf6 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,190 @@ +import base64 +import io +import json +import zlib + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .compat import HTTPResponse, pickle, text_type + + +def _b64_encode_bytes(b): + return base64.b64encode(b).decode("ascii") + + +def _b64_encode_str(s): + return _b64_encode_bytes(s.encode("utf8")) + + +def _b64_encode(s): + if isinstance(s, text_type): + return _b64_encode_str(s) + return _b64_encode_bytes(s) + + +def _b64_decode_bytes(b): + return base64.b64decode(b.encode("ascii")) + + +def _b64_decode_str(s): + return _b64_decode_bytes(s).decode("utf8") + + +class Serializer(object): + + def dumps(self, request, response, body=None): + response_headers = CaseInsensitiveDict(response.headers) + + if body is None: + body = response.read(decode_content=False) + + # NOTE: 99% sure this is dead code. I'm only leaving it + # here b/c I don't have a test yet to prove + # it. Basically, before using + # `cachecontrol.filewrapper.CallbackFileWrapper`, + # this made an effort to reset the file handle. The + # `CallbackFileWrapper` short circuits this code by + # setting the body as the content is consumed, the + # result being a `body` argument is *always* passed + # into cache_response, and in turn, + # `Serializer.dump`. + response._fp = io.BytesIO(body) + + data = { + "response": { + "body": _b64_encode_bytes(body), + "headers": dict( + (_b64_encode(k), _b64_encode(v)) + for k, v in response.headers.items() + ), + "status": response.status, + "version": response.version, + "reason": _b64_encode_str(response.reason), + "strict": response.strict, + "decode_content": response.decode_content, + }, + } + + # Construct our vary headers + data["vary"] = {} + if "vary" in response_headers: + varied_headers = response_headers['vary'].split(',') + for header in varied_headers: + header = header.strip() + data["vary"][header] = request.headers.get(header, None) + + # Encode our Vary headers to ensure they can be serialized as JSON + data["vary"] = dict( + (_b64_encode(k), _b64_encode(v) if v is not None else v) + for k, v in data["vary"].items() + ) + + return b",".join([ + b"cc=2", + zlib.compress( + json.dumps( + data, separators=(",", ":"), sort_keys=True, + ).encode("utf8"), + ), + ]) + + def loads(self, request, data): + # Short circuit if we've been given an empty set of data + if not data: + return + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + ver = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, "_loads_v{0}".format(ver))(request, data) + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return + + def prepare_response(self, request, cached): + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + if "*" in cached.get("vary", {}): + return + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return + + body_raw = cached["response"].pop("body") + + try: + body = io.BytesIO(body_raw) + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode('utf8')) + + return HTTPResponse( + body=body, + preload_content=False, + **cached["response"] + ) + + def _loads_v0(self, request, data): + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return + + def _loads_v1(self, request, data): + try: + cached = pickle.loads(data) + except ValueError: + return + + return self.prepare_response(request, cached) + + def _loads_v2(self, request, data): + try: + cached = json.loads(zlib.decompress(data).decode("utf8")) + except ValueError: + return + + # We need to decode the items that we've base64 encoded + cached["response"]["body"] = _b64_decode_bytes( + cached["response"]["body"] + ) + cached["response"]["headers"] = dict( + (_b64_decode_str(k), _b64_decode_str(v)) + for k, v in cached["response"]["headers"].items() + ) + cached["response"]["reason"] = _b64_decode_str( + cached["response"]["reason"], + ) + cached["vary"] = dict( + (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + for k, v in cached["vary"].items() + ) + + return self.prepare_response(request, cached) diff --git a/lib/python3.4/site-packages/pip/_vendor/cachecontrol/wrapper.py b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000..ea421aa --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,21 @@ +from .adapter import CacheControlAdapter +from .cache import DictCache + + +def CacheControl(sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None): + + cache = cache or DictCache() + adapter = CacheControlAdapter( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + ) + sess.mount('http://', adapter) + sess.mount('https://', adapter) + + return sess diff --git a/lib/python3.4/site-packages/pip/_vendor/colorama/__init__.py b/lib/python3.4/site-packages/pip/_vendor/colorama/__init__.py new file mode 100644 index 0000000..8fc3f01 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/colorama/__init__.py @@ -0,0 +1,7 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from .initialise import init, deinit, reinit, colorama_text +from .ansi import Fore, Back, Style, Cursor +from .ansitowin32 import AnsiToWin32 + +__version__ = '0.3.6' + diff --git a/lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py b/lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py new file mode 100644 index 0000000..7877658 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/colorama/ansi.py @@ -0,0 +1,102 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +''' +This module generates ANSI character codes to printing colors to terminals. +See: http://en.wikipedia.org/wiki/ANSI_escape_code +''' + +CSI = '\033[' +OSC = '\033]' +BEL = '\007' + + +def code_to_chars(code): + return CSI + str(code) + 'm' + +def set_title(title): + return OSC + '2;' + title + BEL + +def clear_screen(mode=2): + return CSI + str(mode) + 'J' + +def clear_line(mode=2): + return CSI + str(mode) + 'K' + + +class AnsiCodes(object): + def __init__(self): + # the subclasses declare class attributes which are numbers. + # Upon instantiation we define instance attributes, which are the same + # as the class attributes but wrapped with the ANSI escape sequence + for name in dir(self): + if not name.startswith('_'): + value = getattr(self, name) + setattr(self, name, code_to_chars(value)) + + +class AnsiCursor(object): + def UP(self, n=1): + return CSI + str(n) + 'A' + def DOWN(self, n=1): + return CSI + str(n) + 'B' + def FORWARD(self, n=1): + return CSI + str(n) + 'C' + def BACK(self, n=1): + return CSI + str(n) + 'D' + def POS(self, x=1, y=1): + return CSI + str(y) + ';' + str(x) + 'H' + + +class AnsiFore(AnsiCodes): + BLACK = 30 + RED = 31 + GREEN = 32 + YELLOW = 33 + BLUE = 34 + MAGENTA = 35 + CYAN = 36 + WHITE = 37 + RESET = 39 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 90 + LIGHTRED_EX = 91 + LIGHTGREEN_EX = 92 + LIGHTYELLOW_EX = 93 + LIGHTBLUE_EX = 94 + LIGHTMAGENTA_EX = 95 + LIGHTCYAN_EX = 96 + LIGHTWHITE_EX = 97 + + +class AnsiBack(AnsiCodes): + BLACK = 40 + RED = 41 + GREEN = 42 + YELLOW = 43 + BLUE = 44 + MAGENTA = 45 + CYAN = 46 + WHITE = 47 + RESET = 49 + + # These are fairly well supported, but not part of the standard. + LIGHTBLACK_EX = 100 + LIGHTRED_EX = 101 + LIGHTGREEN_EX = 102 + LIGHTYELLOW_EX = 103 + LIGHTBLUE_EX = 104 + LIGHTMAGENTA_EX = 105 + LIGHTCYAN_EX = 106 + LIGHTWHITE_EX = 107 + + +class AnsiStyle(AnsiCodes): + BRIGHT = 1 + DIM = 2 + NORMAL = 22 + RESET_ALL = 0 + +Fore = AnsiFore() +Back = AnsiBack() +Style = AnsiStyle() +Cursor = AnsiCursor() diff --git a/lib/python3.4/site-packages/pip/_vendor/colorama/ansitowin32.py b/lib/python3.4/site-packages/pip/_vendor/colorama/ansitowin32.py new file mode 100644 index 0000000..a750d2a --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/colorama/ansitowin32.py @@ -0,0 +1,232 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import re +import sys +import os + +from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style +from .winterm import WinTerm, WinColor, WinStyle +from .win32 import windll, winapi_test + + +winterm = None +if windll is not None: + winterm = WinTerm() + + +def is_a_tty(stream): + return hasattr(stream, 'isatty') and stream.isatty() + + +class StreamWrapper(object): + ''' + Wraps a stream (such as stdout), acting as a transparent proxy for all + attribute access apart from method 'write()', which is delegated to our + Converter instance. + ''' + def __init__(self, wrapped, converter): + # double-underscore everything to prevent clashes with names of + # attributes on the wrapped stream object. + self.__wrapped = wrapped + self.__convertor = converter + + def __getattr__(self, name): + return getattr(self.__wrapped, name) + + def write(self, text): + self.__convertor.write(text) + + +class AnsiToWin32(object): + ''' + Implements a 'write()' method which, on Windows, will strip ANSI character + sequences from the text, and if outputting to a tty, will convert them into + win32 function calls. + ''' + ANSI_CSI_RE = re.compile('\001?\033\[((?:\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer + ANSI_OSC_RE = re.compile('\001?\033\]((?:.|;)*?)(\x07)\002?') # Operating System Command + + def __init__(self, wrapped, convert=None, strip=None, autoreset=False): + # The wrapped stream (normally sys.stdout or sys.stderr) + self.wrapped = wrapped + + # should we reset colors to defaults after every .write() + self.autoreset = autoreset + + # create the proxy wrapping our output stream + self.stream = StreamWrapper(wrapped, self) + + on_windows = os.name == 'nt' + # We test if the WinAPI works, because even if we are on Windows + # we may be using a terminal that doesn't support the WinAPI + # (e.g. Cygwin Terminal). In this case it's up to the terminal + # to support the ANSI codes. + conversion_supported = on_windows and winapi_test() + + # should we strip ANSI sequences from our output? + if strip is None: + strip = conversion_supported or (not wrapped.closed and not is_a_tty(wrapped)) + self.strip = strip + + # should we should convert ANSI sequences into win32 calls? + if convert is None: + convert = conversion_supported and not wrapped.closed and is_a_tty(wrapped) + self.convert = convert + + # dict of ansi codes to win32 functions and parameters + self.win32_calls = self.get_win32_calls() + + # are we wrapping stderr? + self.on_stderr = self.wrapped is sys.stderr + + def should_wrap(self): + ''' + True if this class is actually needed. If false, then the output + stream will not be affected, nor will win32 calls be issued, so + wrapping stdout is not actually required. This will generally be + False on non-Windows platforms, unless optional functionality like + autoreset has been requested using kwargs to init() + ''' + return self.convert or self.strip or self.autoreset + + def get_win32_calls(self): + if self.convert and winterm: + return { + AnsiStyle.RESET_ALL: (winterm.reset_all, ), + AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), + AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), + AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), + AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), + AnsiFore.RED: (winterm.fore, WinColor.RED), + AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), + AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), + AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), + AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), + AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), + AnsiFore.WHITE: (winterm.fore, WinColor.GREY), + AnsiFore.RESET: (winterm.fore, ), + AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), + AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), + AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), + AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), + AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), + AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), + AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), + AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), + AnsiBack.BLACK: (winterm.back, WinColor.BLACK), + AnsiBack.RED: (winterm.back, WinColor.RED), + AnsiBack.GREEN: (winterm.back, WinColor.GREEN), + AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), + AnsiBack.BLUE: (winterm.back, WinColor.BLUE), + AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), + AnsiBack.CYAN: (winterm.back, WinColor.CYAN), + AnsiBack.WHITE: (winterm.back, WinColor.GREY), + AnsiBack.RESET: (winterm.back, ), + AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), + AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), + AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), + AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), + AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), + AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), + AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), + AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), + } + return dict() + + def write(self, text): + if self.strip or self.convert: + self.write_and_convert(text) + else: + self.wrapped.write(text) + self.wrapped.flush() + if self.autoreset: + self.reset_all() + + + def reset_all(self): + if self.convert: + self.call_win32('m', (0,)) + elif not self.strip and not self.wrapped.closed: + self.wrapped.write(Style.RESET_ALL) + + + def write_and_convert(self, text): + ''' + Write the given text to our wrapped stream, stripping any ANSI + sequences from the text, and optionally converting them into win32 + calls. + ''' + cursor = 0 + text = self.convert_osc(text) + for match in self.ANSI_CSI_RE.finditer(text): + start, end = match.span() + self.write_plain_text(text, cursor, start) + self.convert_ansi(*match.groups()) + cursor = end + self.write_plain_text(text, cursor, len(text)) + + + def write_plain_text(self, text, start, end): + if start < end: + self.wrapped.write(text[start:end]) + self.wrapped.flush() + + + def convert_ansi(self, paramstring, command): + if self.convert: + params = self.extract_params(command, paramstring) + self.call_win32(command, params) + + + def extract_params(self, command, paramstring): + if command in 'Hf': + params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) + while len(params) < 2: + # defaults: + params = params + (1,) + else: + params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) + if len(params) == 0: + # defaults: + if command in 'JKm': + params = (0,) + elif command in 'ABCD': + params = (1,) + + return params + + + def call_win32(self, command, params): + if command == 'm': + for param in params: + if param in self.win32_calls: + func_args = self.win32_calls[param] + func = func_args[0] + args = func_args[1:] + kwargs = dict(on_stderr=self.on_stderr) + func(*args, **kwargs) + elif command in 'J': + winterm.erase_screen(params[0], on_stderr=self.on_stderr) + elif command in 'K': + winterm.erase_line(params[0], on_stderr=self.on_stderr) + elif command in 'Hf': # cursor position - absolute + winterm.set_cursor_position(params, on_stderr=self.on_stderr) + elif command in 'ABCD': # cursor position - relative + n = params[0] + # A - up, B - down, C - forward, D - back + x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] + winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) + + + def convert_osc(self, text): + for match in self.ANSI_OSC_RE.finditer(text): + start, end = match.span() + text = text[:start] + text[end:] + paramstring, command = match.groups() + if command in '\x07': # \x07 = BEL + params = paramstring.split(";") + # 0 - change title and icon (we will only change title) + # 1 - change icon (we don't support this) + # 2 - change title + if params[0] in '02': + winterm.set_title(params[1]) + return text diff --git a/lib/python3.4/site-packages/pip/_vendor/colorama/initialise.py b/lib/python3.4/site-packages/pip/_vendor/colorama/initialise.py new file mode 100644 index 0000000..4bce9f2 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/colorama/initialise.py @@ -0,0 +1,81 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +import atexit +import contextlib +import sys + +from .ansitowin32 import AnsiToWin32 + + +orig_stdout = None +orig_stderr = None + +wrapped_stdout = None +wrapped_stderr = None + +atexit_done = False + + +def reset_all(): + AnsiToWin32(orig_stdout).reset_all() + + +def init(autoreset=False, convert=None, strip=None, wrap=True): + + if not wrap and any([autoreset, convert, strip]): + raise ValueError('wrap=False conflicts with any other arg=True') + + global wrapped_stdout, wrapped_stderr + global orig_stdout, orig_stderr + + orig_stdout = sys.stdout + orig_stderr = sys.stderr + + if sys.stdout is None: + wrapped_stdout = None + else: + sys.stdout = wrapped_stdout = \ + wrap_stream(orig_stdout, convert, strip, autoreset, wrap) + if sys.stderr is None: + wrapped_stderr = None + else: + sys.stderr = wrapped_stderr = \ + wrap_stream(orig_stderr, convert, strip, autoreset, wrap) + + global atexit_done + if not atexit_done: + atexit.register(reset_all) + atexit_done = True + + +def deinit(): + if orig_stdout is not None: + sys.stdout = orig_stdout + if orig_stderr is not None: + sys.stderr = orig_stderr + + +@contextlib.contextmanager +def colorama_text(*args, **kwargs): + init(*args, **kwargs) + try: + yield + finally: + deinit() + + +def reinit(): + if wrapped_stdout is not None: + sys.stdout = wrapped_stdout + if wrapped_stderr is not None: + sys.stderr = wrapped_stderr + + +def wrap_stream(stream, convert, strip, autoreset, wrap): + if wrap: + wrapper = AnsiToWin32(stream, + convert=convert, strip=strip, autoreset=autoreset) + if wrapper.should_wrap(): + stream = wrapper.stream + return stream + + diff --git a/lib/python3.4/site-packages/pip/_vendor/colorama/win32.py b/lib/python3.4/site-packages/pip/_vendor/colorama/win32.py new file mode 100644 index 0000000..3d1d2f2 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/colorama/win32.py @@ -0,0 +1,154 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. + +# from winbase.h +STDOUT = -11 +STDERR = -12 + +try: + import ctypes + from ctypes import LibraryLoader + windll = LibraryLoader(ctypes.WinDLL) + from ctypes import wintypes +except (AttributeError, ImportError): + windll = None + SetConsoleTextAttribute = lambda *_: None + winapi_test = lambda *_: None +else: + from ctypes import byref, Structure, c_char, POINTER + + COORD = wintypes._COORD + + class CONSOLE_SCREEN_BUFFER_INFO(Structure): + """struct in wincon.h.""" + _fields_ = [ + ("dwSize", COORD), + ("dwCursorPosition", COORD), + ("wAttributes", wintypes.WORD), + ("srWindow", wintypes.SMALL_RECT), + ("dwMaximumWindowSize", COORD), + ] + def __str__(self): + return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( + self.dwSize.Y, self.dwSize.X + , self.dwCursorPosition.Y, self.dwCursorPosition.X + , self.wAttributes + , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right + , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X + ) + + _GetStdHandle = windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [ + wintypes.DWORD, + ] + _GetStdHandle.restype = wintypes.HANDLE + + _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [ + wintypes.HANDLE, + POINTER(CONSOLE_SCREEN_BUFFER_INFO), + ] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + + _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute + _SetConsoleTextAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + ] + _SetConsoleTextAttribute.restype = wintypes.BOOL + + _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition + _SetConsoleCursorPosition.argtypes = [ + wintypes.HANDLE, + COORD, + ] + _SetConsoleCursorPosition.restype = wintypes.BOOL + + _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA + _FillConsoleOutputCharacterA.argtypes = [ + wintypes.HANDLE, + c_char, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputCharacterA.restype = wintypes.BOOL + + _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute + _FillConsoleOutputAttribute.argtypes = [ + wintypes.HANDLE, + wintypes.WORD, + wintypes.DWORD, + COORD, + POINTER(wintypes.DWORD), + ] + _FillConsoleOutputAttribute.restype = wintypes.BOOL + + _SetConsoleTitleW = windll.kernel32.SetConsoleTitleA + _SetConsoleTitleW.argtypes = [ + wintypes.LPCSTR + ] + _SetConsoleTitleW.restype = wintypes.BOOL + + handles = { + STDOUT: _GetStdHandle(STDOUT), + STDERR: _GetStdHandle(STDERR), + } + + def winapi_test(): + handle = handles[STDOUT] + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return bool(success) + + def GetConsoleScreenBufferInfo(stream_id=STDOUT): + handle = handles[stream_id] + csbi = CONSOLE_SCREEN_BUFFER_INFO() + success = _GetConsoleScreenBufferInfo( + handle, byref(csbi)) + return csbi + + def SetConsoleTextAttribute(stream_id, attrs): + handle = handles[stream_id] + return _SetConsoleTextAttribute(handle, attrs) + + def SetConsoleCursorPosition(stream_id, position, adjust=True): + position = COORD(*position) + # If the position is out of range, do nothing. + if position.Y <= 0 or position.X <= 0: + return + # Adjust for Windows' SetConsoleCursorPosition: + # 1. being 0-based, while ANSI is 1-based. + # 2. expecting (x,y), while ANSI uses (y,x). + adjusted_position = COORD(position.Y - 1, position.X - 1) + if adjust: + # Adjust for viewport's scroll position + sr = GetConsoleScreenBufferInfo(STDOUT).srWindow + adjusted_position.Y += sr.Top + adjusted_position.X += sr.Left + # Resume normal processing + handle = handles[stream_id] + return _SetConsoleCursorPosition(handle, adjusted_position) + + def FillConsoleOutputCharacter(stream_id, char, length, start): + handle = handles[stream_id] + char = c_char(char.encode()) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + success = _FillConsoleOutputCharacterA( + handle, char, length, start, byref(num_written)) + return num_written.value + + def FillConsoleOutputAttribute(stream_id, attr, length, start): + ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' + handle = handles[stream_id] + attribute = wintypes.WORD(attr) + length = wintypes.DWORD(length) + num_written = wintypes.DWORD(0) + # Note that this is hard-coded for ANSI (vs wide) bytes. + return _FillConsoleOutputAttribute( + handle, attribute, length, start, byref(num_written)) + + def SetConsoleTitle(title): + return _SetConsoleTitleW(title) diff --git a/lib/python3.4/site-packages/pip/_vendor/colorama/winterm.py b/lib/python3.4/site-packages/pip/_vendor/colorama/winterm.py new file mode 100644 index 0000000..60309d3 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/colorama/winterm.py @@ -0,0 +1,162 @@ +# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. +from . import win32 + + +# from wincon.h +class WinColor(object): + BLACK = 0 + BLUE = 1 + GREEN = 2 + CYAN = 3 + RED = 4 + MAGENTA = 5 + YELLOW = 6 + GREY = 7 + +# from wincon.h +class WinStyle(object): + NORMAL = 0x00 # dim text, dim background + BRIGHT = 0x08 # bright text, dim background + BRIGHT_BACKGROUND = 0x80 # dim text, bright background + +class WinTerm(object): + + def __init__(self): + self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes + self.set_attrs(self._default) + self._default_fore = self._fore + self._default_back = self._back + self._default_style = self._style + # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. + # So that LIGHT_EX colors and BRIGHT style do not clobber each other, + # we track them separately, since LIGHT_EX is overwritten by Fore/Back + # and BRIGHT is overwritten by Style codes. + self._light = 0 + + def get_attrs(self): + return self._fore + self._back * 16 + (self._style | self._light) + + def set_attrs(self, value): + self._fore = value & 7 + self._back = (value >> 4) & 7 + self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) + + def reset_all(self, on_stderr=None): + self.set_attrs(self._default) + self.set_console(attrs=self._default) + + def fore(self, fore=None, light=False, on_stderr=False): + if fore is None: + fore = self._default_fore + self._fore = fore + # Emulate LIGHT_EX with BRIGHT Style + if light: + self._light |= WinStyle.BRIGHT + else: + self._light &= ~WinStyle.BRIGHT + self.set_console(on_stderr=on_stderr) + + def back(self, back=None, light=False, on_stderr=False): + if back is None: + back = self._default_back + self._back = back + # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style + if light: + self._light |= WinStyle.BRIGHT_BACKGROUND + else: + self._light &= ~WinStyle.BRIGHT_BACKGROUND + self.set_console(on_stderr=on_stderr) + + def style(self, style=None, on_stderr=False): + if style is None: + style = self._default_style + self._style = style + self.set_console(on_stderr=on_stderr) + + def set_console(self, attrs=None, on_stderr=False): + if attrs is None: + attrs = self.get_attrs() + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleTextAttribute(handle, attrs) + + def get_position(self, handle): + position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition + # Because Windows coordinates are 0-based, + # and win32.SetConsoleCursorPosition expects 1-based. + position.X += 1 + position.Y += 1 + return position + + def set_cursor_position(self, position=None, on_stderr=False): + if position is None: + # I'm not currently tracking the position, so there is no default. + # position = self.get_position() + return + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + win32.SetConsoleCursorPosition(handle, position) + + def cursor_adjust(self, x, y, on_stderr=False): + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + position = self.get_position(handle) + adjusted_position = (position.Y + y, position.X + x) + win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) + + def erase_screen(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the screen. + # 1 should clear from the cursor to the beginning of the screen. + # 2 should clear the entire screen, and move cursor to (1,1) + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + # get the number of character cells in the current buffer + cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y + # get number of character cells before current cursor position + cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = cells_in_screen - cells_before_cursor + if mode == 1: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_before_cursor + elif mode == 2: + from_coord = win32.COORD(0, 0) + cells_to_erase = cells_in_screen + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + if mode == 2: + # put the cursor where needed + win32.SetConsoleCursorPosition(handle, (1, 1)) + + def erase_line(self, mode=0, on_stderr=False): + # 0 should clear from the cursor to the end of the line. + # 1 should clear from the cursor to the beginning of the line. + # 2 should clear the entire line. + handle = win32.STDOUT + if on_stderr: + handle = win32.STDERR + csbi = win32.GetConsoleScreenBufferInfo(handle) + if mode == 0: + from_coord = csbi.dwCursorPosition + cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X + if mode == 1: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwCursorPosition.X + elif mode == 2: + from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) + cells_to_erase = csbi.dwSize.X + # fill the entire screen with blanks + win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) + # now set the buffer's attributes accordingly + win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) + + def set_title(self, title): + win32.SetConsoleTitle(title) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/__init__.py b/lib/python3.4/site-packages/pip/_vendor/distlib/__init__.py new file mode 100644 index 0000000..c2421d8 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/__init__.py @@ -0,0 +1,23 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import logging + +__version__ = '0.2.2' + +class DistlibException(Exception): + pass + +try: + from logging import NullHandler +except ImportError: # pragma: no cover + class NullHandler(logging.Handler): + def handle(self, record): pass + def emit(self, record): pass + def createLock(self): self.lock = None + +logger = logging.getLogger(__name__) +logger.addHandler(NullHandler()) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/__init__.py b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/__init__.py new file mode 100644 index 0000000..f7dbf4c --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/__init__.py @@ -0,0 +1,6 @@ +"""Modules copied from Python 3 standard libraries, for internal use only. + +Individual classes and functions are found in d2._backport.misc. Intended +usage is to always import things missing from 3.1 from that module: the +built-in/stdlib objects will be used if found. +""" diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/misc.py b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/misc.py new file mode 100644 index 0000000..cfb318d --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/misc.py @@ -0,0 +1,41 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Backports for individual classes and functions.""" + +import os +import sys + +__all__ = ['cache_from_source', 'callable', 'fsencode'] + + +try: + from imp import cache_from_source +except ImportError: + def cache_from_source(py_file, debug=__debug__): + ext = debug and 'c' or 'o' + return py_file + ext + + +try: + callable = callable +except NameError: + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode +except AttributeError: + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, str): + return filename.encode(sys.getfilesystemencoding()) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/shutil.py b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/shutil.py new file mode 100644 index 0000000..9e2e234 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/shutil.py @@ -0,0 +1,761 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Utility functions for copying and archiving files and directory trees. + +XXX The functions here don't copy the resource fork or other metadata on Mac. + +""" + +import os +import sys +import stat +from os.path import abspath +import fnmatch +import collections +import errno +from . import tarfile + +try: + import bz2 + _BZ2_SUPPORTED = True +except ImportError: + _BZ2_SUPPORTED = False + +try: + from pwd import getpwnam +except ImportError: + getpwnam = None + +try: + from grp import getgrnam +except ImportError: + getgrnam = None + +__all__ = ["copyfileobj", "copyfile", "copymode", "copystat", "copy", "copy2", + "copytree", "move", "rmtree", "Error", "SpecialFileError", + "ExecError", "make_archive", "get_archive_formats", + "register_archive_format", "unregister_archive_format", + "get_unpack_formats", "register_unpack_format", + "unregister_unpack_format", "unpack_archive", "ignore_patterns"] + +class Error(EnvironmentError): + pass + +class SpecialFileError(EnvironmentError): + """Raised when trying to do a kind of operation (e.g. copying) which is + not supported on a special file (e.g. a named pipe)""" + +class ExecError(EnvironmentError): + """Raised when a command could not be executed""" + +class ReadError(EnvironmentError): + """Raised when an archive cannot be read""" + +class RegistryError(Exception): + """Raised when a registery operation with the archiving + and unpacking registeries fails""" + + +try: + WindowsError +except NameError: + WindowsError = None + +def copyfileobj(fsrc, fdst, length=16*1024): + """copy data from file-like object fsrc to file-like object fdst""" + while 1: + buf = fsrc.read(length) + if not buf: + break + fdst.write(buf) + +def _samefile(src, dst): + # Macintosh, Unix. + if hasattr(os.path, 'samefile'): + try: + return os.path.samefile(src, dst) + except OSError: + return False + + # All other platforms: check for same pathname. + return (os.path.normcase(os.path.abspath(src)) == + os.path.normcase(os.path.abspath(dst))) + +def copyfile(src, dst): + """Copy data from src to dst""" + if _samefile(src, dst): + raise Error("`%s` and `%s` are the same file" % (src, dst)) + + for fn in [src, dst]: + try: + st = os.stat(fn) + except OSError: + # File most likely does not exist + pass + else: + # XXX What about other special files? (sockets, devices...) + if stat.S_ISFIFO(st.st_mode): + raise SpecialFileError("`%s` is a named pipe" % fn) + + with open(src, 'rb') as fsrc: + with open(dst, 'wb') as fdst: + copyfileobj(fsrc, fdst) + +def copymode(src, dst): + """Copy mode bits from src to dst""" + if hasattr(os, 'chmod'): + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + os.chmod(dst, mode) + +def copystat(src, dst): + """Copy all stat info (mode bits, atime, mtime, flags) from src to dst""" + st = os.stat(src) + mode = stat.S_IMODE(st.st_mode) + if hasattr(os, 'utime'): + os.utime(dst, (st.st_atime, st.st_mtime)) + if hasattr(os, 'chmod'): + os.chmod(dst, mode) + if hasattr(os, 'chflags') and hasattr(st, 'st_flags'): + try: + os.chflags(dst, st.st_flags) + except OSError as why: + if (not hasattr(errno, 'EOPNOTSUPP') or + why.errno != errno.EOPNOTSUPP): + raise + +def copy(src, dst): + """Copy data and mode bits ("cp src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copymode(src, dst) + +def copy2(src, dst): + """Copy data and all stat info ("cp -p src dst"). + + The destination may be a directory. + + """ + if os.path.isdir(dst): + dst = os.path.join(dst, os.path.basename(src)) + copyfile(src, dst) + copystat(src, dst) + +def ignore_patterns(*patterns): + """Function that can be used as copytree() ignore parameter. + + Patterns is a sequence of glob-style patterns + that are used to exclude files""" + def _ignore_patterns(path, names): + ignored_names = [] + for pattern in patterns: + ignored_names.extend(fnmatch.filter(names, pattern)) + return set(ignored_names) + return _ignore_patterns + +def copytree(src, dst, symlinks=False, ignore=None, copy_function=copy2, + ignore_dangling_symlinks=False): + """Recursively copy a directory tree. + + The destination directory must not already exist. + If exception(s) occur, an Error is raised with a list of reasons. + + If the optional symlinks flag is true, symbolic links in the + source tree result in symbolic links in the destination tree; if + it is false, the contents of the files pointed to by symbolic + links are copied. If the file pointed by the symlink doesn't + exist, an exception will be added in the list of errors raised in + an Error exception at the end of the copy process. + + You can set the optional ignore_dangling_symlinks flag to true if you + want to silence this exception. Notice that this has no effect on + platforms that don't support os.symlink. + + The optional ignore argument is a callable. If given, it + is called with the `src` parameter, which is the directory + being visited by copytree(), and `names` which is the list of + `src` contents, as returned by os.listdir(): + + callable(src, names) -> ignored_names + + Since copytree() is called recursively, the callable will be + called once for each directory that is copied. It returns a + list of names relative to the `src` directory that should + not be copied. + + The optional copy_function argument is a callable that will be used + to copy each file. It will be called with the source path and the + destination path as arguments. By default, copy2() is used, but any + function that supports the same signature (like copy()) can be used. + + """ + names = os.listdir(src) + if ignore is not None: + ignored_names = ignore(src, names) + else: + ignored_names = set() + + os.makedirs(dst) + errors = [] + for name in names: + if name in ignored_names: + continue + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + try: + if os.path.islink(srcname): + linkto = os.readlink(srcname) + if symlinks: + os.symlink(linkto, dstname) + else: + # ignore dangling symlink if the flag is on + if not os.path.exists(linkto) and ignore_dangling_symlinks: + continue + # otherwise let the copy occurs. copy2 will raise an error + copy_function(srcname, dstname) + elif os.path.isdir(srcname): + copytree(srcname, dstname, symlinks, ignore, copy_function) + else: + # Will raise a SpecialFileError for unsupported file types + copy_function(srcname, dstname) + # catch the Error from the recursive copytree so that we can + # continue with other files + except Error as err: + errors.extend(err.args[0]) + except EnvironmentError as why: + errors.append((srcname, dstname, str(why))) + try: + copystat(src, dst) + except OSError as why: + if WindowsError is not None and isinstance(why, WindowsError): + # Copying file access times may fail on Windows + pass + else: + errors.extend((src, dst, str(why))) + if errors: + raise Error(errors) + +def rmtree(path, ignore_errors=False, onerror=None): + """Recursively delete a directory tree. + + If ignore_errors is set, errors are ignored; otherwise, if onerror + is set, it is called to handle the error with arguments (func, + path, exc_info) where func is os.listdir, os.remove, or os.rmdir; + path is the argument to that function that caused it to fail; and + exc_info is a tuple returned by sys.exc_info(). If ignore_errors + is false and onerror is None, an exception is raised. + + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + try: + if os.path.islink(path): + # symlinks to directories are forbidden, see bug #1669 + raise OSError("Cannot call rmtree on a symbolic link") + except OSError: + onerror(os.path.islink, path, sys.exc_info()) + # can't continue even if onerror hook returns + return + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def _basename(path): + # A basename() variant which first strips the trailing slash, if present. + # Thus we always get the last component of the path, even for directories. + return os.path.basename(path.rstrip(os.path.sep)) + +def move(src, dst): + """Recursively move a file or directory to another location. This is + similar to the Unix "mv" command. + + If the destination is a directory or a symlink to a directory, the source + is moved inside the directory. The destination path must not already + exist. + + If the destination already exists but is not a directory, it may be + overwritten depending on os.rename() semantics. + + If the destination is on our current filesystem, then rename() is used. + Otherwise, src is copied to the destination and then removed. + A lot more could be done here... A look at a mv.c shows a lot of + the issues this implementation glosses over. + + """ + real_dst = dst + if os.path.isdir(dst): + if _samefile(src, dst): + # We might be on a case insensitive filesystem, + # perform the rename anyway. + os.rename(src, dst) + return + + real_dst = os.path.join(dst, _basename(src)) + if os.path.exists(real_dst): + raise Error("Destination path '%s' already exists" % real_dst) + try: + os.rename(src, real_dst) + except OSError: + if os.path.isdir(src): + if _destinsrc(src, dst): + raise Error("Cannot move a directory '%s' into itself '%s'." % (src, dst)) + copytree(src, real_dst, symlinks=True) + rmtree(src) + else: + copy2(src, real_dst) + os.unlink(src) + +def _destinsrc(src, dst): + src = abspath(src) + dst = abspath(dst) + if not src.endswith(os.path.sep): + src += os.path.sep + if not dst.endswith(os.path.sep): + dst += os.path.sep + return dst.startswith(src) + +def _get_gid(name): + """Returns a gid, given a group name.""" + if getgrnam is None or name is None: + return None + try: + result = getgrnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _get_uid(name): + """Returns an uid, given a user name.""" + if getpwnam is None or name is None: + return None + try: + result = getpwnam(name) + except KeyError: + result = None + if result is not None: + return result[2] + return None + +def _make_tarball(base_name, base_dir, compress="gzip", verbose=0, dry_run=0, + owner=None, group=None, logger=None): + """Create a (possibly compressed) tar file from all the files under + 'base_dir'. + + 'compress' must be "gzip" (the default), "bzip2", or None. + + 'owner' and 'group' can be used to define an owner and a group for the + archive that is being built. If not provided, the current owner and group + will be used. + + The output tar file will be named 'base_name' + ".tar", possibly plus + the appropriate compression extension (".gz", or ".bz2"). + + Returns the output filename. + """ + tar_compression = {'gzip': 'gz', None: ''} + compress_ext = {'gzip': '.gz'} + + if _BZ2_SUPPORTED: + tar_compression['bzip2'] = 'bz2' + compress_ext['bzip2'] = '.bz2' + + # flags for compression program, each element of list will be an argument + if compress is not None and compress not in compress_ext: + raise ValueError("bad value for 'compress', or compression format not " + "supported : {0}".format(compress)) + + archive_name = base_name + '.tar' + compress_ext.get(compress, '') + archive_dir = os.path.dirname(archive_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # creating the tarball + if logger is not None: + logger.info('Creating tar archive') + + uid = _get_uid(owner) + gid = _get_gid(group) + + def _set_uid_gid(tarinfo): + if gid is not None: + tarinfo.gid = gid + tarinfo.gname = group + if uid is not None: + tarinfo.uid = uid + tarinfo.uname = owner + return tarinfo + + if not dry_run: + tar = tarfile.open(archive_name, 'w|%s' % tar_compression[compress]) + try: + tar.add(base_dir, filter=_set_uid_gid) + finally: + tar.close() + + return archive_name + +def _call_external_zip(base_dir, zip_filename, verbose=False, dry_run=False): + # XXX see if we want to keep an external call here + if verbose: + zipoptions = "-r" + else: + zipoptions = "-rq" + from distutils.errors import DistutilsExecError + from distutils.spawn import spawn + try: + spawn(["zip", zipoptions, zip_filename, base_dir], dry_run=dry_run) + except DistutilsExecError: + # XXX really should distinguish between "couldn't find + # external 'zip' command" and "zip failed". + raise ExecError("unable to create zip file '%s': " + "could neither import the 'zipfile' module nor " + "find a standalone zip utility") % zip_filename + +def _make_zipfile(base_name, base_dir, verbose=0, dry_run=0, logger=None): + """Create a zip file from all the files under 'base_dir'. + + The output zip file will be named 'base_name' + ".zip". Uses either the + "zipfile" Python module (if available) or the InfoZIP "zip" utility + (if installed and found on the default search path). If neither tool is + available, raises ExecError. Returns the name of the output zip + file. + """ + zip_filename = base_name + ".zip" + archive_dir = os.path.dirname(base_name) + + if not os.path.exists(archive_dir): + if logger is not None: + logger.info("creating %s", archive_dir) + if not dry_run: + os.makedirs(archive_dir) + + # If zipfile module is not available, try spawning an external 'zip' + # command. + try: + import zipfile + except ImportError: + zipfile = None + + if zipfile is None: + _call_external_zip(base_dir, zip_filename, verbose, dry_run) + else: + if logger is not None: + logger.info("creating '%s' and adding '%s' to it", + zip_filename, base_dir) + + if not dry_run: + zip = zipfile.ZipFile(zip_filename, "w", + compression=zipfile.ZIP_DEFLATED) + + for dirpath, dirnames, filenames in os.walk(base_dir): + for name in filenames: + path = os.path.normpath(os.path.join(dirpath, name)) + if os.path.isfile(path): + zip.write(path, path) + if logger is not None: + logger.info("adding '%s'", path) + zip.close() + + return zip_filename + +_ARCHIVE_FORMATS = { + 'gztar': (_make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"), + 'bztar': (_make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"), + 'tar': (_make_tarball, [('compress', None)], "uncompressed tar file"), + 'zip': (_make_zipfile, [], "ZIP file"), + } + +if _BZ2_SUPPORTED: + _ARCHIVE_FORMATS['bztar'] = (_make_tarball, [('compress', 'bzip2')], + "bzip2'ed tar-file") + +def get_archive_formats(): + """Returns a list of supported formats for archiving and unarchiving. + + Each element of the returned sequence is a tuple (name, description) + """ + formats = [(name, registry[2]) for name, registry in + _ARCHIVE_FORMATS.items()] + formats.sort() + return formats + +def register_archive_format(name, function, extra_args=None, description=''): + """Registers an archive format. + + name is the name of the format. function is the callable that will be + used to create archives. If provided, extra_args is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_archive_formats() function. + """ + if extra_args is None: + extra_args = [] + if not isinstance(function, collections.Callable): + raise TypeError('The %s object is not callable' % function) + if not isinstance(extra_args, (tuple, list)): + raise TypeError('extra_args needs to be a sequence') + for element in extra_args: + if not isinstance(element, (tuple, list)) or len(element) !=2: + raise TypeError('extra_args elements are : (arg_name, value)') + + _ARCHIVE_FORMATS[name] = (function, extra_args, description) + +def unregister_archive_format(name): + del _ARCHIVE_FORMATS[name] + +def make_archive(base_name, format, root_dir=None, base_dir=None, verbose=0, + dry_run=0, owner=None, group=None, logger=None): + """Create an archive file (eg. zip or tar). + + 'base_name' is the name of the file to create, minus any format-specific + extension; 'format' is the archive format: one of "zip", "tar", "bztar" + or "gztar". + + 'root_dir' is a directory that will be the root directory of the + archive; ie. we typically chdir into 'root_dir' before creating the + archive. 'base_dir' is the directory where we start archiving from; + ie. 'base_dir' will be the common prefix of all files and + directories in the archive. 'root_dir' and 'base_dir' both default + to the current directory. Returns the name of the archive file. + + 'owner' and 'group' are used when creating a tar archive. By default, + uses the current owner and group. + """ + save_cwd = os.getcwd() + if root_dir is not None: + if logger is not None: + logger.debug("changing into '%s'", root_dir) + base_name = os.path.abspath(base_name) + if not dry_run: + os.chdir(root_dir) + + if base_dir is None: + base_dir = os.curdir + + kwargs = {'dry_run': dry_run, 'logger': logger} + + try: + format_info = _ARCHIVE_FORMATS[format] + except KeyError: + raise ValueError("unknown archive format '%s'" % format) + + func = format_info[0] + for arg, val in format_info[1]: + kwargs[arg] = val + + if format != 'zip': + kwargs['owner'] = owner + kwargs['group'] = group + + try: + filename = func(base_name, base_dir, **kwargs) + finally: + if root_dir is not None: + if logger is not None: + logger.debug("changing back to '%s'", save_cwd) + os.chdir(save_cwd) + + return filename + + +def get_unpack_formats(): + """Returns a list of supported formats for unpacking. + + Each element of the returned sequence is a tuple + (name, extensions, description) + """ + formats = [(name, info[0], info[3]) for name, info in + _UNPACK_FORMATS.items()] + formats.sort() + return formats + +def _check_unpack_options(extensions, function, extra_args): + """Checks what gets registered as an unpacker.""" + # first make sure no other unpacker is registered for this extension + existing_extensions = {} + for name, info in _UNPACK_FORMATS.items(): + for ext in info[0]: + existing_extensions[ext] = name + + for extension in extensions: + if extension in existing_extensions: + msg = '%s is already registered for "%s"' + raise RegistryError(msg % (extension, + existing_extensions[extension])) + + if not isinstance(function, collections.Callable): + raise TypeError('The registered function must be a callable') + + +def register_unpack_format(name, extensions, function, extra_args=None, + description=''): + """Registers an unpack format. + + `name` is the name of the format. `extensions` is a list of extensions + corresponding to the format. + + `function` is the callable that will be + used to unpack archives. The callable will receive archives to unpack. + If it's unable to handle an archive, it needs to raise a ReadError + exception. + + If provided, `extra_args` is a sequence of + (name, value) tuples that will be passed as arguments to the callable. + description can be provided to describe the format, and will be returned + by the get_unpack_formats() function. + """ + if extra_args is None: + extra_args = [] + _check_unpack_options(extensions, function, extra_args) + _UNPACK_FORMATS[name] = extensions, function, extra_args, description + +def unregister_unpack_format(name): + """Removes the pack format from the registery.""" + del _UNPACK_FORMATS[name] + +def _ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + +def _unpack_zipfile(filename, extract_dir): + """Unpack zip `filename` to `extract_dir` + """ + try: + import zipfile + except ImportError: + raise ReadError('zlib not supported, cannot unpack this archive.') + + if not zipfile.is_zipfile(filename): + raise ReadError("%s is not a zip file" % filename) + + zip = zipfile.ZipFile(filename) + try: + for info in zip.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name: + continue + + target = os.path.join(extract_dir, *name.split('/')) + if not target: + continue + + _ensure_directory(target) + if not name.endswith('/'): + # file + data = zip.read(info.filename) + f = open(target, 'wb') + try: + f.write(data) + finally: + f.close() + del data + finally: + zip.close() + +def _unpack_tarfile(filename, extract_dir): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise ReadError( + "%s is not a compressed or uncompressed tar file" % filename) + try: + tarobj.extractall(extract_dir) + finally: + tarobj.close() + +_UNPACK_FORMATS = { + 'gztar': (['.tar.gz', '.tgz'], _unpack_tarfile, [], "gzip'ed tar-file"), + 'tar': (['.tar'], _unpack_tarfile, [], "uncompressed tar file"), + 'zip': (['.zip'], _unpack_zipfile, [], "ZIP file") + } + +if _BZ2_SUPPORTED: + _UNPACK_FORMATS['bztar'] = (['.bz2'], _unpack_tarfile, [], + "bzip2'ed tar-file") + +def _find_unpack_format(filename): + for name, info in _UNPACK_FORMATS.items(): + for extension in info[0]: + if filename.endswith(extension): + return name + return None + +def unpack_archive(filename, extract_dir=None, format=None): + """Unpack an archive. + + `filename` is the name of the archive. + + `extract_dir` is the name of the target directory, where the archive + is unpacked. If not provided, the current working directory is used. + + `format` is the archive format: one of "zip", "tar", or "gztar". Or any + other registered format. If not provided, unpack_archive will use the + filename extension and see if an unpacker was registered for that + extension. + + In case none is found, a ValueError is raised. + """ + if extract_dir is None: + extract_dir = os.getcwd() + + if format is not None: + try: + format_info = _UNPACK_FORMATS[format] + except KeyError: + raise ValueError("Unknown unpack format '{0}'".format(format)) + + func = format_info[1] + func(filename, extract_dir, **dict(format_info[2])) + else: + # we need to look at the registered unpackers supported extensions + format = _find_unpack_format(filename) + if format is None: + raise ReadError("Unknown archive format '{0}'".format(filename)) + + func = _UNPACK_FORMATS[format][1] + kwargs = dict(_UNPACK_FORMATS[format][2]) + func(filename, extract_dir, **kwargs) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg new file mode 100644 index 0000000..1746bd0 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.cfg @@ -0,0 +1,84 @@ +[posix_prefix] +# Configuration directories. Some of these come straight out of the +# configure script. They are for implementing the other variables, not to +# be used directly in [resource_locations]. +confdir = /etc +datadir = /usr/share +libdir = /usr/lib +statedir = /var +# User resource directory +local = ~/.local/{distribution.name} + +stdlib = {base}/lib/python{py_version_short} +platstdlib = {platbase}/lib/python{py_version_short} +purelib = {base}/lib/python{py_version_short}/site-packages +platlib = {platbase}/lib/python{py_version_short}/site-packages +include = {base}/include/python{py_version_short}{abiflags} +platinclude = {platbase}/include/python{py_version_short}{abiflags} +data = {base} + +[posix_home] +stdlib = {base}/lib/python +platstdlib = {base}/lib/python +purelib = {base}/lib/python +platlib = {base}/lib/python +include = {base}/include/python +platinclude = {base}/include/python +scripts = {base}/bin +data = {base} + +[nt] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2] +stdlib = {base}/Lib +platstdlib = {base}/Lib +purelib = {base}/Lib/site-packages +platlib = {base}/Lib/site-packages +include = {base}/Include +platinclude = {base}/Include +scripts = {base}/Scripts +data = {base} + +[os2_home] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[nt_user] +stdlib = {userbase}/Python{py_version_nodot} +platstdlib = {userbase}/Python{py_version_nodot} +purelib = {userbase}/Python{py_version_nodot}/site-packages +platlib = {userbase}/Python{py_version_nodot}/site-packages +include = {userbase}/Python{py_version_nodot}/Include +scripts = {userbase}/Scripts +data = {userbase} + +[posix_user] +stdlib = {userbase}/lib/python{py_version_short} +platstdlib = {userbase}/lib/python{py_version_short} +purelib = {userbase}/lib/python{py_version_short}/site-packages +platlib = {userbase}/lib/python{py_version_short}/site-packages +include = {userbase}/include/python{py_version_short} +scripts = {userbase}/bin +data = {userbase} + +[osx_framework_user] +stdlib = {userbase}/lib/python +platstdlib = {userbase}/lib/python +purelib = {userbase}/lib/python/site-packages +platlib = {userbase}/lib/python/site-packages +include = {userbase}/include +scripts = {userbase}/bin +data = {userbase} diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.py b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.py new file mode 100644 index 0000000..1d31326 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/sysconfig.py @@ -0,0 +1,788 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Access to Python's configuration information.""" + +import codecs +import os +import re +import sys +from os.path import pardir, realpath +try: + import configparser +except ImportError: + import ConfigParser as configparser + + +__all__ = [ + 'get_config_h_filename', + 'get_config_var', + 'get_config_vars', + 'get_makefile_filename', + 'get_path', + 'get_path_names', + 'get_paths', + 'get_platform', + 'get_python_version', + 'get_scheme_names', + 'parse_config_h', +] + + +def _safe_realpath(path): + try: + return realpath(path) + except OSError: + return path + + +if sys.executable: + _PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable)) +else: + # sys.executable can be empty if argv[0] has been changed and Python is + # unable to retrieve the real program name + _PROJECT_BASE = _safe_realpath(os.getcwd()) + +if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir)) +# PC/VS7.1 +if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) +# PC/AMD64 +if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower(): + _PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir)) + + +def is_python_build(): + for fn in ("Setup.dist", "Setup.local"): + if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)): + return True + return False + +_PYTHON_BUILD = is_python_build() + +_cfg_read = False + +def _ensure_cfg_read(): + global _cfg_read + if not _cfg_read: + from ..resources import finder + backport_package = __name__.rsplit('.', 1)[0] + _finder = finder(backport_package) + _cfgfile = _finder.find('sysconfig.cfg') + assert _cfgfile, 'sysconfig.cfg exists' + with _cfgfile.as_stream() as s: + _SCHEMES.readfp(s) + if _PYTHON_BUILD: + for scheme in ('posix_prefix', 'posix_home'): + _SCHEMES.set(scheme, 'include', '{srcdir}/Include') + _SCHEMES.set(scheme, 'platinclude', '{projectbase}/.') + + _cfg_read = True + + +_SCHEMES = configparser.RawConfigParser() +_VAR_REPL = re.compile(r'\{([^{]*?)\}') + +def _expand_globals(config): + _ensure_cfg_read() + if config.has_section('globals'): + globals = config.items('globals') + else: + globals = tuple() + + sections = config.sections() + for section in sections: + if section == 'globals': + continue + for option, value in globals: + if config.has_option(section, option): + continue + config.set(section, option, value) + config.remove_section('globals') + + # now expanding local variables defined in the cfg file + # + for section in config.sections(): + variables = dict(config.items(section)) + + def _replacer(matchobj): + name = matchobj.group(1) + if name in variables: + return variables[name] + return matchobj.group(0) + + for option, value in config.items(section): + config.set(section, option, _VAR_REPL.sub(_replacer, value)) + +#_expand_globals(_SCHEMES) + + # FIXME don't rely on sys.version here, its format is an implementation detail + # of CPython, use sys.version_info or sys.hexversion +_PY_VERSION = sys.version.split()[0] +_PY_VERSION_SHORT = sys.version[:3] +_PY_VERSION_SHORT_NO_DOT = _PY_VERSION[0] + _PY_VERSION[2] +_PREFIX = os.path.normpath(sys.prefix) +_EXEC_PREFIX = os.path.normpath(sys.exec_prefix) +_CONFIG_VARS = None +_USER_BASE = None + + +def _subst_vars(path, local_vars): + """In the string `path`, replace tokens like {some.thing} with the + corresponding value from the map `local_vars`. + + If there is no corresponding value, leave the token unchanged. + """ + def _replacer(matchobj): + name = matchobj.group(1) + if name in local_vars: + return local_vars[name] + elif name in os.environ: + return os.environ[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, path) + + +def _extend_dict(target_dict, other_dict): + target_keys = target_dict.keys() + for key, value in other_dict.items(): + if key in target_keys: + continue + target_dict[key] = value + + +def _expand_vars(scheme, vars): + res = {} + if vars is None: + vars = {} + _extend_dict(vars, get_config_vars()) + + for key, value in _SCHEMES.items(scheme): + if os.name in ('posix', 'nt'): + value = os.path.expanduser(value) + res[key] = os.path.normpath(_subst_vars(value, vars)) + return res + + +def format_value(value, vars): + def _replacer(matchobj): + name = matchobj.group(1) + if name in vars: + return vars[name] + return matchobj.group(0) + return _VAR_REPL.sub(_replacer, value) + + +def _get_default_scheme(): + if os.name == 'posix': + # the default scheme for posix is posix_prefix + return 'posix_prefix' + return os.name + + +def _getuserbase(): + env_base = os.environ.get("PYTHONUSERBASE", None) + + def joinuser(*args): + return os.path.expanduser(os.path.join(*args)) + + # what about 'os2emx', 'riscos' ? + if os.name == "nt": + base = os.environ.get("APPDATA") or "~" + if env_base: + return env_base + else: + return joinuser(base, "Python") + + if sys.platform == "darwin": + framework = get_config_var("PYTHONFRAMEWORK") + if framework: + if env_base: + return env_base + else: + return joinuser("~", "Library", framework, "%d.%d" % + sys.version_info[:2]) + + if env_base: + return env_base + else: + return joinuser("~", ".local") + + +def _parse_makefile(filename, vars=None): + """Parse a Makefile-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + # Regexes needed for parsing Makefile (and similar syntaxes, + # like old-style Setup files). + _variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)") + _findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)") + _findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}") + + if vars is None: + vars = {} + done = {} + notdone = {} + + with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f: + lines = f.readlines() + + for line in lines: + if line.startswith('#') or line.strip() == '': + continue + m = _variable_rx.match(line) + if m: + n, v = m.group(1, 2) + v = v.strip() + # `$$' is a literal `$' in make + tmpv = v.replace('$$', '') + + if "$" in tmpv: + notdone[n] = v + else: + try: + v = int(v) + except ValueError: + # insert literal `$' + done[n] = v.replace('$$', '$') + else: + done[n] = v + + # do variable interpolation here + variables = list(notdone.keys()) + + # Variables with a 'PY_' prefix in the makefile. These need to + # be made available without that prefix through sysconfig. + # Special care is needed to ensure that variable expansion works, even + # if the expansion uses the name without a prefix. + renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS') + + while len(variables) > 0: + for name in tuple(variables): + value = notdone[name] + m = _findvar1_rx.search(value) or _findvar2_rx.search(value) + if m is not None: + n = m.group(1) + found = True + if n in done: + item = str(done[n]) + elif n in notdone: + # get it on a subsequent round + found = False + elif n in os.environ: + # do it like make: fall back to environment + item = os.environ[n] + + elif n in renamed_variables: + if (name.startswith('PY_') and + name[3:] in renamed_variables): + item = "" + + elif 'PY_' + n in notdone: + found = False + + else: + item = str(done['PY_' + n]) + + else: + done[n] = item = "" + + if found: + after = value[m.end():] + value = value[:m.start()] + item + after + if "$" in after: + notdone[name] = value + else: + try: + value = int(value) + except ValueError: + done[name] = value.strip() + else: + done[name] = value + variables.remove(name) + + if (name.startswith('PY_') and + name[3:] in renamed_variables): + + name = name[3:] + if name not in done: + done[name] = value + + else: + # bogus variable reference (e.g. "prefix=$/opt/python"); + # just drop it since we can't deal + done[name] = value + variables.remove(name) + + # strip spurious spaces + for k, v in done.items(): + if isinstance(v, str): + done[k] = v.strip() + + # save the results in the global dictionary + vars.update(done) + return vars + + +def get_makefile_filename(): + """Return the path of the Makefile.""" + if _PYTHON_BUILD: + return os.path.join(_PROJECT_BASE, "Makefile") + if hasattr(sys, 'abiflags'): + config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags) + else: + config_dir_name = 'config' + return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile') + + +def _init_posix(vars): + """Initialize the module as appropriate for POSIX systems.""" + # load the installed Makefile: + makefile = get_makefile_filename() + try: + _parse_makefile(makefile, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % makefile + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # load the installed pyconfig.h: + config_h = get_config_h_filename() + try: + with open(config_h) as f: + parse_config_h(f, vars) + except IOError as e: + msg = "invalid Python installation: unable to open %s" % config_h + if hasattr(e, "strerror"): + msg = msg + " (%s)" % e.strerror + raise IOError(msg) + # On AIX, there are wrong paths to the linker scripts in the Makefile + # -- these paths are relative to the Python source, but when installed + # the scripts are in another directory. + if _PYTHON_BUILD: + vars['LDSHARED'] = vars['BLDSHARED'] + + +def _init_non_posix(vars): + """Initialize the module as appropriate for NT""" + # set basic install directories + vars['LIBDEST'] = get_path('stdlib') + vars['BINLIBDEST'] = get_path('platstdlib') + vars['INCLUDEPY'] = get_path('include') + vars['SO'] = '.pyd' + vars['EXE'] = '.exe' + vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT + vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable)) + +# +# public APIs +# + + +def parse_config_h(fp, vars=None): + """Parse a config.h-style file. + + A dictionary containing name/value pairs is returned. If an + optional dictionary is passed in as the second argument, it is + used instead of a new dictionary. + """ + if vars is None: + vars = {} + define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n") + undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n") + + while True: + line = fp.readline() + if not line: + break + m = define_rx.match(line) + if m: + n, v = m.group(1, 2) + try: + v = int(v) + except ValueError: + pass + vars[n] = v + else: + m = undef_rx.match(line) + if m: + vars[m.group(1)] = 0 + return vars + + +def get_config_h_filename(): + """Return the path of pyconfig.h.""" + if _PYTHON_BUILD: + if os.name == "nt": + inc_dir = os.path.join(_PROJECT_BASE, "PC") + else: + inc_dir = _PROJECT_BASE + else: + inc_dir = get_path('platinclude') + return os.path.join(inc_dir, 'pyconfig.h') + + +def get_scheme_names(): + """Return a tuple containing the schemes names.""" + return tuple(sorted(_SCHEMES.sections())) + + +def get_path_names(): + """Return a tuple containing the paths names.""" + # xxx see if we want a static list + return _SCHEMES.options('posix_prefix') + + +def get_paths(scheme=_get_default_scheme(), vars=None, expand=True): + """Return a mapping containing an install scheme. + + ``scheme`` is the install scheme name. If not provided, it will + return the default scheme for the current platform. + """ + _ensure_cfg_read() + if expand: + return _expand_vars(scheme, vars) + else: + return dict(_SCHEMES.items(scheme)) + + +def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True): + """Return a path corresponding to the scheme. + + ``scheme`` is the install scheme name. + """ + return get_paths(scheme, vars, expand)[name] + + +def get_config_vars(*args): + """With no arguments, return a dictionary of all configuration + variables relevant for the current platform. + + On Unix, this means every variable defined in Python's installed Makefile; + On Windows and Mac OS it's a much smaller set. + + With arguments, return a list of values that result from looking up + each argument in the configuration variable dictionary. + """ + global _CONFIG_VARS + if _CONFIG_VARS is None: + _CONFIG_VARS = {} + # Normalized versions of prefix and exec_prefix are handy to have; + # in fact, these are the standard versions used most places in the + # distutils2 module. + _CONFIG_VARS['prefix'] = _PREFIX + _CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX + _CONFIG_VARS['py_version'] = _PY_VERSION + _CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT + _CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2] + _CONFIG_VARS['base'] = _PREFIX + _CONFIG_VARS['platbase'] = _EXEC_PREFIX + _CONFIG_VARS['projectbase'] = _PROJECT_BASE + try: + _CONFIG_VARS['abiflags'] = sys.abiflags + except AttributeError: + # sys.abiflags may not be defined on all platforms. + _CONFIG_VARS['abiflags'] = '' + + if os.name in ('nt', 'os2'): + _init_non_posix(_CONFIG_VARS) + if os.name == 'posix': + _init_posix(_CONFIG_VARS) + # Setting 'userbase' is done below the call to the + # init function to enable using 'get_config_var' in + # the init-function. + if sys.version >= '2.6': + _CONFIG_VARS['userbase'] = _getuserbase() + + if 'srcdir' not in _CONFIG_VARS: + _CONFIG_VARS['srcdir'] = _PROJECT_BASE + else: + _CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir']) + + # Convert srcdir into an absolute path if it appears necessary. + # Normally it is relative to the build directory. However, during + # testing, for example, we might be running a non-installed python + # from a different directory. + if _PYTHON_BUILD and os.name == "posix": + base = _PROJECT_BASE + try: + cwd = os.getcwd() + except OSError: + cwd = None + if (not os.path.isabs(_CONFIG_VARS['srcdir']) and + base != cwd): + # srcdir is relative and we are not in the same directory + # as the executable. Assume executable is in the build + # directory and make srcdir absolute. + srcdir = os.path.join(base, _CONFIG_VARS['srcdir']) + _CONFIG_VARS['srcdir'] = os.path.normpath(srcdir) + + if sys.platform == 'darwin': + kernel_version = os.uname()[2] # Kernel version (8.4.3) + major_version = int(kernel_version.split('.')[0]) + + if major_version < 8: + # On Mac OS X before 10.4, check if -arch and -isysroot + # are in CFLAGS or LDFLAGS and remove them if they are. + # This is needed when building extensions on a 10.3 system + # using a universal build of python. + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + flags = _CONFIG_VARS[key] + flags = re.sub('-arch\s+\w+\s', ' ', flags) + flags = re.sub('-isysroot [^ \t]*', ' ', flags) + _CONFIG_VARS[key] = flags + else: + # Allow the user to override the architecture flags using + # an environment variable. + # NOTE: This name was introduced by Apple in OSX 10.5 and + # is used by several scripting languages distributed with + # that OS release. + if 'ARCHFLAGS' in os.environ: + arch = os.environ['ARCHFLAGS'] + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub('-arch\s+\w+\s', ' ', flags) + flags = flags + ' ' + arch + _CONFIG_VARS[key] = flags + + # If we're on OSX 10.5 or later and the user tries to + # compiles an extension using an SDK that is not present + # on the current machine it is better to not use an SDK + # than to fail. + # + # The major usecase for this is users using a Python.org + # binary installer on OSX 10.6: that installer uses + # the 10.4u SDK, but that SDK is not installed by default + # when you install Xcode. + # + CFLAGS = _CONFIG_VARS.get('CFLAGS', '') + m = re.search('-isysroot\s+(\S+)', CFLAGS) + if m is not None: + sdk = m.group(1) + if not os.path.exists(sdk): + for key in ('LDFLAGS', 'BASECFLAGS', + # a number of derived variables. These need to be + # patched up as well. + 'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'): + + flags = _CONFIG_VARS[key] + flags = re.sub('-isysroot\s+\S+(\s|$)', ' ', flags) + _CONFIG_VARS[key] = flags + + if args: + vals = [] + for name in args: + vals.append(_CONFIG_VARS.get(name)) + return vals + else: + return _CONFIG_VARS + + +def get_config_var(name): + """Return the value of a single variable using the dictionary returned by + 'get_config_vars()'. + + Equivalent to get_config_vars().get(name) + """ + return get_config_vars().get(name) + + +def get_platform(): + """Return a string that identifies the current platform. + + This is used mainly to distinguish platform-specific build directories and + platform-specific built distributions. Typically includes the OS name + and version and the architecture (as supplied by 'os.uname()'), + although the exact information included depends on the OS; eg. for IRIX + the architecture isn't particularly important (IRIX only runs on SGI + hardware), but for Linux the kernel version isn't particularly + important. + + Examples of returned values: + linux-i586 + linux-alpha (?) + solaris-2.6-sun4u + irix-5.3 + irix64-6.2 + + Windows will return one of: + win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc) + win-ia64 (64bit Windows on Itanium) + win32 (all others - specifically, sys.platform is returned) + + For other non-POSIX platforms, currently just returns 'sys.platform'. + """ + if os.name == 'nt': + # sniff sys.version for architecture. + prefix = " bit (" + i = sys.version.find(prefix) + if i == -1: + return sys.platform + j = sys.version.find(")", i) + look = sys.version[i+len(prefix):j].lower() + if look == 'amd64': + return 'win-amd64' + if look == 'itanium': + return 'win-ia64' + return sys.platform + + if os.name != "posix" or not hasattr(os, 'uname'): + # XXX what about the architecture? NT is Intel or Alpha, + # Mac OS is M68k or PPC, etc. + return sys.platform + + # Try to distinguish various flavours of Unix + osname, host, release, version, machine = os.uname() + + # Convert the OS name to lowercase, remove '/' characters + # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh") + osname = osname.lower().replace('/', '') + machine = machine.replace(' ', '_') + machine = machine.replace('/', '-') + + if osname[:5] == "linux": + # At least on Linux/Intel, 'machine' is the processor -- + # i386, etc. + # XXX what about Alpha, SPARC, etc? + return "%s-%s" % (osname, machine) + elif osname[:5] == "sunos": + if release[0] >= "5": # SunOS 5 == Solaris 2 + osname = "solaris" + release = "%d.%s" % (int(release[0]) - 3, release[2:]) + # fall through to standard osname-release-machine representation + elif osname[:4] == "irix": # could be "irix64"! + return "%s-%s" % (osname, release) + elif osname[:3] == "aix": + return "%s-%s.%s" % (osname, version, release) + elif osname[:6] == "cygwin": + osname = "cygwin" + rel_re = re.compile(r'[\d.]+') + m = rel_re.match(release) + if m: + release = m.group() + elif osname[:6] == "darwin": + # + # For our purposes, we'll assume that the system version from + # distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set + # to. This makes the compatibility story a bit more sane because the + # machine is going to compile and link as if it were + # MACOSX_DEPLOYMENT_TARGET. + cfgvars = get_config_vars() + macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET') + + if True: + # Always calculate the release of the running machine, + # needed to determine if we can build fat binaries or not. + + macrelease = macver + # Get the system version. Reading this plist is a documented + # way to get the system version (see the documentation for + # the Gestalt Manager) + try: + f = open('/System/Library/CoreServices/SystemVersion.plist') + except IOError: + # We're on a plain darwin box, fall back to the default + # behaviour. + pass + else: + try: + m = re.search(r'ProductUserVisibleVersion\s*' + r'(.*?)', f.read()) + finally: + f.close() + if m is not None: + macrelease = '.'.join(m.group(1).split('.')[:2]) + # else: fall back to the default behaviour + + if not macver: + macver = macrelease + + if macver: + release = macver + osname = "macosx" + + if ((macrelease + '.') >= '10.4.' and + '-arch' in get_config_vars().get('CFLAGS', '').strip()): + # The universal build will build fat binaries, but not on + # systems before 10.4 + # + # Try to detect 4-way universal builds, those have machine-type + # 'universal' instead of 'fat'. + + machine = 'fat' + cflags = get_config_vars().get('CFLAGS') + + archs = re.findall('-arch\s+(\S+)', cflags) + archs = tuple(sorted(set(archs))) + + if len(archs) == 1: + machine = archs[0] + elif archs == ('i386', 'ppc'): + machine = 'fat' + elif archs == ('i386', 'x86_64'): + machine = 'intel' + elif archs == ('i386', 'ppc', 'x86_64'): + machine = 'fat3' + elif archs == ('ppc64', 'x86_64'): + machine = 'fat64' + elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'): + machine = 'universal' + else: + raise ValueError( + "Don't know machine value for archs=%r" % (archs,)) + + elif machine == 'i386': + # On OSX the machine type returned by uname is always the + # 32-bit variant, even if the executable architecture is + # the 64-bit variant + if sys.maxsize >= 2**32: + machine = 'x86_64' + + elif machine in ('PowerPC', 'Power_Macintosh'): + # Pick a sane name for the PPC architecture. + # See 'i386' case + if sys.maxsize >= 2**32: + machine = 'ppc64' + else: + machine = 'ppc' + + return "%s-%s-%s" % (osname, release, machine) + + +def get_python_version(): + return _PY_VERSION_SHORT + + +def _print_dict(title, data): + for index, (key, value) in enumerate(sorted(data.items())): + if index == 0: + print('%s: ' % (title)) + print('\t%s = "%s"' % (key, value)) + + +def _main(): + """Display all information sysconfig detains.""" + print('Platform: "%s"' % get_platform()) + print('Python version: "%s"' % get_python_version()) + print('Current installation scheme: "%s"' % _get_default_scheme()) + print() + _print_dict('Paths', get_paths()) + print() + _print_dict('Variables', get_config_vars()) + + +if __name__ == '__main__': + _main() diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/tarfile.py b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/tarfile.py new file mode 100644 index 0000000..0580fb7 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/_backport/tarfile.py @@ -0,0 +1,2607 @@ +#------------------------------------------------------------------- +# tarfile.py +#------------------------------------------------------------------- +# Copyright (C) 2002 Lars Gustaebel +# All rights reserved. +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +from __future__ import print_function + +"""Read from and write to tar format archives. +""" + +__version__ = "$Revision$" + +version = "0.9.0" +__author__ = "Lars Gust\u00e4bel (lars@gustaebel.de)" +__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $" +__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $" +__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend." + +#--------- +# Imports +#--------- +import sys +import os +import stat +import errno +import time +import struct +import copy +import re + +try: + import grp, pwd +except ImportError: + grp = pwd = None + +# os.symlink on Windows prior to 6.0 raises NotImplementedError +symlink_exception = (AttributeError, NotImplementedError) +try: + # WindowsError (1314) will be raised if the caller does not hold the + # SeCreateSymbolicLinkPrivilege privilege + symlink_exception += (WindowsError,) +except NameError: + pass + +# from tarfile import * +__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"] + +if sys.version_info[0] < 3: + import __builtin__ as builtins +else: + import builtins + +_open = builtins.open # Since 'open' is TarFile.open + +#--------------------------------------------------------- +# tar constants +#--------------------------------------------------------- +NUL = b"\0" # the null character +BLOCKSIZE = 512 # length of processing blocks +RECORDSIZE = BLOCKSIZE * 20 # length of records +GNU_MAGIC = b"ustar \0" # magic gnu tar string +POSIX_MAGIC = b"ustar\x0000" # magic posix tar string + +LENGTH_NAME = 100 # maximum length of a filename +LENGTH_LINK = 100 # maximum length of a linkname +LENGTH_PREFIX = 155 # maximum length of the prefix field + +REGTYPE = b"0" # regular file +AREGTYPE = b"\0" # regular file +LNKTYPE = b"1" # link (inside tarfile) +SYMTYPE = b"2" # symbolic link +CHRTYPE = b"3" # character special device +BLKTYPE = b"4" # block special device +DIRTYPE = b"5" # directory +FIFOTYPE = b"6" # fifo special device +CONTTYPE = b"7" # contiguous file + +GNUTYPE_LONGNAME = b"L" # GNU tar longname +GNUTYPE_LONGLINK = b"K" # GNU tar longlink +GNUTYPE_SPARSE = b"S" # GNU tar sparse file + +XHDTYPE = b"x" # POSIX.1-2001 extended header +XGLTYPE = b"g" # POSIX.1-2001 global header +SOLARIS_XHDTYPE = b"X" # Solaris extended header + +USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format +GNU_FORMAT = 1 # GNU tar format +PAX_FORMAT = 2 # POSIX.1-2001 (pax) format +DEFAULT_FORMAT = GNU_FORMAT + +#--------------------------------------------------------- +# tarfile constants +#--------------------------------------------------------- +# File types that tarfile supports: +SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE, + SYMTYPE, DIRTYPE, FIFOTYPE, + CONTTYPE, CHRTYPE, BLKTYPE, + GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# File types that will be treated as a regular file. +REGULAR_TYPES = (REGTYPE, AREGTYPE, + CONTTYPE, GNUTYPE_SPARSE) + +# File types that are part of the GNU tar format. +GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK, + GNUTYPE_SPARSE) + +# Fields from a pax header that override a TarInfo attribute. +PAX_FIELDS = ("path", "linkpath", "size", "mtime", + "uid", "gid", "uname", "gname") + +# Fields from a pax header that are affected by hdrcharset. +PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname")) + +# Fields in a pax header that are numbers, all other fields +# are treated as strings. +PAX_NUMBER_FIELDS = { + "atime": float, + "ctime": float, + "mtime": float, + "uid": int, + "gid": int, + "size": int +} + +#--------------------------------------------------------- +# Bits used in the mode field, values in octal. +#--------------------------------------------------------- +S_IFLNK = 0o120000 # symbolic link +S_IFREG = 0o100000 # regular file +S_IFBLK = 0o060000 # block device +S_IFDIR = 0o040000 # directory +S_IFCHR = 0o020000 # character device +S_IFIFO = 0o010000 # fifo + +TSUID = 0o4000 # set UID on execution +TSGID = 0o2000 # set GID on execution +TSVTX = 0o1000 # reserved + +TUREAD = 0o400 # read by owner +TUWRITE = 0o200 # write by owner +TUEXEC = 0o100 # execute/search by owner +TGREAD = 0o040 # read by group +TGWRITE = 0o020 # write by group +TGEXEC = 0o010 # execute/search by group +TOREAD = 0o004 # read by other +TOWRITE = 0o002 # write by other +TOEXEC = 0o001 # execute/search by other + +#--------------------------------------------------------- +# initialization +#--------------------------------------------------------- +if os.name in ("nt", "ce"): + ENCODING = "utf-8" +else: + ENCODING = sys.getfilesystemencoding() + +#--------------------------------------------------------- +# Some useful functions +#--------------------------------------------------------- + +def stn(s, length, encoding, errors): + """Convert a string to a null-terminated bytes object. + """ + s = s.encode(encoding, errors) + return s[:length] + (length - len(s)) * NUL + +def nts(s, encoding, errors): + """Convert a null-terminated bytes object to a string. + """ + p = s.find(b"\0") + if p != -1: + s = s[:p] + return s.decode(encoding, errors) + +def nti(s): + """Convert a number field to a python number. + """ + # There are two possible encodings for a number field, see + # itn() below. + if s[0] != chr(0o200): + try: + n = int(nts(s, "ascii", "strict") or "0", 8) + except ValueError: + raise InvalidHeaderError("invalid header") + else: + n = 0 + for i in range(len(s) - 1): + n <<= 8 + n += ord(s[i + 1]) + return n + +def itn(n, digits=8, format=DEFAULT_FORMAT): + """Convert a python number to a number field. + """ + # POSIX 1003.1-1988 requires numbers to be encoded as a string of + # octal digits followed by a null-byte, this allows values up to + # (8**(digits-1))-1. GNU tar allows storing numbers greater than + # that if necessary. A leading 0o200 byte indicates this particular + # encoding, the following digits-1 bytes are a big-endian + # representation. This allows values up to (256**(digits-1))-1. + if 0 <= n < 8 ** (digits - 1): + s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL + else: + if format != GNU_FORMAT or n >= 256 ** (digits - 1): + raise ValueError("overflow in number field") + + if n < 0: + # XXX We mimic GNU tar's behaviour with negative numbers, + # this could raise OverflowError. + n = struct.unpack("L", struct.pack("l", n))[0] + + s = bytearray() + for i in range(digits - 1): + s.insert(0, n & 0o377) + n >>= 8 + s.insert(0, 0o200) + return s + +def calc_chksums(buf): + """Calculate the checksum for a member's header by summing up all + characters except for the chksum field which is treated as if + it was filled with spaces. According to the GNU tar sources, + some tars (Sun and NeXT) calculate chksum with signed char, + which will be different if there are chars in the buffer with + the high bit set. So we calculate two checksums, unsigned and + signed. + """ + unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512])) + signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512])) + return unsigned_chksum, signed_chksum + +def copyfileobj(src, dst, length=None): + """Copy length bytes from fileobj src to fileobj dst. + If length is None, copy the entire content. + """ + if length == 0: + return + if length is None: + while True: + buf = src.read(16*1024) + if not buf: + break + dst.write(buf) + return + + BUFSIZE = 16 * 1024 + blocks, remainder = divmod(length, BUFSIZE) + for b in range(blocks): + buf = src.read(BUFSIZE) + if len(buf) < BUFSIZE: + raise IOError("end of file reached") + dst.write(buf) + + if remainder != 0: + buf = src.read(remainder) + if len(buf) < remainder: + raise IOError("end of file reached") + dst.write(buf) + return + +filemode_table = ( + ((S_IFLNK, "l"), + (S_IFREG, "-"), + (S_IFBLK, "b"), + (S_IFDIR, "d"), + (S_IFCHR, "c"), + (S_IFIFO, "p")), + + ((TUREAD, "r"),), + ((TUWRITE, "w"),), + ((TUEXEC|TSUID, "s"), + (TSUID, "S"), + (TUEXEC, "x")), + + ((TGREAD, "r"),), + ((TGWRITE, "w"),), + ((TGEXEC|TSGID, "s"), + (TSGID, "S"), + (TGEXEC, "x")), + + ((TOREAD, "r"),), + ((TOWRITE, "w"),), + ((TOEXEC|TSVTX, "t"), + (TSVTX, "T"), + (TOEXEC, "x")) +) + +def filemode(mode): + """Convert a file's mode to a string of the form + -rwxrwxrwx. + Used by TarFile.list() + """ + perm = [] + for table in filemode_table: + for bit, char in table: + if mode & bit == bit: + perm.append(char) + break + else: + perm.append("-") + return "".join(perm) + +class TarError(Exception): + """Base exception.""" + pass +class ExtractError(TarError): + """General exception for extract errors.""" + pass +class ReadError(TarError): + """Exception for unreadble tar archives.""" + pass +class CompressionError(TarError): + """Exception for unavailable compression methods.""" + pass +class StreamError(TarError): + """Exception for unsupported operations on stream-like TarFiles.""" + pass +class HeaderError(TarError): + """Base exception for header errors.""" + pass +class EmptyHeaderError(HeaderError): + """Exception for empty headers.""" + pass +class TruncatedHeaderError(HeaderError): + """Exception for truncated headers.""" + pass +class EOFHeaderError(HeaderError): + """Exception for end of file headers.""" + pass +class InvalidHeaderError(HeaderError): + """Exception for invalid headers.""" + pass +class SubsequentHeaderError(HeaderError): + """Exception for missing and invalid extended headers.""" + pass + +#--------------------------- +# internal stream interface +#--------------------------- +class _LowLevelFile(object): + """Low-level file object. Supports reading and writing. + It is used instead of a regular file object for streaming + access. + """ + + def __init__(self, name, mode): + mode = { + "r": os.O_RDONLY, + "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC, + }[mode] + if hasattr(os, "O_BINARY"): + mode |= os.O_BINARY + self.fd = os.open(name, mode, 0o666) + + def close(self): + os.close(self.fd) + + def read(self, size): + return os.read(self.fd, size) + + def write(self, s): + os.write(self.fd, s) + +class _Stream(object): + """Class that serves as an adapter between TarFile and + a stream-like object. The stream-like object only + needs to have a read() or write() method and is accessed + blockwise. Use of gzip or bzip2 compression is possible. + A stream-like object could be for example: sys.stdin, + sys.stdout, a socket, a tape device etc. + + _Stream is intended to be used only internally. + """ + + def __init__(self, name, mode, comptype, fileobj, bufsize): + """Construct a _Stream object. + """ + self._extfileobj = True + if fileobj is None: + fileobj = _LowLevelFile(name, mode) + self._extfileobj = False + + if comptype == '*': + # Enable transparent compression detection for the + # stream interface + fileobj = _StreamProxy(fileobj) + comptype = fileobj.getcomptype() + + self.name = name or "" + self.mode = mode + self.comptype = comptype + self.fileobj = fileobj + self.bufsize = bufsize + self.buf = b"" + self.pos = 0 + self.closed = False + + try: + if comptype == "gz": + try: + import zlib + except ImportError: + raise CompressionError("zlib module is not available") + self.zlib = zlib + self.crc = zlib.crc32(b"") + if mode == "r": + self._init_read_gz() + else: + self._init_write_gz() + + if comptype == "bz2": + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + if mode == "r": + self.dbuf = b"" + self.cmp = bz2.BZ2Decompressor() + else: + self.cmp = bz2.BZ2Compressor() + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + def __del__(self): + if hasattr(self, "closed") and not self.closed: + self.close() + + def _init_write_gz(self): + """Initialize for writing with gzip compression. + """ + self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED, + -self.zlib.MAX_WBITS, + self.zlib.DEF_MEM_LEVEL, + 0) + timestamp = struct.pack(" self.bufsize: + self.fileobj.write(self.buf[:self.bufsize]) + self.buf = self.buf[self.bufsize:] + + def close(self): + """Close the _Stream object. No operation should be + done on it afterwards. + """ + if self.closed: + return + + if self.mode == "w" and self.comptype != "tar": + self.buf += self.cmp.flush() + + if self.mode == "w" and self.buf: + self.fileobj.write(self.buf) + self.buf = b"" + if self.comptype == "gz": + # The native zlib crc is an unsigned 32-bit integer, but + # the Python wrapper implicitly casts that to a signed C + # long. So, on a 32-bit box self.crc may "look negative", + # while the same crc on a 64-bit box may "look positive". + # To avoid irksome warnings from the `struct` module, force + # it to look positive on all boxes. + self.fileobj.write(struct.pack("= 0: + blocks, remainder = divmod(pos - self.pos, self.bufsize) + for i in range(blocks): + self.read(self.bufsize) + self.read(remainder) + else: + raise StreamError("seeking backwards is not allowed") + return self.pos + + def read(self, size=None): + """Return the next size number of bytes from the stream. + If size is not defined, return all bytes of the stream + up to EOF. + """ + if size is None: + t = [] + while True: + buf = self._read(self.bufsize) + if not buf: + break + t.append(buf) + buf = "".join(t) + else: + buf = self._read(size) + self.pos += len(buf) + return buf + + def _read(self, size): + """Return size bytes from the stream. + """ + if self.comptype == "tar": + return self.__read(size) + + c = len(self.dbuf) + while c < size: + buf = self.__read(self.bufsize) + if not buf: + break + try: + buf = self.cmp.decompress(buf) + except IOError: + raise ReadError("invalid compressed data") + self.dbuf += buf + c += len(buf) + buf = self.dbuf[:size] + self.dbuf = self.dbuf[size:] + return buf + + def __read(self, size): + """Return size bytes from stream. If internal buffer is empty, + read another block from the stream. + """ + c = len(self.buf) + while c < size: + buf = self.fileobj.read(self.bufsize) + if not buf: + break + self.buf += buf + c += len(buf) + buf = self.buf[:size] + self.buf = self.buf[size:] + return buf +# class _Stream + +class _StreamProxy(object): + """Small proxy class that enables transparent compression + detection for the Stream interface (mode 'r|*'). + """ + + def __init__(self, fileobj): + self.fileobj = fileobj + self.buf = self.fileobj.read(BLOCKSIZE) + + def read(self, size): + self.read = self.fileobj.read + return self.buf + + def getcomptype(self): + if self.buf.startswith(b"\037\213\010"): + return "gz" + if self.buf.startswith(b"BZh91"): + return "bz2" + return "tar" + + def close(self): + self.fileobj.close() +# class StreamProxy + +class _BZ2Proxy(object): + """Small proxy class that enables external file object + support for "r:bz2" and "w:bz2" modes. This is actually + a workaround for a limitation in bz2 module's BZ2File + class which (unlike gzip.GzipFile) has no support for + a file object argument. + """ + + blocksize = 16 * 1024 + + def __init__(self, fileobj, mode): + self.fileobj = fileobj + self.mode = mode + self.name = getattr(self.fileobj, "name", None) + self.init() + + def init(self): + import bz2 + self.pos = 0 + if self.mode == "r": + self.bz2obj = bz2.BZ2Decompressor() + self.fileobj.seek(0) + self.buf = b"" + else: + self.bz2obj = bz2.BZ2Compressor() + + def read(self, size): + x = len(self.buf) + while x < size: + raw = self.fileobj.read(self.blocksize) + if not raw: + break + data = self.bz2obj.decompress(raw) + self.buf += data + x += len(data) + + buf = self.buf[:size] + self.buf = self.buf[size:] + self.pos += len(buf) + return buf + + def seek(self, pos): + if pos < self.pos: + self.init() + self.read(pos - self.pos) + + def tell(self): + return self.pos + + def write(self, data): + self.pos += len(data) + raw = self.bz2obj.compress(data) + self.fileobj.write(raw) + + def close(self): + if self.mode == "w": + raw = self.bz2obj.flush() + self.fileobj.write(raw) +# class _BZ2Proxy + +#------------------------ +# Extraction file object +#------------------------ +class _FileInFile(object): + """A thin wrapper around an existing file object that + provides a part of its data as an individual file + object. + """ + + def __init__(self, fileobj, offset, size, blockinfo=None): + self.fileobj = fileobj + self.offset = offset + self.size = size + self.position = 0 + + if blockinfo is None: + blockinfo = [(0, size)] + + # Construct a map with data and zero blocks. + self.map_index = 0 + self.map = [] + lastpos = 0 + realpos = self.offset + for offset, size in blockinfo: + if offset > lastpos: + self.map.append((False, lastpos, offset, None)) + self.map.append((True, offset, offset + size, realpos)) + realpos += size + lastpos = offset + size + if lastpos < self.size: + self.map.append((False, lastpos, self.size, None)) + + def seekable(self): + if not hasattr(self.fileobj, "seekable"): + # XXX gzip.GzipFile and bz2.BZ2File + return True + return self.fileobj.seekable() + + def tell(self): + """Return the current file position. + """ + return self.position + + def seek(self, position): + """Seek to a position in the file. + """ + self.position = position + + def read(self, size=None): + """Read data from the file. + """ + if size is None: + size = self.size - self.position + else: + size = min(size, self.size - self.position) + + buf = b"" + while size > 0: + while True: + data, start, stop, offset = self.map[self.map_index] + if start <= self.position < stop: + break + else: + self.map_index += 1 + if self.map_index == len(self.map): + self.map_index = 0 + length = min(size, stop - self.position) + if data: + self.fileobj.seek(offset + (self.position - start)) + buf += self.fileobj.read(length) + else: + buf += NUL * length + size -= length + self.position += length + return buf +#class _FileInFile + + +class ExFileObject(object): + """File-like object for reading an archive member. + Is returned by TarFile.extractfile(). + """ + blocksize = 1024 + + def __init__(self, tarfile, tarinfo): + self.fileobj = _FileInFile(tarfile.fileobj, + tarinfo.offset_data, + tarinfo.size, + tarinfo.sparse) + self.name = tarinfo.name + self.mode = "r" + self.closed = False + self.size = tarinfo.size + + self.position = 0 + self.buffer = b"" + + def readable(self): + return True + + def writable(self): + return False + + def seekable(self): + return self.fileobj.seekable() + + def read(self, size=None): + """Read at most size bytes from the file. If size is not + present or None, read all data until EOF is reached. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + buf = b"" + if self.buffer: + if size is None: + buf = self.buffer + self.buffer = b"" + else: + buf = self.buffer[:size] + self.buffer = self.buffer[size:] + + if size is None: + buf += self.fileobj.read() + else: + buf += self.fileobj.read(size - len(buf)) + + self.position += len(buf) + return buf + + # XXX TextIOWrapper uses the read1() method. + read1 = read + + def readline(self, size=-1): + """Read one entire line from the file. If size is present + and non-negative, return a string with at most that + size, which may be an incomplete line. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + while True: + buf = self.fileobj.read(self.blocksize) + self.buffer += buf + if not buf or b"\n" in buf: + pos = self.buffer.find(b"\n") + 1 + if pos == 0: + # no newline found. + pos = len(self.buffer) + break + + if size != -1: + pos = min(size, pos) + + buf = self.buffer[:pos] + self.buffer = self.buffer[pos:] + self.position += len(buf) + return buf + + def readlines(self): + """Return a list with all remaining lines. + """ + result = [] + while True: + line = self.readline() + if not line: break + result.append(line) + return result + + def tell(self): + """Return the current file position. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + return self.position + + def seek(self, pos, whence=os.SEEK_SET): + """Seek to a position in the file. + """ + if self.closed: + raise ValueError("I/O operation on closed file") + + if whence == os.SEEK_SET: + self.position = min(max(pos, 0), self.size) + elif whence == os.SEEK_CUR: + if pos < 0: + self.position = max(self.position + pos, 0) + else: + self.position = min(self.position + pos, self.size) + elif whence == os.SEEK_END: + self.position = max(min(self.size + pos, self.size), 0) + else: + raise ValueError("Invalid argument") + + self.buffer = b"" + self.fileobj.seek(self.position) + + def close(self): + """Close the file object. + """ + self.closed = True + + def __iter__(self): + """Get an iterator over the file's lines. + """ + while True: + line = self.readline() + if not line: + break + yield line +#class ExFileObject + +#------------------ +# Exported Classes +#------------------ +class TarInfo(object): + """Informational class which holds the details about an + archive member given by a tar header block. + TarInfo objects are returned by TarFile.getmember(), + TarFile.getmembers() and TarFile.gettarinfo() and are + usually created internally. + """ + + __slots__ = ("name", "mode", "uid", "gid", "size", "mtime", + "chksum", "type", "linkname", "uname", "gname", + "devmajor", "devminor", + "offset", "offset_data", "pax_headers", "sparse", + "tarfile", "_sparse_structs", "_link_target") + + def __init__(self, name=""): + """Construct a TarInfo object. name is the optional name + of the member. + """ + self.name = name # member name + self.mode = 0o644 # file permissions + self.uid = 0 # user id + self.gid = 0 # group id + self.size = 0 # file size + self.mtime = 0 # modification time + self.chksum = 0 # header checksum + self.type = REGTYPE # member type + self.linkname = "" # link name + self.uname = "" # user name + self.gname = "" # group name + self.devmajor = 0 # device major number + self.devminor = 0 # device minor number + + self.offset = 0 # the tar header starts here + self.offset_data = 0 # the file's data starts here + + self.sparse = None # sparse member information + self.pax_headers = {} # pax header information + + # In pax headers the "name" and "linkname" field are called + # "path" and "linkpath". + def _getpath(self): + return self.name + def _setpath(self, name): + self.name = name + path = property(_getpath, _setpath) + + def _getlinkpath(self): + return self.linkname + def _setlinkpath(self, linkname): + self.linkname = linkname + linkpath = property(_getlinkpath, _setlinkpath) + + def __repr__(self): + return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self)) + + def get_info(self): + """Return the TarInfo's attributes as a dictionary. + """ + info = { + "name": self.name, + "mode": self.mode & 0o7777, + "uid": self.uid, + "gid": self.gid, + "size": self.size, + "mtime": self.mtime, + "chksum": self.chksum, + "type": self.type, + "linkname": self.linkname, + "uname": self.uname, + "gname": self.gname, + "devmajor": self.devmajor, + "devminor": self.devminor + } + + if info["type"] == DIRTYPE and not info["name"].endswith("/"): + info["name"] += "/" + + return info + + def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): + """Return a tar header as a string of 512 byte blocks. + """ + info = self.get_info() + + if format == USTAR_FORMAT: + return self.create_ustar_header(info, encoding, errors) + elif format == GNU_FORMAT: + return self.create_gnu_header(info, encoding, errors) + elif format == PAX_FORMAT: + return self.create_pax_header(info, encoding) + else: + raise ValueError("invalid format") + + def create_ustar_header(self, info, encoding, errors): + """Return the object as a ustar header block. + """ + info["magic"] = POSIX_MAGIC + + if len(info["linkname"]) > LENGTH_LINK: + raise ValueError("linkname is too long") + + if len(info["name"]) > LENGTH_NAME: + info["prefix"], info["name"] = self._posix_split_name(info["name"]) + + return self._create_header(info, USTAR_FORMAT, encoding, errors) + + def create_gnu_header(self, info, encoding, errors): + """Return the object as a GNU header block sequence. + """ + info["magic"] = GNU_MAGIC + + buf = b"" + if len(info["linkname"]) > LENGTH_LINK: + buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors) + + if len(info["name"]) > LENGTH_NAME: + buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors) + + return buf + self._create_header(info, GNU_FORMAT, encoding, errors) + + def create_pax_header(self, info, encoding): + """Return the object as a ustar header block. If it cannot be + represented this way, prepend a pax extended header sequence + with supplement information. + """ + info["magic"] = POSIX_MAGIC + pax_headers = self.pax_headers.copy() + + # Test string fields for values that exceed the field length or cannot + # be represented in ASCII encoding. + for name, hname, length in ( + ("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK), + ("uname", "uname", 32), ("gname", "gname", 32)): + + if hname in pax_headers: + # The pax header has priority. + continue + + # Try to encode the string as ASCII. + try: + info[name].encode("ascii", "strict") + except UnicodeEncodeError: + pax_headers[hname] = info[name] + continue + + if len(info[name]) > length: + pax_headers[hname] = info[name] + + # Test number fields for values that exceed the field limit or values + # that like to be stored as float. + for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)): + if name in pax_headers: + # The pax header has priority. Avoid overflow. + info[name] = 0 + continue + + val = info[name] + if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float): + pax_headers[name] = str(val) + info[name] = 0 + + # Create a pax extended header if necessary. + if pax_headers: + buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding) + else: + buf = b"" + + return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace") + + @classmethod + def create_pax_global_header(cls, pax_headers): + """Return the object as a pax global header block sequence. + """ + return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8") + + def _posix_split_name(self, name): + """Split a name longer than 100 chars into a prefix + and a name part. + """ + prefix = name[:LENGTH_PREFIX + 1] + while prefix and prefix[-1] != "/": + prefix = prefix[:-1] + + name = name[len(prefix):] + prefix = prefix[:-1] + + if not prefix or len(name) > LENGTH_NAME: + raise ValueError("name is too long") + return prefix, name + + @staticmethod + def _create_header(info, format, encoding, errors): + """Return a header block. info is a dictionary with file + information, format must be one of the *_FORMAT constants. + """ + parts = [ + stn(info.get("name", ""), 100, encoding, errors), + itn(info.get("mode", 0) & 0o7777, 8, format), + itn(info.get("uid", 0), 8, format), + itn(info.get("gid", 0), 8, format), + itn(info.get("size", 0), 12, format), + itn(info.get("mtime", 0), 12, format), + b" ", # checksum field + info.get("type", REGTYPE), + stn(info.get("linkname", ""), 100, encoding, errors), + info.get("magic", POSIX_MAGIC), + stn(info.get("uname", ""), 32, encoding, errors), + stn(info.get("gname", ""), 32, encoding, errors), + itn(info.get("devmajor", 0), 8, format), + itn(info.get("devminor", 0), 8, format), + stn(info.get("prefix", ""), 155, encoding, errors) + ] + + buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) + chksum = calc_chksums(buf[-BLOCKSIZE:])[0] + buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] + return buf + + @staticmethod + def _create_payload(payload): + """Return the string payload filled with zero bytes + up to the next 512 byte border. + """ + blocks, remainder = divmod(len(payload), BLOCKSIZE) + if remainder > 0: + payload += (BLOCKSIZE - remainder) * NUL + return payload + + @classmethod + def _create_gnu_long_header(cls, name, type, encoding, errors): + """Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence + for name. + """ + name = name.encode(encoding, errors) + NUL + + info = {} + info["name"] = "././@LongLink" + info["type"] = type + info["size"] = len(name) + info["magic"] = GNU_MAGIC + + # create extended header + name blocks. + return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \ + cls._create_payload(name) + + @classmethod + def _create_pax_generic_header(cls, pax_headers, type, encoding): + """Return a POSIX.1-2008 extended or global header sequence + that contains a list of keyword, value pairs. The values + must be strings. + """ + # Check if one of the fields contains surrogate characters and thereby + # forces hdrcharset=BINARY, see _proc_pax() for more information. + binary = False + for keyword, value in pax_headers.items(): + try: + value.encode("utf8", "strict") + except UnicodeEncodeError: + binary = True + break + + records = b"" + if binary: + # Put the hdrcharset field at the beginning of the header. + records += b"21 hdrcharset=BINARY\n" + + for keyword, value in pax_headers.items(): + keyword = keyword.encode("utf8") + if binary: + # Try to restore the original byte representation of `value'. + # Needless to say, that the encoding must match the string. + value = value.encode(encoding, "surrogateescape") + else: + value = value.encode("utf8") + + l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n' + n = p = 0 + while True: + n = l + len(str(p)) + if n == p: + break + p = n + records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n" + + # We use a hardcoded "././@PaxHeader" name like star does + # instead of the one that POSIX recommends. + info = {} + info["name"] = "././@PaxHeader" + info["type"] = type + info["size"] = len(records) + info["magic"] = POSIX_MAGIC + + # Create pax header + record blocks. + return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \ + cls._create_payload(records) + + @classmethod + def frombuf(cls, buf, encoding, errors): + """Construct a TarInfo object from a 512 byte bytes object. + """ + if len(buf) == 0: + raise EmptyHeaderError("empty header") + if len(buf) != BLOCKSIZE: + raise TruncatedHeaderError("truncated header") + if buf.count(NUL) == BLOCKSIZE: + raise EOFHeaderError("end of file header") + + chksum = nti(buf[148:156]) + if chksum not in calc_chksums(buf): + raise InvalidHeaderError("bad checksum") + + obj = cls() + obj.name = nts(buf[0:100], encoding, errors) + obj.mode = nti(buf[100:108]) + obj.uid = nti(buf[108:116]) + obj.gid = nti(buf[116:124]) + obj.size = nti(buf[124:136]) + obj.mtime = nti(buf[136:148]) + obj.chksum = chksum + obj.type = buf[156:157] + obj.linkname = nts(buf[157:257], encoding, errors) + obj.uname = nts(buf[265:297], encoding, errors) + obj.gname = nts(buf[297:329], encoding, errors) + obj.devmajor = nti(buf[329:337]) + obj.devminor = nti(buf[337:345]) + prefix = nts(buf[345:500], encoding, errors) + + # Old V7 tar format represents a directory as a regular + # file with a trailing slash. + if obj.type == AREGTYPE and obj.name.endswith("/"): + obj.type = DIRTYPE + + # The old GNU sparse format occupies some of the unused + # space in the buffer for up to 4 sparse structures. + # Save the them for later processing in _proc_sparse(). + if obj.type == GNUTYPE_SPARSE: + pos = 386 + structs = [] + for i in range(4): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[482]) + origsize = nti(buf[483:495]) + obj._sparse_structs = (structs, isextended, origsize) + + # Remove redundant slashes from directories. + if obj.isdir(): + obj.name = obj.name.rstrip("/") + + # Reconstruct a ustar longname. + if prefix and obj.type not in GNU_TYPES: + obj.name = prefix + "/" + obj.name + return obj + + @classmethod + def fromtarfile(cls, tarfile): + """Return the next TarInfo object from TarFile object + tarfile. + """ + buf = tarfile.fileobj.read(BLOCKSIZE) + obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) + obj.offset = tarfile.fileobj.tell() - BLOCKSIZE + return obj._proc_member(tarfile) + + #-------------------------------------------------------------------------- + # The following are methods that are called depending on the type of a + # member. The entry point is _proc_member() which can be overridden in a + # subclass to add custom _proc_*() methods. A _proc_*() method MUST + # implement the following + # operations: + # 1. Set self.offset_data to the position where the data blocks begin, + # if there is data that follows. + # 2. Set tarfile.offset to the position where the next member's header will + # begin. + # 3. Return self or another valid TarInfo object. + def _proc_member(self, tarfile): + """Choose the right processing method depending on + the type and call it. + """ + if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK): + return self._proc_gnulong(tarfile) + elif self.type == GNUTYPE_SPARSE: + return self._proc_sparse(tarfile) + elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE): + return self._proc_pax(tarfile) + else: + return self._proc_builtin(tarfile) + + def _proc_builtin(self, tarfile): + """Process a builtin type or an unknown type which + will be treated as a regular file. + """ + self.offset_data = tarfile.fileobj.tell() + offset = self.offset_data + if self.isreg() or self.type not in SUPPORTED_TYPES: + # Skip the following data blocks. + offset += self._block(self.size) + tarfile.offset = offset + + # Patch the TarInfo object with saved global + # header information. + self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors) + + return self + + def _proc_gnulong(self, tarfile): + """Process the blocks that hold a GNU longname + or longlink member. + """ + buf = tarfile.fileobj.read(self._block(self.size)) + + # Fetch the next header and process it. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Patch the TarInfo object from the next header with + # the longname information. + next.offset = self.offset + if self.type == GNUTYPE_LONGNAME: + next.name = nts(buf, tarfile.encoding, tarfile.errors) + elif self.type == GNUTYPE_LONGLINK: + next.linkname = nts(buf, tarfile.encoding, tarfile.errors) + + return next + + def _proc_sparse(self, tarfile): + """Process a GNU sparse header plus extra headers. + """ + # We already collected some sparse structures in frombuf(). + structs, isextended, origsize = self._sparse_structs + del self._sparse_structs + + # Collect sparse structures from extended header blocks. + while isextended: + buf = tarfile.fileobj.read(BLOCKSIZE) + pos = 0 + for i in range(21): + try: + offset = nti(buf[pos:pos + 12]) + numbytes = nti(buf[pos + 12:pos + 24]) + except ValueError: + break + if offset and numbytes: + structs.append((offset, numbytes)) + pos += 24 + isextended = bool(buf[504]) + self.sparse = structs + + self.offset_data = tarfile.fileobj.tell() + tarfile.offset = self.offset_data + self._block(self.size) + self.size = origsize + return self + + def _proc_pax(self, tarfile): + """Process an extended or global header as described in + POSIX.1-2008. + """ + # Read the header information. + buf = tarfile.fileobj.read(self._block(self.size)) + + # A pax header stores supplemental information for either + # the following file (extended) or all following files + # (global). + if self.type == XGLTYPE: + pax_headers = tarfile.pax_headers + else: + pax_headers = tarfile.pax_headers.copy() + + # Check if the pax header contains a hdrcharset field. This tells us + # the encoding of the path, linkpath, uname and gname fields. Normally, + # these fields are UTF-8 encoded but since POSIX.1-2008 tar + # implementations are allowed to store them as raw binary strings if + # the translation to UTF-8 fails. + match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf) + if match is not None: + pax_headers["hdrcharset"] = match.group(1).decode("utf8") + + # For the time being, we don't care about anything other than "BINARY". + # The only other value that is currently allowed by the standard is + # "ISO-IR 10646 2000 UTF-8" in other words UTF-8. + hdrcharset = pax_headers.get("hdrcharset") + if hdrcharset == "BINARY": + encoding = tarfile.encoding + else: + encoding = "utf8" + + # Parse pax header information. A record looks like that: + # "%d %s=%s\n" % (length, keyword, value). length is the size + # of the complete record including the length field itself and + # the newline. keyword and value are both UTF-8 encoded strings. + regex = re.compile(br"(\d+) ([^=]+)=") + pos = 0 + while True: + match = regex.match(buf, pos) + if not match: + break + + length, keyword = match.groups() + length = int(length) + value = buf[match.end(2) + 1:match.start(1) + length - 1] + + # Normally, we could just use "utf8" as the encoding and "strict" + # as the error handler, but we better not take the risk. For + # example, GNU tar <= 1.23 is known to store filenames it cannot + # translate to UTF-8 as raw strings (unfortunately without a + # hdrcharset=BINARY header). + # We first try the strict standard encoding, and if that fails we + # fall back on the user's encoding and error handler. + keyword = self._decode_pax_field(keyword, "utf8", "utf8", + tarfile.errors) + if keyword in PAX_NAME_FIELDS: + value = self._decode_pax_field(value, encoding, tarfile.encoding, + tarfile.errors) + else: + value = self._decode_pax_field(value, "utf8", "utf8", + tarfile.errors) + + pax_headers[keyword] = value + pos += length + + # Fetch the next header. + try: + next = self.fromtarfile(tarfile) + except HeaderError: + raise SubsequentHeaderError("missing or bad subsequent header") + + # Process GNU sparse information. + if "GNU.sparse.map" in pax_headers: + # GNU extended sparse format version 0.1. + self._proc_gnusparse_01(next, pax_headers) + + elif "GNU.sparse.size" in pax_headers: + # GNU extended sparse format version 0.0. + self._proc_gnusparse_00(next, pax_headers, buf) + + elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0": + # GNU extended sparse format version 1.0. + self._proc_gnusparse_10(next, pax_headers, tarfile) + + if self.type in (XHDTYPE, SOLARIS_XHDTYPE): + # Patch the TarInfo object with the extended header info. + next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors) + next.offset = self.offset + + if "size" in pax_headers: + # If the extended header replaces the size field, + # we need to recalculate the offset where the next + # header starts. + offset = next.offset_data + if next.isreg() or next.type not in SUPPORTED_TYPES: + offset += next._block(next.size) + tarfile.offset = offset + + return next + + def _proc_gnusparse_00(self, next, pax_headers, buf): + """Process a GNU tar extended sparse header, version 0.0. + """ + offsets = [] + for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf): + offsets.append(int(match.group(1))) + numbytes = [] + for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf): + numbytes.append(int(match.group(1))) + next.sparse = list(zip(offsets, numbytes)) + + def _proc_gnusparse_01(self, next, pax_headers): + """Process a GNU tar extended sparse header, version 0.1. + """ + sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")] + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _proc_gnusparse_10(self, next, pax_headers, tarfile): + """Process a GNU tar extended sparse header, version 1.0. + """ + fields = None + sparse = [] + buf = tarfile.fileobj.read(BLOCKSIZE) + fields, buf = buf.split(b"\n", 1) + fields = int(fields) + while len(sparse) < fields * 2: + if b"\n" not in buf: + buf += tarfile.fileobj.read(BLOCKSIZE) + number, buf = buf.split(b"\n", 1) + sparse.append(int(number)) + next.offset_data = tarfile.fileobj.tell() + next.sparse = list(zip(sparse[::2], sparse[1::2])) + + def _apply_pax_info(self, pax_headers, encoding, errors): + """Replace fields with supplemental information from a previous + pax extended or global header. + """ + for keyword, value in pax_headers.items(): + if keyword == "GNU.sparse.name": + setattr(self, "path", value) + elif keyword == "GNU.sparse.size": + setattr(self, "size", int(value)) + elif keyword == "GNU.sparse.realsize": + setattr(self, "size", int(value)) + elif keyword in PAX_FIELDS: + if keyword in PAX_NUMBER_FIELDS: + try: + value = PAX_NUMBER_FIELDS[keyword](value) + except ValueError: + value = 0 + if keyword == "path": + value = value.rstrip("/") + setattr(self, keyword, value) + + self.pax_headers = pax_headers.copy() + + def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors): + """Decode a single field from a pax record. + """ + try: + return value.decode(encoding, "strict") + except UnicodeDecodeError: + return value.decode(fallback_encoding, fallback_errors) + + def _block(self, count): + """Round up a byte count by BLOCKSIZE and return it, + e.g. _block(834) => 1024. + """ + blocks, remainder = divmod(count, BLOCKSIZE) + if remainder: + blocks += 1 + return blocks * BLOCKSIZE + + def isreg(self): + return self.type in REGULAR_TYPES + def isfile(self): + return self.isreg() + def isdir(self): + return self.type == DIRTYPE + def issym(self): + return self.type == SYMTYPE + def islnk(self): + return self.type == LNKTYPE + def ischr(self): + return self.type == CHRTYPE + def isblk(self): + return self.type == BLKTYPE + def isfifo(self): + return self.type == FIFOTYPE + def issparse(self): + return self.sparse is not None + def isdev(self): + return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE) +# class TarInfo + +class TarFile(object): + """The TarFile Class provides an interface to tar archives. + """ + + debug = 0 # May be set from 0 (no msgs) to 3 (all msgs) + + dereference = False # If true, add content of linked file to the + # tar file, else the link. + + ignore_zeros = False # If true, skips empty or invalid blocks and + # continues processing. + + errorlevel = 1 # If 0, fatal errors only appear in debug + # messages (if debug >= 0). If > 0, errors + # are passed to the caller as exceptions. + + format = DEFAULT_FORMAT # The format to use when creating an archive. + + encoding = ENCODING # Encoding for 8-bit character strings. + + errors = None # Error handler for unicode conversion. + + tarinfo = TarInfo # The default TarInfo class to use. + + fileobject = ExFileObject # The default ExFileObject class to use. + + def __init__(self, name=None, mode="r", fileobj=None, format=None, + tarinfo=None, dereference=None, ignore_zeros=None, encoding=None, + errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None): + """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to + read from an existing archive, 'a' to append data to an existing + file or 'w' to create a new file overwriting an existing one. `mode' + defaults to 'r'. + If `fileobj' is given, it is used for reading or writing data. If it + can be determined, `mode' is overridden by `fileobj's mode. + `fileobj' is not closed, when TarFile is closed. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + self.mode = mode + self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode] + + if not fileobj: + if self.mode == "a" and not os.path.exists(name): + # Create nonexistent files in append mode. + self.mode = "w" + self._mode = "wb" + fileobj = bltn_open(name, self._mode) + self._extfileobj = False + else: + if name is None and hasattr(fileobj, "name"): + name = fileobj.name + if hasattr(fileobj, "mode"): + self._mode = fileobj.mode + self._extfileobj = True + self.name = os.path.abspath(name) if name else None + self.fileobj = fileobj + + # Init attributes. + if format is not None: + self.format = format + if tarinfo is not None: + self.tarinfo = tarinfo + if dereference is not None: + self.dereference = dereference + if ignore_zeros is not None: + self.ignore_zeros = ignore_zeros + if encoding is not None: + self.encoding = encoding + self.errors = errors + + if pax_headers is not None and self.format == PAX_FORMAT: + self.pax_headers = pax_headers + else: + self.pax_headers = {} + + if debug is not None: + self.debug = debug + if errorlevel is not None: + self.errorlevel = errorlevel + + # Init datastructures. + self.closed = False + self.members = [] # list of members as TarInfo objects + self._loaded = False # flag if all members have been read + self.offset = self.fileobj.tell() + # current position in the archive file + self.inodes = {} # dictionary caching the inodes of + # archive members already added + + try: + if self.mode == "r": + self.firstmember = None + self.firstmember = self.next() + + if self.mode == "a": + # Move to the end of the archive, + # before the first empty block. + while True: + self.fileobj.seek(self.offset) + try: + tarinfo = self.tarinfo.fromtarfile(self) + self.members.append(tarinfo) + except EOFHeaderError: + self.fileobj.seek(self.offset) + break + except HeaderError as e: + raise ReadError(str(e)) + + if self.mode in "aw": + self._loaded = True + + if self.pax_headers: + buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy()) + self.fileobj.write(buf) + self.offset += len(buf) + except: + if not self._extfileobj: + self.fileobj.close() + self.closed = True + raise + + #-------------------------------------------------------------------------- + # Below are the classmethods which act as alternate constructors to the + # TarFile class. The open() method is the only one that is needed for + # public use; it is the "super"-constructor and is able to select an + # adequate "sub"-constructor for a particular compression using the mapping + # from OPEN_METH. + # + # This concept allows one to subclass TarFile without losing the comfort of + # the super-constructor. A sub-constructor is registered and made available + # by adding it to the mapping in OPEN_METH. + + @classmethod + def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs): + """Open a tar archive for reading, writing or appending. Return + an appropriate TarFile class. + + mode: + 'r' or 'r:*' open for reading with transparent compression + 'r:' open for reading exclusively uncompressed + 'r:gz' open for reading with gzip compression + 'r:bz2' open for reading with bzip2 compression + 'a' or 'a:' open for appending, creating the file if necessary + 'w' or 'w:' open for writing without compression + 'w:gz' open for writing with gzip compression + 'w:bz2' open for writing with bzip2 compression + + 'r|*' open a stream of tar blocks with transparent compression + 'r|' open an uncompressed stream of tar blocks for reading + 'r|gz' open a gzip compressed stream of tar blocks + 'r|bz2' open a bzip2 compressed stream of tar blocks + 'w|' open an uncompressed stream for writing + 'w|gz' open a gzip compressed stream for writing + 'w|bz2' open a bzip2 compressed stream for writing + """ + + if not name and not fileobj: + raise ValueError("nothing to open") + + if mode in ("r", "r:*"): + # Find out which *open() is appropriate for opening the file. + for comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + if fileobj is not None: + saved_pos = fileobj.tell() + try: + return func(name, "r", fileobj, **kwargs) + except (ReadError, CompressionError) as e: + if fileobj is not None: + fileobj.seek(saved_pos) + continue + raise ReadError("file could not be opened successfully") + + elif ":" in mode: + filemode, comptype = mode.split(":", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + # Select the *open() function according to + # given compression. + if comptype in cls.OPEN_METH: + func = getattr(cls, cls.OPEN_METH[comptype]) + else: + raise CompressionError("unknown compression type %r" % comptype) + return func(name, filemode, fileobj, **kwargs) + + elif "|" in mode: + filemode, comptype = mode.split("|", 1) + filemode = filemode or "r" + comptype = comptype or "tar" + + if filemode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + stream = _Stream(name, filemode, comptype, fileobj, bufsize) + try: + t = cls(name, filemode, stream, **kwargs) + except: + stream.close() + raise + t._extfileobj = False + return t + + elif mode in "aw": + return cls.taropen(name, mode, fileobj, **kwargs) + + raise ValueError("undiscernible mode") + + @classmethod + def taropen(cls, name, mode="r", fileobj=None, **kwargs): + """Open uncompressed tar archive name for reading or writing. + """ + if len(mode) > 1 or mode not in "raw": + raise ValueError("mode must be 'r', 'a' or 'w'") + return cls(name, mode, fileobj, **kwargs) + + @classmethod + def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open gzip compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'") + + try: + import gzip + gzip.GzipFile + except (ImportError, AttributeError): + raise CompressionError("gzip module is not available") + + extfileobj = fileobj is not None + try: + fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj) + t = cls.taropen(name, mode, fileobj, **kwargs) + except IOError: + if not extfileobj and fileobj is not None: + fileobj.close() + if fileobj is None: + raise + raise ReadError("not a gzip file") + except: + if not extfileobj and fileobj is not None: + fileobj.close() + raise + t._extfileobj = extfileobj + return t + + @classmethod + def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs): + """Open bzip2 compressed tar archive name for reading or writing. + Appending is not allowed. + """ + if len(mode) > 1 or mode not in "rw": + raise ValueError("mode must be 'r' or 'w'.") + + try: + import bz2 + except ImportError: + raise CompressionError("bz2 module is not available") + + if fileobj is not None: + fileobj = _BZ2Proxy(fileobj, mode) + else: + fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel) + + try: + t = cls.taropen(name, mode, fileobj, **kwargs) + except (IOError, EOFError): + fileobj.close() + raise ReadError("not a bzip2 file") + t._extfileobj = False + return t + + # All *open() methods are registered here. + OPEN_METH = { + "tar": "taropen", # uncompressed tar + "gz": "gzopen", # gzip compressed tar + "bz2": "bz2open" # bzip2 compressed tar + } + + #-------------------------------------------------------------------------- + # The public methods which TarFile provides: + + def close(self): + """Close the TarFile. In write-mode, two finishing zero blocks are + appended to the archive. + """ + if self.closed: + return + + if self.mode in "aw": + self.fileobj.write(NUL * (BLOCKSIZE * 2)) + self.offset += (BLOCKSIZE * 2) + # fill up the end with zero-blocks + # (like option -b20 for tar does) + blocks, remainder = divmod(self.offset, RECORDSIZE) + if remainder > 0: + self.fileobj.write(NUL * (RECORDSIZE - remainder)) + + if not self._extfileobj: + self.fileobj.close() + self.closed = True + + def getmember(self, name): + """Return a TarInfo object for member `name'. If `name' can not be + found in the archive, KeyError is raised. If a member occurs more + than once in the archive, its last occurrence is assumed to be the + most up-to-date version. + """ + tarinfo = self._getmember(name) + if tarinfo is None: + raise KeyError("filename %r not found" % name) + return tarinfo + + def getmembers(self): + """Return the members of the archive as a list of TarInfo objects. The + list has the same order as the members in the archive. + """ + self._check() + if not self._loaded: # if we want to obtain a list of + self._load() # all members, we first have to + # scan the whole archive. + return self.members + + def getnames(self): + """Return the members of the archive as a list of their names. It has + the same order as the list returned by getmembers(). + """ + return [tarinfo.name for tarinfo in self.getmembers()] + + def gettarinfo(self, name=None, arcname=None, fileobj=None): + """Create a TarInfo object for either the file `name' or the file + object `fileobj' (using os.fstat on its file descriptor). You can + modify some of the TarInfo's attributes before you add it using + addfile(). If given, `arcname' specifies an alternative name for the + file in the archive. + """ + self._check("aw") + + # When fileobj is given, replace name by + # fileobj's real name. + if fileobj is not None: + name = fileobj.name + + # Building the name of the member in the archive. + # Backward slashes are converted to forward slashes, + # Absolute paths are turned to relative paths. + if arcname is None: + arcname = name + drv, arcname = os.path.splitdrive(arcname) + arcname = arcname.replace(os.sep, "/") + arcname = arcname.lstrip("/") + + # Now, fill the TarInfo object with + # information specific for the file. + tarinfo = self.tarinfo() + tarinfo.tarfile = self + + # Use os.stat or os.lstat, depending on platform + # and if symlinks shall be resolved. + if fileobj is None: + if hasattr(os, "lstat") and not self.dereference: + statres = os.lstat(name) + else: + statres = os.stat(name) + else: + statres = os.fstat(fileobj.fileno()) + linkname = "" + + stmd = statres.st_mode + if stat.S_ISREG(stmd): + inode = (statres.st_ino, statres.st_dev) + if not self.dereference and statres.st_nlink > 1 and \ + inode in self.inodes and arcname != self.inodes[inode]: + # Is it a hardlink to an already + # archived file? + type = LNKTYPE + linkname = self.inodes[inode] + else: + # The inode is added only if its valid. + # For win32 it is always 0. + type = REGTYPE + if inode[0]: + self.inodes[inode] = arcname + elif stat.S_ISDIR(stmd): + type = DIRTYPE + elif stat.S_ISFIFO(stmd): + type = FIFOTYPE + elif stat.S_ISLNK(stmd): + type = SYMTYPE + linkname = os.readlink(name) + elif stat.S_ISCHR(stmd): + type = CHRTYPE + elif stat.S_ISBLK(stmd): + type = BLKTYPE + else: + return None + + # Fill the TarInfo object with all + # information we can get. + tarinfo.name = arcname + tarinfo.mode = stmd + tarinfo.uid = statres.st_uid + tarinfo.gid = statres.st_gid + if type == REGTYPE: + tarinfo.size = statres.st_size + else: + tarinfo.size = 0 + tarinfo.mtime = statres.st_mtime + tarinfo.type = type + tarinfo.linkname = linkname + if pwd: + try: + tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0] + except KeyError: + pass + if grp: + try: + tarinfo.gname = grp.getgrgid(tarinfo.gid)[0] + except KeyError: + pass + + if type in (CHRTYPE, BLKTYPE): + if hasattr(os, "major") and hasattr(os, "minor"): + tarinfo.devmajor = os.major(statres.st_rdev) + tarinfo.devminor = os.minor(statres.st_rdev) + return tarinfo + + def list(self, verbose=True): + """Print a table of contents to sys.stdout. If `verbose' is False, only + the names of the members are printed. If it is True, an `ls -l'-like + output is produced. + """ + self._check() + + for tarinfo in self: + if verbose: + print(filemode(tarinfo.mode), end=' ') + print("%s/%s" % (tarinfo.uname or tarinfo.uid, + tarinfo.gname or tarinfo.gid), end=' ') + if tarinfo.ischr() or tarinfo.isblk(): + print("%10s" % ("%d,%d" \ + % (tarinfo.devmajor, tarinfo.devminor)), end=' ') + else: + print("%10d" % tarinfo.size, end=' ') + print("%d-%02d-%02d %02d:%02d:%02d" \ + % time.localtime(tarinfo.mtime)[:6], end=' ') + + print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ') + + if verbose: + if tarinfo.issym(): + print("->", tarinfo.linkname, end=' ') + if tarinfo.islnk(): + print("link to", tarinfo.linkname, end=' ') + print() + + def add(self, name, arcname=None, recursive=True, exclude=None, filter=None): + """Add the file `name' to the archive. `name' may be any type of file + (directory, fifo, symbolic link, etc.). If given, `arcname' + specifies an alternative name for the file in the archive. + Directories are added recursively by default. This can be avoided by + setting `recursive' to False. `exclude' is a function that should + return True for each filename to be excluded. `filter' is a function + that expects a TarInfo object argument and returns the changed + TarInfo object, if it returns None the TarInfo object will be + excluded from the archive. + """ + self._check("aw") + + if arcname is None: + arcname = name + + # Exclude pathnames. + if exclude is not None: + import warnings + warnings.warn("use the filter argument instead", + DeprecationWarning, 2) + if exclude(name): + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Skip if somebody tries to archive the archive... + if self.name is not None and os.path.abspath(name) == self.name: + self._dbg(2, "tarfile: Skipped %r" % name) + return + + self._dbg(1, name) + + # Create a TarInfo object from the file. + tarinfo = self.gettarinfo(name, arcname) + + if tarinfo is None: + self._dbg(1, "tarfile: Unsupported type %r" % name) + return + + # Change or exclude the TarInfo object. + if filter is not None: + tarinfo = filter(tarinfo) + if tarinfo is None: + self._dbg(2, "tarfile: Excluded %r" % name) + return + + # Append the tar header and data to the archive. + if tarinfo.isreg(): + f = bltn_open(name, "rb") + self.addfile(tarinfo, f) + f.close() + + elif tarinfo.isdir(): + self.addfile(tarinfo) + if recursive: + for f in os.listdir(name): + self.add(os.path.join(name, f), os.path.join(arcname, f), + recursive, exclude, filter=filter) + + else: + self.addfile(tarinfo) + + def addfile(self, tarinfo, fileobj=None): + """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is + given, tarinfo.size bytes are read from it and added to the archive. + You can create TarInfo objects using gettarinfo(). + On Windows platforms, `fileobj' should always be opened with mode + 'rb' to avoid irritation about the file size. + """ + self._check("aw") + + tarinfo = copy.copy(tarinfo) + + buf = tarinfo.tobuf(self.format, self.encoding, self.errors) + self.fileobj.write(buf) + self.offset += len(buf) + + # If there's data to follow, append it. + if fileobj is not None: + copyfileobj(fileobj, self.fileobj, tarinfo.size) + blocks, remainder = divmod(tarinfo.size, BLOCKSIZE) + if remainder > 0: + self.fileobj.write(NUL * (BLOCKSIZE - remainder)) + blocks += 1 + self.offset += blocks * BLOCKSIZE + + self.members.append(tarinfo) + + def extractall(self, path=".", members=None): + """Extract all members from the archive to the current working + directory and set owner, modification time and permissions on + directories afterwards. `path' specifies a different directory + to extract to. `members' is optional and must be a subset of the + list returned by getmembers(). + """ + directories = [] + + if members is None: + members = self + + for tarinfo in members: + if tarinfo.isdir(): + # Extract directories with a safe mode. + directories.append(tarinfo) + tarinfo = copy.copy(tarinfo) + tarinfo.mode = 0o700 + # Do not set_attrs directories, as we will do that further down + self.extract(tarinfo, path, set_attrs=not tarinfo.isdir()) + + # Reverse sort directories. + directories.sort(key=lambda a: a.name) + directories.reverse() + + # Set correct owner, mtime and filemode on directories. + for tarinfo in directories: + dirpath = os.path.join(path, tarinfo.name) + try: + self.chown(tarinfo, dirpath) + self.utime(tarinfo, dirpath) + self.chmod(tarinfo, dirpath) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extract(self, member, path="", set_attrs=True): + """Extract a member from the archive to the current working directory, + using its full name. Its file information is extracted as accurately + as possible. `member' may be a filename or a TarInfo object. You can + specify a different directory using `path'. File attributes (owner, + mtime, mode) are set unless `set_attrs' is False. + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + # Prepare the link target for makelink(). + if tarinfo.islnk(): + tarinfo._link_target = os.path.join(path, tarinfo.linkname) + + try: + self._extract_member(tarinfo, os.path.join(path, tarinfo.name), + set_attrs=set_attrs) + except EnvironmentError as e: + if self.errorlevel > 0: + raise + else: + if e.filename is None: + self._dbg(1, "tarfile: %s" % e.strerror) + else: + self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename)) + except ExtractError as e: + if self.errorlevel > 1: + raise + else: + self._dbg(1, "tarfile: %s" % e) + + def extractfile(self, member): + """Extract a member from the archive as a file object. `member' may be + a filename or a TarInfo object. If `member' is a regular file, a + file-like object is returned. If `member' is a link, a file-like + object is constructed from the link's target. If `member' is none of + the above, None is returned. + The file-like object is read-only and provides the following + methods: read(), readline(), readlines(), seek() and tell() + """ + self._check("r") + + if isinstance(member, str): + tarinfo = self.getmember(member) + else: + tarinfo = member + + if tarinfo.isreg(): + return self.fileobject(self, tarinfo) + + elif tarinfo.type not in SUPPORTED_TYPES: + # If a member's type is unknown, it is treated as a + # regular file. + return self.fileobject(self, tarinfo) + + elif tarinfo.islnk() or tarinfo.issym(): + if isinstance(self.fileobj, _Stream): + # A small but ugly workaround for the case that someone tries + # to extract a (sym)link as a file-object from a non-seekable + # stream of tar blocks. + raise StreamError("cannot extract (sym)link as file object") + else: + # A (sym)link's file object is its target's file object. + return self.extractfile(self._find_link_target(tarinfo)) + else: + # If there's no data associated with the member (directory, chrdev, + # blkdev, etc.), return None instead of a file object. + return None + + def _extract_member(self, tarinfo, targetpath, set_attrs=True): + """Extract the TarInfo object tarinfo to a physical + file called targetpath. + """ + # Fetch the TarInfo object for the given name + # and build the destination pathname, replacing + # forward slashes to platform specific separators. + targetpath = targetpath.rstrip("/") + targetpath = targetpath.replace("/", os.sep) + + # Create all upper directories. + upperdirs = os.path.dirname(targetpath) + if upperdirs and not os.path.exists(upperdirs): + # Create directories that are not part of the archive with + # default permissions. + os.makedirs(upperdirs) + + if tarinfo.islnk() or tarinfo.issym(): + self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname)) + else: + self._dbg(1, tarinfo.name) + + if tarinfo.isreg(): + self.makefile(tarinfo, targetpath) + elif tarinfo.isdir(): + self.makedir(tarinfo, targetpath) + elif tarinfo.isfifo(): + self.makefifo(tarinfo, targetpath) + elif tarinfo.ischr() or tarinfo.isblk(): + self.makedev(tarinfo, targetpath) + elif tarinfo.islnk() or tarinfo.issym(): + self.makelink(tarinfo, targetpath) + elif tarinfo.type not in SUPPORTED_TYPES: + self.makeunknown(tarinfo, targetpath) + else: + self.makefile(tarinfo, targetpath) + + if set_attrs: + self.chown(tarinfo, targetpath) + if not tarinfo.issym(): + self.chmod(tarinfo, targetpath) + self.utime(tarinfo, targetpath) + + #-------------------------------------------------------------------------- + # Below are the different file methods. They are called via + # _extract_member() when extract() is called. They can be replaced in a + # subclass to implement other functionality. + + def makedir(self, tarinfo, targetpath): + """Make a directory called targetpath. + """ + try: + # Use a safe mode for the directory, the real mode is set + # later in _extract_member(). + os.mkdir(targetpath, 0o700) + except EnvironmentError as e: + if e.errno != errno.EEXIST: + raise + + def makefile(self, tarinfo, targetpath): + """Make a file called targetpath. + """ + source = self.fileobj + source.seek(tarinfo.offset_data) + target = bltn_open(targetpath, "wb") + if tarinfo.sparse is not None: + for offset, size in tarinfo.sparse: + target.seek(offset) + copyfileobj(source, target, size) + else: + copyfileobj(source, target, tarinfo.size) + target.seek(tarinfo.size) + target.truncate() + target.close() + + def makeunknown(self, tarinfo, targetpath): + """Make a file from a TarInfo object with an unknown type + at targetpath. + """ + self.makefile(tarinfo, targetpath) + self._dbg(1, "tarfile: Unknown file type %r, " \ + "extracted as regular file." % tarinfo.type) + + def makefifo(self, tarinfo, targetpath): + """Make a fifo called targetpath. + """ + if hasattr(os, "mkfifo"): + os.mkfifo(targetpath) + else: + raise ExtractError("fifo not supported by system") + + def makedev(self, tarinfo, targetpath): + """Make a character or block device called targetpath. + """ + if not hasattr(os, "mknod") or not hasattr(os, "makedev"): + raise ExtractError("special devices not supported by system") + + mode = tarinfo.mode + if tarinfo.isblk(): + mode |= stat.S_IFBLK + else: + mode |= stat.S_IFCHR + + os.mknod(targetpath, mode, + os.makedev(tarinfo.devmajor, tarinfo.devminor)) + + def makelink(self, tarinfo, targetpath): + """Make a (symbolic) link called targetpath. If it cannot be created + (platform limitation), we try to make a copy of the referenced file + instead of a link. + """ + try: + # For systems that support symbolic and hard links. + if tarinfo.issym(): + os.symlink(tarinfo.linkname, targetpath) + else: + # See extract(). + if os.path.exists(tarinfo._link_target): + os.link(tarinfo._link_target, targetpath) + else: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except symlink_exception: + if tarinfo.issym(): + linkpath = os.path.join(os.path.dirname(tarinfo.name), + tarinfo.linkname) + else: + linkpath = tarinfo.linkname + else: + try: + self._extract_member(self._find_link_target(tarinfo), + targetpath) + except KeyError: + raise ExtractError("unable to resolve link inside archive") + + def chown(self, tarinfo, targetpath): + """Set owner of targetpath according to tarinfo. + """ + if pwd and hasattr(os, "geteuid") and os.geteuid() == 0: + # We have to be root to do so. + try: + g = grp.getgrnam(tarinfo.gname)[2] + except KeyError: + g = tarinfo.gid + try: + u = pwd.getpwnam(tarinfo.uname)[2] + except KeyError: + u = tarinfo.uid + try: + if tarinfo.issym() and hasattr(os, "lchown"): + os.lchown(targetpath, u, g) + else: + if sys.platform != "os2emx": + os.chown(targetpath, u, g) + except EnvironmentError as e: + raise ExtractError("could not change owner") + + def chmod(self, tarinfo, targetpath): + """Set file permissions of targetpath according to tarinfo. + """ + if hasattr(os, 'chmod'): + try: + os.chmod(targetpath, tarinfo.mode) + except EnvironmentError as e: + raise ExtractError("could not change mode") + + def utime(self, tarinfo, targetpath): + """Set modification time of targetpath according to tarinfo. + """ + if not hasattr(os, 'utime'): + return + try: + os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime)) + except EnvironmentError as e: + raise ExtractError("could not change modification time") + + #-------------------------------------------------------------------------- + def next(self): + """Return the next member of the archive as a TarInfo object, when + TarFile is opened for reading. Return None if there is no more + available. + """ + self._check("ra") + if self.firstmember is not None: + m = self.firstmember + self.firstmember = None + return m + + # Read the next block. + self.fileobj.seek(self.offset) + tarinfo = None + while True: + try: + tarinfo = self.tarinfo.fromtarfile(self) + except EOFHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + except InvalidHeaderError as e: + if self.ignore_zeros: + self._dbg(2, "0x%X: %s" % (self.offset, e)) + self.offset += BLOCKSIZE + continue + elif self.offset == 0: + raise ReadError(str(e)) + except EmptyHeaderError: + if self.offset == 0: + raise ReadError("empty file") + except TruncatedHeaderError as e: + if self.offset == 0: + raise ReadError(str(e)) + except SubsequentHeaderError as e: + raise ReadError(str(e)) + break + + if tarinfo is not None: + self.members.append(tarinfo) + else: + self._loaded = True + + return tarinfo + + #-------------------------------------------------------------------------- + # Little helper methods: + + def _getmember(self, name, tarinfo=None, normalize=False): + """Find an archive member by name from bottom to top. + If tarinfo is given, it is used as the starting point. + """ + # Ensure that all members have been loaded. + members = self.getmembers() + + # Limit the member search list up to tarinfo. + if tarinfo is not None: + members = members[:members.index(tarinfo)] + + if normalize: + name = os.path.normpath(name) + + for member in reversed(members): + if normalize: + member_name = os.path.normpath(member.name) + else: + member_name = member.name + + if name == member_name: + return member + + def _load(self): + """Read through the entire archive file and look for readable + members. + """ + while True: + tarinfo = self.next() + if tarinfo is None: + break + self._loaded = True + + def _check(self, mode=None): + """Check if TarFile is still open, and if the operation's mode + corresponds to TarFile's mode. + """ + if self.closed: + raise IOError("%s is closed" % self.__class__.__name__) + if mode is not None and self.mode not in mode: + raise IOError("bad operation for mode %r" % self.mode) + + def _find_link_target(self, tarinfo): + """Find the target member of a symlink or hardlink member in the + archive. + """ + if tarinfo.issym(): + # Always search the entire archive. + linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname + limit = None + else: + # Search the archive before the link, because a hard link is + # just a reference to an already archived file. + linkname = tarinfo.linkname + limit = tarinfo + + member = self._getmember(linkname, tarinfo=limit, normalize=True) + if member is None: + raise KeyError("linkname %r not found" % linkname) + return member + + def __iter__(self): + """Provide an iterator object. + """ + if self._loaded: + return iter(self.members) + else: + return TarIter(self) + + def _dbg(self, level, msg): + """Write debugging output to sys.stderr. + """ + if level <= self.debug: + print(msg, file=sys.stderr) + + def __enter__(self): + self._check() + return self + + def __exit__(self, type, value, traceback): + if type is None: + self.close() + else: + # An exception occurred. We must not call close() because + # it would try to write end-of-archive blocks and padding. + if not self._extfileobj: + self.fileobj.close() + self.closed = True +# class TarFile + +class TarIter(object): + """Iterator Class. + + for tarinfo in TarFile(...): + suite... + """ + + def __init__(self, tarfile): + """Construct a TarIter object. + """ + self.tarfile = tarfile + self.index = 0 + def __iter__(self): + """Return iterator object. + """ + return self + + def __next__(self): + """Return the next item using TarFile's next() method. + When all members have been read, set TarFile as _loaded. + """ + # Fix for SF #1100429: Under rare circumstances it can + # happen that getmembers() is called during iteration, + # which will cause TarIter to stop prematurely. + if not self.tarfile._loaded: + tarinfo = self.tarfile.next() + if not tarinfo: + self.tarfile._loaded = True + raise StopIteration + else: + try: + tarinfo = self.tarfile.members[self.index] + except IndexError: + raise StopIteration + self.index += 1 + return tarinfo + + next = __next__ # for Python 2.x + +#-------------------- +# exported functions +#-------------------- +def is_tarfile(name): + """Return True if name points to a tar archive that we + are able to handle, else return False. + """ + try: + t = open(name) + t.close() + return True + except TarError: + return False + +bltn_open = open +open = TarFile.open diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/compat.py b/lib/python3.4/site-packages/pip/_vendor/distlib/compat.py new file mode 100644 index 0000000..069ec77 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/compat.py @@ -0,0 +1,1102 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import absolute_import + +import os +import re +import sys + +if sys.version_info[0] < 3: # pragma: no cover + from StringIO import StringIO + string_types = basestring, + text_type = unicode + from types import FileType as file_type + import __builtin__ as builtins + import ConfigParser as configparser + from ._backport import shutil + from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit + from urllib import (urlretrieve, quote as _quote, unquote, url2pathname, + pathname2url, ContentTooShortError, splittype) + + def quote(s): + if isinstance(s, unicode): + s = s.encode('utf-8') + return _quote(s) + + import urllib2 + from urllib2 import (Request, urlopen, URLError, HTTPError, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPSHandler, HTTPHandler, HTTPRedirectHandler, + build_opener) + import httplib + import xmlrpclib + import Queue as queue + from HTMLParser import HTMLParser + import htmlentitydefs + raw_input = raw_input + from itertools import ifilter as filter + from itertools import ifilterfalse as filterfalse + + _userprog = None + def splituser(host): + """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" + global _userprog + if _userprog is None: + import re + _userprog = re.compile('^(.*)@(.*)$') + + match = _userprog.match(host) + if match: return match.group(1, 2) + return None, host + +else: # pragma: no cover + from io import StringIO + string_types = str, + text_type = str + from io import TextIOWrapper as file_type + import builtins + import configparser + import shutil + from urllib.parse import (urlparse, urlunparse, urljoin, splituser, quote, + unquote, urlsplit, urlunsplit, splittype) + from urllib.request import (urlopen, urlretrieve, Request, url2pathname, + pathname2url, + HTTPBasicAuthHandler, HTTPPasswordMgr, + HTTPSHandler, HTTPHandler, HTTPRedirectHandler, + build_opener) + from urllib.error import HTTPError, URLError, ContentTooShortError + import http.client as httplib + import urllib.request as urllib2 + import xmlrpc.client as xmlrpclib + import queue + from html.parser import HTMLParser + import html.entities as htmlentitydefs + raw_input = input + from itertools import filterfalse + filter = filter + +try: + from ssl import match_hostname, CertificateError +except ImportError: # pragma: no cover + class CertificateError(ValueError): + pass + + + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + parts = dn.split('.') + leftmost, remainder = parts[0], parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survery of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate, match_hostname needs a " + "SSL socket or SSL context with either " + "CERT_OPTIONAL or CERT_REQUIRED") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +try: + from types import SimpleNamespace as Container +except ImportError: # pragma: no cover + class Container(object): + """ + A generic container for when multiple values need to be returned + """ + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + +try: + from shutil import which +except ImportError: # pragma: no cover + # Implementation from Python 3.3 + def which(cmd, mode=os.F_OK | os.X_OK, path=None): + """Given a command, mode, and a PATH string, return the path which + conforms to the given mode on the PATH, or None if there is no such + file. + + `mode` defaults to os.F_OK | os.X_OK. `path` defaults to the result + of os.environ.get("PATH"), or can be overridden with a custom search + path. + + """ + # Check that a given file can be accessed with the correct mode. + # Additionally check that `file` is not a directory, as on Windows + # directories pass the os.access check. + def _access_check(fn, mode): + return (os.path.exists(fn) and os.access(fn, mode) + and not os.path.isdir(fn)) + + # If we're given a path with a directory part, look it up directly rather + # than referring to PATH directories. This includes checking relative to the + # current directory, e.g. ./script + if os.path.dirname(cmd): + if _access_check(cmd, mode): + return cmd + return None + + if path is None: + path = os.environ.get("PATH", os.defpath) + if not path: + return None + path = path.split(os.pathsep) + + if sys.platform == "win32": + # The current directory takes precedence on Windows. + if not os.curdir in path: + path.insert(0, os.curdir) + + # PATHEXT is necessary to check on Windows. + pathext = os.environ.get("PATHEXT", "").split(os.pathsep) + # See if the given file matches any of the expected path extensions. + # This will allow us to short circuit when given "python.exe". + # If it does match, only test that one, otherwise we have to try + # others. + if any(cmd.lower().endswith(ext.lower()) for ext in pathext): + files = [cmd] + else: + files = [cmd + ext for ext in pathext] + else: + # On other platforms you don't have things like PATHEXT to tell you + # what file suffixes are executable, so just pass on cmd as-is. + files = [cmd] + + seen = set() + for dir in path: + normdir = os.path.normcase(dir) + if not normdir in seen: + seen.add(normdir) + for thefile in files: + name = os.path.join(dir, thefile) + if _access_check(name, mode): + return name + return None + + +# ZipFile is a context manager in 2.7, but not in 2.6 + +from zipfile import ZipFile as BaseZipFile + +if hasattr(BaseZipFile, '__enter__'): # pragma: no cover + ZipFile = BaseZipFile +else: + from zipfile import ZipExtFile as BaseZipExtFile + + class ZipExtFile(BaseZipExtFile): + def __init__(self, base): + self.__dict__.update(base.__dict__) + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + class ZipFile(BaseZipFile): + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.close() + # return None, so if an exception occurred, it will propagate + + def open(self, *args, **kwargs): + base = BaseZipFile.open(self, *args, **kwargs) + return ZipExtFile(base) + +try: + from platform import python_implementation +except ImportError: # pragma: no cover + def python_implementation(): + """Return a string identifying the Python implementation.""" + if 'PyPy' in sys.version: + return 'PyPy' + if os.name == 'java': + return 'Jython' + if sys.version.startswith('IronPython'): + return 'IronPython' + return 'CPython' + +try: + import sysconfig +except ImportError: # pragma: no cover + from ._backport import sysconfig + +try: + callable = callable +except NameError: # pragma: no cover + from collections import Callable + + def callable(obj): + return isinstance(obj, Callable) + + +try: + fsencode = os.fsencode + fsdecode = os.fsdecode +except AttributeError: # pragma: no cover + _fsencoding = sys.getfilesystemencoding() + if _fsencoding == 'mbcs': + _fserrors = 'strict' + else: + _fserrors = 'surrogateescape' + + def fsencode(filename): + if isinstance(filename, bytes): + return filename + elif isinstance(filename, text_type): + return filename.encode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + + def fsdecode(filename): + if isinstance(filename, text_type): + return filename + elif isinstance(filename, bytes): + return filename.decode(_fsencoding, _fserrors) + else: + raise TypeError("expect bytes or str, not %s" % + type(filename).__name__) + +try: + from tokenize import detect_encoding +except ImportError: # pragma: no cover + from codecs import BOM_UTF8, lookup + import re + + cookie_re = re.compile("coding[:=]\s*([-\w.]+)") + + def _get_normal_name(orig_enc): + """Imitates get_normal_name in tokenizer.c.""" + # Only care about the first 12 characters. + enc = orig_enc[:12].lower().replace("_", "-") + if enc == "utf-8" or enc.startswith("utf-8-"): + return "utf-8" + if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ + enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): + return "iso-8859-1" + return orig_enc + + def detect_encoding(readline): + """ + The detect_encoding() function is used to detect the encoding that should + be used to decode a Python source file. It requires one argment, readline, + in the same way as the tokenize() generator. + + It will call readline a maximum of twice, and return the encoding used + (as a string) and a list of any lines (left as bytes) it has read in. + + It detects the encoding from the presence of a utf-8 bom or an encoding + cookie as specified in pep-0263. If both a bom and a cookie are present, + but disagree, a SyntaxError will be raised. If the encoding cookie is an + invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, + 'utf-8-sig' is returned. + + If no encoding is specified, then the default of 'utf-8' will be returned. + """ + try: + filename = readline.__self__.name + except AttributeError: + filename = None + bom_found = False + encoding = None + default = 'utf-8' + def read_or_stop(): + try: + return readline() + except StopIteration: + return b'' + + def find_cookie(line): + try: + # Decode as UTF-8. Either the line is an encoding declaration, + # in which case it should be pure ASCII, or it must be UTF-8 + # per default encoding. + line_string = line.decode('utf-8') + except UnicodeDecodeError: + msg = "invalid or missing encoding declaration" + if filename is not None: + msg = '{} for {!r}'.format(msg, filename) + raise SyntaxError(msg) + + matches = cookie_re.findall(line_string) + if not matches: + return None + encoding = _get_normal_name(matches[0]) + try: + codec = lookup(encoding) + except LookupError: + # This behaviour mimics the Python interpreter + if filename is None: + msg = "unknown encoding: " + encoding + else: + msg = "unknown encoding for {!r}: {}".format(filename, + encoding) + raise SyntaxError(msg) + + if bom_found: + if codec.name != 'utf-8': + # This behaviour mimics the Python interpreter + if filename is None: + msg = 'encoding problem: utf-8' + else: + msg = 'encoding problem for {!r}: utf-8'.format(filename) + raise SyntaxError(msg) + encoding += '-sig' + return encoding + + first = read_or_stop() + if first.startswith(BOM_UTF8): + bom_found = True + first = first[3:] + default = 'utf-8-sig' + if not first: + return default, [] + + encoding = find_cookie(first) + if encoding: + return encoding, [first] + + second = read_or_stop() + if not second: + return default, [first] + + encoding = find_cookie(second) + if encoding: + return encoding, [first, second] + + return default, [first, second] + +# For converting & <-> & etc. +try: + from html import escape +except ImportError: + from cgi import escape +if sys.version_info[:2] < (3, 4): + unescape = HTMLParser().unescape +else: + from html import unescape + +try: + from collections import ChainMap +except ImportError: # pragma: no cover + from collections import MutableMapping + + try: + from reprlib import recursive_repr as _recursive_repr + except ImportError: + def _recursive_repr(fillvalue='...'): + ''' + Decorator to make a repr function return fillvalue for a recursive + call + ''' + + def decorating_function(user_function): + repr_running = set() + + def wrapper(self): + key = id(self), get_ident() + if key in repr_running: + return fillvalue + repr_running.add(key) + try: + result = user_function(self) + finally: + repr_running.discard(key) + return result + + # Can't use functools.wraps() here because of bootstrap issues + wrapper.__module__ = getattr(user_function, '__module__') + wrapper.__doc__ = getattr(user_function, '__doc__') + wrapper.__name__ = getattr(user_function, '__name__') + wrapper.__annotations__ = getattr(user_function, '__annotations__', {}) + return wrapper + + return decorating_function + + class ChainMap(MutableMapping): + ''' A ChainMap groups multiple dicts (or other mappings) together + to create a single, updateable view. + + The underlying mappings are stored in a list. That list is public and can + accessed or updated using the *maps* attribute. There is no other state. + + Lookups search the underlying mappings successively until a key is found. + In contrast, writes, updates, and deletions only operate on the first + mapping. + + ''' + + def __init__(self, *maps): + '''Initialize a ChainMap by setting *maps* to the given mappings. + If no mappings are provided, a single empty dictionary is used. + + ''' + self.maps = list(maps) or [{}] # always at least one map + + def __missing__(self, key): + raise KeyError(key) + + def __getitem__(self, key): + for mapping in self.maps: + try: + return mapping[key] # can't use 'key in mapping' with defaultdict + except KeyError: + pass + return self.__missing__(key) # support subclasses that define __missing__ + + def get(self, key, default=None): + return self[key] if key in self else default + + def __len__(self): + return len(set().union(*self.maps)) # reuses stored hash values if possible + + def __iter__(self): + return iter(set().union(*self.maps)) + + def __contains__(self, key): + return any(key in m for m in self.maps) + + def __bool__(self): + return any(self.maps) + + @_recursive_repr() + def __repr__(self): + return '{0.__class__.__name__}({1})'.format( + self, ', '.join(map(repr, self.maps))) + + @classmethod + def fromkeys(cls, iterable, *args): + 'Create a ChainMap with a single dict created from the iterable.' + return cls(dict.fromkeys(iterable, *args)) + + def copy(self): + 'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]' + return self.__class__(self.maps[0].copy(), *self.maps[1:]) + + __copy__ = copy + + def new_child(self): # like Django's Context.push() + 'New ChainMap with a new dict followed by all previous maps.' + return self.__class__({}, *self.maps) + + @property + def parents(self): # like Django's Context.pop() + 'New ChainMap from maps[1:].' + return self.__class__(*self.maps[1:]) + + def __setitem__(self, key, value): + self.maps[0][key] = value + + def __delitem__(self, key): + try: + del self.maps[0][key] + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def popitem(self): + 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' + try: + return self.maps[0].popitem() + except KeyError: + raise KeyError('No keys found in the first mapping.') + + def pop(self, key, *args): + 'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].' + try: + return self.maps[0].pop(key, *args) + except KeyError: + raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + + def clear(self): + 'Clear maps[0], leaving maps[1:] intact.' + self.maps[0].clear() + +try: + from imp import cache_from_source +except ImportError: # pragma: no cover + def cache_from_source(path, debug_override=None): + assert path.endswith('.py') + if debug_override is None: + debug_override = __debug__ + if debug_override: + suffix = 'c' + else: + suffix = 'o' + return path + suffix + +try: + from collections import OrderedDict +except ImportError: # pragma: no cover +## {{{ http://code.activestate.com/recipes/576693/ (r9) +# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy. +# Passes Python2.7's test suite and incorporates all the latest updates. + try: + from thread import get_ident as _get_ident + except ImportError: + from dummy_thread import get_ident as _get_ident + + try: + from _abcoll import KeysView, ValuesView, ItemsView + except ImportError: + pass + + + class OrderedDict(dict): + 'Dictionary that remembers insertion order' + # An inherited dict maps keys to values. + # The inherited dict provides __getitem__, __len__, __contains__, and get. + # The remaining methods are order-aware. + # Big-O running times for all methods are the same as for regular dictionaries. + + # The internal self.__map dictionary maps keys to links in a doubly linked list. + # The circular doubly linked list starts and ends with a sentinel element. + # The sentinel element never gets deleted (this simplifies the algorithm). + # Each link is stored as a list of length three: [PREV, NEXT, KEY]. + + def __init__(self, *args, **kwds): + '''Initialize an ordered dictionary. Signature is the same as for + regular dictionaries, but keyword arguments are not recommended + because their insertion order is arbitrary. + + ''' + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__root + except AttributeError: + self.__root = root = [] # sentinel node + root[:] = [root, root, None] + self.__map = {} + self.__update(*args, **kwds) + + def __setitem__(self, key, value, dict_setitem=dict.__setitem__): + 'od.__setitem__(i, y) <==> od[i]=y' + # Setting a new item creates a new link which goes at the end of the linked + # list, and the inherited dictionary is updated with the new key/value pair. + if key not in self: + root = self.__root + last = root[0] + last[1] = root[0] = self.__map[key] = [last, root, key] + dict_setitem(self, key, value) + + def __delitem__(self, key, dict_delitem=dict.__delitem__): + 'od.__delitem__(y) <==> del od[y]' + # Deleting an existing item uses self.__map to find the link which is + # then removed by updating the links in the predecessor and successor nodes. + dict_delitem(self, key) + link_prev, link_next, key = self.__map.pop(key) + link_prev[1] = link_next + link_next[0] = link_prev + + def __iter__(self): + 'od.__iter__() <==> iter(od)' + root = self.__root + curr = root[1] + while curr is not root: + yield curr[2] + curr = curr[1] + + def __reversed__(self): + 'od.__reversed__() <==> reversed(od)' + root = self.__root + curr = root[0] + while curr is not root: + yield curr[2] + curr = curr[0] + + def clear(self): + 'od.clear() -> None. Remove all items from od.' + try: + for node in self.__map.itervalues(): + del node[:] + root = self.__root + root[:] = [root, root, None] + self.__map.clear() + except AttributeError: + pass + dict.clear(self) + + def popitem(self, last=True): + '''od.popitem() -> (k, v), return and remove a (key, value) pair. + Pairs are returned in LIFO order if last is true or FIFO order if false. + + ''' + if not self: + raise KeyError('dictionary is empty') + root = self.__root + if last: + link = root[0] + link_prev = link[0] + link_prev[1] = root + root[0] = link_prev + else: + link = root[1] + link_next = link[1] + root[1] = link_next + link_next[0] = root + key = link[2] + del self.__map[key] + value = dict.pop(self, key) + return key, value + + # -- the following methods do not depend on the internal structure -- + + def keys(self): + 'od.keys() -> list of keys in od' + return list(self) + + def values(self): + 'od.values() -> list of values in od' + return [self[key] for key in self] + + def items(self): + 'od.items() -> list of (key, value) pairs in od' + return [(key, self[key]) for key in self] + + def iterkeys(self): + 'od.iterkeys() -> an iterator over the keys in od' + return iter(self) + + def itervalues(self): + 'od.itervalues -> an iterator over the values in od' + for k in self: + yield self[k] + + def iteritems(self): + 'od.iteritems -> an iterator over the (key, value) items in od' + for k in self: + yield (k, self[k]) + + def update(*args, **kwds): + '''od.update(E, **F) -> None. Update od from dict/iterable E and F. + + If E is a dict instance, does: for k in E: od[k] = E[k] + If E has a .keys() method, does: for k in E.keys(): od[k] = E[k] + Or if E is an iterable of items, does: for k, v in E: od[k] = v + In either case, this is followed by: for k, v in F.items(): od[k] = v + + ''' + if len(args) > 2: + raise TypeError('update() takes at most 2 positional ' + 'arguments (%d given)' % (len(args),)) + elif not args: + raise TypeError('update() takes at least 1 argument (0 given)') + self = args[0] + # Make progressively weaker assumptions about "other" + other = () + if len(args) == 2: + other = args[1] + if isinstance(other, dict): + for key in other: + self[key] = other[key] + elif hasattr(other, 'keys'): + for key in other.keys(): + self[key] = other[key] + else: + for key, value in other: + self[key] = value + for key, value in kwds.items(): + self[key] = value + + __update = update # let subclasses override update without breaking __init__ + + __marker = object() + + def pop(self, key, default=__marker): + '''od.pop(k[,d]) -> v, remove specified key and return the corresponding value. + If key is not found, d is returned if given, otherwise KeyError is raised. + + ''' + if key in self: + result = self[key] + del self[key] + return result + if default is self.__marker: + raise KeyError(key) + return default + + def setdefault(self, key, default=None): + 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od' + if key in self: + return self[key] + self[key] = default + return default + + def __repr__(self, _repr_running=None): + 'od.__repr__() <==> repr(od)' + if not _repr_running: _repr_running = {} + call_key = id(self), _get_ident() + if call_key in _repr_running: + return '...' + _repr_running[call_key] = 1 + try: + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + finally: + del _repr_running[call_key] + + def __reduce__(self): + 'Return state information for pickling' + items = [[k, self[k]] for k in self] + inst_dict = vars(self).copy() + for k in vars(OrderedDict()): + inst_dict.pop(k, None) + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def copy(self): + 'od.copy() -> a shallow copy of od' + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S + and values equal to v (which defaults to None). + + ''' + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + '''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive + while comparison to a regular mapping is order-insensitive. + + ''' + if isinstance(other, OrderedDict): + return len(self)==len(other) and self.items() == other.items() + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other + + # -- the following methods are only used in Python 2.7 -- + + def viewkeys(self): + "od.viewkeys() -> a set-like object providing a view on od's keys" + return KeysView(self) + + def viewvalues(self): + "od.viewvalues() -> an object providing a view on od's values" + return ValuesView(self) + + def viewitems(self): + "od.viewitems() -> a set-like object providing a view on od's items" + return ItemsView(self) + +try: + from logging.config import BaseConfigurator, valid_ident +except ImportError: # pragma: no cover + IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + + def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + + + # The ConvertingXXX classes are wrappers around standard Python containers, + # and they serve to convert any suitable values in the container. The + # conversion converts base dicts, lists and tuples to their wrapped + # equivalents, whereas strings which match a conversion format are converted + # appropriately. + # + # Each wrapper should have a configurator attribute holding the actual + # configurator to use for conversion. + + class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + #If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = staticmethod(__import__) + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + #print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + #rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, string_types): + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict([(k, config[k]) for k in config if valid_ident(k)]) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/database.py b/lib/python3.4/site-packages/pip/_vendor/distlib/database.py new file mode 100644 index 0000000..7bc1914 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/database.py @@ -0,0 +1,1312 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2016 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""PEP 376 implementation.""" + +from __future__ import unicode_literals + +import base64 +import codecs +import contextlib +import hashlib +import logging +import os +import posixpath +import sys +import zipimport + +from . import DistlibException, resources +from .compat import StringIO +from .version import get_scheme, UnsupportedVersionError +from .metadata import Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME +from .util import (parse_requirement, cached_property, parse_name_and_version, + read_exports, write_exports, CSVReader, CSVWriter) + + +__all__ = ['Distribution', 'BaseInstalledDistribution', + 'InstalledDistribution', 'EggInfoDistribution', + 'DistributionPath'] + + +logger = logging.getLogger(__name__) + +EXPORTS_FILENAME = 'pydist-exports.json' +COMMANDS_FILENAME = 'pydist-commands.json' + +DIST_FILES = ('INSTALLER', METADATA_FILENAME, 'RECORD', 'REQUESTED', + 'RESOURCES', EXPORTS_FILENAME, 'SHARED') + +DISTINFO_EXT = '.dist-info' + + +class _Cache(object): + """ + A simple cache mapping names and .dist-info paths to distributions + """ + def __init__(self): + """ + Initialise an instance. There is normally one for each DistributionPath. + """ + self.name = {} + self.path = {} + self.generated = False + + def clear(self): + """ + Clear the cache, setting it to its initial state. + """ + self.name.clear() + self.path.clear() + self.generated = False + + def add(self, dist): + """ + Add a distribution to the cache. + :param dist: The distribution to add. + """ + if dist.path not in self.path: + self.path[dist.path] = dist + self.name.setdefault(dist.key, []).append(dist) + + +class DistributionPath(object): + """ + Represents a set of distributions installed on a path (typically sys.path). + """ + def __init__(self, path=None, include_egg=False): + """ + Create an instance from a path, optionally including legacy (distutils/ + setuptools/distribute) distributions. + :param path: The path to use, as a list of directories. If not specified, + sys.path is used. + :param include_egg: If True, this instance will look for and return legacy + distributions as well as those based on PEP 376. + """ + if path is None: + path = sys.path + self.path = path + self._include_dist = True + self._include_egg = include_egg + + self._cache = _Cache() + self._cache_egg = _Cache() + self._cache_enabled = True + self._scheme = get_scheme('default') + + def _get_cache_enabled(self): + return self._cache_enabled + + def _set_cache_enabled(self, value): + self._cache_enabled = value + + cache_enabled = property(_get_cache_enabled, _set_cache_enabled) + + def clear_cache(self): + """ + Clears the internal cache. + """ + self._cache.clear() + self._cache_egg.clear() + + + def _yield_distributions(self): + """ + Yield .dist-info and/or .egg(-info) distributions. + """ + # We need to check if we've seen some resources already, because on + # some Linux systems (e.g. some Debian/Ubuntu variants) there are + # symlinks which alias other files in the environment. + seen = set() + for path in self.path: + finder = resources.finder_for_path(path) + if finder is None: + continue + r = finder.find('') + if not r or not r.is_container: + continue + rset = sorted(r.resources) + for entry in rset: + r = finder.find(entry) + if not r or r.path in seen: + continue + if self._include_dist and entry.endswith(DISTINFO_EXT): + possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] + for metadata_filename in possible_filenames: + metadata_path = posixpath.join(entry, metadata_filename) + pydist = finder.find(metadata_path) + if pydist: + break + else: + continue + + with contextlib.closing(pydist.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + logger.debug('Found %s', r.path) + seen.add(r.path) + yield new_dist_class(r.path, metadata=metadata, + env=self) + elif self._include_egg and entry.endswith(('.egg-info', + '.egg')): + logger.debug('Found %s', r.path) + seen.add(r.path) + yield old_dist_class(r.path, self) + + def _generate_cache(self): + """ + Scan the path for distributions and populate the cache with + those that are found. + """ + gen_dist = not self._cache.generated + gen_egg = self._include_egg and not self._cache_egg.generated + if gen_dist or gen_egg: + for dist in self._yield_distributions(): + if isinstance(dist, InstalledDistribution): + self._cache.add(dist) + else: + self._cache_egg.add(dist) + + if gen_dist: + self._cache.generated = True + if gen_egg: + self._cache_egg.generated = True + + @classmethod + def distinfo_dirname(cls, name, version): + """ + The *name* and *version* parameters are converted into their + filename-escaped form, i.e. any ``'-'`` characters are replaced + with ``'_'`` other than the one in ``'dist-info'`` and the one + separating the name from the version number. + + :parameter name: is converted to a standard distribution name by replacing + any runs of non- alphanumeric characters with a single + ``'-'``. + :type name: string + :parameter version: is converted to a standard version string. Spaces + become dots, and all other non-alphanumeric characters + (except dots) become dashes, with runs of multiple + dashes condensed to a single dash. + :type version: string + :returns: directory name + :rtype: string""" + name = name.replace('-', '_') + return '-'.join([name, version]) + DISTINFO_EXT + + def get_distributions(self): + """ + Provides an iterator that looks for distributions and returns + :class:`InstalledDistribution` or + :class:`EggInfoDistribution` instances for each one of them. + + :rtype: iterator of :class:`InstalledDistribution` and + :class:`EggInfoDistribution` instances + """ + if not self._cache_enabled: + for dist in self._yield_distributions(): + yield dist + else: + self._generate_cache() + + for dist in self._cache.path.values(): + yield dist + + if self._include_egg: + for dist in self._cache_egg.path.values(): + yield dist + + def get_distribution(self, name): + """ + Looks for a named distribution on the path. + + This function only returns the first result found, as no more than one + value is expected. If nothing is found, ``None`` is returned. + + :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` + or ``None`` + """ + result = None + name = name.lower() + if not self._cache_enabled: + for dist in self._yield_distributions(): + if dist.key == name: + result = dist + break + else: + self._generate_cache() + + if name in self._cache.name: + result = self._cache.name[name][0] + elif self._include_egg and name in self._cache_egg.name: + result = self._cache_egg.name[name][0] + return result + + def provides_distribution(self, name, version=None): + """ + Iterates over all distributions to find which distributions provide *name*. + If a *version* is provided, it will be used to filter the results. + + This function only returns the first result found, since no more than + one values are expected. If the directory is not found, returns ``None``. + + :parameter version: a version specifier that indicates the version + required, conforming to the format in ``PEP-345`` + + :type name: string + :type version: string + """ + matcher = None + if not version is None: + try: + matcher = self._scheme.matcher('%s (%s)' % (name, version)) + except ValueError: + raise DistlibException('invalid name or version: %r, %r' % + (name, version)) + + for dist in self.get_distributions(): + provided = dist.provides + + for p in provided: + p_name, p_ver = parse_name_and_version(p) + if matcher is None: + if p_name == name: + yield dist + break + else: + if p_name == name and matcher.match(p_ver): + yield dist + break + + def get_file_path(self, name, relative_path): + """ + Return the path to a resource file. + """ + dist = self.get_distribution(name) + if dist is None: + raise LookupError('no distribution named %r found' % name) + return dist.get_resource_path(relative_path) + + def get_exported_entries(self, category, name=None): + """ + Return all of the exported entries in a particular category. + + :param category: The category to search for entries. + :param name: If specified, only entries with that name are returned. + """ + for dist in self.get_distributions(): + r = dist.exports + if category in r: + d = r[category] + if name is not None: + if name in d: + yield d[name] + else: + for v in d.values(): + yield v + + +class Distribution(object): + """ + A base class for distributions, whether installed or from indexes. + Either way, it must have some metadata, so that's all that's needed + for construction. + """ + + build_time_dependency = False + """ + Set to True if it's known to be only a build-time dependency (i.e. + not needed after installation). + """ + + requested = False + """A boolean that indicates whether the ``REQUESTED`` metadata file is + present (in other words, whether the package was installed by user + request or it was installed as a dependency).""" + + def __init__(self, metadata): + """ + Initialise an instance. + :param metadata: The instance of :class:`Metadata` describing this + distribution. + """ + self.metadata = metadata + self.name = metadata.name + self.key = self.name.lower() # for case-insensitive comparisons + self.version = metadata.version + self.locator = None + self.digest = None + self.extras = None # additional features requested + self.context = None # environment marker overrides + self.download_urls = set() + self.digests = {} + + @property + def source_url(self): + """ + The source archive download URL for this distribution. + """ + return self.metadata.source_url + + download_url = source_url # Backward compatibility + + @property + def name_and_version(self): + """ + A utility property which displays the name and version in parentheses. + """ + return '%s (%s)' % (self.name, self.version) + + @property + def provides(self): + """ + A set of distribution names and versions provided by this distribution. + :return: A set of "name (version)" strings. + """ + plist = self.metadata.provides + s = '%s (%s)' % (self.name, self.version) + if s not in plist: + plist.append(s) + return plist + + def _get_requirements(self, req_attr): + md = self.metadata + logger.debug('Getting requirements from metadata %r', md.todict()) + reqts = getattr(md, req_attr) + return set(md.get_requirements(reqts, extras=self.extras, + env=self.context)) + + @property + def run_requires(self): + return self._get_requirements('run_requires') + + @property + def meta_requires(self): + return self._get_requirements('meta_requires') + + @property + def build_requires(self): + return self._get_requirements('build_requires') + + @property + def test_requires(self): + return self._get_requirements('test_requires') + + @property + def dev_requires(self): + return self._get_requirements('dev_requires') + + def matches_requirement(self, req): + """ + Say if this instance matches (fulfills) a requirement. + :param req: The requirement to match. + :rtype req: str + :return: True if it matches, else False. + """ + # Requirement may contain extras - parse to lose those + # from what's passed to the matcher + r = parse_requirement(req) + scheme = get_scheme(self.metadata.scheme) + try: + matcher = scheme.matcher(r.requirement) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + result = False + for p in self.provides: + p_name, p_ver = parse_name_and_version(p) + if p_name != name: + continue + try: + result = matcher.match(p_ver) + break + except UnsupportedVersionError: + pass + return result + + def __repr__(self): + """ + Return a textual representation of this instance, + """ + if self.source_url: + suffix = ' [%s]' % self.source_url + else: + suffix = '' + return '' % (self.name, self.version, suffix) + + def __eq__(self, other): + """ + See if this distribution is the same as another. + :param other: The distribution to compare with. To be equal to one + another. distributions must have the same type, name, + version and source_url. + :return: True if it is the same, else False. + """ + if type(other) is not type(self): + result = False + else: + result = (self.name == other.name and + self.version == other.version and + self.source_url == other.source_url) + return result + + def __hash__(self): + """ + Compute hash in a way which matches the equality test. + """ + return hash(self.name) + hash(self.version) + hash(self.source_url) + + +class BaseInstalledDistribution(Distribution): + """ + This is the base class for installed distributions (whether PEP 376 or + legacy). + """ + + hasher = None + + def __init__(self, metadata, path, env=None): + """ + Initialise an instance. + :param metadata: An instance of :class:`Metadata` which describes the + distribution. This will normally have been initialised + from a metadata file in the ``path``. + :param path: The path of the ``.dist-info`` or ``.egg-info`` + directory for the distribution. + :param env: This is normally the :class:`DistributionPath` + instance where this distribution was found. + """ + super(BaseInstalledDistribution, self).__init__(metadata) + self.path = path + self.dist_path = env + + def get_hash(self, data, hasher=None): + """ + Get the hash of some data, using a particular hash algorithm, if + specified. + + :param data: The data to be hashed. + :type data: bytes + :param hasher: The name of a hash implementation, supported by hashlib, + or ``None``. Examples of valid values are ``'sha1'``, + ``'sha224'``, ``'sha384'``, '``sha256'``, ``'md5'`` and + ``'sha512'``. If no hasher is specified, the ``hasher`` + attribute of the :class:`InstalledDistribution` instance + is used. If the hasher is determined to be ``None``, MD5 + is used as the hashing algorithm. + :returns: The hash of the data. If a hasher was explicitly specified, + the returned hash will be prefixed with the specified hasher + followed by '='. + :rtype: str + """ + if hasher is None: + hasher = self.hasher + if hasher is None: + hasher = hashlib.md5 + prefix = '' + else: + hasher = getattr(hashlib, hasher) + prefix = '%s=' % self.hasher + digest = hasher(data).digest() + digest = base64.urlsafe_b64encode(digest).rstrip(b'=').decode('ascii') + return '%s%s' % (prefix, digest) + + +class InstalledDistribution(BaseInstalledDistribution): + """ + Created with the *path* of the ``.dist-info`` directory provided to the + constructor. It reads the metadata contained in ``pydist.json`` when it is + instantiated., or uses a passed in Metadata instance (useful for when + dry-run mode is being used). + """ + + hasher = 'sha256' + + def __init__(self, path, metadata=None, env=None): + self.finder = finder = resources.finder_for_path(path) + if finder is None: + import pdb; pdb.set_trace () + if env and env._cache_enabled and path in env._cache.path: + metadata = env._cache.path[path].metadata + elif metadata is None: + r = finder.find(METADATA_FILENAME) + # Temporary - for Wheel 0.23 support + if r is None: + r = finder.find(WHEEL_METADATA_FILENAME) + # Temporary - for legacy support + if r is None: + r = finder.find('METADATA') + if r is None: + raise ValueError('no %s found in %s' % (METADATA_FILENAME, + path)) + with contextlib.closing(r.as_stream()) as stream: + metadata = Metadata(fileobj=stream, scheme='legacy') + + super(InstalledDistribution, self).__init__(metadata, path, env) + + if env and env._cache_enabled: + env._cache.add(self) + + try: + r = finder.find('REQUESTED') + except AttributeError: + import pdb; pdb.set_trace () + self.requested = r is not None + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def _get_records(self): + """ + Get the list of installed files for the distribution + :return: A list of tuples of path, hash and size. Note that hash and + size might be ``None`` for some entries. The path is exactly + as stored in the file (which is as in PEP 376). + """ + results = [] + r = self.get_distinfo_resource('RECORD') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as record_reader: + # Base location is parent dir of .dist-info dir + #base_location = os.path.dirname(self.path) + #base_location = os.path.abspath(base_location) + for row in record_reader: + missing = [None for i in range(len(row), 3)] + path, checksum, size = row + missing + #if not os.path.isabs(path): + # path = path.replace('/', os.sep) + # path = os.path.join(base_location, path) + results.append((path, checksum, size)) + return results + + @cached_property + def exports(self): + """ + Return the information exported by this distribution. + :return: A dictionary of exports, mapping an export category to a dict + of :class:`ExportEntry` instances describing the individual + export entries, and keyed by name. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + result = self.read_exports() + return result + + def read_exports(self): + """ + Read exports data from a file in .ini format. + + :return: A dictionary of exports, mapping an export category to a list + of :class:`ExportEntry` instances describing the individual + export entries. + """ + result = {} + r = self.get_distinfo_resource(EXPORTS_FILENAME) + if r: + with contextlib.closing(r.as_stream()) as stream: + result = read_exports(stream) + return result + + def write_exports(self, exports): + """ + Write a dictionary of exports to a file in .ini format. + :param exports: A dictionary of exports, mapping an export category to + a list of :class:`ExportEntry` instances describing the + individual export entries. + """ + rf = self.get_distinfo_file(EXPORTS_FILENAME) + with open(rf, 'w') as f: + write_exports(exports, f) + + def get_resource_path(self, relative_path): + """ + NOTE: This API may change in the future. + + Return the absolute path to a resource file with the given relative + path. + + :param relative_path: The path, relative to .dist-info, of the resource + of interest. + :return: The absolute path where the resource is to be found. + """ + r = self.get_distinfo_resource('RESOURCES') + with contextlib.closing(r.as_stream()) as stream: + with CSVReader(stream=stream) as resources_reader: + for relative, destination in resources_reader: + if relative == relative_path: + return destination + raise KeyError('no resource file with relative path %r ' + 'is installed' % relative_path) + + def list_installed_files(self): + """ + Iterates over the ``RECORD`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: iterator of (path, hash, size) + """ + for result in self._get_records(): + yield result + + def write_installed_files(self, paths, prefix, dry_run=False): + """ + Writes the ``RECORD`` file, using the ``paths`` iterable passed in. Any + existing ``RECORD`` file is silently overwritten. + + prefix is used to determine when to write absolute paths. + """ + prefix = os.path.join(prefix, '') + base = os.path.dirname(self.path) + base_under_prefix = base.startswith(prefix) + base = os.path.join(base, '') + record_path = self.get_distinfo_file('RECORD') + logger.info('creating %s', record_path) + if dry_run: + return None + with CSVWriter(record_path) as writer: + for path in paths: + if os.path.isdir(path) or path.endswith(('.pyc', '.pyo')): + # do not put size and hash, as in PEP-376 + hash_value = size = '' + else: + size = '%d' % os.path.getsize(path) + with open(path, 'rb') as fp: + hash_value = self.get_hash(fp.read()) + if path.startswith(base) or (base_under_prefix and + path.startswith(prefix)): + path = os.path.relpath(path, base) + writer.writerow((path, hash_value, size)) + + # add the RECORD file itself + if record_path.startswith(base): + record_path = os.path.relpath(record_path, base) + writer.writerow((record_path, '', '')) + return record_path + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + base = os.path.dirname(self.path) + record_path = self.get_distinfo_file('RECORD') + for path, hash_value, size in self.list_installed_files(): + if not os.path.isabs(path): + path = os.path.join(base, path) + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + elif os.path.isfile(path): + actual_size = str(os.path.getsize(path)) + if size and actual_size != size: + mismatches.append((path, 'size', size, actual_size)) + elif hash_value: + if '=' in hash_value: + hasher = hash_value.split('=', 1)[0] + else: + hasher = None + + with open(path, 'rb') as f: + actual_hash = self.get_hash(f.read(), hasher) + if actual_hash != hash_value: + mismatches.append((path, 'hash', hash_value, actual_hash)) + return mismatches + + @cached_property + def shared_locations(self): + """ + A dictionary of shared locations whose keys are in the set 'prefix', + 'purelib', 'platlib', 'scripts', 'headers', 'data' and 'namespace'. + The corresponding value is the absolute path of that category for + this distribution, and takes into account any paths selected by the + user at installation time (e.g. via command-line arguments). In the + case of the 'namespace' key, this would be a list of absolute paths + for the roots of namespace packages in this distribution. + + The first time this property is accessed, the relevant information is + read from the SHARED file in the .dist-info directory. + """ + result = {} + shared_path = os.path.join(self.path, 'SHARED') + if os.path.isfile(shared_path): + with codecs.open(shared_path, 'r', encoding='utf-8') as f: + lines = f.read().splitlines() + for line in lines: + key, value = line.split('=', 1) + if key == 'namespace': + result.setdefault(key, []).append(value) + else: + result[key] = value + return result + + def write_shared_locations(self, paths, dry_run=False): + """ + Write shared location information to the SHARED file in .dist-info. + :param paths: A dictionary as described in the documentation for + :meth:`shared_locations`. + :param dry_run: If True, the action is logged but no file is actually + written. + :return: The path of the file written to. + """ + shared_path = os.path.join(self.path, 'SHARED') + logger.info('creating %s', shared_path) + if dry_run: + return None + lines = [] + for key in ('prefix', 'lib', 'headers', 'scripts', 'data'): + path = paths[key] + if os.path.isdir(paths[key]): + lines.append('%s=%s' % (key, path)) + for ns in paths.get('namespace', ()): + lines.append('namespace=%s' % ns) + + with codecs.open(shared_path, 'w', encoding='utf-8') as f: + f.write('\n'.join(lines)) + return shared_path + + def get_distinfo_resource(self, path): + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + finder = resources.finder_for_path(self.path) + if finder is None: + raise DistlibException('Unable to get a finder for %s' % self.path) + return finder.find(path) + + def get_distinfo_file(self, path): + """ + Returns a path located under the ``.dist-info`` directory. Returns a + string representing the path. + + :parameter path: a ``'/'``-separated path relative to the + ``.dist-info`` directory or an absolute path; + If *path* is an absolute path and doesn't start + with the ``.dist-info`` directory path, + a :class:`DistlibException` is raised + :type path: str + :rtype: str + """ + # Check if it is an absolute path # XXX use relpath, add tests + if path.find(os.sep) >= 0: + # it's an absolute path? + distinfo_dirname, path = path.split(os.sep)[-2:] + if distinfo_dirname != self.path.split(os.sep)[-1]: + raise DistlibException( + 'dist-info file %r does not belong to the %r %s ' + 'distribution' % (path, self.name, self.version)) + + # The file must be relative + if path not in DIST_FILES: + raise DistlibException('invalid path for a dist-info file: ' + '%r at %r' % (path, self.path)) + + return os.path.join(self.path, path) + + def list_distinfo_files(self): + """ + Iterates over the ``RECORD`` entries and returns paths for each line if + the path is pointing to a file located in the ``.dist-info`` directory + or one of its subdirectories. + + :returns: iterator of paths + """ + base = os.path.dirname(self.path) + for path, checksum, size in self._get_records(): + # XXX add separator or use real relpath algo + if not os.path.isabs(path): + path = os.path.join(base, path) + if path.startswith(self.path): + yield path + + def __eq__(self, other): + return (isinstance(other, InstalledDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + + +class EggInfoDistribution(BaseInstalledDistribution): + """Created with the *path* of the ``.egg-info`` directory or file provided + to the constructor. It reads the metadata contained in the file itself, or + if the given path happens to be a directory, the metadata is read from the + file ``PKG-INFO`` under that directory.""" + + requested = True # as we have no way of knowing, assume it was + shared_locations = {} + + def __init__(self, path, env=None): + def set_name_and_version(s, n, v): + s.name = n + s.key = n.lower() # for case-insensitive comparisons + s.version = v + + self.path = path + self.dist_path = env + if env and env._cache_enabled and path in env._cache_egg.path: + metadata = env._cache_egg.path[path].metadata + set_name_and_version(self, metadata.name, metadata.version) + else: + metadata = self._get_metadata(path) + + # Need to be set before caching + set_name_and_version(self, metadata.name, metadata.version) + + if env and env._cache_enabled: + env._cache_egg.add(self) + super(EggInfoDistribution, self).__init__(metadata, path, env) + + def _get_metadata(self, path): + requires = None + + def parse_requires_data(data): + """Create a list of dependencies from a requires.txt file. + + *data*: the contents of a setuptools-produced requires.txt file. + """ + reqs = [] + lines = data.splitlines() + for line in lines: + line = line.strip() + if line.startswith('['): + logger.warning('Unexpected line: quitting requirement scan: %r', + line) + break + r = parse_requirement(line) + if not r: + logger.warning('Not recognised as a requirement: %r', line) + continue + if r.extras: + logger.warning('extra requirements in requires.txt are ' + 'not supported') + if not r.constraints: + reqs.append(r.name) + else: + cons = ', '.join('%s%s' % c for c in r.constraints) + reqs.append('%s (%s)' % (r.name, cons)) + return reqs + + def parse_requires_path(req_path): + """Create a list of dependencies from a requires.txt file. + + *req_path*: the path to a setuptools-produced requires.txt file. + """ + + reqs = [] + try: + with codecs.open(req_path, 'r', 'utf-8') as fp: + reqs = parse_requires_data(fp.read()) + except IOError: + pass + return reqs + + if path.endswith('.egg'): + if os.path.isdir(path): + meta_path = os.path.join(path, 'EGG-INFO', 'PKG-INFO') + metadata = Metadata(path=meta_path, scheme='legacy') + req_path = os.path.join(path, 'EGG-INFO', 'requires.txt') + requires = parse_requires_path(req_path) + else: + # FIXME handle the case where zipfile is not available + zipf = zipimport.zipimporter(path) + fileobj = StringIO( + zipf.get_data('EGG-INFO/PKG-INFO').decode('utf8')) + metadata = Metadata(fileobj=fileobj, scheme='legacy') + try: + data = zipf.get_data('EGG-INFO/requires.txt') + requires = parse_requires_data(data.decode('utf-8')) + except IOError: + requires = None + elif path.endswith('.egg-info'): + if os.path.isdir(path): + req_path = os.path.join(path, 'requires.txt') + requires = parse_requires_path(req_path) + path = os.path.join(path, 'PKG-INFO') + metadata = Metadata(path=path, scheme='legacy') + else: + raise DistlibException('path must end with .egg-info or .egg, ' + 'got %r' % path) + + if requires: + metadata.add_requirements(requires) + return metadata + + def __repr__(self): + return '' % ( + self.name, self.version, self.path) + + def __str__(self): + return "%s %s" % (self.name, self.version) + + def check_installed_files(self): + """ + Checks that the hashes and sizes of the files in ``RECORD`` are + matched by the files themselves. Returns a (possibly empty) list of + mismatches. Each entry in the mismatch list will be a tuple consisting + of the path, 'exists', 'size' or 'hash' according to what didn't match + (existence is checked first, then size, then hash), the expected + value and the actual value. + """ + mismatches = [] + record_path = os.path.join(self.path, 'installed-files.txt') + if os.path.exists(record_path): + for path, _, _ in self.list_installed_files(): + if path == record_path: + continue + if not os.path.exists(path): + mismatches.append((path, 'exists', True, False)) + return mismatches + + def list_installed_files(self): + """ + Iterates over the ``installed-files.txt`` entries and returns a tuple + ``(path, hash, size)`` for each line. + + :returns: a list of (path, hash, size) + """ + + def _md5(path): + f = open(path, 'rb') + try: + content = f.read() + finally: + f.close() + return hashlib.md5(content).hexdigest() + + def _size(path): + return os.stat(path).st_size + + record_path = os.path.join(self.path, 'installed-files.txt') + result = [] + if os.path.exists(record_path): + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + p = os.path.normpath(os.path.join(self.path, line)) + # "./" is present as a marker between installed files + # and installation metadata files + if not os.path.exists(p): + logger.warning('Non-existent file: %s', p) + if p.endswith(('.pyc', '.pyo')): + continue + #otherwise fall through and fail + if not os.path.isdir(p): + result.append((p, _md5(p), _size(p))) + result.append((record_path, None, None)) + return result + + def list_distinfo_files(self, absolute=False): + """ + Iterates over the ``installed-files.txt`` entries and returns paths for + each line if the path is pointing to a file located in the + ``.egg-info`` directory or one of its subdirectories. + + :parameter absolute: If *absolute* is ``True``, each returned path is + transformed into a local absolute path. Otherwise the + raw value from ``installed-files.txt`` is returned. + :type absolute: boolean + :returns: iterator of paths + """ + record_path = os.path.join(self.path, 'installed-files.txt') + skip = True + with codecs.open(record_path, 'r', encoding='utf-8') as f: + for line in f: + line = line.strip() + if line == './': + skip = False + continue + if not skip: + p = os.path.normpath(os.path.join(self.path, line)) + if p.startswith(self.path): + if absolute: + yield p + else: + yield line + + def __eq__(self, other): + return (isinstance(other, EggInfoDistribution) and + self.path == other.path) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + __hash__ = object.__hash__ + +new_dist_class = InstalledDistribution +old_dist_class = EggInfoDistribution + + +class DependencyGraph(object): + """ + Represents a dependency graph between distributions. + + The dependency relationships are stored in an ``adjacency_list`` that maps + distributions to a list of ``(other, label)`` tuples where ``other`` + is a distribution and the edge is labeled with ``label`` (i.e. the version + specifier, if such was provided). Also, for more efficient traversal, for + every distribution ``x``, a list of predecessors is kept in + ``reverse_list[x]``. An edge from distribution ``a`` to + distribution ``b`` means that ``a`` depends on ``b``. If any missing + dependencies are found, they are stored in ``missing``, which is a + dictionary that maps distributions to a list of requirements that were not + provided by any other distributions. + """ + + def __init__(self): + self.adjacency_list = {} + self.reverse_list = {} + self.missing = {} + + def add_distribution(self, distribution): + """Add the *distribution* to the graph. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + """ + self.adjacency_list[distribution] = [] + self.reverse_list[distribution] = [] + #self.missing[distribution] = [] + + def add_edge(self, x, y, label=None): + """Add an edge from distribution *x* to distribution *y* with the given + *label*. + + :type x: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type y: :class:`distutils2.database.InstalledDistribution` or + :class:`distutils2.database.EggInfoDistribution` + :type label: ``str`` or ``None`` + """ + self.adjacency_list[x].append((y, label)) + # multiple edges are allowed, so be careful + if x not in self.reverse_list[y]: + self.reverse_list[y].append(x) + + def add_missing(self, distribution, requirement): + """ + Add a missing *requirement* for the given *distribution*. + + :type distribution: :class:`distutils2.database.InstalledDistribution` + or :class:`distutils2.database.EggInfoDistribution` + :type requirement: ``str`` + """ + logger.debug('%s missing %r', distribution, requirement) + self.missing.setdefault(distribution, []).append(requirement) + + def _repr_dist(self, dist): + return '%s %s' % (dist.name, dist.version) + + def repr_node(self, dist, level=1): + """Prints only a subgraph""" + output = [self._repr_dist(dist)] + for other, label in self.adjacency_list[dist]: + dist = self._repr_dist(other) + if label is not None: + dist = '%s [%s]' % (dist, label) + output.append(' ' * level + str(dist)) + suboutput = self.repr_node(other, level + 1) + subs = suboutput.split('\n') + output.extend(subs[1:]) + return '\n'.join(output) + + def to_dot(self, f, skip_disconnected=True): + """Writes a DOT output for the graph to the provided file *f*. + + If *skip_disconnected* is set to ``True``, then all distributions + that are not dependent on any other distribution are skipped. + + :type f: has to support ``file``-like operations + :type skip_disconnected: ``bool`` + """ + disconnected = [] + + f.write("digraph dependencies {\n") + for dist, adjs in self.adjacency_list.items(): + if len(adjs) == 0 and not skip_disconnected: + disconnected.append(dist) + for other, label in adjs: + if not label is None: + f.write('"%s" -> "%s" [label="%s"]\n' % + (dist.name, other.name, label)) + else: + f.write('"%s" -> "%s"\n' % (dist.name, other.name)) + if not skip_disconnected and len(disconnected) > 0: + f.write('subgraph disconnected {\n') + f.write('label = "Disconnected"\n') + f.write('bgcolor = red\n') + + for dist in disconnected: + f.write('"%s"' % dist.name) + f.write('\n') + f.write('}\n') + f.write('}\n') + + def topological_sort(self): + """ + Perform a topological sort of the graph. + :return: A tuple, the first element of which is a topologically sorted + list of distributions, and the second element of which is a + list of distributions that cannot be sorted because they have + circular dependencies and so form a cycle. + """ + result = [] + # Make a shallow copy of the adjacency list + alist = {} + for k, v in self.adjacency_list.items(): + alist[k] = v[:] + while True: + # See what we can remove in this run + to_remove = [] + for k, v in list(alist.items())[:]: + if not v: + to_remove.append(k) + del alist[k] + if not to_remove: + # What's left in alist (if anything) is a cycle. + break + # Remove from the adjacency list of others + for k, v in alist.items(): + alist[k] = [(d, r) for d, r in v if d not in to_remove] + logger.debug('Moving to result: %s', + ['%s (%s)' % (d.name, d.version) for d in to_remove]) + result.extend(to_remove) + return result, list(alist.keys()) + + def __repr__(self): + """Representation of the graph""" + output = [] + for dist, adjs in self.adjacency_list.items(): + output.append(self.repr_node(dist)) + return '\n'.join(output) + + +def make_graph(dists, scheme='default'): + """Makes a dependency graph from the given distributions. + + :parameter dists: a list of distributions + :type dists: list of :class:`distutils2.database.InstalledDistribution` and + :class:`distutils2.database.EggInfoDistribution` instances + :rtype: a :class:`DependencyGraph` instance + """ + scheme = get_scheme(scheme) + graph = DependencyGraph() + provided = {} # maps names to lists of (version, dist) tuples + + # first, build the graph and find out what's provided + for dist in dists: + graph.add_distribution(dist) + + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + provided.setdefault(name, []).append((version, dist)) + + # now make the edges + for dist in dists: + requires = (dist.run_requires | dist.meta_requires | + dist.build_requires | dist.dev_requires) + for req in requires: + try: + matcher = scheme.matcher(req) + except UnsupportedVersionError: + # XXX compat-mode if cannot read the version + logger.warning('could not read version %r - using name only', + req) + name = req.split()[0] + matcher = scheme.matcher(name) + + name = matcher.key # case-insensitive + + matched = False + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + graph.add_edge(dist, provider, req) + matched = True + break + if not matched: + graph.add_missing(dist, req) + return graph + + +def get_dependent_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + dependent on *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + dep = [dist] # dependent distributions + todo = graph.reverse_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop() + dep.append(d) + for succ in graph.reverse_list[d]: + if succ not in dep: + todo.append(succ) + + dep.pop(0) # remove dist from dep, was there to prevent infinite loops + return dep + + +def get_required_dists(dists, dist): + """Recursively generate a list of distributions from *dists* that are + required by *dist*. + + :param dists: a list of distributions + :param dist: a distribution, member of *dists* for which we are interested + """ + if dist not in dists: + raise DistlibException('given distribution %r is not a member ' + 'of the list' % dist.name) + graph = make_graph(dists) + + req = [] # required distributions + todo = graph.adjacency_list[dist] # list of nodes we should inspect + + while todo: + d = todo.pop()[0] + req.append(d) + for pred in graph.adjacency_list[d]: + if pred not in req: + todo.append(pred) + + return req + + +def make_dist(name, version, **kwargs): + """ + A convenience method for making a dist given just a name and version. + """ + summary = kwargs.pop('summary', 'Placeholder for summary') + md = Metadata(**kwargs) + md.name = name + md.version = version + md.summary = summary or 'Plaeholder for summary' + return Distribution(md) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/index.py b/lib/python3.4/site-packages/pip/_vendor/distlib/index.py new file mode 100644 index 0000000..73037c9 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/index.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import hashlib +import logging +import os +import shutil +import subprocess +import tempfile +try: + from threading import Thread +except ImportError: + from dummy_threading import Thread + +from . import DistlibException +from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr, + urlparse, build_opener, string_types) +from .util import cached_property, zip_dir, ServerProxy + +logger = logging.getLogger(__name__) + +DEFAULT_INDEX = 'https://pypi.python.org/pypi' +DEFAULT_REALM = 'pypi' + +class PackageIndex(object): + """ + This class represents a package index compatible with PyPI, the Python + Package Index. + """ + + boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$' + + def __init__(self, url=None): + """ + Initialise an instance. + + :param url: The URL of the index. If not specified, the URL for PyPI is + used. + """ + self.url = url or DEFAULT_INDEX + self.read_configuration() + scheme, netloc, path, params, query, frag = urlparse(self.url) + if params or query or frag or scheme not in ('http', 'https'): + raise DistlibException('invalid repository: %s' % self.url) + self.password_handler = None + self.ssl_verifier = None + self.gpg = None + self.gpg_home = None + self.rpc_proxy = None + with open(os.devnull, 'w') as sink: + for s in ('gpg2', 'gpg'): + try: + rc = subprocess.check_call([s, '--version'], stdout=sink, + stderr=sink) + if rc == 0: + self.gpg = s + break + except OSError: + pass + + def _get_pypirc_command(self): + """ + Get the distutils command for interacting with PyPI configurations. + :return: the command. + """ + from distutils.core import Distribution + from distutils.config import PyPIRCCommand + d = Distribution() + return PyPIRCCommand(d) + + def read_configuration(self): + """ + Read the PyPI access configuration as supported by distutils, getting + PyPI to do the acutal work. This populates ``username``, ``password``, + ``realm`` and ``url`` attributes from the configuration. + """ + # get distutils to do the work + c = self._get_pypirc_command() + c.repository = self.url + cfg = c._read_pypirc() + self.username = cfg.get('username') + self.password = cfg.get('password') + self.realm = cfg.get('realm', 'pypi') + self.url = cfg.get('repository', self.url) + + def save_configuration(self): + """ + Save the PyPI access configuration. You must have set ``username`` and + ``password`` attributes before calling this method. + + Again, distutils is used to do the actual work. + """ + self.check_credentials() + # get distutils to do the work + c = self._get_pypirc_command() + c._store_pypirc(self.username, self.password) + + def check_credentials(self): + """ + Check that ``username`` and ``password`` have been set, and raise an + exception if not. + """ + if self.username is None or self.password is None: + raise DistlibException('username and password must be set') + pm = HTTPPasswordMgr() + _, netloc, _, _, _, _ = urlparse(self.url) + pm.add_password(self.realm, netloc, self.username, self.password) + self.password_handler = HTTPBasicAuthHandler(pm) + + def register(self, metadata): + """ + Register a distribution on PyPI, using the provided metadata. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the distribution to be + registered. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + metadata.validate() + d = metadata.todict() + d[':action'] = 'verify' + request = self.encode_request(d.items(), []) + response = self.send_request(request) + d[':action'] = 'submit' + request = self.encode_request(d.items(), []) + return self.send_request(request) + + def _reader(self, name, stream, outbuf): + """ + Thread runner for reading lines of from a subprocess into a buffer. + + :param name: The logical name of the stream (used for logging only). + :param stream: The stream to read from. This will typically a pipe + connected to the output stream of a subprocess. + :param outbuf: The list to append the read lines to. + """ + while True: + s = stream.readline() + if not s: + break + s = s.decode('utf-8').rstrip() + outbuf.append(s) + logger.debug('%s: %s' % (name, s)) + stream.close() + + def get_sign_command(self, filename, signer, sign_password, + keystore=None): + """ + Return a suitable command for signing a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The signing command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + if sign_password is not None: + cmd.extend(['--batch', '--passphrase-fd', '0']) + td = tempfile.mkdtemp() + sf = os.path.join(td, os.path.basename(filename) + '.asc') + cmd.extend(['--detach-sign', '--armor', '--local-user', + signer, '--output', sf, filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd, sf + + def run_command(self, cmd, input_data=None): + """ + Run a command in a child process , passing it any input data specified. + + :param cmd: The command to run. + :param input_data: If specified, this must be a byte string containing + data to be sent to the child process. + :return: A tuple consisting of the subprocess' exit code, a list of + lines read from the subprocess' ``stdout``, and a list of + lines read from the subprocess' ``stderr``. + """ + kwargs = { + 'stdout': subprocess.PIPE, + 'stderr': subprocess.PIPE, + } + if input_data is not None: + kwargs['stdin'] = subprocess.PIPE + stdout = [] + stderr = [] + p = subprocess.Popen(cmd, **kwargs) + # We don't use communicate() here because we may need to + # get clever with interacting with the command + t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout)) + t1.start() + t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr)) + t2.start() + if input_data is not None: + p.stdin.write(input_data) + p.stdin.close() + + p.wait() + t1.join() + t2.join() + return p.returncode, stdout, stderr + + def sign_file(self, filename, signer, sign_password, keystore=None): + """ + Sign a file. + + :param filename: The pathname to the file to be signed. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The absolute pathname of the file where the signature is + stored. + """ + cmd, sig_file = self.get_sign_command(filename, signer, sign_password, + keystore) + rc, stdout, stderr = self.run_command(cmd, + sign_password.encode('utf-8')) + if rc != 0: + raise DistlibException('sign command failed with error ' + 'code %s' % rc) + return sig_file + + def upload_file(self, metadata, filename, signer=None, sign_password=None, + filetype='sdist', pyversion='source', keystore=None): + """ + Upload a release file to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the file to be uploaded. + :param filename: The pathname of the file to be uploaded. + :param signer: The identifier of the signer of the file. + :param sign_password: The passphrase for the signer's + private key used for signing. + :param filetype: The type of the file being uploaded. This is the + distutils command which produced that file, e.g. + ``sdist`` or ``bdist_wheel``. + :param pyversion: The version of Python which the release relates + to. For code compatible with any Python, this would + be ``source``, otherwise it would be e.g. ``3.2``. + :param keystore: The path to a directory which contains the keys + used in signing. If not specified, the instance's + ``gpg_home`` attribute is used instead. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.exists(filename): + raise DistlibException('not found: %s' % filename) + metadata.validate() + d = metadata.todict() + sig_file = None + if signer: + if not self.gpg: + logger.warning('no signing program available - not signed') + else: + sig_file = self.sign_file(filename, signer, sign_password, + keystore) + with open(filename, 'rb') as f: + file_data = f.read() + md5_digest = hashlib.md5(file_data).hexdigest() + sha256_digest = hashlib.sha256(file_data).hexdigest() + d.update({ + ':action': 'file_upload', + 'protcol_version': '1', + 'filetype': filetype, + 'pyversion': pyversion, + 'md5_digest': md5_digest, + 'sha256_digest': sha256_digest, + }) + files = [('content', os.path.basename(filename), file_data)] + if sig_file: + with open(sig_file, 'rb') as f: + sig_data = f.read() + files.append(('gpg_signature', os.path.basename(sig_file), + sig_data)) + shutil.rmtree(os.path.dirname(sig_file)) + request = self.encode_request(d.items(), files) + return self.send_request(request) + + def upload_documentation(self, metadata, doc_dir): + """ + Upload documentation to the index. + + :param metadata: A :class:`Metadata` instance defining at least a name + and version number for the documentation to be + uploaded. + :param doc_dir: The pathname of the directory which contains the + documentation. This should be the directory that + contains the ``index.html`` for the documentation. + :return: The HTTP response received from PyPI upon submission of the + request. + """ + self.check_credentials() + if not os.path.isdir(doc_dir): + raise DistlibException('not a directory: %r' % doc_dir) + fn = os.path.join(doc_dir, 'index.html') + if not os.path.exists(fn): + raise DistlibException('not found: %r' % fn) + metadata.validate() + name, version = metadata.name, metadata.version + zip_data = zip_dir(doc_dir).getvalue() + fields = [(':action', 'doc_upload'), + ('name', name), ('version', version)] + files = [('content', name, zip_data)] + request = self.encode_request(fields, files) + return self.send_request(request) + + def get_verify_command(self, signature_filename, data_filename, + keystore=None): + """ + Return a suitable command for verifying a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: The verifying command as a list suitable to be + passed to :class:`subprocess.Popen`. + """ + cmd = [self.gpg, '--status-fd', '2', '--no-tty'] + if keystore is None: + keystore = self.gpg_home + if keystore: + cmd.extend(['--homedir', keystore]) + cmd.extend(['--verify', signature_filename, data_filename]) + logger.debug('invoking: %s', ' '.join(cmd)) + return cmd + + def verify_signature(self, signature_filename, data_filename, + keystore=None): + """ + Verify a signature for a file. + + :param signature_filename: The pathname to the file containing the + signature. + :param data_filename: The pathname to the file containing the + signed data. + :param keystore: The path to a directory which contains the keys + used in verification. If not specified, the + instance's ``gpg_home`` attribute is used instead. + :return: True if the signature was verified, else False. + """ + if not self.gpg: + raise DistlibException('verification unavailable because gpg ' + 'unavailable') + cmd = self.get_verify_command(signature_filename, data_filename, + keystore) + rc, stdout, stderr = self.run_command(cmd) + if rc not in (0, 1): + raise DistlibException('verify command failed with error ' + 'code %s' % rc) + return rc == 0 + + def download_file(self, url, destfile, digest=None, reporthook=None): + """ + This is a convenience method for downloading a file from an URL. + Normally, this will be a file from the index, though currently + no check is made for this (i.e. a file can be downloaded from + anywhere). + + The method is just like the :func:`urlretrieve` function in the + standard library, except that it allows digest computation to be + done during download and checking that the downloaded data + matched any expected value. + + :param url: The URL of the file to be downloaded (assumed to be + available via an HTTP GET request). + :param destfile: The pathname where the downloaded file is to be + saved. + :param digest: If specified, this must be a (hasher, value) + tuple, where hasher is the algorithm used (e.g. + ``'md5'``) and ``value`` is the expected value. + :param reporthook: The same as for :func:`urlretrieve` in the + standard library. + """ + if digest is None: + digester = None + logger.debug('No digest specified') + else: + if isinstance(digest, (list, tuple)): + hasher, digest = digest + else: + hasher = 'md5' + digester = getattr(hashlib, hasher)() + logger.debug('Digest specified: %s' % digest) + # The following code is equivalent to urlretrieve. + # We need to do it this way so that we can compute the + # digest of the file as we go. + with open(destfile, 'wb') as dfp: + # addinfourl is not a context manager on 2.x + # so we have to use try/finally + sfp = self.send_request(Request(url)) + try: + headers = sfp.info() + blocksize = 8192 + size = -1 + read = 0 + blocknum = 0 + if "content-length" in headers: + size = int(headers["Content-Length"]) + if reporthook: + reporthook(blocknum, blocksize, size) + while True: + block = sfp.read(blocksize) + if not block: + break + read += len(block) + dfp.write(block) + if digester: + digester.update(block) + blocknum += 1 + if reporthook: + reporthook(blocknum, blocksize, size) + finally: + sfp.close() + + # check that we got the whole file, if we can + if size >= 0 and read < size: + raise DistlibException( + 'retrieval incomplete: got only %d out of %d bytes' + % (read, size)) + # if we have a digest, it must match. + if digester: + actual = digester.hexdigest() + if digest != actual: + raise DistlibException('%s digest mismatch for %s: expected ' + '%s, got %s' % (hasher, destfile, + digest, actual)) + logger.debug('Digest verified: %s', digest) + + def send_request(self, req): + """ + Send a standard library :class:`Request` to PyPI and return its + response. + + :param req: The request to send. + :return: The HTTP response from PyPI (a standard library HTTPResponse). + """ + handlers = [] + if self.password_handler: + handlers.append(self.password_handler) + if self.ssl_verifier: + handlers.append(self.ssl_verifier) + opener = build_opener(*handlers) + return opener.open(req) + + def encode_request(self, fields, files): + """ + Encode fields and files for posting to an HTTP server. + + :param fields: The fields to send as a list of (fieldname, value) + tuples. + :param files: The files to send as a list of (fieldname, filename, + file_bytes) tuple. + """ + # Adapted from packaging, which in turn was adapted from + # http://code.activestate.com/recipes/146306 + + parts = [] + boundary = self.boundary + for k, values in fields: + if not isinstance(values, (list, tuple)): + values = [values] + + for v in values: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"' % + k).encode('utf-8'), + b'', + v.encode('utf-8'))) + for key, filename, value in files: + parts.extend(( + b'--' + boundary, + ('Content-Disposition: form-data; name="%s"; filename="%s"' % + (key, filename)).encode('utf-8'), + b'', + value)) + + parts.extend((b'--' + boundary + b'--', b'')) + + body = b'\r\n'.join(parts) + ct = b'multipart/form-data; boundary=' + boundary + headers = { + 'Content-type': ct, + 'Content-length': str(len(body)) + } + return Request(self.url, body, headers) + + def search(self, terms, operator=None): + if isinstance(terms, string_types): + terms = {'name': terms} + if self.rpc_proxy is None: + self.rpc_proxy = ServerProxy(self.url, timeout=3.0) + return self.rpc_proxy.search(terms, operator or 'and') diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/locators.py b/lib/python3.4/site-packages/pip/_vendor/distlib/locators.py new file mode 100644 index 0000000..1e14638 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/locators.py @@ -0,0 +1,1264 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# + +import gzip +from io import BytesIO +import json +import logging +import os +import posixpath +import re +try: + import threading +except ImportError: # pragma: no cover + import dummy_threading as threading +import zlib + +from . import DistlibException +from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, + queue, quote, unescape, string_types, build_opener, + HTTPRedirectHandler as BaseRedirectHandler, + Request, HTTPError, URLError) +from .database import Distribution, DistributionPath, make_dist +from .metadata import Metadata +from .util import (cached_property, parse_credentials, ensure_slash, + split_filename, get_project_data, parse_requirement, + parse_name_and_version, ServerProxy) +from .version import get_scheme, UnsupportedVersionError +from .wheel import Wheel, is_compatible + +logger = logging.getLogger(__name__) + +HASHER_HASH = re.compile('^(\w+)=([a-f0-9]+)') +CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) +HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') +DEFAULT_INDEX = 'https://pypi.python.org/pypi' + +def get_all_distribution_names(url=None): + """ + Return all distribution names known by an index. + :param url: The URL of the index. + :return: A list of all known distribution names. + """ + if url is None: + url = DEFAULT_INDEX + client = ServerProxy(url, timeout=3.0) + return client.list_packages() + +class RedirectHandler(BaseRedirectHandler): + """ + A class to work around a bug in some Python 3.2.x releases. + """ + # There's a bug in the base version for some 3.2.x + # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header + # returns e.g. /abc, it bails because it says the scheme '' + # is bogus, when actually it should use the request's + # URL for the scheme. See Python issue #13696. + def http_error_302(self, req, fp, code, msg, headers): + # Some servers (incorrectly) return multiple Location headers + # (so probably same goes for URI). Use first header. + newurl = None + for key in ('location', 'uri'): + if key in headers: + newurl = headers[key] + break + if newurl is None: + return + urlparts = urlparse(newurl) + if urlparts.scheme == '': + newurl = urljoin(req.get_full_url(), newurl) + if hasattr(headers, 'replace_header'): + headers.replace_header(key, newurl) + else: + headers[key] = newurl + return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, + headers) + + http_error_301 = http_error_303 = http_error_307 = http_error_302 + +class Locator(object): + """ + A base class for locators - things that locate distributions. + """ + source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') + binary_extensions = ('.egg', '.exe', '.whl') + excluded_extensions = ('.pdf',) + + # A list of tags indicating which wheels you want to match. The default + # value of None matches against the tags compatible with the running + # Python. If you want to match other values, set wheel_tags on a locator + # instance to a list of tuples (pyver, abi, arch) which you want to match. + wheel_tags = None + + downloadable_extensions = source_extensions + ('.whl',) + + def __init__(self, scheme='default'): + """ + Initialise an instance. + :param scheme: Because locators look for most recent versions, they + need to know the version scheme to use. This specifies + the current PEP-recommended scheme - use ``'legacy'`` + if you need to support existing distributions on PyPI. + """ + self._cache = {} + self.scheme = scheme + # Because of bugs in some of the handlers on some of the platforms, + # we use our own opener rather than just using urlopen. + self.opener = build_opener(RedirectHandler()) + # If get_project() is called from locate(), the matcher instance + # is set from the requirement passed to locate(). See issue #18 for + # why this can be useful to know. + self.matcher = None + + def clear_cache(self): + self._cache.clear() + + def _get_scheme(self): + return self._scheme + + def _set_scheme(self, value): + self._scheme = value + + scheme = property(_get_scheme, _set_scheme) + + def _get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This should be implemented in subclasses. + + If called from a locate() request, self.matcher will be set to a + matcher for the requirement to satisfy, otherwise it will be None. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Please implement in the subclass') + + def get_project(self, name): + """ + For a given project, get a dictionary mapping available versions to Distribution + instances. + + This calls _get_project to do all the work, and just implements a caching layer on top. + """ + if self._cache is None: + result = self._get_project(name) + elif name in self._cache: + result = self._cache[name] + else: + result = self._get_project(name) + self._cache[name] = result + return result + + def score_url(self, url): + """ + Give an url a score which can be used to choose preferred URLs + for a given project release. + """ + t = urlparse(url) + basename = posixpath.basename(t.path) + compatible = True + is_wheel = basename.endswith('.whl') + if is_wheel: + compatible = is_compatible(Wheel(basename), self.wheel_tags) + return (t.scheme != 'https', 'pypi.python.org' in t.netloc, + is_wheel, compatible, basename) + + def prefer_url(self, url1, url2): + """ + Choose one of two URLs where both are candidates for distribution + archives for the same version of a distribution (for example, + .tar.gz vs. zip). + + The current implementation favours https:// URLs over http://, archives + from PyPI over those from other locations, wheel compatibility (if a + wheel) and then the archive name. + """ + result = url2 + if url1: + s1 = self.score_url(url1) + s2 = self.score_url(url2) + if s1 > s2: + result = url1 + if result != url2: + logger.debug('Not replacing %r with %r', url1, url2) + else: + logger.debug('Replacing %r with %r', url1, url2) + return result + + def split_filename(self, filename, project_name): + """ + Attempt to split a filename in project name, version and Python version. + """ + return split_filename(filename, project_name) + + def convert_url_to_download_info(self, url, project_name): + """ + See if a URL is a candidate for a download URL for a project (the URL + has typically been scraped from an HTML page). + + If it is, a dictionary is returned with keys "name", "version", + "filename" and "url"; otherwise, None is returned. + """ + def same_project(name1, name2): + name1, name2 = name1.lower(), name2.lower() + if name1 == name2: + result = True + else: + # distribute replaces '-' by '_' in project names, so it + # can tell where the version starts in a filename. + result = name1.replace('_', '-') == name2.replace('_', '-') + return result + + result = None + scheme, netloc, path, params, query, frag = urlparse(url) + if frag.lower().startswith('egg='): + logger.debug('%s: version hint in fragment: %r', + project_name, frag) + m = HASHER_HASH.match(frag) + if m: + algo, digest = m.groups() + else: + algo, digest = None, None + origpath = path + if path and path[-1] == '/': + path = path[:-1] + if path.endswith('.whl'): + try: + wheel = Wheel(path) + if is_compatible(wheel, self.wheel_tags): + if project_name is None: + include = True + else: + include = same_project(wheel.name, project_name) + if include: + result = { + 'name': wheel.name, + 'version': wheel.version, + 'filename': wheel.filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + 'python-version': ', '.join( + ['.'.join(list(v[2:])) for v in wheel.pyver]), + } + except Exception as e: + logger.warning('invalid path for wheel: %s', path) + elif path.endswith(self.downloadable_extensions): + path = filename = posixpath.basename(path) + for ext in self.downloadable_extensions: + if path.endswith(ext): + path = path[:-len(ext)] + t = self.split_filename(path, project_name) + if not t: + logger.debug('No match for project/version: %s', path) + else: + name, version, pyver = t + if not project_name or same_project(project_name, name): + result = { + 'name': name, + 'version': version, + 'filename': filename, + 'url': urlunparse((scheme, netloc, origpath, + params, query, '')), + #'packagetype': 'sdist', + } + if pyver: + result['python-version'] = pyver + break + if result and algo: + result['%s_digest' % algo] = digest + return result + + def _get_digest(self, info): + """ + Get a digest from a dictionary by looking at keys of the form + 'algo_digest'. + + Returns a 2-tuple (algo, digest) if found, else None. Currently + looks only for SHA256, then MD5. + """ + result = None + for algo in ('sha256', 'md5'): + key = '%s_digest' % algo + if key in info: + result = (algo, info[key]) + break + return result + + def _update_version_data(self, result, info): + """ + Update a result dictionary (the final result from _get_project) with a + dictionary for a specific version, which typically holds information + gleaned from a filename or URL for an archive for the distribution. + """ + name = info.pop('name') + version = info.pop('version') + if version in result: + dist = result[version] + md = dist.metadata + else: + dist = make_dist(name, version, scheme=self.scheme) + md = dist.metadata + dist.digest = digest = self._get_digest(info) + url = info['url'] + result['digests'][url] = digest + if md.source_url != info['url']: + md.source_url = self.prefer_url(md.source_url, url) + result['urls'].setdefault(version, set()).add(url) + dist.locator = self + result[version] = dist + + def locate(self, requirement, prereleases=False): + """ + Find the most recent distribution which matches the given + requirement. + + :param requirement: A requirement of the form 'foo (1.0)' or perhaps + 'foo (>= 1.0, < 2.0, != 1.3)' + :param prereleases: If ``True``, allow pre-release versions + to be located. Otherwise, pre-release versions + are not returned. + :return: A :class:`Distribution` instance, or ``None`` if no such + distribution could be located. + """ + result = None + r = parse_requirement(requirement) + if r is None: + raise DistlibException('Not a valid requirement: %r' % requirement) + scheme = get_scheme(self.scheme) + self.matcher = matcher = scheme.matcher(r.requirement) + logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) + versions = self.get_project(r.name) + if len(versions) > 2: # urls and digests keys are present + # sometimes, versions are invalid + slist = [] + vcls = matcher.version_class + for k in versions: + if k in ('urls', 'digests'): + continue + try: + if not matcher.match(k): + logger.debug('%s did not match %r', matcher, k) + else: + if prereleases or not vcls(k).is_prerelease: + slist.append(k) + else: + logger.debug('skipping pre-release ' + 'version %s of %s', k, matcher.name) + except Exception: # pragma: no cover + logger.warning('error matching %s with %r', matcher, k) + pass # slist.append(k) + if len(slist) > 1: + slist = sorted(slist, key=scheme.key) + if slist: + logger.debug('sorted list: %s', slist) + version = slist[-1] + result = versions[version] + if result: + if r.extras: + result.extras = r.extras + result.download_urls = versions.get('urls', {}).get(version, set()) + d = {} + sd = versions.get('digests', {}) + for url in result.download_urls: + if url in sd: + d[url] = sd[url] + result.digests = d + self.matcher = None + return result + + +class PyPIRPCLocator(Locator): + """ + This locator uses XML-RPC to locate distributions. It therefore + cannot be used with simple mirrors (that only mirror file content). + """ + def __init__(self, url, **kwargs): + """ + Initialise an instance. + + :param url: The URL to use for XML-RPC. + :param kwargs: Passed to the superclass constructor. + """ + super(PyPIRPCLocator, self).__init__(**kwargs) + self.base_url = url + self.client = ServerProxy(url, timeout=3.0) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + return set(self.client.list_packages()) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + versions = self.client.package_releases(name, True) + for v in versions: + urls = self.client.release_urls(name, v) + data = self.client.release_data(name, v) + metadata = Metadata(scheme=self.scheme) + metadata.name = data['name'] + metadata.version = data['version'] + metadata.license = data.get('license') + metadata.keywords = data.get('keywords', []) + metadata.summary = data.get('summary') + dist = Distribution(metadata) + if urls: + info = urls[0] + metadata.source_url = info['url'] + dist.digest = self._get_digest(info) + dist.locator = self + result[v] = dist + for info in urls: + url = info['url'] + digest = self._get_digest(info) + result['urls'].setdefault(v, set()).add(url) + result['digests'][url] = digest + return result + +class PyPIJSONLocator(Locator): + """ + This locator uses PyPI's JSON interface. It's very limited in functionality + and probably not worth using. + """ + def __init__(self, url, **kwargs): + super(PyPIJSONLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + url = urljoin(self.base_url, '%s/json' % quote(name)) + try: + resp = self.opener.open(url) + data = resp.read().decode() # for now + d = json.loads(data) + md = Metadata(scheme=self.scheme) + data = d['info'] + md.name = data['name'] + md.version = data['version'] + md.license = data.get('license') + md.keywords = data.get('keywords', []) + md.summary = data.get('summary') + dist = Distribution(md) + dist.locator = self + urls = d['urls'] + result[md.version] = dist + for info in d['urls']: + url = info['url'] + dist.download_urls.add(url) + dist.digests[url] = self._get_digest(info) + result['urls'].setdefault(md.version, set()).add(url) + result['digests'][url] = self._get_digest(info) + # Now get other releases + for version, infos in d['releases'].items(): + if version == md.version: + continue # already done + omd = Metadata(scheme=self.scheme) + omd.name = md.name + omd.version = version + odist = Distribution(omd) + odist.locator = self + result[version] = odist + for info in infos: + url = info['url'] + odist.download_urls.add(url) + odist.digests[url] = self._get_digest(info) + result['urls'].setdefault(version, set()).add(url) + result['digests'][url] = self._get_digest(info) +# for info in urls: +# md.source_url = info['url'] +# dist.digest = self._get_digest(info) +# dist.locator = self +# for info in urls: +# url = info['url'] +# result['urls'].setdefault(md.version, set()).add(url) +# result['digests'][url] = self._get_digest(info) + except Exception as e: + logger.exception('JSON fetch failed: %s', e) + return result + + +class Page(object): + """ + This class represents a scraped HTML page. + """ + # The following slightly hairy-looking regex just looks for the contents of + # an anchor link, which has an attribute "href" either immediately preceded + # or immediately followed by a "rel" attribute. The attribute values can be + # declared with double quotes, single quotes or no quotes - which leads to + # the length of the expression. + _href = re.compile(""" +(rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*))\s+)? +href\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*)) +(\s+rel\s*=\s*(?:"(?P[^"]*)"|'(?P[^']*)'|(?P[^>\s\n]*)))? +""", re.I | re.S | re.X) + _base = re.compile(r"""]+)""", re.I | re.S) + + def __init__(self, data, url): + """ + Initialise an instance with the Unicode page contents and the URL they + came from. + """ + self.data = data + self.base_url = self.url = url + m = self._base.search(self.data) + if m: + self.base_url = m.group(1) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + @cached_property + def links(self): + """ + Return the URLs of all the links on a page together with information + about their "rel" attribute, for determining which ones to treat as + downloads and which ones to queue for further scraping. + """ + def clean(url): + "Tidy up an URL." + scheme, netloc, path, params, query, frag = urlparse(url) + return urlunparse((scheme, netloc, quote(path), + params, query, frag)) + + result = set() + for match in self._href.finditer(self.data): + d = match.groupdict('') + rel = (d['rel1'] or d['rel2'] or d['rel3'] or + d['rel4'] or d['rel5'] or d['rel6']) + url = d['url1'] or d['url2'] or d['url3'] + url = urljoin(self.base_url, url) + url = unescape(url) + url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) + result.add((url, rel)) + # We sort the result, hoping to bring the most recent versions + # to the front + result = sorted(result, key=lambda t: t[0], reverse=True) + return result + + +class SimpleScrapingLocator(Locator): + """ + A locator which scrapes HTML pages to locate downloads for a distribution. + This runs multiple threads to do the I/O; performance is at least as good + as pip's PackageFinder, which works in an analogous fashion. + """ + + # These are used to deal with various Content-Encoding schemes. + decoders = { + 'deflate': zlib.decompress, + 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), + 'none': lambda b: b, + } + + def __init__(self, url, timeout=None, num_workers=10, **kwargs): + """ + Initialise an instance. + :param url: The root URL to use for scraping. + :param timeout: The timeout, in seconds, to be applied to requests. + This defaults to ``None`` (no timeout specified). + :param num_workers: The number of worker threads you want to do I/O, + This defaults to 10. + :param kwargs: Passed to the superclass. + """ + super(SimpleScrapingLocator, self).__init__(**kwargs) + self.base_url = ensure_slash(url) + self.timeout = timeout + self._page_cache = {} + self._seen = set() + self._to_fetch = queue.Queue() + self._bad_hosts = set() + self.skip_externals = False + self.num_workers = num_workers + self._lock = threading.RLock() + # See issue #45: we need to be resilient when the locator is used + # in a thread, e.g. with concurrent.futures. We can't use self._lock + # as it is for coordinating our internal threads - the ones created + # in _prepare_threads. + self._gplock = threading.RLock() + + def _prepare_threads(self): + """ + Threads are created only when get_project is called, and terminate + before it returns. They are there primarily to parallelise I/O (i.e. + fetching web pages). + """ + self._threads = [] + for i in range(self.num_workers): + t = threading.Thread(target=self._fetch) + t.setDaemon(True) + t.start() + self._threads.append(t) + + def _wait_threads(self): + """ + Tell all the threads to terminate (by sending a sentinel value) and + wait for them to do so. + """ + # Note that you need two loops, since you can't say which + # thread will get each sentinel + for t in self._threads: + self._to_fetch.put(None) # sentinel + for t in self._threads: + t.join() + self._threads = [] + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + with self._gplock: + self.result = result + self.project_name = name + url = urljoin(self.base_url, '%s/' % quote(name)) + self._seen.clear() + self._page_cache.clear() + self._prepare_threads() + try: + logger.debug('Queueing %s', url) + self._to_fetch.put(url) + self._to_fetch.join() + finally: + self._wait_threads() + del self.result + return result + + platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' + r'win(32|-amd64)|macosx-?\d+)\b', re.I) + + def _is_platform_dependent(self, url): + """ + Does an URL refer to a platform-specific download? + """ + return self.platform_dependent.search(url) + + def _process_download(self, url): + """ + See if an URL is a suitable download for a project. + + If it is, register information in the result dictionary (for + _get_project) about the specific version it's for. + + Note that the return value isn't actually used other than as a boolean + value. + """ + if self._is_platform_dependent(url): + info = None + else: + info = self.convert_url_to_download_info(url, self.project_name) + logger.debug('process_download: %s -> %s', url, info) + if info: + with self._lock: # needed because self.result is shared + self._update_version_data(self.result, info) + return info + + def _should_queue(self, link, referrer, rel): + """ + Determine whether a link URL from a referring page and with a + particular "rel" attribute should be queued for scraping. + """ + scheme, netloc, path, _, _, _ = urlparse(link) + if path.endswith(self.source_extensions + self.binary_extensions + + self.excluded_extensions): + result = False + elif self.skip_externals and not link.startswith(self.base_url): + result = False + elif not referrer.startswith(self.base_url): + result = False + elif rel not in ('homepage', 'download'): + result = False + elif scheme not in ('http', 'https', 'ftp'): + result = False + elif self._is_platform_dependent(link): + result = False + else: + host = netloc.split(':', 1)[0] + if host.lower() == 'localhost': + result = False + else: + result = True + logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, + referrer, result) + return result + + def _fetch(self): + """ + Get a URL to fetch from the work queue, get the HTML page, examine its + links for download candidates and candidates for further scraping. + + This is a handy method to run in a thread. + """ + while True: + url = self._to_fetch.get() + try: + if url: + page = self.get_page(url) + if page is None: # e.g. after an error + continue + for link, rel in page.links: + if link not in self._seen: + self._seen.add(link) + if (not self._process_download(link) and + self._should_queue(link, url, rel)): + logger.debug('Queueing %s from %s', link, url) + self._to_fetch.put(link) + finally: + # always do this, to avoid hangs :-) + self._to_fetch.task_done() + if not url: + #logger.debug('Sentinel seen, quitting.') + break + + def get_page(self, url): + """ + Get the HTML for an URL, possibly from an in-memory cache. + + XXX TODO Note: this cache is never actually cleared. It's assumed that + the data won't get stale over the lifetime of a locator instance (not + necessarily true for the default_locator). + """ + # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api + scheme, netloc, path, _, _, _ = urlparse(url) + if scheme == 'file' and os.path.isdir(url2pathname(path)): + url = urljoin(ensure_slash(url), 'index.html') + + if url in self._page_cache: + result = self._page_cache[url] + logger.debug('Returning %s from cache: %s', url, result) + else: + host = netloc.split(':', 1)[0] + result = None + if host in self._bad_hosts: + logger.debug('Skipping %s due to bad host %s', url, host) + else: + req = Request(url, headers={'Accept-encoding': 'identity'}) + try: + logger.debug('Fetching %s', url) + resp = self.opener.open(req, timeout=self.timeout) + logger.debug('Fetched %s', url) + headers = resp.info() + content_type = headers.get('Content-Type', '') + if HTML_CONTENT_TYPE.match(content_type): + final_url = resp.geturl() + data = resp.read() + encoding = headers.get('Content-Encoding') + if encoding: + decoder = self.decoders[encoding] # fail if not found + data = decoder(data) + encoding = 'utf-8' + m = CHARSET.search(content_type) + if m: + encoding = m.group(1) + try: + data = data.decode(encoding) + except UnicodeError: # pragma: no cover + data = data.decode('latin-1') # fallback + result = Page(data, final_url) + self._page_cache[final_url] = result + except HTTPError as e: + if e.code != 404: + logger.exception('Fetch failed: %s: %s', url, e) + except URLError as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + with self._lock: + self._bad_hosts.add(host) + except Exception as e: # pragma: no cover + logger.exception('Fetch failed: %s: %s', url, e) + finally: + self._page_cache[url] = result # even if None (failure) + return result + + _distname_re = re.compile(']*>([^<]+)<') + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + page = self.get_page(self.base_url) + if not page: + raise DistlibException('Unable to get %s' % self.base_url) + for match in self._distname_re.finditer(page.data): + result.add(match.group(1)) + return result + +class DirectoryLocator(Locator): + """ + This class locates distributions in a directory tree. + """ + + def __init__(self, path, **kwargs): + """ + Initialise an instance. + :param path: The root of the directory tree to search. + :param kwargs: Passed to the superclass constructor, + except for: + * recursive - if True (the default), subdirectories are + recursed into. If False, only the top-level directory + is searched, + """ + self.recursive = kwargs.pop('recursive', True) + super(DirectoryLocator, self).__init__(**kwargs) + path = os.path.abspath(path) + if not os.path.isdir(path): # pragma: no cover + raise DistlibException('Not a directory: %r' % path) + self.base_dir = path + + def should_include(self, filename, parent): + """ + Should a filename be considered as a candidate for a distribution + archive? As well as the filename, the directory which contains it + is provided, though not used by the current implementation. + """ + return filename.endswith(self.downloadable_extensions) + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, name) + if info: + self._update_version_data(result, info) + if not self.recursive: + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for root, dirs, files in os.walk(self.base_dir): + for fn in files: + if self.should_include(fn, root): + fn = os.path.join(root, fn) + url = urlunparse(('file', '', + pathname2url(os.path.abspath(fn)), + '', '', '')) + info = self.convert_url_to_download_info(url, None) + if info: + result.add(info['name']) + if not self.recursive: + break + return result + +class JSONLocator(Locator): + """ + This locator uses special extended metadata (not available on PyPI) and is + the basis of performant dependency resolution in distlib. Other locators + require archive downloads before dependencies can be determined! As you + might imagine, that can be slow. + """ + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + raise NotImplementedError('Not available from this locator') + + def _get_project(self, name): + result = {'urls': {}, 'digests': {}} + data = get_project_data(name) + if data: + for info in data.get('files', []): + if info['ptype'] != 'sdist' or info['pyversion'] != 'source': + continue + # We don't store summary in project metadata as it makes + # the data bigger for no benefit during dependency + # resolution + dist = make_dist(data['name'], info['version'], + summary=data.get('summary', + 'Placeholder for summary'), + scheme=self.scheme) + md = dist.metadata + md.source_url = info['url'] + # TODO SHA256 digest + if 'digest' in info and info['digest']: + dist.digest = ('md5', info['digest']) + md.dependencies = info.get('requirements', {}) + dist.exports = info.get('exports', {}) + result[dist.version] = dist + result['urls'].setdefault(dist.version, set()).add(info['url']) + return result + +class DistPathLocator(Locator): + """ + This locator finds installed distributions in a path. It can be useful for + adding to an :class:`AggregatingLocator`. + """ + def __init__(self, distpath, **kwargs): + """ + Initialise an instance. + + :param distpath: A :class:`DistributionPath` instance to search. + """ + super(DistPathLocator, self).__init__(**kwargs) + assert isinstance(distpath, DistributionPath) + self.distpath = distpath + + def _get_project(self, name): + dist = self.distpath.get_distribution(name) + if dist is None: + result = {'urls': {}, 'digests': {}} + else: + result = { + dist.version: dist, + 'urls': {dist.version: set([dist.source_url])}, + 'digests': {dist.version: set([None])} + } + return result + + +class AggregatingLocator(Locator): + """ + This class allows you to chain and/or merge a list of locators. + """ + def __init__(self, *locators, **kwargs): + """ + Initialise an instance. + + :param locators: The list of locators to search. + :param kwargs: Passed to the superclass constructor, + except for: + * merge - if False (the default), the first successful + search from any of the locators is returned. If True, + the results from all locators are merged (this can be + slow). + """ + self.merge = kwargs.pop('merge', False) + self.locators = locators + super(AggregatingLocator, self).__init__(**kwargs) + + def clear_cache(self): + super(AggregatingLocator, self).clear_cache() + for locator in self.locators: + locator.clear_cache() + + def _set_scheme(self, value): + self._scheme = value + for locator in self.locators: + locator.scheme = value + + scheme = property(Locator.scheme.fget, _set_scheme) + + def _get_project(self, name): + result = {} + for locator in self.locators: + d = locator.get_project(name) + if d: + if self.merge: + files = result.get('urls', {}) + digests = result.get('digests', {}) + # next line could overwrite result['urls'], result['digests'] + result.update(d) + df = result.get('urls') + if files and df: + for k, v in files.items(): + if k in df: + df[k] |= v + else: + df[k] = v + dd = result.get('digests') + if digests and dd: + dd.update(digests) + else: + # See issue #18. If any dists are found and we're looking + # for specific constraints, we only return something if + # a match is found. For example, if a DirectoryLocator + # returns just foo (1.0) while we're looking for + # foo (>= 2.0), we'll pretend there was nothing there so + # that subsequent locators can be queried. Otherwise we + # would just return foo (1.0) which would then lead to a + # failure to find foo (>= 2.0), because other locators + # weren't searched. Note that this only matters when + # merge=False. + if self.matcher is None: + found = True + else: + found = False + for k in d: + if self.matcher.match(k): + found = True + break + if found: + result = d + break + return result + + def get_distribution_names(self): + """ + Return all the distribution names known to this locator. + """ + result = set() + for locator in self.locators: + try: + result |= locator.get_distribution_names() + except NotImplementedError: + pass + return result + + +# We use a legacy scheme simply because most of the dists on PyPI use legacy +# versions which don't conform to PEP 426 / PEP 440. +default_locator = AggregatingLocator( + JSONLocator(), + SimpleScrapingLocator('https://pypi.python.org/simple/', + timeout=3.0), + scheme='legacy') + +locate = default_locator.locate + +NAME_VERSION_RE = re.compile(r'(?P[\w-]+)\s*' + r'\(\s*(==\s*)?(?P[^)]+)\)$') + +class DependencyFinder(object): + """ + Locate dependencies for distributions. + """ + + def __init__(self, locator=None): + """ + Initialise an instance, using the specified locator + to locate distributions. + """ + self.locator = locator or default_locator + self.scheme = get_scheme(self.locator.scheme) + + def add_distribution(self, dist): + """ + Add a distribution to the finder. This will update internal information + about who provides what. + :param dist: The distribution to add. + """ + logger.debug('adding distribution %s', dist) + name = dist.key + self.dists_by_name[name] = dist + self.dists[(name, dist.version)] = dist + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Add to provided: %s, %s, %s', name, version, dist) + self.provided.setdefault(name, set()).add((version, dist)) + + def remove_distribution(self, dist): + """ + Remove a distribution from the finder. This will update internal + information about who provides what. + :param dist: The distribution to remove. + """ + logger.debug('removing distribution %s', dist) + name = dist.key + del self.dists_by_name[name] + del self.dists[(name, dist.version)] + for p in dist.provides: + name, version = parse_name_and_version(p) + logger.debug('Remove from provided: %s, %s, %s', name, version, dist) + s = self.provided[name] + s.remove((version, dist)) + if not s: + del self.provided[name] + + def get_matcher(self, reqt): + """ + Get a version matcher for a requirement. + :param reqt: The requirement + :type reqt: str + :return: A version matcher (an instance of + :class:`distlib.version.Matcher`). + """ + try: + matcher = self.scheme.matcher(reqt) + except UnsupportedVersionError: # pragma: no cover + # XXX compat-mode if cannot read the version + name = reqt.split()[0] + matcher = self.scheme.matcher(name) + return matcher + + def find_providers(self, reqt): + """ + Find the distributions which can fulfill a requirement. + + :param reqt: The requirement. + :type reqt: str + :return: A set of distribution which can fulfill the requirement. + """ + matcher = self.get_matcher(reqt) + name = matcher.key # case-insensitive + result = set() + provided = self.provided + if name in provided: + for version, provider in provided[name]: + try: + match = matcher.match(version) + except UnsupportedVersionError: + match = False + + if match: + result.add(provider) + break + return result + + def try_to_replace(self, provider, other, problems): + """ + Attempt to replace one provider with another. This is typically used + when resolving dependencies from multiple sources, e.g. A requires + (B >= 1.0) while C requires (B >= 1.1). + + For successful replacement, ``provider`` must meet all the requirements + which ``other`` fulfills. + + :param provider: The provider we are trying to replace with. + :param other: The provider we're trying to replace. + :param problems: If False is returned, this will contain what + problems prevented replacement. This is currently + a tuple of the literal string 'cantreplace', + ``provider``, ``other`` and the set of requirements + that ``provider`` couldn't fulfill. + :return: True if we can replace ``other`` with ``provider``, else + False. + """ + rlist = self.reqts[other] + unmatched = set() + for s in rlist: + matcher = self.get_matcher(s) + if not matcher.match(provider.version): + unmatched.add(s) + if unmatched: + # can't replace other with provider + problems.add(('cantreplace', provider, other, + frozenset(unmatched))) + result = False + else: + # can replace other with provider + self.remove_distribution(other) + del self.reqts[other] + for s in rlist: + self.reqts.setdefault(provider, set()).add(s) + self.add_distribution(provider) + result = True + return result + + def find(self, requirement, meta_extras=None, prereleases=False): + """ + Find a distribution and all distributions it depends on. + + :param requirement: The requirement specifying the distribution to + find, or a Distribution instance. + :param meta_extras: A list of meta extras such as :test:, :build: and + so on. + :param prereleases: If ``True``, allow pre-release versions to be + returned - otherwise, don't return prereleases + unless they're all that's available. + + Return a set of :class:`Distribution` instances and a set of + problems. + + The distributions returned should be such that they have the + :attr:`required` attribute set to ``True`` if they were + from the ``requirement`` passed to ``find()``, and they have the + :attr:`build_time_dependency` attribute set to ``True`` unless they + are post-installation dependencies of the ``requirement``. + + The problems should be a tuple consisting of the string + ``'unsatisfied'`` and the requirement which couldn't be satisfied + by any distribution known to the locator. + """ + + self.provided = {} + self.dists = {} + self.dists_by_name = {} + self.reqts = {} + + meta_extras = set(meta_extras or []) + if ':*:' in meta_extras: + meta_extras.remove(':*:') + # :meta: and :run: are implicitly included + meta_extras |= set([':test:', ':build:', ':dev:']) + + if isinstance(requirement, Distribution): + dist = odist = requirement + logger.debug('passed %s as requirement', odist) + else: + dist = odist = self.locator.locate(requirement, + prereleases=prereleases) + if dist is None: + raise DistlibException('Unable to locate %r' % requirement) + logger.debug('located %s', odist) + dist.requested = True + problems = set() + todo = set([dist]) + install_dists = set([odist]) + while todo: + dist = todo.pop() + name = dist.key # case-insensitive + if name not in self.dists_by_name: + self.add_distribution(dist) + else: + #import pdb; pdb.set_trace() + other = self.dists_by_name[name] + if other != dist: + self.try_to_replace(dist, other, problems) + + ireqts = dist.run_requires | dist.meta_requires + sreqts = dist.build_requires + ereqts = set() + if dist in install_dists: + for key in ('test', 'build', 'dev'): + e = ':%s:' % key + if e in meta_extras: + ereqts |= getattr(dist, '%s_requires' % key) + all_reqts = ireqts | sreqts | ereqts + for r in all_reqts: + providers = self.find_providers(r) + if not providers: + logger.debug('No providers found for %r', r) + provider = self.locator.locate(r, prereleases=prereleases) + # If no provider is found and we didn't consider + # prereleases, consider them now. + if provider is None and not prereleases: + provider = self.locator.locate(r, prereleases=True) + if provider is None: + logger.debug('Cannot satisfy %r', r) + problems.add(('unsatisfied', r)) + else: + n, v = provider.key, provider.version + if (n, v) not in self.dists: + todo.add(provider) + providers.add(provider) + if r in ireqts and dist in install_dists: + install_dists.add(provider) + logger.debug('Adding %s to install_dists', + provider.name_and_version) + for p in providers: + name = p.key + if name not in self.dists_by_name: + self.reqts.setdefault(p, set()).add(r) + else: + other = self.dists_by_name[name] + if other != p: + # see if other can be replaced by p + self.try_to_replace(p, other, problems) + + dists = set(self.dists.values()) + for dist in dists: + dist.build_time_dependency = dist not in install_dists + if dist.build_time_dependency: + logger.debug('%s is a build-time dependency only.', + dist.name_and_version) + logger.debug('find done for %s', odist) + return dists, problems diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/manifest.py b/lib/python3.4/site-packages/pip/_vendor/distlib/manifest.py new file mode 100644 index 0000000..21cff45 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/manifest.py @@ -0,0 +1,367 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2013 Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Class representing the list of files in a distribution. + +Equivalent to distutils.filelist, but fixes some problems. +""" +import fnmatch +import logging +import os +import re + +from . import DistlibException +from .compat import fsdecode +from .util import convert_path + + +__all__ = ['Manifest'] + +logger = logging.getLogger(__name__) + +# a \ followed by some spaces + EOL +_COLLAPSE_PATTERN = re.compile('\\\w*\n', re.M) +_COMMENTED_LINE = re.compile('#.*?(?=\n)|\n(?=$)', re.M | re.S) + + +class Manifest(object): + """A list of files built by on exploring the filesystem and filtered by + applying various patterns to what we find there. + """ + + def __init__(self, base=None): + """ + Initialise an instance. + + :param base: The base directory to explore under. + """ + self.base = os.path.abspath(os.path.normpath(base or os.getcwd())) + self.prefix = self.base + os.sep + self.allfiles = None + self.files = set() + + # + # Public API + # + + def findall(self): + """Find all files under the base and set ``allfiles`` to the absolute + pathnames of files found. + """ + from stat import S_ISREG, S_ISDIR, S_ISLNK + + self.allfiles = allfiles = [] + root = self.base + stack = [root] + pop = stack.pop + push = stack.append + + while stack: + root = pop() + names = os.listdir(root) + + for name in names: + fullname = os.path.join(root, name) + + # Avoid excess stat calls -- just one will do, thank you! + stat = os.stat(fullname) + mode = stat.st_mode + if S_ISREG(mode): + allfiles.append(fsdecode(fullname)) + elif S_ISDIR(mode) and not S_ISLNK(mode): + push(fullname) + + def add(self, item): + """ + Add a file to the manifest. + + :param item: The pathname to add. This can be relative to the base. + """ + if not item.startswith(self.prefix): + item = os.path.join(self.base, item) + self.files.add(os.path.normpath(item)) + + def add_many(self, items): + """ + Add a list of files to the manifest. + + :param items: The pathnames to add. These can be relative to the base. + """ + for item in items: + self.add(item) + + def sorted(self, wantdirs=False): + """ + Return sorted files in directory order + """ + + def add_dir(dirs, d): + dirs.add(d) + logger.debug('add_dir added %s', d) + if d != self.base: + parent, _ = os.path.split(d) + assert parent not in ('', '/') + add_dir(dirs, parent) + + result = set(self.files) # make a copy! + if wantdirs: + dirs = set() + for f in result: + add_dir(dirs, os.path.dirname(f)) + result |= dirs + return [os.path.join(*path_tuple) for path_tuple in + sorted(os.path.split(path) for path in result)] + + def clear(self): + """Clear all collected files.""" + self.files = set() + self.allfiles = [] + + def process_directive(self, directive): + """ + Process a directive which either adds some files from ``allfiles`` to + ``files``, or removes some files from ``files``. + + :param directive: The directive to process. This should be in a format + compatible with distutils ``MANIFEST.in`` files: + + http://docs.python.org/distutils/sourcedist.html#commands + """ + # Parse the line: split it up, make sure the right number of words + # is there, and return the relevant words. 'action' is always + # defined: it's the first word of the line. Which of the other + # three are defined depends on the action; it'll be either + # patterns, (dir and patterns), or (dirpattern). + action, patterns, thedir, dirpattern = self._parse_directive(directive) + + # OK, now we know that the action is valid and we have the + # right number of words on the line for that action -- so we + # can proceed with minimal error-checking. + if action == 'include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=True): + logger.warning('no files found matching %r', pattern) + + elif action == 'exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=True) + #if not found: + # logger.warning('no previously-included files ' + # 'found matching %r', pattern) + + elif action == 'global-include': + for pattern in patterns: + if not self._include_pattern(pattern, anchor=False): + logger.warning('no files found matching %r ' + 'anywhere in distribution', pattern) + + elif action == 'global-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, anchor=False) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found anywhere in ' + # 'distribution', pattern) + + elif action == 'recursive-include': + for pattern in patterns: + if not self._include_pattern(pattern, prefix=thedir): + logger.warning('no files found matching %r ' + 'under directory %r', pattern, thedir) + + elif action == 'recursive-exclude': + for pattern in patterns: + found = self._exclude_pattern(pattern, prefix=thedir) + #if not found: + # logger.warning('no previously-included files ' + # 'matching %r found under directory %r', + # pattern, thedir) + + elif action == 'graft': + if not self._include_pattern(None, prefix=dirpattern): + logger.warning('no directories found matching %r', + dirpattern) + + elif action == 'prune': + if not self._exclude_pattern(None, prefix=dirpattern): + logger.warning('no previously-included directories found ' + 'matching %r', dirpattern) + else: # pragma: no cover + # This should never happen, as it should be caught in + # _parse_template_line + raise DistlibException( + 'invalid action %r' % action) + + # + # Private API + # + + def _parse_directive(self, directive): + """ + Validate a directive. + :param directive: The directive to validate. + :return: A tuple of action, patterns, thedir, dir_patterns + """ + words = directive.split() + if len(words) == 1 and words[0] not in ('include', 'exclude', + 'global-include', + 'global-exclude', + 'recursive-include', + 'recursive-exclude', + 'graft', 'prune'): + # no action given, let's use the default 'include' + words.insert(0, 'include') + + action = words[0] + patterns = thedir = dir_pattern = None + + if action in ('include', 'exclude', + 'global-include', 'global-exclude'): + if len(words) < 2: + raise DistlibException( + '%r expects ...' % action) + + patterns = [convert_path(word) for word in words[1:]] + + elif action in ('recursive-include', 'recursive-exclude'): + if len(words) < 3: + raise DistlibException( + '%r expects ...' % action) + + thedir = convert_path(words[1]) + patterns = [convert_path(word) for word in words[2:]] + + elif action in ('graft', 'prune'): + if len(words) != 2: + raise DistlibException( + '%r expects a single ' % action) + + dir_pattern = convert_path(words[1]) + + else: + raise DistlibException('unknown action %r' % action) + + return action, patterns, thedir, dir_pattern + + def _include_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Select strings (presumably filenames) from 'self.files' that + match 'pattern', a Unix-style wildcard (glob) pattern. + + Patterns are not quite the same as implemented by the 'fnmatch' + module: '*' and '?' match non-special characters, where "special" + is platform-dependent: slash on Unix; colon, slash, and backslash on + DOS/Windows; and colon on Mac OS. + + If 'anchor' is true (the default), then the pattern match is more + stringent: "*.py" will match "foo.py" but not "foo/bar.py". If + 'anchor' is false, both of these will match. + + If 'prefix' is supplied, then only filenames starting with 'prefix' + (itself a pattern) and ending with 'pattern', with anything in between + them, will match. 'anchor' is ignored in this case. + + If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and + 'pattern' is assumed to be either a string containing a regex or a + regex object -- no translation is done, the regex is just compiled + and used as-is. + + Selected strings will be added to self.files. + + Return True if files are found. + """ + # XXX docstring lying about what the special chars are? + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + + # delayed loading of allfiles list + if self.allfiles is None: + self.findall() + + for name in self.allfiles: + if pattern_re.search(name): + self.files.add(name) + found = True + return found + + def _exclude_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Remove strings (presumably filenames) from 'files' that match + 'pattern'. + + Other parameters are the same as for 'include_pattern()', above. + The list 'self.files' is modified in place. Return True if files are + found. + + This API is public to allow e.g. exclusion of SCM subdirs, e.g. when + packaging source distributions + """ + found = False + pattern_re = self._translate_pattern(pattern, anchor, prefix, is_regex) + for f in list(self.files): + if pattern_re.search(f): + self.files.remove(f) + found = True + return found + + def _translate_pattern(self, pattern, anchor=True, prefix=None, + is_regex=False): + """Translate a shell-like wildcard pattern to a compiled regular + expression. + + Return the compiled regex. If 'is_regex' true, + then 'pattern' is directly compiled to a regex (if it's a string) + or just returned as-is (assumes it's a regex object). + """ + if is_regex: + if isinstance(pattern, str): + return re.compile(pattern) + else: + return pattern + + if pattern: + pattern_re = self._glob_to_re(pattern) + else: + pattern_re = '' + + base = re.escape(os.path.join(self.base, '')) + if prefix is not None: + # ditch end of pattern character + empty_pattern = self._glob_to_re('') + prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] + sep = os.sep + if os.sep == '\\': + sep = r'\\' + pattern_re = '^' + base + sep.join((prefix_re, + '.*' + pattern_re)) + else: # no prefix -- respect anchor flag + if anchor: + pattern_re = '^' + base + pattern_re + + return re.compile(pattern_re) + + def _glob_to_re(self, pattern): + """Translate a shell-like glob pattern to a regular expression. + + Return a string containing the regex. Differs from + 'fnmatch.translate()' in that '*' does not match "special characters" + (which are platform-specific). + """ + pattern_re = fnmatch.translate(pattern) + + # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which + # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix, + # and by extension they shouldn't match such "special characters" under + # any OS. So change all non-escaped dots in the RE to match any + # character except the special characters (currently: just os.sep). + sep = os.sep + if os.sep == '\\': + # we're using a regex to manipulate a regex, so we need + # to escape the backslash twice + sep = r'\\\\' + escaped = r'\1[^%s]' % sep + pattern_re = re.sub(r'((? y, + 'gte': lambda x, y: x >= y, + 'in': lambda x, y: x in y, + 'lt': lambda x, y: x < y, + 'lte': lambda x, y: x <= y, + 'not': lambda x: not x, + 'noteq': lambda x, y: x != y, + 'notin': lambda x, y: x not in y, + } + + allowed_values = { + 'sys_platform': sys.platform, + 'python_version': '%s.%s' % sys.version_info[:2], + # parsing sys.platform is not reliable, but there is no other + # way to get e.g. 2.7.2+, and the PEP is defined with sys.version + 'python_full_version': sys.version.split(' ', 1)[0], + 'os_name': os.name, + 'platform_in_venv': str(in_venv()), + 'platform_release': platform.release(), + 'platform_version': platform.version(), + 'platform_machine': platform.machine(), + 'platform_python_implementation': python_implementation(), + } + + def __init__(self, context=None): + """ + Initialise an instance. + + :param context: If specified, names are looked up in this mapping. + """ + self.context = context or {} + self.source = None + + def get_fragment(self, offset): + """ + Get the part of the source which is causing a problem. + """ + fragment_len = 10 + s = '%r' % (self.source[offset:offset + fragment_len]) + if offset + fragment_len < len(self.source): + s += '...' + return s + + def get_handler(self, node_type): + """ + Get a handler for the specified AST node type. + """ + return getattr(self, 'do_%s' % node_type, None) + + def evaluate(self, node, filename=None): + """ + Evaluate a source string or node, using ``filename`` when + displaying errors. + """ + if isinstance(node, string_types): + self.source = node + kwargs = {'mode': 'eval'} + if filename: + kwargs['filename'] = filename + try: + node = ast.parse(node, **kwargs) + except SyntaxError as e: + s = self.get_fragment(e.offset) + raise SyntaxError('syntax error %s' % s) + node_type = node.__class__.__name__.lower() + handler = self.get_handler(node_type) + if handler is None: + if self.source is None: + s = '(source not available)' + else: + s = self.get_fragment(node.col_offset) + raise SyntaxError("don't know how to evaluate %r %s" % ( + node_type, s)) + return handler(node) + + def get_attr_key(self, node): + assert isinstance(node, ast.Attribute), 'attribute node expected' + return '%s.%s' % (node.value.id, node.attr) + + def do_attribute(self, node): + if not isinstance(node.value, ast.Name): + valid = False + else: + key = self.get_attr_key(node) + valid = key in self.context or key in self.allowed_values + if not valid: + raise SyntaxError('invalid expression: %s' % key) + if key in self.context: + result = self.context[key] + else: + result = self.allowed_values[key] + return result + + def do_boolop(self, node): + result = self.evaluate(node.values[0]) + is_or = node.op.__class__ is ast.Or + is_and = node.op.__class__ is ast.And + assert is_or or is_and + if (is_and and result) or (is_or and not result): + for n in node.values[1:]: + result = self.evaluate(n) + if (is_or and result) or (is_and and not result): + break + return result + + def do_compare(self, node): + def sanity_check(lhsnode, rhsnode): + valid = True + if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str): + valid = False + #elif (isinstance(lhsnode, ast.Attribute) + # and isinstance(rhsnode, ast.Attribute)): + # klhs = self.get_attr_key(lhsnode) + # krhs = self.get_attr_key(rhsnode) + # valid = klhs != krhs + if not valid: + s = self.get_fragment(node.col_offset) + raise SyntaxError('Invalid comparison: %s' % s) + + lhsnode = node.left + lhs = self.evaluate(lhsnode) + result = True + for op, rhsnode in zip(node.ops, node.comparators): + sanity_check(lhsnode, rhsnode) + op = op.__class__.__name__.lower() + if op not in self.operators: + raise SyntaxError('unsupported operation: %r' % op) + rhs = self.evaluate(rhsnode) + result = self.operators[op](lhs, rhs) + if not result: + break + lhs = rhs + lhsnode = rhsnode + return result + + def do_expression(self, node): + return self.evaluate(node.body) + + def do_name(self, node): + valid = False + if node.id in self.context: + valid = True + result = self.context[node.id] + elif node.id in self.allowed_values: + valid = True + result = self.allowed_values[node.id] + if not valid: + raise SyntaxError('invalid expression: %s' % node.id) + return result + + def do_str(self, node): + return node.s + + +def interpret(marker, execution_context=None): + """ + Interpret a marker and return a result depending on environment. + + :param marker: The marker to interpret. + :type marker: str + :param execution_context: The context used for name lookup. + :type execution_context: mapping + """ + return Evaluator(execution_context).evaluate(marker.strip()) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/metadata.py b/lib/python3.4/site-packages/pip/_vendor/distlib/metadata.py new file mode 100644 index 0000000..71525dd --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/metadata.py @@ -0,0 +1,1066 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +"""Implementation of the Metadata for Python packages PEPs. + +Supports all metadata formats (1.0, 1.1, 1.2, and 2.0 experimental). +""" +from __future__ import unicode_literals + +import codecs +from email import message_from_file +import json +import logging +import re + + +from . import DistlibException, __version__ +from .compat import StringIO, string_types, text_type +from .markers import interpret +from .util import extract_by_key, get_extras +from .version import get_scheme, PEP440_VERSION_RE + +logger = logging.getLogger(__name__) + + +class MetadataMissingError(DistlibException): + """A required metadata is missing""" + + +class MetadataConflictError(DistlibException): + """Attempt to read or write metadata fields that are conflictual.""" + + +class MetadataUnrecognizedVersionError(DistlibException): + """Unknown metadata version number.""" + + +class MetadataInvalidError(DistlibException): + """A metadata value is invalid""" + +# public API of this module +__all__ = ['Metadata', 'PKG_INFO_ENCODING', 'PKG_INFO_PREFERRED_VERSION'] + +# Encoding used for the PKG-INFO files +PKG_INFO_ENCODING = 'utf-8' + +# preferred version. Hopefully will be changed +# to 1.2 once PEP 345 is supported everywhere +PKG_INFO_PREFERRED_VERSION = '1.1' + +_LINE_PREFIX_1_2 = re.compile('\n \|') +_LINE_PREFIX_PRE_1_2 = re.compile('\n ') +_241_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License') + +_314_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'License', 'Classifier', 'Download-URL', 'Obsoletes', + 'Provides', 'Requires') + +_314_MARKERS = ('Obsoletes', 'Provides', 'Requires', 'Classifier', + 'Download-URL') + +_345_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External') + +_345_MARKERS = ('Provides-Dist', 'Requires-Dist', 'Requires-Python', + 'Obsoletes-Dist', 'Requires-External', 'Maintainer', + 'Maintainer-email', 'Project-URL') + +_426_FIELDS = ('Metadata-Version', 'Name', 'Version', 'Platform', + 'Supported-Platform', 'Summary', 'Description', + 'Keywords', 'Home-page', 'Author', 'Author-email', + 'Maintainer', 'Maintainer-email', 'License', + 'Classifier', 'Download-URL', 'Obsoletes-Dist', + 'Project-URL', 'Provides-Dist', 'Requires-Dist', + 'Requires-Python', 'Requires-External', 'Private-Version', + 'Obsoleted-By', 'Setup-Requires-Dist', 'Extension', + 'Provides-Extra') + +_426_MARKERS = ('Private-Version', 'Provides-Extra', 'Obsoleted-By', + 'Setup-Requires-Dist', 'Extension') + +_ALL_FIELDS = set() +_ALL_FIELDS.update(_241_FIELDS) +_ALL_FIELDS.update(_314_FIELDS) +_ALL_FIELDS.update(_345_FIELDS) +_ALL_FIELDS.update(_426_FIELDS) + +EXTRA_RE = re.compile(r'''extra\s*==\s*("([^"]+)"|'([^']+)')''') + + +def _version2fieldlist(version): + if version == '1.0': + return _241_FIELDS + elif version == '1.1': + return _314_FIELDS + elif version == '1.2': + return _345_FIELDS + elif version == '2.0': + return _426_FIELDS + raise MetadataUnrecognizedVersionError(version) + + +def _best_version(fields): + """Detect the best version depending on the fields used.""" + def _has_marker(keys, markers): + for marker in markers: + if marker in keys: + return True + return False + + keys = [] + for key, value in fields.items(): + if value in ([], 'UNKNOWN', None): + continue + keys.append(key) + + possible_versions = ['1.0', '1.1', '1.2', '2.0'] + + # first let's try to see if a field is not part of one of the version + for key in keys: + if key not in _241_FIELDS and '1.0' in possible_versions: + possible_versions.remove('1.0') + if key not in _314_FIELDS and '1.1' in possible_versions: + possible_versions.remove('1.1') + if key not in _345_FIELDS and '1.2' in possible_versions: + possible_versions.remove('1.2') + if key not in _426_FIELDS and '2.0' in possible_versions: + possible_versions.remove('2.0') + + # possible_version contains qualified versions + if len(possible_versions) == 1: + return possible_versions[0] # found ! + elif len(possible_versions) == 0: + raise MetadataConflictError('Unknown metadata set') + + # let's see if one unique marker is found + is_1_1 = '1.1' in possible_versions and _has_marker(keys, _314_MARKERS) + is_1_2 = '1.2' in possible_versions and _has_marker(keys, _345_MARKERS) + is_2_0 = '2.0' in possible_versions and _has_marker(keys, _426_MARKERS) + if int(is_1_1) + int(is_1_2) + int(is_2_0) > 1: + raise MetadataConflictError('You used incompatible 1.1/1.2/2.0 fields') + + # we have the choice, 1.0, or 1.2, or 2.0 + # - 1.0 has a broken Summary field but works with all tools + # - 1.1 is to avoid + # - 1.2 fixes Summary but has little adoption + # - 2.0 adds more features and is very new + if not is_1_1 and not is_1_2 and not is_2_0: + # we couldn't find any specific marker + if PKG_INFO_PREFERRED_VERSION in possible_versions: + return PKG_INFO_PREFERRED_VERSION + if is_1_1: + return '1.1' + if is_1_2: + return '1.2' + + return '2.0' + +_ATTR2FIELD = { + 'metadata_version': 'Metadata-Version', + 'name': 'Name', + 'version': 'Version', + 'platform': 'Platform', + 'supported_platform': 'Supported-Platform', + 'summary': 'Summary', + 'description': 'Description', + 'keywords': 'Keywords', + 'home_page': 'Home-page', + 'author': 'Author', + 'author_email': 'Author-email', + 'maintainer': 'Maintainer', + 'maintainer_email': 'Maintainer-email', + 'license': 'License', + 'classifier': 'Classifier', + 'download_url': 'Download-URL', + 'obsoletes_dist': 'Obsoletes-Dist', + 'provides_dist': 'Provides-Dist', + 'requires_dist': 'Requires-Dist', + 'setup_requires_dist': 'Setup-Requires-Dist', + 'requires_python': 'Requires-Python', + 'requires_external': 'Requires-External', + 'requires': 'Requires', + 'provides': 'Provides', + 'obsoletes': 'Obsoletes', + 'project_url': 'Project-URL', + 'private_version': 'Private-Version', + 'obsoleted_by': 'Obsoleted-By', + 'extension': 'Extension', + 'provides_extra': 'Provides-Extra', +} + +_PREDICATE_FIELDS = ('Requires-Dist', 'Obsoletes-Dist', 'Provides-Dist') +_VERSIONS_FIELDS = ('Requires-Python',) +_VERSION_FIELDS = ('Version',) +_LISTFIELDS = ('Platform', 'Classifier', 'Obsoletes', + 'Requires', 'Provides', 'Obsoletes-Dist', + 'Provides-Dist', 'Requires-Dist', 'Requires-External', + 'Project-URL', 'Supported-Platform', 'Setup-Requires-Dist', + 'Provides-Extra', 'Extension') +_LISTTUPLEFIELDS = ('Project-URL',) + +_ELEMENTSFIELD = ('Keywords',) + +_UNICODEFIELDS = ('Author', 'Maintainer', 'Summary', 'Description') + +_MISSING = object() + +_FILESAFE = re.compile('[^A-Za-z0-9.]+') + + +def _get_name_and_version(name, version, for_filename=False): + """Return the distribution name with version. + + If for_filename is true, return a filename-escaped form.""" + if for_filename: + # For both name and version any runs of non-alphanumeric or '.' + # characters are replaced with a single '-'. Additionally any + # spaces in the version string become '.' + name = _FILESAFE.sub('-', name) + version = _FILESAFE.sub('-', version.replace(' ', '.')) + return '%s-%s' % (name, version) + + +class LegacyMetadata(object): + """The legacy metadata of a release. + + Supports versions 1.0, 1.1 and 1.2 (auto-detected). You can + instantiate the class with one of these arguments (or none): + - *path*, the path to a metadata file + - *fileobj* give a file-like object with metadata as content + - *mapping* is a dict-like object + - *scheme* is a version scheme name + """ + # TODO document the mapping API and UNKNOWN default key + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._fields = {} + self.requires_files = [] + self._dependencies = None + self.scheme = scheme + if path is not None: + self.read(path) + elif fileobj is not None: + self.read_file(fileobj) + elif mapping is not None: + self.update(mapping) + self.set_metadata_version() + + def set_metadata_version(self): + self._fields['Metadata-Version'] = _best_version(self._fields) + + def _write_field(self, fileobj, name, value): + fileobj.write('%s: %s\n' % (name, value)) + + def __getitem__(self, name): + return self.get(name) + + def __setitem__(self, name, value): + return self.set(name, value) + + def __delitem__(self, name): + field_name = self._convert_name(name) + try: + del self._fields[field_name] + except KeyError: + raise KeyError(name) + + def __contains__(self, name): + return (name in self._fields or + self._convert_name(name) in self._fields) + + def _convert_name(self, name): + if name in _ALL_FIELDS: + return name + name = name.replace('-', '_').lower() + return _ATTR2FIELD.get(name, name) + + def _default_value(self, name): + if name in _LISTFIELDS or name in _ELEMENTSFIELD: + return [] + return 'UNKNOWN' + + def _remove_line_prefix(self, value): + if self.metadata_version in ('1.0', '1.1'): + return _LINE_PREFIX_PRE_1_2.sub('\n', value) + else: + return _LINE_PREFIX_1_2.sub('\n', value) + + def __getattr__(self, name): + if name in _ATTR2FIELD: + return self[name] + raise AttributeError(name) + + # + # Public API + # + +# dependencies = property(_get_dependencies, _set_dependencies) + + def get_fullname(self, filesafe=False): + """Return the distribution name with version. + + If filesafe is true, return a filename-escaped form.""" + return _get_name_and_version(self['Name'], self['Version'], filesafe) + + def is_field(self, name): + """return True if name is a valid metadata key""" + name = self._convert_name(name) + return name in _ALL_FIELDS + + def is_multi_field(self, name): + name = self._convert_name(name) + return name in _LISTFIELDS + + def read(self, filepath): + """Read the metadata values from a file path.""" + fp = codecs.open(filepath, 'r', encoding='utf-8') + try: + self.read_file(fp) + finally: + fp.close() + + def read_file(self, fileob): + """Read the metadata values from a file object.""" + msg = message_from_file(fileob) + self._fields['Metadata-Version'] = msg['metadata-version'] + + # When reading, get all the fields we can + for field in _ALL_FIELDS: + if field not in msg: + continue + if field in _LISTFIELDS: + # we can have multiple lines + values = msg.get_all(field) + if field in _LISTTUPLEFIELDS and values is not None: + values = [tuple(value.split(',')) for value in values] + self.set(field, values) + else: + # single line + value = msg[field] + if value is not None and value != 'UNKNOWN': + self.set(field, value) + self.set_metadata_version() + + def write(self, filepath, skip_unknown=False): + """Write the metadata fields to filepath.""" + fp = codecs.open(filepath, 'w', encoding='utf-8') + try: + self.write_file(fp, skip_unknown) + finally: + fp.close() + + def write_file(self, fileobject, skip_unknown=False): + """Write the PKG-INFO format data to a file object.""" + self.set_metadata_version() + + for field in _version2fieldlist(self['Metadata-Version']): + values = self.get(field) + if skip_unknown and values in ('UNKNOWN', [], ['UNKNOWN']): + continue + if field in _ELEMENTSFIELD: + self._write_field(fileobject, field, ','.join(values)) + continue + if field not in _LISTFIELDS: + if field == 'Description': + if self.metadata_version in ('1.0', '1.1'): + values = values.replace('\n', '\n ') + else: + values = values.replace('\n', '\n |') + values = [values] + + if field in _LISTTUPLEFIELDS: + values = [','.join(value) for value in values] + + for value in values: + self._write_field(fileobject, field, value) + + def update(self, other=None, **kwargs): + """Set metadata values from the given iterable `other` and kwargs. + + Behavior is like `dict.update`: If `other` has a ``keys`` method, + they are looped over and ``self[key]`` is assigned ``other[key]``. + Else, ``other`` is an iterable of ``(key, value)`` iterables. + + Keys that don't match a metadata field or that have an empty value are + dropped. + """ + def _set(key, value): + if key in _ATTR2FIELD and value: + self.set(self._convert_name(key), value) + + if not other: + # other is None or empty container + pass + elif hasattr(other, 'keys'): + for k in other.keys(): + _set(k, other[k]) + else: + for k, v in other: + _set(k, v) + + if kwargs: + for k, v in kwargs.items(): + _set(k, v) + + def set(self, name, value): + """Control then set a metadata field.""" + name = self._convert_name(name) + + if ((name in _ELEMENTSFIELD or name == 'Platform') and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [v.strip() for v in value.split(',')] + else: + value = [] + elif (name in _LISTFIELDS and + not isinstance(value, (list, tuple))): + if isinstance(value, string_types): + value = [value] + else: + value = [] + + if logger.isEnabledFor(logging.WARNING): + project_name = self['Name'] + + scheme = get_scheme(self.scheme) + if name in _PREDICATE_FIELDS and value is not None: + for v in value: + # check that the values are valid + if not scheme.is_valid_matcher(v.split(';')[0]): + logger.warning( + '%r: %r is not valid (field %r)', + project_name, v, name) + # FIXME this rejects UNKNOWN, is that right? + elif name in _VERSIONS_FIELDS and value is not None: + if not scheme.is_valid_constraint_list(value): + logger.warning('%r: %r is not a valid version (field %r)', + project_name, value, name) + elif name in _VERSION_FIELDS and value is not None: + if not scheme.is_valid_version(value): + logger.warning('%r: %r is not a valid version (field %r)', + project_name, value, name) + + if name in _UNICODEFIELDS: + if name == 'Description': + value = self._remove_line_prefix(value) + + self._fields[name] = value + + def get(self, name, default=_MISSING): + """Get a metadata field.""" + name = self._convert_name(name) + if name not in self._fields: + if default is _MISSING: + default = self._default_value(name) + return default + if name in _UNICODEFIELDS: + value = self._fields[name] + return value + elif name in _LISTFIELDS: + value = self._fields[name] + if value is None: + return [] + res = [] + for val in value: + if name not in _LISTTUPLEFIELDS: + res.append(val) + else: + # That's for Project-URL + res.append((val[0], val[1])) + return res + + elif name in _ELEMENTSFIELD: + value = self._fields[name] + if isinstance(value, string_types): + return value.split(',') + return self._fields[name] + + def check(self, strict=False): + """Check if the metadata is compliant. If strict is True then raise if + no Name or Version are provided""" + self.set_metadata_version() + + # XXX should check the versions (if the file was loaded) + missing, warnings = [], [] + + for attr in ('Name', 'Version'): # required by PEP 345 + if attr not in self: + missing.append(attr) + + if strict and missing != []: + msg = 'missing required metadata: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + + for attr in ('Home-page', 'Author'): + if attr not in self: + missing.append(attr) + + # checking metadata 1.2 (XXX needs to check 1.1, 1.0) + if self['Metadata-Version'] != '1.2': + return missing, warnings + + scheme = get_scheme(self.scheme) + + def are_valid_constraints(value): + for v in value: + if not scheme.is_valid_matcher(v.split(';')[0]): + return False + return True + + for fields, controller in ((_PREDICATE_FIELDS, are_valid_constraints), + (_VERSIONS_FIELDS, + scheme.is_valid_constraint_list), + (_VERSION_FIELDS, + scheme.is_valid_version)): + for field in fields: + value = self.get(field, None) + if value is not None and not controller(value): + warnings.append('Wrong value for %r: %s' % (field, value)) + + return missing, warnings + + def todict(self, skip_missing=False): + """Return fields as a dict. + + Field names will be converted to use the underscore-lowercase style + instead of hyphen-mixed case (i.e. home_page instead of Home-page). + """ + self.set_metadata_version() + + mapping_1_0 = ( + ('metadata_version', 'Metadata-Version'), + ('name', 'Name'), + ('version', 'Version'), + ('summary', 'Summary'), + ('home_page', 'Home-page'), + ('author', 'Author'), + ('author_email', 'Author-email'), + ('license', 'License'), + ('description', 'Description'), + ('keywords', 'Keywords'), + ('platform', 'Platform'), + ('classifiers', 'Classifier'), + ('download_url', 'Download-URL'), + ) + + data = {} + for key, field_name in mapping_1_0: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] + + if self['Metadata-Version'] == '1.2': + mapping_1_2 = ( + ('requires_dist', 'Requires-Dist'), + ('requires_python', 'Requires-Python'), + ('requires_external', 'Requires-External'), + ('provides_dist', 'Provides-Dist'), + ('obsoletes_dist', 'Obsoletes-Dist'), + ('project_url', 'Project-URL'), + ('maintainer', 'Maintainer'), + ('maintainer_email', 'Maintainer-email'), + ) + for key, field_name in mapping_1_2: + if not skip_missing or field_name in self._fields: + if key != 'project_url': + data[key] = self[field_name] + else: + data[key] = [','.join(u) for u in self[field_name]] + + elif self['Metadata-Version'] == '1.1': + mapping_1_1 = ( + ('provides', 'Provides'), + ('requires', 'Requires'), + ('obsoletes', 'Obsoletes'), + ) + for key, field_name in mapping_1_1: + if not skip_missing or field_name in self._fields: + data[key] = self[field_name] + + return data + + def add_requirements(self, requirements): + if self['Metadata-Version'] == '1.1': + # we can't have 1.1 metadata *and* Setuptools requires + for field in ('Obsoletes', 'Requires', 'Provides'): + if field in self: + del self[field] + self['Requires-Dist'] += requirements + + # Mapping API + # TODO could add iter* variants + + def keys(self): + return list(_version2fieldlist(self['Metadata-Version'])) + + def __iter__(self): + for key in self.keys(): + yield key + + def values(self): + return [self[key] for key in self.keys()] + + def items(self): + return [(key, self[key]) for key in self.keys()] + + def __repr__(self): + return '<%s %s %s>' % (self.__class__.__name__, self.name, + self.version) + + +METADATA_FILENAME = 'pydist.json' +WHEEL_METADATA_FILENAME = 'metadata.json' + + +class Metadata(object): + """ + The metadata of a release. This implementation uses 2.0 (JSON) + metadata where possible. If not possible, it wraps a LegacyMetadata + instance which handles the key-value metadata format. + """ + + METADATA_VERSION_MATCHER = re.compile('^\d+(\.\d+)*$') + + NAME_MATCHER = re.compile('^[0-9A-Z]([0-9A-Z_.-]*[0-9A-Z])?$', re.I) + + VERSION_MATCHER = PEP440_VERSION_RE + + SUMMARY_MATCHER = re.compile('.{1,2047}') + + METADATA_VERSION = '2.0' + + GENERATOR = 'distlib (%s)' % __version__ + + MANDATORY_KEYS = { + 'name': (), + 'version': (), + 'summary': ('legacy',), + } + + INDEX_KEYS = ('name version license summary description author ' + 'author_email keywords platform home_page classifiers ' + 'download_url') + + DEPENDENCY_KEYS = ('extras run_requires test_requires build_requires ' + 'dev_requires provides meta_requires obsoleted_by ' + 'supports_environments') + + SYNTAX_VALIDATORS = { + 'metadata_version': (METADATA_VERSION_MATCHER, ()), + 'name': (NAME_MATCHER, ('legacy',)), + 'version': (VERSION_MATCHER, ('legacy',)), + 'summary': (SUMMARY_MATCHER, ('legacy',)), + } + + __slots__ = ('_legacy', '_data', 'scheme') + + def __init__(self, path=None, fileobj=None, mapping=None, + scheme='default'): + if [path, fileobj, mapping].count(None) < 2: + raise TypeError('path, fileobj and mapping are exclusive') + self._legacy = None + self._data = None + self.scheme = scheme + #import pdb; pdb.set_trace() + if mapping is not None: + try: + self._validate_mapping(mapping, scheme) + self._data = mapping + except MetadataUnrecognizedVersionError: + self._legacy = LegacyMetadata(mapping=mapping, scheme=scheme) + self.validate() + else: + data = None + if path: + with open(path, 'rb') as f: + data = f.read() + elif fileobj: + data = fileobj.read() + if data is None: + # Initialised with no args - to be added + self._data = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + else: + if not isinstance(data, text_type): + data = data.decode('utf-8') + try: + self._data = json.loads(data) + self._validate_mapping(self._data, scheme) + except ValueError: + # Note: MetadataUnrecognizedVersionError does not + # inherit from ValueError (it's a DistlibException, + # which should not inherit from ValueError). + # The ValueError comes from the json.load - if that + # succeeds and we get a validation error, we want + # that to propagate + self._legacy = LegacyMetadata(fileobj=StringIO(data), + scheme=scheme) + self.validate() + + common_keys = set(('name', 'version', 'license', 'keywords', 'summary')) + + none_list = (None, list) + none_dict = (None, dict) + + mapped_keys = { + 'run_requires': ('Requires-Dist', list), + 'build_requires': ('Setup-Requires-Dist', list), + 'dev_requires': none_list, + 'test_requires': none_list, + 'meta_requires': none_list, + 'extras': ('Provides-Extra', list), + 'modules': none_list, + 'namespaces': none_list, + 'exports': none_dict, + 'commands': none_dict, + 'classifiers': ('Classifier', list), + 'source_url': ('Download-URL', None), + 'metadata_version': ('Metadata-Version', None), + } + + del none_list, none_dict + + def __getattribute__(self, key): + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, maker = mapped[key] + if self._legacy: + if lk is None: + result = None if maker is None else maker() + else: + result = self._legacy.get(lk) + else: + value = None if maker is None else maker() + if key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + result = self._data.get(key, value) + else: + # special cases for PEP 459 + sentinel = object() + result = sentinel + d = self._data.get('extensions') + if d: + if key == 'commands': + result = d.get('python.commands', value) + elif key == 'classifiers': + d = d.get('python.details') + if d: + result = d.get(key, value) + else: + d = d.get('python.exports') + if d: + result = d.get(key, value) + if result is sentinel: + result = value + elif key not in common: + result = object.__getattribute__(self, key) + elif self._legacy: + result = self._legacy.get(key) + else: + result = self._data.get(key) + return result + + def _validate_value(self, key, value, scheme=None): + if key in self.SYNTAX_VALIDATORS: + pattern, exclusions = self.SYNTAX_VALIDATORS[key] + if (scheme or self.scheme) not in exclusions: + m = pattern.match(value) + if not m: + raise MetadataInvalidError('%r is an invalid value for ' + 'the %r property' % (value, + key)) + + def __setattr__(self, key, value): + self._validate_value(key, value) + common = object.__getattribute__(self, 'common_keys') + mapped = object.__getattribute__(self, 'mapped_keys') + if key in mapped: + lk, _ = mapped[key] + if self._legacy: + if lk is None: + raise NotImplementedError + self._legacy[lk] = value + elif key not in ('commands', 'exports', 'modules', 'namespaces', + 'classifiers'): + self._data[key] = value + else: + # special cases for PEP 459 + d = self._data.setdefault('extensions', {}) + if key == 'commands': + d['python.commands'] = value + elif key == 'classifiers': + d = d.setdefault('python.details', {}) + d[key] = value + else: + d = d.setdefault('python.exports', {}) + d[key] = value + elif key not in common: + object.__setattr__(self, key, value) + else: + if key == 'keywords': + if isinstance(value, string_types): + value = value.strip() + if value: + value = value.split() + else: + value = [] + if self._legacy: + self._legacy[key] = value + else: + self._data[key] = value + + @property + def name_and_version(self): + return _get_name_and_version(self.name, self.version, True) + + @property + def provides(self): + if self._legacy: + result = self._legacy['Provides-Dist'] + else: + result = self._data.setdefault('provides', []) + s = '%s (%s)' % (self.name, self.version) + if s not in result: + result.append(s) + return result + + @provides.setter + def provides(self, value): + if self._legacy: + self._legacy['Provides-Dist'] = value + else: + self._data['provides'] = value + + def get_requirements(self, reqts, extras=None, env=None): + """ + Base method to get dependencies, given a set of extras + to satisfy and an optional environment context. + :param reqts: A list of sometimes-wanted dependencies, + perhaps dependent on extras and environment. + :param extras: A list of optional components being requested. + :param env: An optional environment for marker evaluation. + """ + if self._legacy: + result = reqts + else: + result = [] + extras = get_extras(extras or [], self.extras) + for d in reqts: + if 'extra' not in d and 'environment' not in d: + # unconditional + include = True + else: + if 'extra' not in d: + # Not extra-dependent - only environment-dependent + include = True + else: + include = d.get('extra') in extras + if include: + # Not excluded because of extras, check environment + marker = d.get('environment') + if marker: + include = interpret(marker, env) + if include: + result.extend(d['requires']) + for key in ('build', 'dev', 'test'): + e = ':%s:' % key + if e in extras: + extras.remove(e) + # A recursive call, but it should terminate since 'test' + # has been removed from the extras + reqts = self._data.get('%s_requires' % key, []) + result.extend(self.get_requirements(reqts, extras=extras, + env=env)) + return result + + @property + def dictionary(self): + if self._legacy: + return self._from_legacy() + return self._data + + @property + def dependencies(self): + if self._legacy: + raise NotImplementedError + else: + return extract_by_key(self._data, self.DEPENDENCY_KEYS) + + @dependencies.setter + def dependencies(self, value): + if self._legacy: + raise NotImplementedError + else: + self._data.update(value) + + def _validate_mapping(self, mapping, scheme): + if mapping.get('metadata_version') != self.METADATA_VERSION: + raise MetadataUnrecognizedVersionError() + missing = [] + for key, exclusions in self.MANDATORY_KEYS.items(): + if key not in mapping: + if scheme not in exclusions: + missing.append(key) + if missing: + msg = 'Missing metadata items: %s' % ', '.join(missing) + raise MetadataMissingError(msg) + for k, v in mapping.items(): + self._validate_value(k, v, scheme) + + def validate(self): + if self._legacy: + missing, warnings = self._legacy.check(True) + if missing or warnings: + logger.warning('Metadata: missing: %s, warnings: %s', + missing, warnings) + else: + self._validate_mapping(self._data, self.scheme) + + def todict(self): + if self._legacy: + return self._legacy.todict(True) + else: + result = extract_by_key(self._data, self.INDEX_KEYS) + return result + + def _from_legacy(self): + assert self._legacy and not self._data + result = { + 'metadata_version': self.METADATA_VERSION, + 'generator': self.GENERATOR, + } + lmd = self._legacy.todict(True) # skip missing ones + for k in ('name', 'version', 'license', 'summary', 'description', + 'classifier'): + if k in lmd: + if k == 'classifier': + nk = 'classifiers' + else: + nk = k + result[nk] = lmd[k] + kw = lmd.get('Keywords', []) + if kw == ['']: + kw = [] + result['keywords'] = kw + keys = (('requires_dist', 'run_requires'), + ('setup_requires_dist', 'build_requires')) + for ok, nk in keys: + if ok in lmd and lmd[ok]: + result[nk] = [{'requires': lmd[ok]}] + result['provides'] = self.provides + author = {} + maintainer = {} + return result + + LEGACY_MAPPING = { + 'name': 'Name', + 'version': 'Version', + 'license': 'License', + 'summary': 'Summary', + 'description': 'Description', + 'classifiers': 'Classifier', + } + + def _to_legacy(self): + def process_entries(entries): + reqts = set() + for e in entries: + extra = e.get('extra') + env = e.get('environment') + rlist = e['requires'] + for r in rlist: + if not env and not extra: + reqts.add(r) + else: + marker = '' + if extra: + marker = 'extra == "%s"' % extra + if env: + if marker: + marker = '(%s) and %s' % (env, marker) + else: + marker = env + reqts.add(';'.join((r, marker))) + return reqts + + assert self._data and not self._legacy + result = LegacyMetadata() + nmd = self._data + for nk, ok in self.LEGACY_MAPPING.items(): + if nk in nmd: + result[ok] = nmd[nk] + r1 = process_entries(self.run_requires + self.meta_requires) + r2 = process_entries(self.build_requires + self.dev_requires) + if self.extras: + result['Provides-Extra'] = sorted(self.extras) + result['Requires-Dist'] = sorted(r1) + result['Setup-Requires-Dist'] = sorted(r2) + # TODO: other fields such as contacts + return result + + def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True): + if [path, fileobj].count(None) != 1: + raise ValueError('Exactly one of path and fileobj is needed') + self.validate() + if legacy: + if self._legacy: + legacy_md = self._legacy + else: + legacy_md = self._to_legacy() + if path: + legacy_md.write(path, skip_unknown=skip_unknown) + else: + legacy_md.write_file(fileobj, skip_unknown=skip_unknown) + else: + if self._legacy: + d = self._from_legacy() + else: + d = self._data + if fileobj: + json.dump(d, fileobj, ensure_ascii=True, indent=2, + sort_keys=True) + else: + with codecs.open(path, 'w', 'utf-8') as f: + json.dump(d, f, ensure_ascii=True, indent=2, + sort_keys=True) + + def add_requirements(self, requirements): + if self._legacy: + self._legacy.add_requirements(requirements) + else: + run_requires = self._data.setdefault('run_requires', []) + always = None + for entry in run_requires: + if 'environment' not in entry and 'extra' not in entry: + always = entry + break + if always is None: + always = { 'requires': requirements } + run_requires.insert(0, always) + else: + rset = set(always['requires']) | set(requirements) + always['requires'] = sorted(rset) + + def __repr__(self): + name = self.name or '(no name)' + version = self.version or 'no version' + return '<%s %s %s (%s)>' % (self.__class__.__name__, + self.metadata_version, name, version) diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/resources.py b/lib/python3.4/site-packages/pip/_vendor/distlib/resources.py new file mode 100644 index 0000000..9dd8ca0 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/resources.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2016 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from __future__ import unicode_literals + +import bisect +import io +import logging +import os +import pkgutil +import shutil +import sys +import types +import zipimport + +from . import DistlibException +from .util import cached_property, get_cache_base, path_to_cache_dir, Cache + +logger = logging.getLogger(__name__) + + +cache = None # created when needed + + +class ResourceCache(Cache): + def __init__(self, base=None): + if base is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('resource-cache')) + super(ResourceCache, self).__init__(base) + + def is_stale(self, resource, path): + """ + Is the cache stale for the given resource? + + :param resource: The :class:`Resource` being cached. + :param path: The path of the resource in the cache. + :return: True if the cache is stale. + """ + # Cache invalidation is a hard problem :-) + return True + + def get(self, resource): + """ + Get a resource into the cache, + + :param resource: A :class:`Resource` instance. + :return: The pathname of the resource in the cache. + """ + prefix, path = resource.finder.get_cache_info(resource) + if prefix is None: + result = path + else: + result = os.path.join(self.base, self.prefix_to_dir(prefix), path) + dirname = os.path.dirname(result) + if not os.path.isdir(dirname): + os.makedirs(dirname) + if not os.path.exists(result): + stale = True + else: + stale = self.is_stale(resource, path) + if stale: + # write the bytes of the resource to the cache location + with open(result, 'wb') as f: + f.write(resource.bytes) + return result + + +class ResourceBase(object): + def __init__(self, finder, name): + self.finder = finder + self.name = name + + +class Resource(ResourceBase): + """ + A class representing an in-package resource, such as a data file. This is + not normally instantiated by user code, but rather by a + :class:`ResourceFinder` which manages the resource. + """ + is_container = False # Backwards compatibility + + def as_stream(self): + """ + Get the resource as a stream. + + This is not a property to make it obvious that it returns a new stream + each time. + """ + return self.finder.get_stream(self) + + @cached_property + def file_path(self): + global cache + if cache is None: + cache = ResourceCache() + return cache.get(self) + + @cached_property + def bytes(self): + return self.finder.get_bytes(self) + + @cached_property + def size(self): + return self.finder.get_size(self) + + +class ResourceContainer(ResourceBase): + is_container = True # Backwards compatibility + + @cached_property + def resources(self): + return self.finder.get_resources(self) + + +class ResourceFinder(object): + """ + Resource finder for file system resources. + """ + + if sys.platform.startswith('java'): + skipped_extensions = ('.pyc', '.pyo', '.class') + else: + skipped_extensions = ('.pyc', '.pyo') + + def __init__(self, module): + self.module = module + self.loader = getattr(module, '__loader__', None) + self.base = os.path.dirname(getattr(module, '__file__', '')) + + def _adjust_path(self, path): + return os.path.realpath(path) + + def _make_path(self, resource_name): + # Issue #50: need to preserve type of path on Python 2.x + # like os.path._get_sep + if isinstance(resource_name, bytes): # should only happen on 2.x + sep = b'/' + else: + sep = '/' + parts = resource_name.split(sep) + parts.insert(0, self.base) + result = os.path.join(*parts) + return self._adjust_path(result) + + def _find(self, path): + return os.path.exists(path) + + def get_cache_info(self, resource): + return None, resource.path + + def find(self, resource_name): + path = self._make_path(resource_name) + if not self._find(path): + result = None + else: + if self._is_directory(path): + result = ResourceContainer(self, resource_name) + else: + result = Resource(self, resource_name) + result.path = path + return result + + def get_stream(self, resource): + return open(resource.path, 'rb') + + def get_bytes(self, resource): + with open(resource.path, 'rb') as f: + return f.read() + + def get_size(self, resource): + return os.path.getsize(resource.path) + + def get_resources(self, resource): + def allowed(f): + return (f != '__pycache__' and not + f.endswith(self.skipped_extensions)) + return set([f for f in os.listdir(resource.path) if allowed(f)]) + + def is_container(self, resource): + return self._is_directory(resource.path) + + _is_directory = staticmethod(os.path.isdir) + + def iterator(self, resource_name): + resource = self.find(resource_name) + if resource is not None: + todo = [resource] + while todo: + resource = todo.pop(0) + yield resource + if resource.is_container: + rname = resource.name + for name in resource.resources: + if not rname: + new_name = name + else: + new_name = '/'.join([rname, name]) + child = self.find(new_name) + if child.is_container: + todo.append(child) + else: + yield child + + +class ZipResourceFinder(ResourceFinder): + """ + Resource finder for resources in .zip files. + """ + def __init__(self, module): + super(ZipResourceFinder, self).__init__(module) + archive = self.loader.archive + self.prefix_len = 1 + len(archive) + # PyPy doesn't have a _files attr on zipimporter, and you can't set one + if hasattr(self.loader, '_files'): + self._files = self.loader._files + else: + self._files = zipimport._zip_directory_cache[archive] + self.index = sorted(self._files) + + def _adjust_path(self, path): + return path + + def _find(self, path): + path = path[self.prefix_len:] + if path in self._files: + result = True + else: + if path and path[-1] != os.sep: + path = path + os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + if not result: + logger.debug('_find failed: %r %r', path, self.loader.prefix) + else: + logger.debug('_find worked: %r %r', path, self.loader.prefix) + return result + + def get_cache_info(self, resource): + prefix = self.loader.archive + path = resource.path[1 + len(prefix):] + return prefix, path + + def get_bytes(self, resource): + return self.loader.get_data(resource.path) + + def get_stream(self, resource): + return io.BytesIO(self.get_bytes(resource)) + + def get_size(self, resource): + path = resource.path[self.prefix_len:] + return self._files[path][3] + + def get_resources(self, resource): + path = resource.path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + plen = len(path) + result = set() + i = bisect.bisect(self.index, path) + while i < len(self.index): + if not self.index[i].startswith(path): + break + s = self.index[i][plen:] + result.add(s.split(os.sep, 1)[0]) # only immediate children + i += 1 + return result + + def _is_directory(self, path): + path = path[self.prefix_len:] + if path and path[-1] != os.sep: + path += os.sep + i = bisect.bisect(self.index, path) + try: + result = self.index[i].startswith(path) + except IndexError: + result = False + return result + +_finder_registry = { + type(None): ResourceFinder, + zipimport.zipimporter: ZipResourceFinder +} + +try: + import _frozen_importlib + _finder_registry[_frozen_importlib.SourceFileLoader] = ResourceFinder + _finder_registry[_frozen_importlib.FileFinder] = ResourceFinder +except (ImportError, AttributeError): + pass + + +def register_finder(loader, finder_maker): + _finder_registry[type(loader)] = finder_maker + +_finder_cache = {} + + +def finder(package): + """ + Return a resource finder for a package. + :param package: The name of the package. + :return: A :class:`ResourceFinder` instance for the package. + """ + if package in _finder_cache: + result = _finder_cache[package] + else: + if package not in sys.modules: + __import__(package) + module = sys.modules[package] + path = getattr(module, '__path__', None) + if path is None: + raise DistlibException('You cannot get a finder for a module, ' + 'only for a package') + loader = getattr(module, '__loader__', None) + finder_maker = _finder_registry.get(type(loader)) + if finder_maker is None: + raise DistlibException('Unable to locate finder for %r' % package) + result = finder_maker(module) + _finder_cache[package] = result + return result + + +_dummy_module = types.ModuleType(str('__dummy__')) + + +def finder_for_path(path): + """ + Return a resource finder for a path, which should represent a container. + + :param path: The path. + :return: A :class:`ResourceFinder` instance for the path. + """ + result = None + # calls any path hooks, gets importer into cache + pkgutil.get_importer(path) + loader = sys.path_importer_cache.get(path) + finder = _finder_registry.get(type(loader)) + if finder: + module = _dummy_module + module.__file__ = os.path.join(path, '') + module.__loader__ = loader + result = finder(module) + return result diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/scripts.py b/lib/python3.4/site-packages/pip/_vendor/distlib/scripts.py new file mode 100644 index 0000000..c9996d5 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/scripts.py @@ -0,0 +1,384 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2013-2015 Vinay Sajip. +# Licensed to the Python Software Foundation under a contributor agreement. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +from io import BytesIO +import logging +import os +import re +import struct +import sys + +from .compat import sysconfig, detect_encoding, ZipFile +from .resources import finder +from .util import (FileOperator, get_export_entry, convert_path, + get_executable, in_venv) + +logger = logging.getLogger(__name__) + +_DEFAULT_MANIFEST = ''' + + + + + + + + + + + + +'''.strip() + +# check if Python is called on the first line with this expression +FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$') +SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*- +if __name__ == '__main__': + import sys, re + + def _resolve(module, func): + __import__(module) + mod = sys.modules[module] + parts = func.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + try: + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + + func = _resolve('%(module)s', '%(func)s') + rc = func() # None interpreted as 0 + except Exception as e: # only supporting Python >= 2.6 + sys.stderr.write('%%s\\n' %% e) + rc = 1 + sys.exit(rc) +''' + + +def _enquote_executable(executable): + if ' ' in executable: + # make sure we quote only the executable in case of env + # for example /usr/bin/env "/dir with spaces/bin/jython" + # instead of "/usr/bin/env /dir with spaces/bin/jython" + # otherwise whole + if executable.startswith('/usr/bin/env '): + env, _executable = executable.split(' ', 1) + if ' ' in _executable and not _executable.startswith('"'): + executable = '%s "%s"' % (env, _executable) + else: + if not executable.startswith('"'): + executable = '"%s"' % executable + return executable + + +class ScriptMaker(object): + """ + A class to copy or create scripts from source scripts or callable + specifications. + """ + script_template = SCRIPT_TEMPLATE + + executable = None # for shebangs + + def __init__(self, source_dir, target_dir, add_launchers=True, + dry_run=False, fileop=None): + self.source_dir = source_dir + self.target_dir = target_dir + self.add_launchers = add_launchers + self.force = False + self.clobber = False + # It only makes sense to set mode bits on POSIX. + self.set_mode = (os.name == 'posix') or (os.name == 'java' and + os._name == 'posix') + self.variants = set(('', 'X.Y')) + self._fileop = fileop or FileOperator(dry_run) + + self._is_nt = os.name == 'nt' or ( + os.name == 'java' and os._name == 'nt') + + def _get_alternate_executable(self, executable, options): + if options.get('gui', False) and self._is_nt: # pragma: no cover + dn, fn = os.path.split(executable) + fn = fn.replace('python', 'pythonw') + executable = os.path.join(dn, fn) + return executable + + if sys.platform.startswith('java'): # pragma: no cover + def _is_shell(self, executable): + """ + Determine if the specified executable is a script + (contains a #! line) + """ + try: + with open(executable) as fp: + return fp.read(2) == '#!' + except (OSError, IOError): + logger.warning('Failed to open %s', executable) + return False + + def _fix_jython_executable(self, executable): + if self._is_shell(executable): + # Workaround for Jython is not needed on Linux systems. + import java + + if java.lang.System.getProperty('os.name') == 'Linux': + return executable + elif executable.lower().endswith('jython.exe'): + # Use wrapper exe for Jython on Windows + return executable + return '/usr/bin/env %s' % executable + + def _get_shebang(self, encoding, post_interp=b'', options=None): + enquote = True + if self.executable: + executable = self.executable + enquote = False # assume this will be taken care of + elif not sysconfig.is_python_build(): + executable = get_executable() + elif in_venv(): # pragma: no cover + executable = os.path.join(sysconfig.get_path('scripts'), + 'python%s' % sysconfig.get_config_var('EXE')) + else: # pragma: no cover + executable = os.path.join( + sysconfig.get_config_var('BINDIR'), + 'python%s%s' % (sysconfig.get_config_var('VERSION'), + sysconfig.get_config_var('EXE'))) + if options: + executable = self._get_alternate_executable(executable, options) + + if sys.platform.startswith('java'): # pragma: no cover + executable = self._fix_jython_executable(executable) + # Normalise case for Windows + executable = os.path.normcase(executable) + # If the user didn't specify an executable, it may be necessary to + # cater for executable paths with spaces (not uncommon on Windows) + if enquote: + executable = _enquote_executable(executable) + # Issue #51: don't use fsencode, since we later try to + # check that the shebang is decodable using utf-8. + executable = executable.encode('utf-8') + # in case of IronPython, play safe and enable frames support + if (sys.platform == 'cli' and '-X:Frames' not in post_interp + and '-X:FullFrames' not in post_interp): # pragma: no cover + post_interp += b' -X:Frames' + shebang = b'#!' + executable + post_interp + b'\n' + # Python parser starts to read a script using UTF-8 until + # it gets a #coding:xxx cookie. The shebang has to be the + # first line of a file, the #coding:xxx cookie cannot be + # written before. So the shebang has to be decodable from + # UTF-8. + try: + shebang.decode('utf-8') + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable from utf-8' % shebang) + # If the script is encoded to a custom encoding (use a + # #coding:xxx cookie), the shebang has to be decodable from + # the script encoding too. + if encoding != 'utf-8': + try: + shebang.decode(encoding) + except UnicodeDecodeError: # pragma: no cover + raise ValueError( + 'The shebang (%r) is not decodable ' + 'from the script encoding (%r)' % (shebang, encoding)) + return shebang + + def _get_script_text(self, entry): + return self.script_template % dict(module=entry.prefix, + func=entry.suffix) + + manifest = _DEFAULT_MANIFEST + + def get_manifest(self, exename): + base = os.path.basename(exename) + return self.manifest % base + + def _write_script(self, names, shebang, script_bytes, filenames, ext): + use_launcher = self.add_launchers and self._is_nt + linesep = os.linesep.encode('utf-8') + if not use_launcher: + script_bytes = shebang + linesep + script_bytes + else: # pragma: no cover + if ext == 'py': + launcher = self._get_launcher('t') + else: + launcher = self._get_launcher('w') + stream = BytesIO() + with ZipFile(stream, 'w') as zf: + zf.writestr('__main__.py', script_bytes) + zip_data = stream.getvalue() + script_bytes = launcher + shebang + linesep + zip_data + for name in names: + outname = os.path.join(self.target_dir, name) + if use_launcher: # pragma: no cover + n, e = os.path.splitext(outname) + if e.startswith('.py'): + outname = n + outname = '%s.exe' % outname + try: + self._fileop.write_binary_file(outname, script_bytes) + except Exception: + # Failed writing an executable - it might be in use. + logger.warning('Failed to write executable - trying to ' + 'use .deleteme logic') + dfname = '%s.deleteme' % outname + if os.path.exists(dfname): + os.remove(dfname) # Not allowed to fail here + os.rename(outname, dfname) # nor here + self._fileop.write_binary_file(outname, script_bytes) + logger.debug('Able to replace executable using ' + '.deleteme logic') + try: + os.remove(dfname) + except Exception: + pass # still in use - ignore error + else: + if self._is_nt and not outname.endswith('.' + ext): # pragma: no cover + outname = '%s.%s' % (outname, ext) + if os.path.exists(outname) and not self.clobber: + logger.warning('Skipping existing file %s', outname) + continue + self._fileop.write_binary_file(outname, script_bytes) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + + def _make_script(self, entry, filenames, options=None): + post_interp = b'' + if options: + args = options.get('interpreter_args', []) + if args: + args = ' %s' % ' '.join(args) + post_interp = args.encode('utf-8') + shebang = self._get_shebang('utf-8', post_interp, options=options) + script = self._get_script_text(entry).encode('utf-8') + name = entry.name + scriptnames = set() + if '' in self.variants: + scriptnames.add(name) + if 'X' in self.variants: + scriptnames.add('%s%s' % (name, sys.version[0])) + if 'X.Y' in self.variants: + scriptnames.add('%s-%s' % (name, sys.version[:3])) + if options and options.get('gui', False): + ext = 'pyw' + else: + ext = 'py' + self._write_script(scriptnames, shebang, script, filenames, ext) + + def _copy_script(self, script, filenames): + adjust = False + script = os.path.join(self.source_dir, convert_path(script)) + outname = os.path.join(self.target_dir, os.path.basename(script)) + if not self.force and not self._fileop.newer(script, outname): + logger.debug('not copying %s (up-to-date)', script) + return + + # Always open the file, but ignore failures in dry-run mode -- + # that way, we'll get accurate feedback if we can read the + # script. + try: + f = open(script, 'rb') + except IOError: # pragma: no cover + if not self.dry_run: + raise + f = None + else: + first_line = f.readline() + if not first_line: # pragma: no cover + logger.warning('%s: %s is an empty file (skipping)', + self.get_command_name(), script) + return + + match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n')) + if match: + adjust = True + post_interp = match.group(1) or b'' + + if not adjust: + if f: + f.close() + self._fileop.copy_file(script, outname) + if self.set_mode: + self._fileop.set_executable_mode([outname]) + filenames.append(outname) + else: + logger.info('copying and adjusting %s -> %s', script, + self.target_dir) + if not self._fileop.dry_run: + encoding, lines = detect_encoding(f.readline) + f.seek(0) + shebang = self._get_shebang(encoding, post_interp) + if b'pythonw' in first_line: # pragma: no cover + ext = 'pyw' + else: + ext = 'py' + n = os.path.basename(outname) + self._write_script([n], shebang, f.read(), filenames, ext) + if f: + f.close() + + @property + def dry_run(self): + return self._fileop.dry_run + + @dry_run.setter + def dry_run(self, value): + self._fileop.dry_run = value + + if os.name == 'nt' or (os.name == 'java' and os._name == 'nt'): # pragma: no cover + # Executable launcher support. + # Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/ + + def _get_launcher(self, kind): + if struct.calcsize('P') == 8: # 64-bit + bits = '64' + else: + bits = '32' + name = '%s%s.exe' % (kind, bits) + # Issue 31: don't hardcode an absolute package name, but + # determine it relative to the current package + distlib_package = __name__.rsplit('.', 1)[0] + result = finder(distlib_package).find(name).bytes + return result + + # Public API follows + + def make(self, specification, options=None): + """ + Make a script. + + :param specification: The specification, which is either a valid export + entry specification (to make a script from a + callable) or a filename (to make a script by + copying from a source location). + :param options: A dictionary of options controlling script generation. + :return: A list of all absolute pathnames written to. + """ + filenames = [] + entry = get_export_entry(specification) + if entry is None: + self._copy_script(specification, filenames) + else: + self._make_script(entry, filenames, options=options) + return filenames + + def make_multiple(self, specifications, options=None): + """ + Take a list of specifications and make scripts from them, + :param specifications: A list of specifications. + :return: A list of all absolute pathnames written to, + """ + filenames = [] + for specification in specifications: + filenames.extend(self.make(specification, options)) + return filenames diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/t32.exe b/lib/python3.4/site-packages/pip/_vendor/distlib/t32.exe new file mode 100644 index 0000000000000000000000000000000000000000..e0168c22c57f32e59dc7f64f805c586e959c3ec2 GIT binary patch literal 89088 zcmeFae|!|xxj#PpBgrOgG7BUaeg_N|32Jn~k}iQovLOn=g~)~#19-)_u2eD10Ja7a zCu=hqw%T63x89GoQnVjz?Y*>$U@f>IGyz2gw2FmlY*Rh!q#6r`1!Cs=e$MPBL9y-q zyzc$)i)QA`InVj^oaa2}InQ~Xvz0eLEZ77=u;b4#1Yr-}^v}zmfB(UdoHOw^Il}Xq zuT9!xS@hbZn?iT4FIZQ1&mDEQe!JkdTkpR69;x78ZZD{l?=HCW?t-!#s|vn-&zjq> z%+Ag#Ocw3bKFF{5_@RZVzY|YgvhaF*S8Xg@7~t>W3m5SBl!deKp7+CX3oG%i`1tz^ zT|9nL5L#B@4PKUWu|)I_X)xxOPcVn?+ss;it7}tmSL7GLHG?~er(1bnS!?q ze@E!sYlf|YkS16tV&E-2iEkk7PuncOpSO_%Qa;Me-vcj-6n6ei5N1=L*djdTr@)u~ zJ!}!)atK1hQ9&5@rKAf!Kl#$w9KpBQvOfHH;Y_}hoSA^*In>Piz#IB+E;pkgZXV+s%c^9n-j%DVM+w;_u66@3<# z;^W*P428T@Ufu0$?*ZWC97F&Iyu3wt5BcX6gzJWU{{O-MK@2R@x*NOgAc$>E)_H>a zcdD;XsdosnFY0so4cQru^qn`1kO3YMBD(RjA7bQ{1G#2`KUDCsS73)T5f}6N*{?B- z$^mECV&n-&yn8N%a(eggOxGyc1Gf(g@VGD=x9Fdqb_C<=) zGCdC2swJWo7R~1 zJRRaAt(f^nh_4e)^0C&2&#Q0o~kWmI@N)__eo&Wq|XCtQ{i!Zfiy5$Zlk(dEh`*n zk%qk;qDxy)WQ@Ov;wN6e$p}N%_3q8xbYC%^Uw-z?jY$_In zV36mO%$yG73?1fh@AiR=%2ozY?2KwZJNHc@*j!aAoT|;70kCkd@wOj{YuIUwOozR& zn`iqevZ2h+&BPFU$4qqSgyx8uc-l-{%YI`f7V^Z9nP>^skMIf|Wrep8pEdGkqb^jF zPjyZAS65eK5UULdjWY6_NJB6;>0$>(zraqjiEKOD^4Fj;aI~@&@Khk_o zwHPd9I`rx{KbmB~Q>x8Vytv`uAlf@!%i$+sJ zIhYpKReBC(h^8`R+0M{#dD>*gvrGy(|C-FQoM$oNYc2vGjdT;L0_qd=X4Pnr8tEcT z#!8-`cgRac5vgSJUX*aa(Are&nFNr_b0`P$qZL+gJzGbu=S{h{sP$+<>(O`xUrA6S zCsDY@NAN3&+ChyIOw^C!@=;vm1d7{#G+{evi&DPosC2a>!1iM71_m~?15NU?JJ243 z6OqBm#Mnh0Gj{R5oCHW9P|aS(YOEY6u4ZpzG!MrM(Q8jOE#k8ER9n0st4P=xI`Rot z=lnd7hoRPYxno+5!}C5c;pvcb*^XR6=nzJM{jr4zx?}ox0U2d+RByDNvj zU8=9E!$*s-!7!rW=Uwatl=ywj9RG{vnLVK+V#Rv z1M;Cv2q|m?Uy_|_I%c9B{m5X42}-~k!;8UkQL8O;>W*5^E7Axh2F~E9jU1ADYD-)a z_(>9sBp>MtGy(H5HvcD*5{Y_`lOMp1X<9&ELKb6;yMHf1fYTyHEq}|%oEzS#jYgYmAex8Gxi7= z6+PY4UZ0`0Z>N;79lUu0wP^cO6KMICCxq<*bta`T?*$2I;W5iswz-eJNagBj+X!BD zT9wuUWxu`gYy$}8{*S}`D*@pGM76fRHIb&-BMn%$v~$Ww6I(w@*X$We!jK%EcFB&q ztm@UseqH0)b;xl4>#N+cm2Ir006VDCGQyvf#sX-AF_Y?I<3VXdNQ(E*(^EIOv41t{ z-Z(~kz35=;*)*lqwS*m##M(MGU;(X!mHzHIdOZ?>IP zJ~P~J&lq!s)3iIq3f%MuAT$Xim1*UmN2ye7{u^So^Pb~%*uHq``^4|P zgotY6T9SR_1e<$>AY8uLZkc1LPZYfm9(Z|upH|-!j{wS3bGq&prT(-aO?(l&Y>WGt z=iLpNTI(74wAQL;Xl)c|E84FQ+e68o;~TA7o5$C;aRnGo0;|Y$P>-L*ip)O6r0>#P zS!3}fijH6I>(dJZ?A1bmQ0!8rzE6-QYn2JZmuR-2dkYP4Bj4%B2{CdRir0L(@!w*? zDD{vYiNDhDIG6?P2Sa1`f`fMmlhNw-_35G2=RW}M z?p5Wk64@nPRU+p~(@Nxg`3hdTh|;k!aj>V*0V+I~7tYtD-B`lhG4>P0U|Gm#bFc;m zRl;Td1=R8mRz4EQ2UnCd8VFyP&a-`ovYDhdkPV_Q>bx0!MaG&oG}ZMfn))f#ctWkA zg|gn^IZsPp8*1Ky_~b%489|Us{CB7V;zf;F9dPg;ToCHx2XYT^1SH7p3eFMY6 zQ9f2l^h=|(BdzDsw1dhA6I(w>w>cjr?xv@`DA?x-z4HpU$d}XTj3&@rjdP>UiLIUK zHha_r+`|FAg$+_J`zu&=vPBOgs8n_tB{g00rPb{Bs8j2q-8*D%XEi$r!~!+d0andX zJnqZO?p*YUFS|>q^(La0fXW1+w#4UldVuLbpX*)XZl6QTUYkBo5O>?>>Emh5 z3tOT-2cAaWaxgA1XWJy&4bWeu^U~Jj8mDwSt1yAVl|1Kx)MVM|qjeRmco5o|O(ZQ|DRG{)6nZVqIU9YqW5@hJ#t8QP5w?Re3< zYR&21{WfW~_BJ6smm^#GaANC+>Ahvw*c&h!M?FW@yR@E$Z)M4r`PX^guKOH$Xl}@3 zwYKt8IVl`83oN z9rpx{8$>3}#%nD`t@mMXjLDQ>|H*fePE29BKwIDdgR>pfGaF`Q$u`61@M5Jnb!LH6 zD8QQQr5v1t+-GKr#O_N24eGP_nv7A|=+bU<_O?1C$C*}A(nvOK0eBWF?ky{}+a)Rz zs}>l6<~wck^{rfmjSz^w_)`%5Ic#A-$vy`t8AF%Het?K1ZOuTCYMM&ZWgn!{AjvuO zF^v|~AyY0T#C?VwoSJMTw42mKLY!9a+@Z)WMkShsQ@M92ou4Z8E{j_3lyd^?_n65R zEOxiMgkXTVNrWnQ2Ag8{Ih4*m!04lPSfmj<98%6R)J-{~qdcF)5_9DUu-)I_TyW{N z=+Pl?6U-;^(bniZ14}o$>!75D0VG6@!%?aR|b&4!x|m+^I;j0&F&z zUqDN+7zR7Qx-JtSZT6r?STOj&zhUSjwDNISa)$Yt?{l!Q+Hv4AwN8OVI^NNB)X~Y* z8Sn)X=^;a>M)x^Xm+Wo;LF#Tcs`g6Rya>qQS5o5x?2X=3ciC)obm(9cf7RbYvCwV} zc2Ftp&Y34zA$VQS6fYFq?WyL(G!J_+GW9u|P=3qsav zFpoWzHxs8p4PJ`*&nh6fmx76zY?lF&7ozgl_#McosEGHf`D#kn(rjQ zmHIA$;9~^eiAR!~Yraml0gAsib`rgeKZ@DOg{#KysSy6QCqyu!?9=qj>!IiPZhFpS z^epYdvvzBq%SEqjJAaMufY2Cv5M-r{=S77x0Z0V7rsz z`L4GktpaHl{I`t~xAy^q9cevyX4-{VN7`wGFo>oSMvWZD8ojOAipKCQwUM`u2EdCV zfYhhaqab)--daS49zW_uFod8|N&w*ffN3O{5S49eAt8US0m#700U8*EDN|&$)L5Io7ayf$TSo~J2AiQJ& zPW5c}38k{nqG!I8iJl;6H8oD|L-VR@J^RI`w-Ft^Z_95$l0xK=*fb8`;_jG|s1hIg zSM*)lqObz7sTV{H)8R|tQ&3)Y#oO>6 z_I~`gc=fh<#mFytyf^*}ygPh71QvgcCtAhG!w6{5>i@YPOjLQ*I;;czk2fGzsZR*9 zG*E7Vez(GIl5SR5j^wKV9?+;fg{6y;(J1jun-BxCeZ&5sFCalx_B7t!XS@uBr|&dX zs9()vr(`bcOT|n~<{94jDs858aSxZDk7J8|NVg6MV7WL)|n^t#I+Z zOV}0K`6+t60!6w{aP%b2kE6|l!YLHYb!9k){- zi#kj|+9jJ=XYWJ}>MEz_yfmuu4!)g+m37;O#->X;Rh-;YCrd0K@?V%P+vh zd^nBtidhR+x&%KB7tlj!(g1@44pi;Z_iG%zV zIlx~}$N1})zs2kEhax?AfsIBh?P`>?sU5sNVxDS*QQGx=YP5?2pp+;zSF3a(1_708 zCQW`BhW^K7l2Oi3!2^j+VhwabE#ypgx*E$^5QMkN{n`rls%{v>BeDMOy9CTL5Vo@4 z(NsKuT^T>lNix!vrVL}uB-9#t_w#ZkTaD5A<=xnSVQD?=j``^y>%a#H-^`4W!TFs1 zwTHpyrIGWLvscu87OnWJa(4K}oN)GX44D13N03(2mR5#*;hdo7;MmxutxC*M)5bbx z(rPQG?@8F8nnGL~dp;G_XGS@bf)@J|vO}U?qB__E2nKPM_FKHwVu(jvOEUNft-iu5 ztT9j$XbQ(i>e@nA6n{mXI2GVzb2UyHf9F?kXAs-LN0UT|Q#|N6*KDVJZCYi|8WoE% zFuMwqSNluNl2ko>6;`_Qx%xs|?{Lo~t!TBD-tIHNt2yh8e%D2tzHDu=-S&ptIg8Es z80}x|)XE*^cOiZI9KBw9t;vir{?TPjt?JWOU^QcEWw&y^Us@HtUWnRPXp0`qjg~zK z6>E=%?MEVV2>Z!*7*&NzEs@wJ@0JCcFQFYrt@7&)^@bo7YFPV*HsM!svk3v-gm1BJ z^zA!s_O6XoA^4{s0+t&~Zd?EDPHbLFe*}R^kP1TdX185OJdZRZqP7f6^im^_;*|r~ z>o^g^9zlDc*(zD?ltyq>akciQc98v>84c|OvBod8YV4O%wM+Dzhp_M;i>@|NF6hl6*|_aQ>c>w zE!LJh06pD3T3hE-`uimh%9x>?>zC}FziqfOI%7lhmgijBFtk|tu3eB1hb+{K`3`o? zd_i~-Vg6Bu4}IsFhagE1Zt{W5HWX_=GtZumm^}n1xlx}39daLJS1)2=mInbR=I+iZM zabPanWipAH{SF}v!vLtoNI+`>TW}x3;__(gc2Z(%t;v~R=>QRq4R^YaEkE;_$LCxx zvLbM=l*YNHP5iG)XF&T5dm~G^JErt^ZX6j1J5ow`(rshzU!#>EI*CuPwr2A?4XdHd1cDqM*)^*dM(3yu)`Y@kJqeJv&V@R|`7qfVKC@RRF zfh(c`V9J40q`?#7Sk7DU?BwP?WLbAvkIB5b3{x6|`6ahGaC`ns{ffk!c zU9!Cb!4{wEr=);~dQT7|Pa@iw*^F|unY3saZ)?ZZnj<*E2pKb*5RC{(7H_FF(4ZYx zKD8t4IbXMsZMzbpZ-(?&e9SSvN!i%<5v67rZ<^&%RH}%Qnv-%`iSM|SL*>*QQGLgG zsanku{TCg+t~J0xV|~{;yw(qgVo(D$aYXpK?D~ftzD|h)bZ$xlbPfV^asX5AC_rbR ztq|nPJ9w`x_y#&&#$J|)h3&->P3e*YvCmL!8~XrcVGb*h$~7ndm7F>QvE01mu~aOX zPJ{)fn%`~sL-CdoWE=SgIAEUyh>tJ-6#Bn z%l^Bqh)NP!V|O1486p=EWkN~#B1j3xktc%cUOVc94zy`40K+ym8qzS7nNWUZm=O+& z0MTVDrF4LUgip|(#{ND$#inZk6gqGMB$^8r^|Bu%QO)XnG#R)EzXwnuI=J5V0FnY# zwcA^ANwlD2@|Vs-v_r`gLyw>UsQ$i{$|pHfqQwFz9s(>K30|~n_J(Nzb{0e*wfmeX zPdixjdf0aKY-lSAD0;Wb&qO408C>2QYC_~bY8I4@4`8nHCdp45yQs^EyL%w+2O(F1 zMF`D8hH9qItvneRkq=m5vk$^0bOg zHVWqBEDBpzTiZT@B=f2F?zd|1m5itA>hF5F2`_3PI}B)UjIl&XqqKr(GtZIXXggJ<%xy$YY_22G z306#obPI`0)fldp%-&XuR0$2NJPauN5l{w{mTG692G%-o!Uz4YZH9T8NtxAj2$C&A zbGe;woucJVw>g!?{YmWI^Q^Rd~8gKeU36YX(kjHXk3Q_vpFzxNw&>B#XvRc93^3usT8lC zsU&Q&9YM!e+9XX!d-N;<>qrM}8S|C%HgRhRA%l;(k@pC?j6=5-IL{`HVlSsX7-;np zn;P>0@suDoA>SxTYXJRimXoFPqU5b%=|5S4moZ1#Y3FPn?+B>g) zAHe(zd*=oEw$wftdRk1BE>@3LV4+~w()={1J?Xhf=dK-W_`v2q(Qwu(d!M0`5~&2c zifV065ljU(Pp`%n{{aYZI6Azvf-^UmcZkRbox7-XenC3-3_9RbUNu%Bqf%=J@vhX` zAP$rbLH*HLF+TXKP>LqR1R5v1vuC9Tv1uBiy18@F~6({mrYPF>cQ z!lhP$QxOG+j>{bKa6h|&T1H5p0Ho`qOX?kRZf~nq!gySjOpo58TyJt4 zn%tQhhyV-oO0fMg1%eR(g3SmFRl9KH;ycMsL0v*4@hlP`En3(_Di^4tsqk_$NMD)# zc^S_8Eb{oM{c|ru>MTCrjAErDKg9$jCzg`J`(Y{PUA3}JDdd2dU;1a7QKgM1;t1@c z%os?_>yZjhm8I-z!L0*~VlNq-^sWZw{t^NV|~a zQen*a9TjWj-GnOmQNk=5ns<6=XcmwK(TS4rB-!KqZVe_7N{vNoGOR=KZ7B8fB<9UEFc39+rpyuKksx+T#2V@d4`K7$7x#;$K-V&VA2y7IBv<`9&YrTt9%TQj8 z_ME~Y+M+?QPoesvxo{Uhy2ojeC$SOc#$ARyF6txwNr$idI+TagX;o%^DE5qHFyF=g zJr_d7Oi$bWla0!*O&QSfH|Oiak;h-frt`Iqh&c`FL|vq_=&t5l<{07=F<27v6Fxqw zWF&$1{1%N!Dk)c)?Wt*lPJ$ZksK(-f^Dfy{B0J>~P;&nqt-<+MhE`9E*2T6AWadYp z>>hlh^``2_bAH1#ZPxs%Kxl3gXk{-J2<&%AwXlGdQ&qDj%Rx39A$6AS*Yc~AfRC7f z?XWb)YT7T4hUjd-lw(9P&lKjMJo!3~y6zhbBSivou~UDgj!Y)}{~LlpEZ>~+zM?_2 zR1C2sIcrvu*Ngf*b2a3D#wo4Uhg`qoU2r;J!+qe`8_bPL5lvN zb2)&yd>zf@EIyYFUuSAM*I?@yboBM}O_e+zxzQIOQu6%t0ru%rZfzB}}ycN;O3I8-ZW6m&G*$ka=0BD`R-h)oY5fWz|#xP`{>Qwv_Z}TXZe!F1LPCql4$nz zi~C{~VS7*-T_NXa`_CL9{;T|{UtQ*cHK1D4qkqSYl}EErKH|-M3$G+0Y(!AMR@;9R z`f(QwYT&qn1;)izLtNI5L*nScx;KISl;yBiHMwK24bDPM@rifUQT zR9c#E%9*Z-w%2>-*()4p?R1lM(10SO8)k;oU|;- zuZb;-a;6Zlj|0zbc4(s4%t4@5R1VN7S@IMPdKrPj(h=q6ra%FPy#|3@mK7GFm23&R z<<~Nd8R5#~mHKfa!3Dh2@2 zcEHa^gGziZ@qu!Xot~#nNXt1D@V&n*VK^Q!U_rjK8O@}t6 z&yPQd4~XO93x{-3N!YiHMCElk3&j_Pc^h6NIa`oe%SiEsSHyiPeZnd=n=SYY5#uyu z)xQH*4B2jjQ;R0{mR*f5O_|?()75F%fZw0lGEx(pZ_1f6pZ4IXugm5a9oI*kIsO>V zPOt_$*a7UE&@*f{ifVd}GFq2$yxar*`=H+jS#ieX87V9EZmdf|rLr4GH$70hlU>Tl zqlkiy37D%G(J=hLPh2tI+9go#v_X4si=iNO(Wqj*9W*hlR{8rx!B0DYCI6Ri0K_`=O4RK>iX+Nfwlcq}*|Zv*O=JIuTJ5<@UK? zC&#J88ucZd(jnl+h)kc((AWEUYSp=qN8SQ21mu2bGh;lMa z;0Pz3enRkV3pj$!_)N19v_C~aV~Bu8V@^vs_TF?Npd727S*r=C3t$)&u(bFOvC^67 z52vm4v_rg{_vaLgW!EF97RST1Y$m=fMsB9S?cGBB7Fr)o{Rs~Km(!-}UjWO!0{X$O zUHBk&{W(;EgY5bRpek}=lf%QcQ*~ol9LWUs`T!Wa7eW1))&@46V$r9-uEC|X3~?XK z1C>sT)NWc8`&?sVUaY1+O&0S#UJUJZ^xajg`!-tOp!O$p7h}hnl(VbY53v>G!f2O% z30pc?_swQGWQCU6rV*bdoy8#}wV``eY{nWCJFo)<_2w~gpkDTC=#IdQA;5cGgF1&? zEc#?haVvRjz&qqUK$+el4(H{&z?*Wppiv5pQN7L$GbO@&T6vx^*)?u4cnXLU_6c~s-M1{777 zqm-_N-Vugt@kj`AymnSOXRo{Z%)815Hpy9Zv^hiRuqbhh+zb2OPKT|PUM{>|QT}9W zja#MkQ@=pLSti`a1cwS%wc%+W)VIJi57gpkYf1l3nwI<7qSt{X+GB96h-^0hrhUe0 zFfG&ABx$cMFRrQvAsinKUBq1BB|5s33#!<{Zm~l73@y|MnPQlR6>5ZRHPi^%VW`|T zgUy0)z#e*^z8^s>=&Xn};4>*xT#8`B-knIn*@_l<76E1Tx-Upv_~tM!3wmW}%CAIx z@HYZfDI^dkt-s0eTkC1?52Tfm~<6`SrrsAXi*2qnsmP@;QKYxAR@;@NaeZ2CS<>E=Uw_5>N5l;l* zh>Tv>IeYm=QQWs*+}F^9lMSU)q;VAYHMP1E2f~*4&k%0pFf_g5|2~jpjcW)uoSP<3hnim9OM|R!=~MSoTRi<@ zbnO7B{S3FgiQkMEjgS3Zu`F@lE73B?5tA0;bCCppOltIuWU-fBSS+Pcv3a~$`ZD5~ zyg&*MEs&S;2}H<-bJt7D8qS5JvW9bOrTm6-8^y@mh&TH~=MwP_0`shuN6k-0V$PVu z6@L~@kbDDvCY0#p3v}J%M3Xm>rOH+FqF)@4luLb9O4=i;%P!Nxf>-R z78Ar8v8kNicZyAO=zR~)?C33vO;^%;lh||_y_btk6Yy4d5j{-eV;hao4D*XG?DwuRp}#J7W6f9zzz;ak!D7-ZSni5IxIfPse%@mj(Ppt}3Ln1P z#jQ3Te~0+}-Dr}MxKp}cN!%@6sU+^fh8%e5;3f{W6c4KDgE)%Mq0Op^an4b^W^LeYg2p4@ zI?+IF?*#VXMN(~7IvjB)8gN0bDED{fju+|^ge^7lB%URlebhVQ&W$f93j{qZ!x8%q z`8G3(k5hFCPu`qvEfX4P30K%OMa(fHstTM->dzcVJkJw~Zs( zUg8(ZfpLu=0V9GmUf~TDWL)~eCw`SN^Z6ufFxaq{X8s*;wS&5A5MbvZfaeN%cyc_^ zJ8%QSTUERjPqPm+)J%@Oh{>EJ6p22+UV00v`?3}Pgg%I|=#qCmVBw$*1OIeANlr~KMeLTIj0^*lh0|#Wvly3 zJuhuz+?7o$>ww$$7plTVtsGacdDqpJlQT_0d}n@6899~Dg6tGD?@v|p`=)< zHaG{+__N(o?A7O9UyJw*?BTGZJypb++|h(Sm2&X@S&)9cfc@NrHEUA# zlBfWLlZ~4>-3X^MF%F23GR?8NLLWot=*h1l9fx;TX}FmNR!CbZR2KFD^3U{36>gJk z=wr+bnBX+P$xeM*MB4=u55oP|Bs(Qtp6t|DuyzZM^De-evUd+vC5<$8QGgwa0pMvK z7u2|v*O*<{oM7B8Nc$>QbVtNdQ!%Ej=w#@}!8 z=eW@;jKp6Me=5@Wc7tU80Q=-7^a{c=^m^~UNn$#rGVat=iQ9FcX~JF)b`Z}(DF(X% zQV^MYbCP#K_D(-Lgj_+$@32WHRc6xZ+}t7K?jd_a!50H({+noD{t7Y-?8`UdL&1i- zW@z1UIw-hcU%n&>cI^Ndd-vEsvoEh=8+ZpVw(IUC=0!(9wCg^J;>^yoE(Fo0i|x9r zP`|nB-bcj_?kS zTSs|c%z8UcY_;#3mV}FGfYhbLo%IE@oe}H5fmi7TKb|-@WSPy~=6F3@%;M-Zneh*BO z|C=r{X`sZ5;rA3$P5iE;Vg~U`N_<6{07d>794oNz@8!GVGZ21pz^tazZoAx`(%Ius zO%obtakJ`a$|jdhF4Vlar3GpKI$FvrokXvk(#iD7C>6YqS>!a<4J8eXw{ZFudT50g z2l#6-3hr340jvcQ30=N}6_Ob6;#**Vt+iI;@(bwSoY5DtR0*Drx=$Nhd8l>0-Mklr zuMB<%++1-G9{-%C*10se60CNhaBsYeM_5ejPaHi~X)9N&i&sOmRL(lY2&t#Sg+Xoe zEy|h_2UaL&<5?>I{=dRQW%HU+X#8SwW=HuN1(EvZQly#Rjr4tYDb&g4HxZ$}vh-h5 z-x2y=TpCS%KS1AeN+I2w@T9EOuPpsx>br%$CzS58Cex8o&~r+kr*HU>t|8TgTUyeg9QrtHUr^18Pk@SHZ={xzOCTuzRkMUD-5p>V$j~sSG#FiOktV~} znOOl1ShClh8-VfRde}%{s$*I$fVg5i)c`Ywg9dK`yY3~>`V?x7pnL;Pb`5AI1{nhs zgrRqvF_S7}<2V!rId_DIyXm1JKvQ&>^V20z>sPsB0X0MW8}3T!Af+z6&m2-F%|4&Q zu8zcMbHkj;G95tr`ktGN`+Umj0D88O0j-szCB4}S>lq!+Mw5DKq>(|! zz|faeI$P2WpR*a)e=URCldI{3bPad7ex~7kft1#8K2!b)O0u_K2a^CyL3C4WilZ4V z>9Y$Y`|M0Py<9)FSpPFmID!OzV@oC`3~!*zu((gmC+D*Ac@1-Jk*;Ci0gUf)?2PahYrb~QC~2E(lXvg2b!>OcBZrd z#>=HpQ27CTDbuRWPG+~HqkCrOjEupa>e(!m1hJJH8Aw{@ERP2uiXeMNyHy~JQ3h8$ z2a)K>$`!zw4cyDH47e{kaVQt4E*VcG2nzguNy>J%6KcFbRrC0+F=Vx~6O(=$xvi=* zXJe${o0WmS&B~W;aC6m`^^q#Qsw!H4nq`5b!Hy9$Rf<~YuD^ja^Jk6q*J+8?kE~I< zi1V^*{cPrOfb|4nhH`l_yRE-EKm{D4#hn*Wv7KC6Rqoyjl`L8D;ed8X(_!oPTQp2N z?(X%x#}$+tYfIb`&MR?4RI!{$zI$-3*dD+I0)e8oz|n-)Ulj=87{YjmOg`JCS$_7j zcQ_yJ!`HL`)Vc|rft&`E@eXDN3ZE(%C`_vgZ&qMW>=W4k z(xdvKhVhs+u-Mvjkf7-CwNMZ*@;0I9HX|>F7q>kKz$B8kK6wYIe!BN=tTD5j4-Xl@ z!(ku=aF9ORSGH|i2YAx|jlv%6^Z+F6^s_0&*hb}1dJb0C!^os$^BUrxz6)J7d>MPN zO3ru{W?1=uA=bZ@%9R}*<+p6=0x;CAWV^LFSKYj>{lSBnH1pdZT!_g%zoR@9L($=E z_Iuc+n6CXBLPp*Z6p+%_&jEbc$(|LXbEBdc?+)(6u;3N0G3d`{MVU_<}G_ z_!f}HaWS)J2!1e6VDA%P30j`vwSAm$v)FDx1Ax=~X)hqw-HV#uLi{SiGc42@C_WkeitZf)~g!{i)S;>^YE3Dr!av{WGz zkSoAWJ&!WjG~j{AnGfH4H0|0b>}1Exuu(mf*y!4+Of~~70h6o^YhyE-8@dV9SZ%x= zs-X9FMg#YDXg2@g^Wp8#4F17~^lhb-AmAoK(_%YK104025+LreQc4L(;1@p?ZljsC zol>^ZZ0Ja%jODjtYrv_4=QStf_Ti&Bukb3_%NbcfE6-kh>CRLEJ^C+rdCku&z)h>~?z8Z3NoAha zEpBNl7Un_>(yr5U9HFoLZ6rOR3W zfm9DaK*;Q21R<@RFLTI>ECStH#i&VW4Q>W#Kq%4yyA1?ME<&eZ)1>ePONt4(b zv_dHm)Cp_>LLEXr3S_gXX+nH8abIk2N_#+Mv^($VAhaOBqQ1je&bzUqW6w^DekV#> z2f1Vq(qoq%g#APjhRh;OX}le(31bV-j$Fw5)r{ZNuKntE)7;PZr>rAhZzERsQ#`q! zY#f$TXh>(uNkT6Bhh>oaUo(}$vz4JwSXh3^ZbVdvkd1chPnsSUmBxUFMWx{&=xf>Z z`52aT?Fb$B4O~$~R$)O{xHKHHG%vE+zmksRXqmOJp`f9;@{5Mszo=-gG~_M4tqo$+ z4p0veWR=#zt`p_IjWbv7ECI9Y(d_m8*mBNN&?d=`L@Ugs;|3N^<@V!PY@@#86nh8@ zh`G-g11pH*W8{%q2ULtmd1(=b_<$3XyPrFlKXVvX^3dzYUHa!a>avNxe7lzeh>T}GWaNd`qImSyqtQ?EfmQQ3JrV>i2xNJyc2|vMvC97Mj%=HvxUjp#ikn&YqsFu z&B#mlqXiM#-^Mdg9V{=VK!qoi_TwRsL3_^Od_Ks23{@UyA-HoZz6u>HrB2FYtHV~n zL}43{Z1aAs@f`hgct6*hDf)i^E!6U`Z3S$pI&edADevv;5C)MG!Yd4>k=2&)yuam- zU=^Tiy5UT7^1IDw1`u6a3R@i4RQ5M~u%+1Xbr2wh78h<8jD3*N@TVqGjqL9LzLlA!U#g?XJR(T`+ym?WbZ8?Bks(FREGxIVF@zDt@~Qr{_Y%%UgeP$iLz z5wkb&9^d0dkIjXjUza}gRtPo;xP<9%(tJnhhW?N8F9;Lg&@NPwwW8={!1ytD|9PDV5bi0-IY4gZwsq7Rfx~ZX1N}8!+K_cG;J&z(I6kRND7UV|&6cTsZ>fLmJc!fs zeWWWvPaCPH+*tV%R*dc?d-~>YlRedMgU$dth(8s5h8}?tUC^T==+O~zx8*B6YUMpb zyN*%2j!131UHgk#^^ruQFzNO5;oQR{#e6c;~{C3#Pmg>JM#G4p2IS{rBa>$yI~C9dWLT^0G_Bm1@e z19{1i%FGW5pklTS7@U_|6%cpdo9;eV4TB82t#d4f8-81!vvrxcb;0&79Kf@#Q({x$ zA&QHkx#{;%gDhTNxIZ)%bH}{PV;e9n_?2$DAf5}!SNjsUz+>RH4&3AcvL~PrmIZ-L zOZ)x7bRyOer)@m5(a_6t1cYg3IKXCIaF79$4YoyD| zV!Y>cZ&wnWys`%`2{AKJKg`<}meHCT`q91EKwz{E2Lz!8J;gmDU%emV8W@A+Y88F% zgDm@J{C)_R=AXJ$^A|2~8g8 z$p8v*stXhz@PF zcL?&C2fG#)0Bl=mcO8(-wKm)Pu<4 z*w60jb=ojTVopzcUH4swoVH5GN*J}KQ8G?1YM^wbGE6sXx;-I8ZT-QENzRU&4)}q_ zuq_LvJgqHkrJcrP{Nd*=B!uPp>|HLq0gw*WNxgg%Ftm@FxQ!nAGGpT{+MDLZcF}cM zEcGQCuICk@Pi5)SqzYV1^$Iv>lFNJ>LH)&j)TV9p&`0f~j&7rep0$s@IUvx408uKJc395_#lJ_H|bB6^A22>czzhJOFVDK z{h2qeRx9697pz9SJV(26jaqr6W5JrOD28IDYuOH<6Ia}6*-iv({IjI;Wc_HZ@+1(d zRo|kP4R;v6llmXozd^`=DM!-hl9nneW1_KfO_i~%GYFI18i$6=FpVs#4St^m=?drq3%bvu z50_Ek0s!%WMl??AFFGGxV26}eCs(mtA7YN4=qVc+++Rb&S!p;%t~;5ygcMc=%QjBI!pujNK1oTAS6Y-=i=NF%)BJ-G|V$ z4rS&D6XQ733Q-^X+!l+{YJvWJu^AzM3dRbNf4zb=;jot2B+g3V=6U98Xc`Xxg_j_aJ3|Cfp0LP(tB~+pp4x< zlp_U#?%qai@%Wf@ISNAhWCNG1bW`4K)GA5=EmI0Mx7|F^n92J?aS%iBKm6i4c_NYM zz>$B&Ps}UGun^LTelC%YZJ@lzD&g9Lc==-Xgl*hmIr3(Cd%d+qKHK1eg); zx85k9Mz209I!c0!YR`fQL-LI6Mj6+ifQ{})2DS_3q|Cd2ZptFd=xXuYfawH#3{J4i zxDzbqP)!@1(U?xKBiVN-EB^(83A{1-A0f8#c3AZz*p;MOWyQ^?`*GQtoUi?jggI(V zPO>rIKx6dD+Gj<76L&k)YgMcvXe4ecx>2$%lywxCMxJjD+%X=x4>Jk;7X{(^b7$BO zhrb*8m&ey2x>mI^{nrCMiNo{7yzUbOTB{`sg$0h!aWuha;l+^qb1H!MXE8!a_j9Et+42vA#LD772E(a=B3Y)NPk^hegWW<=I9{r(){7Bm zDaUpI%szEPRDz}(si9Lpec%nSVPIi&{x}}jNo{yFz|MnF1=(|`Gv$6x^u&IOh{4}= zSc)$k$m@S%Ef>Up%Z!IV`zyS!d%DIzq}Ydz-ca_y4bj2z)X0p#VA0w;i$9C;EP*h|T;BgqB6K3ay8ZoK>GF zM48Rexp?Ak2OOQAwaDK95uQ0pr=8`BUy-hOB8~I6 zAvI!efUA5*nR?fz$E;`ou8z*IL#wkDAFS;|;K;icH|$ox#IFFg;#ao(n$4Jzp?!u+ zvW`NXjB8=UrqzJ~wSG=gR6?P{fcemA+8i z*Xn1lPUanf)E=cei(a)=V$13E926gX8U+J3Qigb(tKSI7lz!re(EiOgctiBDID|KW z{H$mQ47mi-$$2;}T6r4GG8(0VmcY=8aFWbzwai8$O4$gtJb1KEw)xQLbOw|i_j!)?Ztwq?4|w-ViPHY2FfJW-C&{ZzSG$k z6#OfmkqWVhTX*8ah*0?kFFmfHq}kZQ&9=&u#NBQ<==KBCt+eOmjj^h`sDEmdYEA+! zHFzA`2rGzSt%qY$j9gFk@ItN0c1oASfy%G)baul9z~gv3sDEZ_pcX9Q^@$Ihg_0=l zK)ROGGSX~i^G)ImEk`XAXT?7SHMtKFY0e_j+=kDFy+kvx9;JqgksaufNogf;czo~+ zKxnVJdy6H3bbsWyZap7qfw|Xe9t%x%O59s}Ywdi)745ZDb!}FB}bV#Es zkfuCM69R2FTTUcPZC^=^pD*qnX?xH1y4ijO?H^;=MWY(dUQw&&YrI>wcFQg*!hF`M zE?ax!UW(|ap6&VX_Uuj)V!qU~TPbd6&lXV6=ww+l*`GPbX&hvVj1n0pJ2s?SXOrFX zXpeGY-#x;@sowTFbqzOzQm4MQJ3GM8>SFHRKp?aX+skOQ2hRFSqR|QzThz)8+5K9p zKalj1IE01IdNh63SUD6d9AXS!$?=a zt+0eoLAY0c_;X_KB#*OC5LAoxxykAIH+%;5t5e*Dn7J2QS3WQev&L{Uj6(J^o_`$W z|4#&!0&X1S)ObUR_@r1tlA8OqmFy>v^4rK+)HMzFO5$xK90|HIdMSf<;_!}9ei_UyDFeMpD@ zxW{_|2~YG_w(?}B*~_Ozqf2x?em7p}AYd`GH-7web=IJ0B# zQ@Q4@Y)S8Yrx$0i8rcHjDE+&TvT&D-(~_>wfx#G=W^FEp;MgJY5Ekq^KeFRh^JoF!w|<}DPKMA$ER|~jTENIl0Jod zGFFd&ggKk^)vF+1y%|OA+}A3XJ6{|^Y9@E>`ycky!!}3#Z+q$~Pm?=@5O7s*K!#2pBmUKC$()%zs=3s_)8fkt&p?cZmjEK(=rlR=vNKC##wI%Bz*3y zM;-ylk#g3nLO#vNS>8v+1>A-^_{yWZkGpH-((8s4De1WVRmw z{mQeFfVbXrXefE>Z6a^IY1~`y&qzypT$#8qdE7tn)Z0H2 zxR2pr`rJSC)Jq}gpZC=JBk*bK*A^tS>b`g;I*}YC)4wn2w004Py&qvH{>NPPM*MP! zs~&NB(s!j?^{A$Q-c^qlNo*x)mVDJ!kBa*5x$5!a{+_E|3ScK`XW{n;U%sz#)gwk? zdPu8LTDX&LdQ#F&Zv~bhzEbSNh?s-W5h1v5fR2(xGt^CQCg58*z}LI!4F^ohP45ya z|NqTRZ!FyOF5zx^!_OR7jtPI?PcKO`DdOPs^&dOxaUZmQ%299e2+S*T)H4ZVz)_EI z^^YC(ZUbp09rc!f#Zk|DJ$}6yb~Dh`K@<76|94M4nuq_3o_f^tul3Ypdoj=#^VB2B z?_!>MV?Iyul`nhhQ9T!W>S8Px)dg@Vz3qAE<^Ntgni+SqtPJP8w@7FLQxBzPk_fzDadc={~ zL=^x}QxI3df7w${G$*XP7hZa6fX<|^CUy$kX){BNMA4p$h}d89-P`g#+8^R4C{BFO zD;$q_g{%j?LM{G&jlX^Pvm^f6X0PeJw`2*OqCnvf<9_IWs$uEh$0m&jTHi3>P$my& zS&yf-I$DSJj(!6-|LDU$nv7YWg@%pX0y^;PGJMN~sgdbtegB7cy>EP-<=+={b@MXZ zu8mu@b3=^we3XMs7wO)39PHqiUHj#6Y#YTE2<$<;&Hbd-roCzI0%N#uhhHy^`*!Y2 z!mS*DV-^JUqujbFI-6TJ;Z9@oz8ze-vFK{M@&WGK!PzNx-52a=ah>Lni!15Z;z405 zZlhh+sgGuT|B-45vzwKgO->z)I>BjSZKo~kWOpN-|M=V=sEXv#9RIx`{zDCHHOvO^ z3os6K9+@^|E7v<69rN91w*RFqI8vbzQvLM>yBy|3D2;pQQWUFr1Jv|aHO1Pf_0e(y@UL}9p0tU|&s8@KJT+hGx~ zX9R!RA@Q{BA*qF*C~7_2?!awC%g0sw*_}AS#qA}#4{ELIW;wkJvF4^UIwC^GT#E-D}2V@#>vJ8XEf4>OsLO~x(O6_ zrr~_M8GC#8i7Lb+7IV6KAh{Zus+i|;VoqgA04&ji0Q^b`4m64b6%yXI`luT6yv}J@ zls?|ovNes0)rXV&bsj|(P!tMk-kMGb(=(bM=K<>L%lc9ijUACL6!Q}_*nQ}79@ZYE zm}pZDWwITK-dv!ZAH6ZFVNuR_S9D2%o~@i4z0paZ1<@s)&9D(|$L~XX-l%Igbxtc- zk-~LO&XD78n5+IDftSsC9b7iCjo%!)U>y#3wyYJTOSFw4CeAf*(iQ5MxM}-MnRwfry(xT&a9d~+y`zN zjp?(M{x9SaLqVJX(N8G-eewvHx@lhfV4{A2@Y41_h~)-xJ0py{RRVxgQ#t23_1q$v^_oOtk^^jjC2`!thlrX!Mcwcp?0C|fdt4# zgYK9Un?eYSyF0*vbJ0X1{s3a=qBc0G(4?TKZz77%(CGYe0eD#B6c_h$lalVcc6Gh8 zWEm45q&d@2{|SnG6QQX5X-iHMej^VAAc!xNW2PL6KZEd)Vje&-P=cYDyABnra%c2c z!Y~XHej&1`rJT$JE*D>zL$^l(Wt}Yz-RP8~%rhRPSr&K_8$QM_&%()`JSUG};0*kh zV!H#@Eq0Vp7uPz_wvPD@v|M0!p2w_ASLZunjuBN)qoA8>wX18@VzP>hS#OX((@t@R zjQI1Ilnsk){AQA!ih=a#{0T3@hE`~BIK0=(Xa7I;-UP17;`<*T1Qb^= zEpaKYyG80{UoHs9q7W#Gg62XfASeRDy)LDK;(o>S)4nfitCf~Zn~6(mt6PhirIpns zEGbQ}%=y3H^V|n6h-UTue1E^+>)!+C&OEc8bLPx?=9#fZx!BqwU8K4!*$p2E;3_Ds z&C;5xJJ?iJJ{{~&IdNY!5(#77u~tjoy+l=n_St<|5e8!^wlwjCw0XV9(#3)4Hr}9U zP0L1yYMYMAi_2e4Q7-NaQ&|)SG~~7^kF#E=9SN~;d~GRkL#?s?er8>159~@nM6R9T ztJ9bhD8xl9Agkq=F_cpK%iCIa-~W~4RI%51e}2uyAvI*D!9b3L-G2~i$UImh_HIieCf z{@@eFMtg4Src&311{_h&O$+ly~aJJ33OJ8^7`s@kxxF>89Zna+r zUo+(oTMEi<%wAW1s|l!oX2QKw)0Ws6xcX`M}Hw%*f2;p6sBr;OF8k7Q?Xz(iY zQg{$Gmht~Fw6NE4Z<>9Cpwq^L>28{bO1FivY)dfh#Ez%Mq`59Yr@8=t^kx9Lh3VOv)VWvBMadyzxfU@RDMwfr z@5Wm{R^=qyCkS9&%w5adOYno8?zUaZ>45!hGv4Hua=h7|gB2q<>tgah%*N8{9DF}< zBD&5!(AsXHmu)@*ju}%nueNN9wX$t%Z3gUH0tsxj8^=JwX%}#6_F$Jf=NLMd?o~y- zH^mmf*Lr1)1Ke@eQw&FXekoR?soLB(726J`{DFYnZHp5$V8yZcWAJg|MfGtYI>91} zuyyYQwV6!a12E*52;Bd)3LWs)J>Wr=)&a*BVB|Ir9dH-h<8&^_wfDj!?_&acY+oY= zKm0Va^-=rD{P`hb>7Lva+ZeEl&CSVrb?_~Op7t1HTTa&Yst!FHH-&IfKh=Fv0IC2*)Bt%Ke1?B#p#jxFr`$_8gIV^{UAhTpP+F z#b(wbV?vi=%XUa;T!VxXE|U;8OFpI&@^VP1t3yKV8YJX=NkX{huf#ka4Kg(QQ(Tj{ zBi%O2(>~!eH^XQbP1SfDDm1rho)O1D&$`Bn6PLrz;&|XT_9LO&V0;~R7JaYl6XY>J z9^NErJ*#|<>IW?R(ywm{XI^lO`BA<=DqHS5Z6o99q9bj6-3X?X1}A@ zkHfOD|HCx_4Kwx#=vTXO$ipJxq0#1RgU5TAJ%VoFsKpIJs%>V%-a-sg?7R1m03i%% zf=9Qv+@J+&>b|87wmdnGYWT_Hduk{C7Tk5r`lrshoXfG$()D;PJ?}KH7c~5H48Dhg z$Kwl#og=`n;VDy^%L0tfZ$?|Ts2riWb;`q}q7)i}COBbtc?shqGxvpAkU#qntm0PK zZTFY4b;syh4bSu;7S(?ycw?l*oXW8|6UW|H;l}EExw$V2L7>XlRpW}`d+pwP9v*^r zz}~unU)bTq5#ooa67E~DwK%cLx@;dZ#lHW@UZRBKA~fatCk6Ho3Yemsx}A&HXxD9; zOyPF$mgSbd*tE!74O{shs{Yw}1U5IARn}eA8F1fc-9yv)VC7)g$H9fim?F%xNelD~`AtKS;+dqn-7Rlmp7?*{dI0Kc~19>vda zE8{$jccrTwgjc(+^4sNHNGXSpIz9AFSaeK+`7}fzAkYy)6#T_ zMn_#DovBM4MwZqzb6?UL>EO&y>-q?#NhG3$f?7HRZqe9$FM^Wmo7b-ZX1=Z z;y^_cl_KqqDe}GId#q8A@R~UD@Y0%Wb3|zkj{U?n;<&vVUK&Z<{#~&RKUt;Wxt6xh z=k3lOaDEET8bdrP9;~$+Dpta-K4uEAbj&I(ZsKRT25aH=h~QDhGzPBm)qH(4-*q*> zNfraIVanHj*e7&4nhKFBorQkN3ra*UWq}s4yRtxy*pZfX6bo?hBN!vfl!WbF#8T$4 z*4Ba5xuS~NK#~n@pf?85?hCi$S7`&gAQO90yl?iP!2yq*l`*z5_=s5kti` z*z5c(&Ft>hZ7L@m2Ijh*OoA+(Qz;lUk+P1xub~$i+6rvSZE4q`(Xn(LF%j0y ze8B*EWn0k0V`6YGB5YdofoUktFK&s|57luw#l}iSK=3w3o^zn$7!p?c#^b=B2ZP>2 zylu_g7oJ6T0f$AeBLqH719HCSVs-pBA3$_n@Ylj_9<+4PKA2^THsKdrmuTS3cKiyt zfn8mh@=wB25d+u;*MJ1BT+eo2ws`}bDw}03Q+`?qYzu&+*7Q^a00)OUfiYx=rit{` z*h+C$nn=bmUxVFNb@Ea1GuHJ=*AZb+!J|GMc$j3n_5g+F0E18u`7J~R8yoRkS_3`g zTgn}J$nQBeg&s1GddQwi5BVhgG_8xc;PG*gq?-z zoKUuvIJBMTgwndqd9P~JDApC#EE(0{;Dk`7kCxCbMWTdQK7e4(Us|JTtjQNDKE{jX z8njB2EvcGRGLgz#O#845sif4pZ~pl;aGMucwsAZ>Q*0+NCbp`U9EW>8gB-wDXbkK4 zjx*D6xF>~_5v|Vl!##H)#Z_>$M&WSJyJ4{xy4=i~q%?+%qlpJJ3KTV*E{NY$$HP4z zVP)arp5K5Rb+{*Eslz=t!GeVxhr>OyO1F^*Jl*qip6;oKa8MbBtJ6K1D5txFm#O-i zTG0`5ylgX*-mCggv=gGTr#jv94zwJU`G*lisSQ6wL@g9%ZhGlz+(*j@p3gzZDMlRX zzvz7=h;z5`TUtIG?;pa3Q?T({k%~ZexkN1YMSQQT+vYBs1Vm{3;US+<>X6T<3lCIw zu=|0v`az$n>$f|b8`C1{6x*|0g_5$L_F1YL7(p!}=~of~Eg~c6$9>YB$H3CGANRS4 zM3im*9U^a4?ozdgjwgPyD6|tlUC)6wDiT*Pfg`nwL^4KGBrak?>d;SxC@P26hAvYL zmw5t&u9$@No`A&Sgr!$~L&l#bt_DWm8TvOpp~?XrMFe(QrT&WQA^vpYnbikhN*vDy6pgB3d>aM zBd1`&wV>%ZH5aye4YA$=v!zZ=y2bYHFDNxAbr!MIa4=|3xtU%XFU1fBwjd(4hI|ooeC-68Z?Y3~3CmIuUJI?WtaRHm zjLQQ)X|dU`Rk?JXavOtFLg8dpwF09NEGVnSF$+g=HM1V5m<|pFtI;L(h8#CQZ|DTa zMuxjuqg%VQIe4mFZ`gfpgy{Zcgcu2U3Gh3h*_sHE2#AKC_KgWjo z1gK)u)ot#$+oCkIjkNgNZ^*^>J*_M^l;S=a4;~+GX_q|ZY^7WBlwT{Ilc$`M*Lq^z z8oMLAH1#bXTX~&rbukC*80iVy1F)7FxSLgddZcbiOmtI+v6i~1?il+BqN|S)F{nH{j%Qcz1}l>#F^dVE`vpBtEQM~R$gmsS%*bCSaFz* zI#;c}8qU*JJx^h1I|frj8sX~c<2u<6+rzwC8^$&k*2<4BJ%iy~v>P_2eqeQRQ>#AN zzhKqpGla!rp}NRcU1_1e5^*_Ip5mFklytO38r(I@(-n?QX=%HG$v}m#?lMkCn*+D^>4sAicp15ja4$2# z+3L5A2cq(G4jw21)q~YVxwxvYIjh`D?}hWZSlp~RwsanL*9HN{QYM7M2*C^G*7;Iq zxI%+l>CCHs@tviuFVZNTdkh~{mrcOEK8kXtY3_^{l2W=SqOug;% zU|uM5DFf-kxyrdl=Y}NFZh-cHz5rC9328WTnG`$PHR|myz_A`jzz#iZnc4e7*__}D z_A=;MR|c*BIJ52o@?G%?a*@@afzzR|A}zg4&TD1j#WGn1To8hRS3M5u5{ z2bT_ZF8xVy@3i9H%MM8sH+z`6vihk>>1kC8OC3V4atH?sK%u?NesV57&60Irkb-LB z5CdsP0HGm(B*19EZGdb*0iYBBUUvbnyTJ5>=?Swv%=R!nU}8ybWhtz$`3=TMyg?b0I4&?ya5Jq#WS9V$Mr^|<M_3P|w!+UTGT!prj!MQsN-nopYHqC<#r6tENOw7iqyBiOlCDLoTW^K} z%L6xcw#EIkt-%{CH#b@4iLUbw?Zp|5*L(iT%nv1fF*5rB;PVMImPCTV~mdJbH7D0SKj`<7r2}LxzR_>x!%du7FK~RVRcWqXl zI0*%}Iy)s(-;E0(abcTn+GiMD#n{wwUCTz{UOZ_Y!;5jbWIY^y)_pzRMH;+THy)3K496uv!IF zbnooS=6V8ixfcB#`(%8-Jf}smi|I_Ei**-tQ+u#%j`N$`Y|C*J1Z0n>1w6pqXEiN8 zjK$^sR!bH3!!gJ~Bo39Pj!i9-{C zp5;bca&7Xqv9Ri2R^x78OH~icS)425zK|!^BP*QsXJew3pE_ps89cE+g@zJNj^4RK zuQv6Swb#biEwn9l&L1w1?AfF|)qTrs7aYl~-B)t3X8!LY0`trym|(Ga5KK%w&An+h z5#}3c;`o}dk~o(VYm2f3@1D!smV71G~D(fzLLTjJz79B zZau+?(c^2Ve>z)=BdsW@wws}~L4|Xha-FR|tYgl(`az3}%Q>bvW#d4W2pkKuvOviw zY&~b|M~VYa#JUC-f6Wj20-Hgo$i8a=;_DT1E=J4O+6>@{ojtpzBNa|yKma7!-xJ4F z%4_>V+Atq!V*6tjB5PXV;iX|4fWt)`hkZ9{))Rc81Zw(GZ1BXzs2BK&cA^`;&%X9% zBmrE$rvCFC2mRUbG9Cr850ZSK$3E-sUZM(Z<-Woe)*WN+#;K4lJ$Lov&d5cgvO-BC z`ky-Gk#&@(HWxfRuPZtZ>$`D5o@TcQ6w$fTccc}^a1Al-oa=;S`PYSQuCq9qtweK^8OH*)mXy*4^k7y6hG2qoga_CfS}G4{pJ!`CdWpmzHC*?OC2p zdNz3cS@U`*yl~W}C-UTxZO9EGuk1afNBsv;bvQ$Ww-<0>NwLyI5O-; zYm9-8>){w2Q{#Romn(sI7srU|UQg@X8eCmI$N~8PB5&F;^E%seSYWkYF#T@M-rJ0r z;}WCFPD!?J$2CM=yGjB9!_np;iz{PpV=PAaeqh^nD_0`)Vul2=SL%cPfYQ zT6JMH_(~NEwi>fe)iP_Ls{3->Z+{I(E8hG z=B74KomSRzx}H?l-L!tDtovyFP+3t~aWn|53grWQ+pIjeGt2gzvL2!JF=ahQ>)pzF zg4SiqdXm<8%4(yvNLf$OnysvAkJNZ&t)lA)Wj#ylU}deLHB?zI(CV$M>_wTH6aMyn^R=;`&5w^r@UW2{Y0u4;!JdgotL&}>*^T}-YC=udgZ z+c%mEzIjNpyByS9j3(Dvc9-qSWrm;0e~xlVvAsZ-`Wd+|`~jtdi*QV?Bkl2)XeoJ^ zTu0koESgJulj}IU%OuUk)8u-a-DSAu(#7POVRs4FTnr}HY`aT8&84TwHP`OaS#t?8 z;o4@6vgVpgsL6Gq-KBbzqCUdpy43FSt>zMKa$R9}Ijp%1F}bd?yX;UdGZIYxcPp0^ z+e>uOsUObetrS~ld>*q5lbhI9W1d-n^O(JIIOyTRSn1GKhF7ufE-J!MN>G!uwzW~7 z-QijJ6%-{MN^vyxGdM+*>p9k;ZlxzKE^D(amWk1a>xaoxnv|Y6J+|d8I6vD8=~!;I z1%N<|4sl#*E89)V&KrSooKh>B5xeNQN{79E$3B1+H}q-y&?t+8&_fw-Z2SsUw6 zPp=xU_m&NXPG#Qp&?6%5`E3*<+&gula%ky312U`#Lj}MgRi&6CbuK-B5eJN8y`ii1 zAkMYsPCJ)-)MdA>uX2|jwznC~*CzAuq2V^m`*lNq+-Zk=$74sDXXW)g(B}fPv*HqZ zIE}wxn_ru6@Z1aLmbGlFY-Zini~LI)iw}EJt$Wl`hx?)(WY)pbuZnm4(9E1!*|h8- zIAFaZzbRlS);sc34EUMA^^RtMVQ?x=g;Q}Fobt0G@ROH>pBcB|XI=q3Cc~{bA8y4n z;Z|H+P^Mbyvh5{EIo`4chh)Wp>K|M%U+&z-R% zuUpok4Bv4SyTsw1KU@&(Y;Mk#w%EaOmg>#eO#sE(pT$ zuv}kj(gI&smF{$Mf8M2M<BT~Hk!F$ zYnog2PA z>8@+xs!ob>PMYGJ4fGws}W@2vba}!k-%y7tfDPO-aV;dCPaqiJT?RZM8M>m0PzMSZ=*roLt__+y~s~ z0ZOp+9P)i((~xgwHfi!*$X?iX!L}#l1g>|(o#Y37-w8QkZvGksNESA!{>-{#ELVng z#@S754RqsH!b5nu+a0Nf3D9Plz=AK+_14d5D# z{%!!=341I7Vz0Sf>R0yY751C9dD0-BshJb(dU1PlVC044%*0kZ(h0S^MU0QLZm z08RqV0{FZP8XG=cIz@=1mIyKUfe7)?rU-FxY(#C4JN8D158&U}bJv$`N70?F*aX0k zp8yywSD}>{;vOj`;jakpqr^xNEBc8b2fMcj5D6j%F7zKN;zX1fCZd%~RDNO1tQ>Pv zVg97dA|x;ksZ0|2qEKYwufNC;x%h*2O;|*pFpC_7$P@-ZA^sT7z+^>h!97qY6XY~Z zB;(I9jOLq+oQI0RjrgEU5Gzqcst}jPR7r~wpu;G9@yGP~Asr0T@Rtdk3Sb%l`S`Vf z2I80qEaHHVK~a>U+_Dtb(^S}OP*Vh3I(`{rnwSV$XTv1}X%xaYPfW&J9zu}v87c*G zM@x(TIm#y;@p8oN_Hg-dZvjr|=`&mg{3j{@OnaUN0o0I64eM!eEmH3cITg5RxSNqLsb>imA;nx3CSPR1jp?yGb;{CJ&N`p^DL#>x zIS7*poDHI{7y#E?q;@0TvXEXL-pLK(L>Ec{`I!#5lv8;MM@kQ4Y4W1|*2RogI3*$l z;?=)amh|%WsV(n3q(Ba2A{@Cj8MGRZLjmGWQdDQz^T>s%QA=JhUCn12Lh9J-GOOp~ zU7Jtia>Z2Ymp9Ya`5guM9Eo?93wf{kXdEW5h+qHzid;0L!&pm)j+4%}#wf^Wz;7S4 z9eVv@X{W1Nq0z(I)mRz^C3BQp9T(PEjjui;ur@s%kE<)cnaHd094{?H$NU2RMt-bG ztfPM?KmXeN3LukO+0Q^)g-Fu^{^f#em)0D;F4l2O=a*k?dRNb{G+>?w37ds{&8V~F zkp4zV)Jw50H@$qkMR09?jg@ykN|oiMwW6%g`KonbE18Klk*!pwN(Z?+N5qCH zu^rIb7PeEAnJc&V-VX9Z{4Pyr8o10>j+AFB{->+9OKStB{%QJuM_%gZl%e>YZs&ql zj^602^m;_@vsEyoJ!LzZjqvPwCW0EBw_3|g&gf;NrDJp`pR1P#rmD#x<$`@Sdv7ge zJ>9zUx%za|LB(Y7TTij57F)gCjiRo-&O*85E8eixU|(8*ciP#Cvv*=or*Ve8ht|4l zaVTANyr67x?9=xs_NKr*@L7af7?}l=BM&1?PJ@pei4bjJ&VqRj-C?$)`==4=lCf-< z9r5l7vlGl`K8X;WVFtnM0@LskPWlYpUYrO#BX1@f+q4gLvBbt(!_asa|R!isrtE z&6<@9ndQ?r&yt%9Yg7I^VLUFJn~J7QJX~U8VqE$78%2@|Xb4Ig-ONn{8jYz**wk$> zu*ihlZP~Zs0W%9`;c-IhfnbfH}3tpPnP%>g*o49^|h)4Z~j@wRNF9M4er;IiYpMH@@>1lrCJf zc*)Xb%dIQQR^GX4^?&ZV`<{F6yZ?a)AA0zaM<09qi6_^rUH8<}&pi9w^Xp%D@udwL zUw&oN<}F)aeeLx(-rV-q_VOJ&ckO%bByko~y1o|J&~u{=oHrPEDL$T$?s?Yu=(| ztJZDYJ=$K=uKl&wb@1%isdJa>Z|G|1*1bp1UcLMDz40cmoBQ=2VD$F!_45x13q&-Jjo z*zNZ^*rDwz*T)Sj9qe~H*jG8&S3B7M<6ytb!M>?UU7DL6?9@(B8He=LM!j3Gf;Ivx zS}|B-^2MmPKUT34@W&@MwZzNk~uCfE5C4>@<`R74THL*q@{>v=v%V11hE8$b=le3Zf?zH6%@=YD5y7{8zP2?5R6Nw;g_py zd@}5RH>00RwEuB7nIY&h+x{n%&j95EC!v2vnZuu> zc8>1#aeF-M97XFc8ooM?0;`^!594?pg%8sv4tn02nP<<3m}q(H`RaKx9P?p1ocVJV zf^$+0_gK8;IJ}PpwhU3^;FgN~GZck&t|JkOYlwPEiQp?&GWB$m;a~rqc@rn*$F*SQ z1C6mto9|4IxHBE%%J`T=4GEbxErhGjkPSRH8xrvtXqk;iBqk2X$QS?%o+K21Bp^C- zQs%UYnX15t=Fh0L51VApr*m>Xu1X``lBWtXvY;?Wm0^0}BvpQy z6ALZrg<6>OJY_{b0_h8+%QOYjVmg34k(fSPAPty>A~tiPh)>TG$?0ZGVFo<##1`f# z+sJH7VUgV}H8UeG)BZBjqS+Gj^OSG0#crmX8CM{lKzM<00$~LFne2bPTlkZ1S!oZ3 zLUO5V=t(-$&FQ&@%)-L_LPJiTAvfKUHz_-_FvQTSNKIM`Q7ibwrsw2lW*E%*hWvue zJVWNJ%t;n=`ovro#;816=$C~y(2$;INH0L341{OtpponE5T-CQJ;RVym_H4nRdms# zo617O=T8I_wm(WOu=vus)Ip%D#s>qyPs8YS^z%qef7d-WGXf~`rFw)YGiD`pn73dj(vLcF{IluC-)s=f^V1L(WhG%mCDH@joA)Gx9V<8TBipCY?Oe@IEOw-$waU(KwGt-MQ z$3-Q?B`dE4drQ;5AVX_mGz4$!hq==L=z^y=jxctaa*0lSWHtEi$KgqYg zf5Z6i1lRX>%%ABdn>2sgyTY#H(U6ayglge*|AzU@jlZ)0nK4)PPaa#}|40Km$4#um zZK{L4VYnMI>cZXRU~fowr@YJOcgyq$F#*6wr`Iu`PYWZ&=Kx3lhUK!`T;IPTKO<+< z_ixD0^9!!*f2g#+e?$6?tgi3hF#phd>iaj0-{yh({te^*=iw{+uYa+=KQA8=cyxXi zZP5Jb_R>rECiEBDv}f(FFb`l|{;ckTJu7evI#~DaX#01s%Rk%e($UFrJIN9!%weOn zaY+Jtom4U11=C@Nxh%$O+Wb~LBcjykNS_L$y;89)g@x$;?5^UxOKN(KIX1sA8QpDe zrakUtgv`qk4Cd7Nnt}R0RBx3R~po%KY zF=N=9n~d(43lJjMIUzHBM&>2%IT{s(D1(u+3&@?t4N5vv99HSqFKv{mi#}ScRg(#tg!>{JpY@=1l}qh&76kYAK8@=r|C_=&-?rGNsKDh>jh+)T)G zgCcp^B{??}^U!kV5tsup0U@%OI;}b>qFo(6bf*ME_R^hL@-cZcatibFIHrVzpahFj zMO#;0Xh-JPO0qbrYDXi%#oI0;bBpZjS9@s(G1?l}KE44R?QIX=l(?NnsesEEiB8ST zh|9x7hzMX#Mcf_a|7|5lTAd%6&3Rs226nhR#zryev7grJWKw3Kn(^gLLj6bCqBQR! z2S(;hnri3w4u^E@6&(rBR(zjbgr$O!Ip9%|DiPSko%Xs*hg1>noRFWMk&rX75X%E7 z+e1noMr3L%cu=KnZ+Mb~Snu!-p+E_xI>qK%in5v5AWK#j#=6*1r$QyMxN(Fzu#JpD z+(2S9EGCim7Lac6F(*T{PFTqkOUNyU*24uM?9}8J#$s-1DMYo(RP`X4VPo<#hGj8Z ztbQKjGi!Uk+R?2PjWNzl(TdW5?~$jKlZq0JDb! zDoS629Vkj)<7PLh!)XQJ;w1g3W77SYF5}X42QXcVIo;XqBO|C)cF1An$xqDh0t+5YG`x*dxUHJ{_Wq%R^w;c@Jr zvp>4O!S$|=x}E0puSZ-JIL7(!**z}%Xn7hg`~3HC`rl0X^4cN|i_32^_}|3xznj-( zMZi8nx~vEo{=0bnFQ@+hxG0o2#rU>~i`!ZZC~jLYdqFol`<^FR~bIi9)1hL?|<^-lbyu3PuiXwd-?R|J)vR@ z#{g*f(FsQlBJJpuqdu30KYZlN6X%8So~L2Pb<`+?T3Qhy76M8Da{#jdW05^aOKmab-BE(sM4R8W*1i}6m;b>Z-uC49{L^>h*$`JA0--RXqqw59qcX5j=htU5%|BwSND!#MQ<@UvNqz&FN zS1I9~Ybi|SjUO#sGgT&!!=1A5DoiwERK=9g-v-kaCIXePaBzxB3Fl%s3#9~xdbD($ zL?+&F!byqds;AFg=|bGrh&#j0au5hT9l9^QjQjnUaewAA?yp|Pecxr=k6gyx)`&aX zwi>v*`8eU|8GKW$5g@AkoJ3!^>+hbp-0B%j(De86fleYDOx54}M#H~X-TQ$YykkV7 z`_H-^;gPofeqtWp!6W^BNU@W+)giyW@G}4+08YY=6zq*X{rmS9g9i^5si~8HhOue~P9%ge>5pMEMXUcAV$zY3)S&J#rw>ee7Eh#$pq>hu2Q(+ z=d`?#rTk~ksd77o$a0VUedd0*8phAcLi&7)8UFqKSy}s$zj{~cXJzHW9bt-@{ss8+ zK^PUD@%iLdw-p(znVizwzPs_k)Bi^2hDS3-jBN0eEWo{>LJpwh8a=M~D>& zj`W{>&^t&Apx$df*uS>>`u9~>`Xk<{$WU2>=xPlB_|4hQBc{98^8RCfMt$81)c%yKS8<_hdGQJ7lJF92BX9s` z*qs1MG%jP(pWz(iF`dSs81Aw}I02ji%>eFzu99-@0WbjmOwRrM{IEX}Ur;9}i`#F% zUE!w7_^MT_#BAi*{*Q@X@f2^+FAe0o}fs+Eec9kB@x=P_}2ha}I zH4b_hh9|gdd?Jq`6ra!#UE%4~tC#Td^1^;%qX-KN6Oobl);}&z@oxC=;UXm^1;@{f z5#z^?7xOY=#pKD8#gr*i#I$MCL}4NJ)S~^EJ9n;l!V)f?zdcO6GS4JRC&!2tQ=`P) z`H|wG!U*xk{1EX%Nt}3a{zS3${sOV*&RfKRHB&|3x1_jvrxf0MqzK_Os0}qIY zAAVRo{`li!-MV$Ed^c>^AU1E_tlF|S-+WUX-nBt2_*sh0RZ_hD_S@o}cis^neDHxd zc<`Y3=%bIs(W6Ik*y0!B^tVUE;a{Zq>Z`AmTvSw4h_k1E5Z|1aqN=J&R99CkSs`1S zU_X;f64o>!G;XV~4!8lrgUa5f3UVs15n<9>jFrR1Y&l)rFIS7rawGOP9n{-X(H`-; zC!v2Wz(td*aNfoStUn_Dnj2 zV;-mek!XQQ(t`Tyew~*t83wir= zA@5s_dp9=<`O(3;_@WK=o_D}_ryKVFd!gC}i2acKDxAq6+ukaqcY%<@7YjN4VIfz) zEab+0LLNNs5Z{3K0f?V~_!AL-7UHi&{I!Vx3gW+o_6G(J8iKCKfI z=;+y@!*#7%wd#^U9O=&u^@+$Oty8@KX=DTaqhb={;^Pxz61&z50RKKWsq_aWM#sk_ zMkmJJ(CJ!x7RVv(#=iX$KtI!uQd#I+OAGY`3VZ|qRQlegM6ft9CO%OSp>Wjs5B$@j zy(5T1d}3l+r`Xtb?b?BeTK}|wrfCraVvxm9_}>s4tElW!>mTLr>K@d_7!x0z2rkEV zRa9OdYZs63kjO|MXLna)cv@mwVq8q3h9#O1?TL%>8x%Fb#i_+my9ap{7meo1?w=Ms zz@vFnC#SYUfFV&#j1Pw()%iyT`WtW)bv{3WvDH>(RkK@&^JI(XwruVQEo`;U>shtvCGjXNX7J z;KWGqG0wr8{Et>4;2!83WkOYP^j7JkeB+}bh;cFgAq~78(nmqZMTXRmjrvjlNvxMR z+E-a#T;pC6egcDwazKd&|71K@j5Hd8Y-}u7BElPt=dL_X(Z_QmMvPG7k$IVeYsVf> z&l)I}=ER8m3!}uQ1uoU+-Me>-4?p}+jSWtIeO!F~_1EI$ z$&;e8vQnHnbxQp5^Jz6U`1RLc#UFqCAy!vQu@Pg1gTFhpvje)JqYFewhkChc5<0rs z=;-c5NB5LyD>n&m`IZfVK$GrgiHZx^(N+cR0oxH%I&Z4k7dn{`I_Fnd-s4zUU7N(Y97zAW5H_Eh;1(I;!g6h42inPF`f=+GK2H}NuvXz)Q&aOB?C3bJ8vl;q_kPqtKc*2F z7}%Tm0WXFByYId$u|_I^pM3SzR~6n^Lz3Tp_nka&;DE%Mf(pkt7$eaAB;q`_apT6h zTeohVi(y|(X=&-Of`WqRl`B^!VO&kzZUlIB=+MCzYa6%v`1p`_p3%|KeVH!oe+mCH zXU<5BWmfm-(Ze4+T=~NfKS-?U*7AA({{8Z^&puOlqI?wYESnQ2PRP$c|6IM3CLJ*5 zQFh4O*`I&@xdw9gJJPiM_Su<+%=gvuVLrVUGf`WQejtMLSteeR<-grZz%q7Om zst&UZSSDCkQ$XDJ?b|0=SB@MxqU@xBZ2;ws^@L?a+_wBAW#&;S!*RH|-#bzcc$@bO zOWD61_m{sb<#XRk`RloJhf!Bp|F7mz_`mhmTdRBa?CFm&{7ST!66K+2V4K4-AYVxj z%YwMm&hr2A%P$qsPM(wB#Fw%_T8KCKPZ@pkl$6n*NEvYm_s?JnJ`g$s-+iEAx0E-5 zhTb^*y!-1?t~>_az=aDBUk&mIqwq&tFLfaPI_PDgn}4UCupUreQoodPFg|{Z`WXBt z4Jdw_P3N+Rb4IqSSO_{ukiKt?G9O3NIFmsiXNQ~n&c(r zi2QGi4)UEmXBkm$DF1ACF=tZ<4FmqSe6GrfNS7VLeALX!;^^9#xV{}m79m_sSC~5dY$~e#v0~$8`=osR%_MhSN;AM#Ec>er(iGHRYkOm!e zIvOhzofbzre*gV5I^_H9~{JALYFQYZu+ot}i4lY!7MbG|;5nQ?47*q01L# zow9WN{Y~=enW1uxB}_hAG*Hn%0>)S176>~HNynv308T?dLv4K~4fgunpKTJ_)Lv+l zx`BqSuWD^8l9}It28Dn3?%iwKfAsO}=PxM_)?fAqdU>!u@LjJb#2@XREWa;9K0iBD zJ~cB;J_#Bg0S$c8(SD8t4WrSCo$nBOSDp20mMhgXPO}Lgb6Lo8+^zK*NkM`D@h=@>5i2(vSukpktG%4)vLB66H^!OAhb#BpY=AW&vsk_qT%1LUFt~uF}`2n@9*CoW0ZxYgY6J zeOyLA_O~1}@Oix?SZEo9wIDxAA#vL6Ce6G;E$1A~(!2 z$@QS2VST;_ZPMy#k#fb{TrT3$38|qv>)-X`p;_K4?H)cm*`PWUtR`lb)~)l#dh+l=l~e%e(U; zLk({PM20d)hftEsGZ~mh0EASM=yK(7YTS zq=~Y{xVD3@$ejxVWjSbg12m}m3>w%b>Gk<>v`G&ahRgd3BII45VI^o-4jL9gj^<}Z zD@55QMLLWzUO~Obb4mM8{B`*^nM}Qj1M$SJkXjm8E{=3uiIzY9xFEmU@t}NrMU>oL z8YJJa*Jrj#dVOAtHivNgCJ~xPW z#qkgEXW0)KGKA$IpMCaOHTL3|2I~WI#*7)YG+c>}E0qcHVVksxZPK6BXVAc>5neau z)RBC`UYi|gJ7u3fePmi%n#A4=$#Gp#QIRy8&2q(x6{=oi$w#$291Ar@3wh7Jfo&DX zLL85BKEQ|ddH+)udEqzAH`dHh{j=j3;{khpUWK~Ar!i!8H>_2-W2`?C?+X8Z{rUx9 zYfm1IKdZzzT9w0-Pd+J!4I3u0w@8+il}YH7_4)m+k8vFMkk0EXnZd!qcVkbFS_fg?_PQ*wM^R!=lEj*pyz#~x zCC1hY7vjP3h9i&$9dtTqGCcxiN>7_@>j#faly5GIK>w_bG1xypKQ~Hly=$`k{Jkym z+^@eLMtNWL`UCb-%7qITp1~d^xnRM9UvItjR+*cdtN4z2rIIDmzF2dybJwB=)YUa$ui9ndjI?r-S$sf66@RjUcu>w{hGDN)j;T22MqBb9tUU!_pW&UHL;M~5Dfa}*64#qa6LI63Gxpx8`WqD$ zB_kptBzC8$a-cus=rqvO|A9C6M-x#BpGWv8!i0Dr36S^=)+QSB$_ zUI7(Ackpl_>+`m4+Y}At57!M*KNJn=>FF{dAwkI&bn*&+@|rv+EySDkfb9?6IY*%U zaIC!@^?-GNb%N=#4F>KPku|Ce8WS-$weQN>7|!cJgyOd zPd2n|J59dS#;!e6Ad+af;F+PsZWdADe zq*39okN?5%g``1m7j;^68fcQ=Y@5(`t9b%tihZpke~ByKi9gFmA45>DL%F9ck>{*K zXfy5y9)Ab-TKpa5pKXL=c{Elgq>F3p$#9e<$~xWI2M~8%KY}^Z zBmV$*g+KGq=fCKCR#Gn62ayieLGp?12irRKM~-De-fr2lMLzi8gG!#sZyiWCWruZ# z<0igSW|$uP0<@V=<5TLt%Xi|=rz7!k>=%i@4x~YcE0qb;z@BK84rPpSNE`8`Y;8x| zNm_K=F+P3jZ_2xdb=|L8|3Upy^I!De3rP$4PgAFZrcMV<@|5ih>jHsgLpPQK+fLR~ zmKob3>Jq4fA}!=Q^7uRLOdZDcZ3=%~{xMeQO+FE>R3;qXbNoZPsFx#9wrK9zvq!bN zGm9zY(~BKvp!G{eJ95+Z2u?+?2p)YlE<8LlJ_hJ zjs^5`;5*xVwzX`>FsD#@4bc2QjXUX3^6w}c4dq;a*XymLNm=JR!>}wU^W-yWq^v`Z zE@F&eLs_w3;7jKkXSC%-6^u7m?=?Sk8_0x`Fnjyhe#_6%drU(w!Hpf5OucIha_=TBmtYzoG} z(FpGi=#KHm>B-P>P*29S&qlp#+>cc`e2>KECxpM;b3NMdHORAu?I+tv z&XEJ>r9wBQ|cS3x2ArDYvR-oeSB8R1EBpXbiG~KPGemm z9cxc1#DQZc&OO;bvR!9i#I~AaS&mcaM?0Uf&{a|ANnIOtJlwxP9ToLE)caG%#^-Xx z!@Vo#0GNa3VZJbh{V)49&U2YB$I;}8oqjcT(o)oZsV}<~Yq!+*QCGI9%F(w`JC#_D z@}TnPe2`-V0&yU)eJ7A7?7x2e_NaWiG)3tpxE{`D;*ZdG{h;YhlYs;EzT5-BwSDS* zb$O(|mwLufm><`d$*$~EFn>+QI5&lO=(w=_IR+*!?0?>PV47Ofrp}pb@_PQnVFGZV z-jn)sC6B;?x;E-~D38=pQP)X5;TY5rJp1fr(v|%<)?M=O{oNGy%>*3>jycE^j;&vu z7o~K4)X#7okq`4GQ1?pxF?D{9^2og`)X`AaM;#O8(QBKEdQQ)u;~-tX$bOaMG=2Q0 z1SLP3WN;=jRQXKJM z-js8ie5h}uj)eL`>Z!KeSMaAg!ykUbP3p&?^8^mu&%-)Foh=`oevT_}4@O>baq$$6 zK?yn@TsI*v_;7ulx+dy4s4JtMiTVxd$*5zZZi6~9>X+tB(%{|&Ig&Uid4%3o*L^cR z$Nbp_5ID9WIO0KESpGYfgiGqXUIq?a)8yJXbu5%e>ba<2p{`Nsxa|4Q&4^`Is!r(g z7zRGzsV#qw&7g-!$9I=0YzsKw=lGm-6BqJ;=Akt?N^eMg6?M1N8B+KBm|b7UIzjyi zb%iC+QRzHjW{zz%b+>qG<)3v6-}dIA{hz|R$aaVG0?zHo6Sh6%$64EF^4+_Vl%9a= z)09W*@VPdv*9q#GsOzJitk{kNGfO(I>G8)Q?_XP33cjIJDGW4$5|>n+pBhdb}QQ?+|+ciI23{iiIDACyJr zOOZ%=s1SqMC29ZW1TGd>qS$ zW%2gvBw2OxBYFCpPwM0mb%Oe9%Hu=x#xVOv`+u~p9B+4J{?J8y6&}t}-AtvM#+eFo z`SRszen*_xF0vjF5AuNJL*AUR9hDzGF;%`+8maaI=<@jF>{$8oJq5DzBnBsSvR01` z0|yTD#d_~-(g8hY{=|tB_u~6)Id$q(wGNCul4|WjpZk#K-KTwxQy#&59m0W)%4%4SkSA1uCzoUN+*e>XG75&5o=w&8D zmWwb(;#p1H^YmwPs;=Y}c)tplZ0(_rhimls;9s&Vs9!_Btn%S_fP82E1dho#M$=97 zRlk9scOb3Dps)B5d2=f8XOEIU)ciajt+II&`SQjW`e(hIN49 zC^sA@vAy8FM688Bfpj@nxSB_`zifZ7kA4c%JB~GZ_W5jcs9V691xm-vdO`g7kPrGA zFUQN&sc_s58XiWbSDQESQ~B%iOuiC`2Z8yqKf)Oas%_w2RnktGV|kI+q?>Ir)?S|a zi+QX3nGf0;)&4hzhdu_zHwO~mJF2x}jtjZwM0xxNd8_5eFhrdZT^XAi)^f%#YUI3f`;wpgQ^f+A+g%A!m9g1Nm$dX3RU@5ue zyIq60+=IPZOuNws@)-5lV~=fuj^-TNsGqPeoju4f7_>nYe~<2*Vmb?CZp%kB2aVcW!dL|i#`plly_bFEyRuZ;~S+2?PZKXU%cxdP`Z zM$DBS0AAF!a!yX%DgVs(*?DnljB^Hau!n#%)8Jg3b1$w%aNfsxXAtI{T%+cgig>f# z;=?%%=TNWQGhH$b$EOlaEZ20nj=(t^`Nwi%AH;DGpEnlAsPPu(ADnw|?!&Pz=Ns!E zUM86a=ix(u2g?s<&nP`N+c5Is#O^2LmXa_vpXAt`<95y)ICq+xIauY9d|b+BFt5dT z-mjowd9Xco#D#YD<$7G246m0zd8d!Nj-#Js{_Kx9Phi??m(lkZ zJH}@_O}v>7c>#Y_pV((}J&AES&t~5ZT}-{WOlMxR^^qO`;cN#1^KzhvVR(W&!#ZIh z(_p|o&S)%o;+m7diLUC6=dCq?AKl#4X=~2#)82U2nhQ=e;+sL8wB~9Li8q94Y7dFq zyU_+W6QSuvMYxkPceY{HwA{R+u`VlQC!}P{Hd9R z-3^wU$VroMB}!QLtn}QX%Ag19W~4Oo~j3 zjE;^QI(YPGyXD{a|2cke^uZ5LS{NgZw;FFZmKir0-!gt;e9pUtPY<6&pNT$sK1+P= z_Ib`{ug{l0zxsIkUhf<3JJNT$?@HfCe4qE->wCcWJKrCDt9_mQuJP;OccWjRU#Q<5 zeh>P+;rF{=bAO|My8jga>HdrTpY(s#|FnO*fF1$a0S^Uy7jPlq&cK%g_Xi#i92e9n z*bwX!e0T7!;CF(L1a}CzJ|r+CHY7PDEhIl=M#!R&)gh0D>q;$fUtm= zfW&|?0TTlz2TTi?8Bh{n4Y)7h;eho48w0imYzsIXa5SJI;HjV^LDfOm1;<6Xu_jlq;W-$lNi{Sy2R`Y#Ds9S{R--U~bu_(IUupw!@dg7*fW4o)(CXR0y1 z7CJ8M_OQpp#6b6fH{tv+9AzakV1#@7`@G}Z$}h-oj9*y5JwXoyJreXp(7K>!gI);Q z81!AxnV@#T{eshimjquMG9qMLh&kkuko}ObL#87pudoo5%bc)nVf(@kg?$tDQ`n2) z+r#&Ue-M5s{0Q;AO)H~lW4tlTxYBsk_?_`r<8Q{!-UjbT?;+kJy>Io-^3L_Pc;Df@ z(0i5lz21*{uSL0S@!sYAf%jqWFTAC9d!G=W+kHwJ1}@p<3pq)!Xqg}zVwZuG72 z{n--WzZ|~;zZrgWRoVT=?>>~?Gk%-=Uh{hkvb@jlLzLxbeqZ|i=;!R; z+`p~=b^bT__we^Z%?tF8^}oe`xc{yGxBHj)-|zpr{}=v0_@9HEH-(g&0-^$j2Gj)3 z3i>AKT+p?_eS$-R6M|O+uMeIdx+e5ksB>74uzq3DVL4$d!h#2mAM=dm;K-Hf7ky<)Wd53HUU`yxdDX%Jp%^^jtCqb zm>oDRa9-fzz!ib_20k44RN(r+ErHtu-$mPTB=GCN9|QXa4GJ0;G$AB2WNOHcki8*? zLt2=+o1#olnqopTLq~cHl`ZK8q1>js<-i^h413AjALH&KbN! z5yf$Qqm?#T!h@BFbTII+gw&EI78XJVLJ~_EEVKtD z!98p$C!f1mF#!@T%e?z67= ztN3AcK|NI|rr!}n5ywr8V*y(**{yXXXoXId)|%TttK*mPP5c(M@>?k2a(d1w;N+{>+^O{BP+ORE@ESv8{aYEn&e?RmAx zxwNJ>)V3_?UZj~EyBW}GLb9sVwUn!P+pac~-tD;_`m-5nH-kb0iyn@F^?3K{x z3m8!&W;7e!M#h*jmRZkT!_O>%NO0|>=;z9FVqN%TSSDnWsgaYDr9bic`LyZ)54;aa ztDUp8)5=&`Ylu@SXXUL4&Z;SE+A64myY23{Mdsz6>w_Nw1W}19R3pSpjNIhkT` z_!Vpdr_qQeG~*&#(S{`2kwPcB(Su&}p&tWCV-OkSkjDfjF@0+jXE!cl86UCA z6j{dx3fRUDirB>-eA=%AI;bmkm9Ewy9o7*Y)%DuYG3{vR1QVxGPv{xu?y4^6AXQU{ z!W5w>8PrHk)XZ7gN^O*+c1m$-_EI19(*UJukTUGoq0;V`ud)JV1GDQ#_Qce|-=ZL3{maZ3m!VPEPJ#VYQ19MrfaL6G}>Kj%(Dw6?$g0l&Q9-gD1> zp7WgNJli=_ciBph*W>XN@ZWJfp4GhNFRY&b{V_oDsEKPwdHz0f>oKeI5?hbCbivmb z2Ny27`MO0{-x#dF`lg$1HiFk&8(fsUDfso9g3*gE3Ep_~{A*7sEiJ0ZRej7a{_vY? zo%Owg|N9=9+dH4{JFdC8_jdKZsn@UGm-XJj`-Z!|+B=W;wa$j#X)693x%^k=-d|Ji zFMqv$fz*e0%ng3J@Q-Dj#@;n+6Nz%97KqWbh?rb1@S1OfAXtOqIaGJkM`u@t&9G@za%| zza4p=`O<;cJf5S5BL9cL9*?Kh?dlh1KE=3pi9yO4TX;x^WbA?r!~TNw=9ESAuQsms zcwRb#ggyNK690Gcf7o9bv^mAi=4q}WAxMYE@b*i`5B-Hbp0-mKx*63N8LOv+^rQYQ zU-*vHw*$T84>A zg7(K}dOY^LV)CS;#HICldOGm%ULt=9Qmv7BZ~`m0dpuh@wrtT;b1VFM%BU#&s&4KL zTE0^!(~k9tZoOZ+7e}g>+q2torpt>e?#%b-%|Gv9`3dkyb_)?Vfgf$3e%@E%GqN{OVnH5Zsn; zyZCT7kf`sCq@O%MwiYL%=?7xfZPs)4K2j5^*wTu;i1l1#bph~*TU+gVO}%w%hi$96 zdt{&!>Bk;vV~ix+{wM0LcjA?w>fZgCI|1ZlEYt#Pvt2vVZIvcHp5FZ}^sV<$t7fj% zsMQg-p3|*u_Q#_n|2C=gTeQaL7~7>;A{nLCqy3=We)m&GNb9=w&&X<7x%PpFRP$1< z@*q91&jm1*%X($pmCN=vy(eQi)M{z2)xj}7C&OGy!)|M%KGRwwtzDR&Tj|t={oo=AKT6H%xfS8KwLWfLUK6(#6TDrITdzf~id-4F zA~N@if5WY?ZoQye`<%d;TN#Eq+0zAf3N?IEFfw5=?(T%OS+|}8OQx9fD}tcu*U4-3 zw6rBloIo96b6|uPeMt~(j*DNjz^}02m$`Qe>*p}5^v_^V%SOXry~_!_u!UVzswdZ! z4#9PBaA`&PY(0HLMI(!=kwg~P1!cL#67Pu1}12CH!bt2q0ypM@bIeVQ%-Q57%+b@AR?Kn&g>Icy zZpYVr;y9Ed{x9U)0)4X}nRB3U3z+$Z5T7MMm-I0C3)!ET{JK#?lyx34PGBiTpPEFE z8)|C|U;3hh*+OmAcHL|%&{pjfU_Aigt?rAa8~iNS_@O!z0$y z+Ny5*ni7Q?qvGa<{D`(H=4l}BYl-xkzEko%PsmcO-Wa!@janO=zy-9}P&+Mqa-PT9 zkt`RsdV5}st|IsoWkMOvt&xcmHQJ`2g6vC z`PAcyTH8}MR7}Ys{KFf;(`||rA1jU54@lwZ6mCcvOT)6R!VR@XMc9Cd)In;Vw!c=a z08yj^h=$svC;4WBxy&27*(b;S7z(LurePN zsYX6qhT(;o7~R^+Is;E0lSxfjdxWx|-cY;HZ=BRnJI8Mvr>)ZcYayl{`yo|NatuX% z6pgbI{m>ESC);Ult800kgadE7AWP7JxVhfnPPkRE6qb1qEeLCy_4mRzaqG zHTt0}7fTiZH&VuMnA`Me?hrtj^@1=Z6%7!y*JYmGq0x(Mx5RV(GWTvruFu7F5Lew6 zH#a)$({bxSdRBhJ2Y=D6PxbztiF8C_`Hrh^-PxG%whzV}B**X=KI7@R{z8z)J_fX- z;qz!j;n+Y}`vEYpXVY2&l&DESsH+m`(z)aSVc4j2YkMNa+Sf@A+9f`L{dT(Ih2o~zlH{FVl$v8Z>Y4rD9@7|t+%(AHT1VTPBQ<)4!wP2S>|MR=V3g=S7i-D z^5J;cCt}Q~;#gwU??kK(ddo)b2W>32arO)4&@Rtdnb_+HR<~uE2&+81R376+J{qF6 zC%RwT>|Vu3^a!@klEqzW|4`Mz!;^iAoTnrw+2&BSz=vhdC=)W?BRrm!E34bG6Je8L z5FU50e`2&`7!Z&yQWU347$c~iQ8ABwsY

cCam5!j}jO7Ps19?sV&U#*qxz<7n$K z89}Pt{>?ZRUq`kgvbs?t97g3|@GU1+}{xgZ|vgO_(~$&PFYtusdJW;;tM zPe{;vL?jDztG%PTjeV1?@+{SE!fMy8(j7G-Hb3^m4Qhuvmv35g zYai@0^ggWE&9L1;e`T|z@R7C95+h(=Dpkt_e(n_W2Xp6^|y-{ulDs8boD}JsIGWsfU%tSudv3DeWQ zIK;NYoj1$(_|!blW5TJ?V&Q5LO0&LDhnDEj>$}wwPx*dK7JDvaNKg50JW0mWmaIt6 z0vwxD6P8Nixo+aDTw>ws@l=?-Me!56y_86e$Tq;{^K!x_V1*ZR_PxqWtUn%CG!v1#~l93^rcIy zq^qIl7o7u;{ysv&J-41N6k_!s|DgU()GK%D{cr2-pRgNk(%UoVrW2#f?DfY%=GInb`>Ky}h=skU}exklTWv67gkLFd@F)~UAY zz^sy*(FSwV9(THvAI1B7^dsB!$?Y9mknNOAXYQ>*Y?~5K&FGDr^|r@;>?CPpeNB#M z4$l6HB5+DRf+C1;WWBGa62d2KsAk~joG=f*K0LCczO|#pH_kWZKb>DJ=ZXR@uvV_%Hy-VbU@)OA= zdyr%NW?067TYnHoQq}(Qa{u||s7uI`HAY0Oiz>1sB5Oo>5v9BH^p@RP^Bj_Mxgu*p zvE{~B%-WU+ZPf01o1P^s_8^!o#%w?MC8|5&R)56m zOtAhI=#{pfN3t-_IizFC&JEoog0oCiF_FHe3Evu9!W4Y#F(%|`Ch7)gT)0$I(2LwxQ+_}nGsiPXHH zyFd;NxIO1qJ*^$A-Yo9zMt(Xhe6c@v!^OvfL(i~ z&}ooyTOE6>g*3UvzEEUTcWe6*IdrQF$)`;!4-2K#bql#DVa;c^QyZO<)8HG}E$c+u zTc^X*+hKvBi~jC7W-$icpGbunhaS8{R)f$W5zYFjesF&;w?6^Va(%svAZVO<8K6m? z!Tmjvo?kJ~z8XPeW#9SbVXfucWRMv}UcH3SG0{s(d$*sc(AC^5n5n%Xq0J0HJ4N%~4ySv~h1qTVJH6 z(Ky_`@K9fO!_Ao692V?}(vw;A#OkTDwkU~72}0ZLYA}$^%X*F-%Zo;5rQ|%FVSuGj zTv&!H+1|G_0878xzgLUOSX=c%L#R6`j6iI)h-`c?cp><95F%QNL$dfm!c|#Xt#xgqvr7CoJP^X~+ov0_1MU{*FR%d1+ zVJifvy_1oP-mAR6tZ$vym+FNIR$9w1#Vv#eZXMk>qug&4OJ-zDYrybYw^y)d8k8cN6!Qm)l$1bmYM;_%6gNf7Ic*`x&4II1d=HI9y2v;0R`+erGucb&m+i$ zWyjlRAe4Y^o9#k+=C1BFL|KY&t&!@lQo0nR>KjC%f>O^_9!AO|)&Lum_56covdo@4 zRJqjq4RgQdA>eh!@f!(KGaYbLT_`CKC!vB;7;z@+#l?sRh{`f}8K}aD9DD^6s#e!s zCJGJTiO)L@QpxUYjb88j8Xr*5vGVe^H`R#QC;8f%Vy3g9RxeNXRPQ!d%l10zmXWph z4iM0O&_UP3_Bg-=y7i$9tb?eoH~K4`7oyyk%?W#&36j%XbT zQ!~3U>+C?h44tK!O>{x`w(04)T_x6xfkdcRyYoS^S~Ctv*!&q`^Y#Ld*8DvQh4PFe zEU&Wi#I0xI-Y#c{>>Vl6?l78`dFmJ6xFZt20QMM1GvZH?)$E@06&{XKYFu3u zVQ&&`%`Av0y|}7tyM3o$j2}#`bxA>@zD}XR6p5G;_db`fHXsSeqz9eAEZ}6DWQV0S zJIZ7vz{-=Ug)pFAVcI=)RwP*c4!W4S%B}~4MyPsM?YLVIye{%v5ysZGsL;yebxVl@ z``2C;8n?JS`v$C_x6Mj)l_B7dux{uo$$Fz!M&PDfFNMxpJX?3r1ieoH?29&W}^dYPPl;s&u@85P0db6;hh`G4aOxU2i(RwkCd!_`H-fW(F{ z6SJO;Sude8Iy;H-vm>8S$mr8nd0p6>16LbW=D;FjvN>?8G0r}D#1Ii)>k}q6GXv82 zRGycmR2s1kMv8m0{-;JVz2-L5i5>Y7YX_B~)6tqch=t{C{s{P^d^Y!8vivsV8M$C>*^Xf5Nxk^U}E8*A@Nv@$ANi-eY}j9J~<+P+cQO8J_gzTO>`osi2}LQXBK zX=~pqS?r5g&t?mVD3Rgk#@ZW zL`nJ)Nm(CS#OzY1D{~uzTr-FA_HSU^q0g9L>BA{fRjOn__#bq^?Nw{c!w|Dq5{e~fr%8OJ3W%cI%>bLQOL)}geC%v zN`w~pt1=#wB>C}DTnFur@SU?ol1)EUQ6~1a zroFx36&=~~AuloQx9lkp?KCVjEZRN?KIgj;nx0>=kR2>hVIXg;zDHWTvz*RihFhqo zV`UhLnPxG^LFTZ0{SH_stlxXU!oHL}O1E14(%)2zTl3%-i3~!0fkF#G7(-9R8l8ZTepGL?2Asf$ zf03>vDs(5XhafActi?%|nEM?Vq|Nq2VW}Z4gV(FK*dM>eWTk&rA<)6P%s#Xj()=0{ zFGgk21GwaK?KpPpNLtUcf5tj;0x$iU9bDoBLtuo^$z4xlhwyv2tWw4141&B+m>c-#s<;6KnH)`2Qf*uWyHAA5Oh5sbp1w!#e_s!EE!OO%;GJtdK;3eMuKi`7TLX4g zEjoaJWjcFqEvLQPX%8hoc!gd1AqC6i$>y~E!iE~~DckLK_I@UDvyhy>c6kv`JlQ|U z#|ZaB5N}AH8=N=xw>TK<1opdiQdK%|JTZ!geJK}-dkTmFM;)DbpP6y5rj7gVjyzDyw4ozLg_j70Q z{jQ8HJ>fM0X?oi0yt0^*{(S)EwB^#AxDA!wB6WYwZA=p?urY#N)(Komyo~8kk8}Jb z`>)`A`EdR+jBV3;A!_YYb7J3L%BE~iyAvfOI;%XI>hj$rA531f_1JoXFhVU)^9kC$ zh;+$*_y_su6#`WS#I@$%xT%k-)O%IxfFfTBt53LqDGBt&i%+J0l5m)J!<^<-b@;$cwB@$DG4kqBz`Ksn~+!VeO>iZPF6QK{2zC_429#2>w z%CO;0)Vpe^O-M0q2eUt6N;ZR=K=L78_A12=1-jj+H4^n(qE^OQUN3My@i8#ZbogO7 zSl!wx{v2Pg6H+UD^cXVY{PHEqs2u4UQfHn94#KMZR$J(q z5{y3I-ET^@pO2h7JklLjT%vL=S530@f#M7wo)x>nMb@*j6NOh4M_O?tsWwJC>y!<) zBfsgBkAUx@H-vKvhg<(%QOy+FT?q7|x`$&E1`dY{3{fJMI)MYK^1CYp6@-y`=p~nt z5@c~1sq0~+7UQn>+K|tI?4Jsv_CWvuju_+I&-Yw-OfaWaWWbQ*m^FeN_6@8#SKW4v zEJ$H9z|qnZ*x_<^a3R%JX*m&2;B4s;V=ZH&5QT+NsCGzYByxA89PLxQ8IE~FzF8}2 zOa{)drcudS;s4X$=w?2SX8w`4krb#o=|?%6xn2gG^1Uc3B%h}kJlmQo{j+?9b}Wuz z&mO6p7xX&~UDF*;Q8cy4OXyTjQRnp2!s>!KMfrk_gza~+Ol7xcT;h_}=R=otyf z?ncEmTJssQHbmM|r1Ush0QJGgJbsy!1qF59H)_kZM0selmcC1kVA@3#R=pQox<)z* z7#3IoO;cwLnis|+5 z>FLtDDXN4jWJ#T-ZU0G`kFR7`jwATD}$ea@^ak}Ep z2Jw#y?O)m)v)+tbIBrfmOZ9JR$PuTMsiA{LQDY+HZ_Km+t_Ul^>5PW_P5nKq#q06N zjb{7DkaR7IQZB~ErL#53vZ)g&q8cK4tlQ_1_9*M%q>wp~pDdIr!`UKpz?<|**bgd& z%5nnY52#1>!F3?86AhJQOI3Egs~{vDhpCtoan|ci`$f*t?$(L3O6T$61b$7?>NXKh z1V33*L3Bn4D7PyCMLE7gw7<|+#zG5P(@NQws$q2AkCneb>|MEt6!EeCF4*oc^WTfeDugk+=5Ki35?)fh2|hYbX9A z3}ofP&Z#~q2dS<0dPrKX1E-3k0lj#G-J4!)a{?EsUi>20i><^*tPknMN0HD+$tV9m zdXeMfg+40(_Nr;K8~PP8aM@g}Hys($mQNkYpy8t3de0uVfUZKXN1Aw}1VD(mCA4H#&z8Jx)jV zJFdQ94^+U_;D?8+3qGfZ30jeI0oL;s-l_S?bNeIZ)Z{U~WYt#QhWLEP$@T(NG`e+3 znHa&k?Z2|6hq{uQHKV+_?KaKa^C@QtvW4c%@*uWmEJbCa!>~G~$r?RXJe&g^WBDQX>pO&BG%MCanXOuXCld+5d5sojdwTCVA%CLZ>L}BlZONx8G?79ek5aQ0V}?kMZ~}jm z{3_4(By;se##X%^2YBBLuaeQbkwwytI%fUP`C_&aDp-#$jKqsGsMkOLB;cmbX&3eJ zPL8wZ!!)HYoM0couIcg{)87{|wO{w596Gs)isR-|+>?z0-1Tl-GlgO{t}KI;OtW7? zi8v&k&skF>85(%31b5w{$&+(Cf=S2VCB_L>XP0hq`hJEKXy!(*4!jIb%Fx*dR|c)eA8y)?R|FEfp)D`t;A(%&P9xk)S|4q1_)vOX z^75et39MF9`{!V#&dj*yWoA|H#s>^5gV6JIT~8EuGV#~ir>CyiVb6!WK}T^Oec>H} zfhdpc>j=skz6IA|!>8kXg^j9CPgt?=8nAbHBJ@fGm7n#Tc78im?&yly(@+GkYbt#OP~*R#a0Td^Ijg@~GM?dZxF|J9tUWuppU3TYz{5AC?6&VG8Y@WJQh!P2DRYAsYUFmMUKh&r=?$QRXBzkqwGiAuX%hWi(!jezM@tn z2)5ZimM|NS?0#Bvn(857?0UEZp^eL<5o<<;)18{(Z+YjABREb$@%!N@tby!T&5c2N zkU19w7caZX=D~xQMy+kwV3G44g6iTG>+-c5;cH9QWeb_x_5Wg=5r5DaSG~*HhQyz5 zKXECe0=2c~_XM77rYlgVwB}{UK0&u2uDDT=)!JZJs-{DqYAqYcsXZ!M7p9Dfxk2Vl zoZ=wd%C6|@3)pizBVugWm0WI*KAy@_Gs<*pWt-HSLfh54T7_n!Bn$lhZ7AMYtFfD? z7T@)60!qJKaJA(0vT0t%GSF*h_>CiF3anO@tu!jxM%tywP}B`fXO#7+8uF84OqD0U zb4Ga{r(DYNKh&1z4foA*Sw~MPJf9hg(lZ(qJ7v|l=|2>rFW%Iw6{{zXM>^(XCveq$L*E%S{ z!!1IfN4``c-nM-7%3~n8qqni7!jrl5i}v>anHKj5x+it#Z!2Wn@>Zhrse|;f!=a1$20jYzvi!ZVi#;xsc_!kY#X2$g(^|i;{}% z$a0=VQD`||CKUP@NtC<=!^roGxkMq~8sxiSdTMUN!T*7NyLO+z7m)}rxFEnA6(1bZc6Xq$7a_}6;p?yC`pZCdj7qCkX z)$(I%$#5drgOKo`sWl66d=P8BC_Nv{H;T>fAf9#^fwtM5N6;m^g?7&@Xg0pn5w-yq z`gVIV6h3O5k3saSWpMKYg?Q1N?;q3?E0XsUYQcB*&g4~6-~)6Wf@6p@$`|NeZMy(#a`X<(0zk5!?d(=zW!bEz)l|3-nD`xmB?wopO z(x`SwiJMr+t$8+@hcH98FVtbfrCw0Z^GJ!|fPri;O z)k)RZKOo?4gqaN^Kvf4eqxH9hLcR+I*ptkA38P_8SObZ4=~;i}5JkJ=GvCBr^zMDZ z^NcqbqJWtp+_S`C|F+M0R`{(VpBIZM|_GUKV4ZnZ;H||BeUqj0erv4koM*vh#A^ z_wLL_LOl^}dJmiFy!S*Z?L~c(JkoybH5pk671}#gkg4)y_V_@nI=yoK&wiK%?|qd<2V{0-(2ZL2nIzJie69KJd}`TcltvoQDH2swmA@eC zYn)%@-yg5%xb#bP*1K_UCuG@Hj$TKyWFwE43V2YaO&zZ_3nh%4z1}Eo_2Tt}5-qcZ z7$~-_JKb1M7LSXkV|n7_JVFkaX79jwlKBl;7@6vlj0}PgX(>6)HI`FJZj6O$jL9M= z@acDCw8@6rWiJ|6Gp=QSah&0U(v9(r%jOgr6Qiw@2Wwsto6{OSZDckS551HumgJ5YG|_JeMb?bF22k&yEyW zsNqMg*J71#HZGf&r?u<^vZ4)a?k2$Dr#^3E(oc1nM;N@a#$+|~lY-^r=-r~j$l$Eg z9`#PIdEC=`Jg2{2*B&jNu07eDCtr;GaC_|@SpX+e+T849lGN(7aXQYg*W-@Xlf7bi z-*h8HQ?1~&`APoL(JQ=zFp=2kX|?7j-sHcTX zH_nPwzO>WE>!8l+6bYhwmpXMT2Q1NjM_4{lYmbOLP9VVtBsxZ?rMsIzoKS~##U2Fh z_CGn!1vx_4(5K^7KH3z^+&Mb2V zIkes=Ymn1_9r=QJ8R6x@OSZ4JgMygjK}^p0mL*4$T#)$_%?fV9Pk$aadtBTE;>htE z@^i@f4MxaVM>&CcX$fg9KO_SP>N5V|P|ri3N79fQT|@2c9wW^5J9$M6D&?*NLDdzp z=ZAgP4$f%nrB!EV6N1HML*0*pkqPA-pbePlCH3bWxiL6f~#{eQfyS$S&XLWP0}>L{0S~ zXFb2>1ZKm&m;*lOb@5AsJCPk!K`6B+6G4w{4&0b5uwKso1N=Caq|8Y~q7NEx_oHBD zkJ-V?-NfuG2+AI5uAehx^t|$N@vVW6gDpU9Bpc;$GjuQ_+uPfE{XWx?z46ynQN0@n zk=ur<%{)n8s&~uYh&II&nsy4zhh{=;K!LXsR=*sgNTjEoMRde`9bVw|Ih?2HXF(={ ze)E+)Vwx3|Cob^Kbt&{|&<5fBpKXxdaPB6zf3~^+Kj?B5fHZB+TZB2+=Tb1I`@l9d{BZ4c@y&jF`omtr+2e2d^ z@Fek;$N9z{0goO}6&w)O0%waEK_$ngysKr6r)E?jd1jBm%ydAGa8RIYhF@NKY9=^W z+%&U1e{{|TXYdSJl_*>BLQr~6L{OYQPFF{r`lpwxqSgxrZfTqB4n!;b^c=KyW_<;M zx^mGIUp?iZ)UxZ2L=GN9GK#1j(U&nf)(hbkoUas@ukebk&Sv`YoBu!D&}M{wn4Udr3B8|p1qpyzeg`$FN67a&tM#;qN7*8A*5^P`r{YmPdc z5G5q;TXXq;sU3lhw3j}QnGc_+RO~)EN_dieJrOx|;{Y;ita^8}=8Ag_ObxM>=6&7Z z!KWhju0_TW#Xo1A!myUU2hfwW{Ek%7_I&?ERSgNr@)LGlN0rAWPI^{FwE+X zn0wIK?DN%K-!`Yt+q_Yju&kV5B+DAIFa?~-J3&exEv1i8XZzO4fj(uyltX>@v@6za zIw$i^vj0wBbKQ>ohi*qWXX&Ne3|G!MEReO@6AM;`M4b}ae=y9F(MTI=sg5ku#msdH{UEUrDd9twE3zwup=?iDTTjaZw6XAt%I zm~N8pDY}oO_D>3EVV<=~oV+ilBMz$0Z^5q1ds6QGN-lm&?!6-SHfWK2Et92X8Z-yy zYu{PSX%5WLnxA1F%z>EJe4L6oUu%{l-{!z2hG7oO(VCxCUzg$1 zJ#e+w%t;c`>b2&FD3+;&2aa1XrJZsyiU@^hb>YR2)1IQ?wQm$JKJ3RhF<(j{PvyFu z1DQTA>Hk7=te9DreNQ%6JkZ4TV5DavdPEHvuPfyG*LA!OSeNV@tEb6|zUiB z=P78gOwK#nYS6_4md=l;<5Eatx6LiMxD%h^s!00c4_c!Y7iTYuD32;h3&)DH$N|bsOxd7GaD_c9#6)LNu! zUF3~fZ7t4ih3Z0yY%xbU`y(Mt*%$3cMKE9PM@au%(sXnQ8z*l9HVF*i+gmm+=V}Bl z0Ta_oTbw1?|32O}>FJ!>sJ$cK+bIId=Iql{!ko8A4Hy=ql^sX~z)_F2WVL3@nlQ4) z$i`5W){M`G$Sn?uo!+Xg%E;&Q6gj7$qHrF+&s>%5cQFk2I)OCfFtlw_m z_cG)$6?<6LS0P4ot3T_PqLPoLhjnTM^j!f@d-8x@k4Il@wG(*cPKra@xy(n1eH90` zP#CmFjo=g+rQ_pK zZc6$uReF4QdSmG1q+iVi01?_=JQ@BAU_nI%KGE7sE9Pn~aza?m-)S;i{>6oYawjuQ zeH-$OGWn=f69+&N+0n%28!AgMYr`KTqUv}#)h>)y&ZpAKx5!eGJWQQ$W{kM3JrcAB zuqUz&-IqJ{tj?=-4jO$<9Z(qSH_9nm@Y$kO_7fDPAzcCEQsX*o@AbGXMzvL=^z;wf zWMa~F4@OqY8h5xRsx#ldV18~jBu}+wKZr5_D-&(Ab1cktAg#&)g`oWi^#c*$_)vCA z#OjYS(6Ahy%k`Eq$VD>lm1DN)<`yuv^es6?I&2(ysAhMxMbc##&{U(4MZ;Of9<^3A zCA$T-zC{r zC_`EHPM$$^(24S1!m^Y%+nL5&cA$BX|Hg>+=oYmd+3ho7jrv$)n3}j{L%HZh zR&23-r!rDv7DQJ}Lan5DCbi$oGXHGW7fu;M_RRO;z!>|9Nf-u2FJ*Dd{;*OKEPB9q z1{0N&E_q6}==4&3hl;ydW%xvT70p~TSj~|UmrQ~`bC8l#aY-<@7WDUIHIQr$UInU_ zD7EmJIl(O$G$Qn3vT+*EN@Ehwla0f9R%tDA)SDXonS-CYRb@VQ19=%ah$K)i%)IJG z+QkO$oHk0hskJPx_yYhdSx58jMXIX&HMuQuFvba>z{Ye zzHGMtItOT?Wabe0I8}HgxCiT){W1g$&m}pXf$P_dI{@*3ylw}Z4Zl4fh3#w0~ zTozZX&0@_;2ijhxM)wvRJ$DOE<9xsWQ`ffxNkc67Ip$u;!O%nh;595FEFb|#tygmr zaJhXS12KahdkOEHau{_Q7Y{C39GX^5V~elXt(Ozld-3*95qr-J6NLs`M+1)N`Yu9m zxe|I~h@~}*Rs`C2`=VGml|0KbCSf;|f;>CcK5VEJ`*^ouSD?M0l8BB++M{#b$!s0$ zj(rQkA&FK*1m&J~)s?QCpELc|@1vRMS6yfuUqd|j(JQFi^99@vr}V?kt5*|F7KQdD zC-jBq;T~!S$gbR8*nVuklnr$lM^n#%uMn1bdF*!C^F+t^V?vA|zj`<6Rir1Zm&HT? z`z{6nCgynBLl?A3Yy;CfqeuAB78@mO7FW9cKO_vQ*^#+X>WSQ?AVGXb(3W6d8zuRf zkvF#tfFG*| z?TgkhBgI5@$y^L%N7>&Vsvi-Cgh@aEgV~XGngr>k|Y!7Nvf75KY$UvXSQ z_A(|B@QWN9FSBp@yAovkF%Te_p(Vl;EEZda&Lte6A^S*PQ;j<8TWKySFGwD1g@e+m&7s|wr=T9+wrlmE(zyQy|1iG$Hi5>0MTQ!R=r zZW(oRHxrZnR#WZwhsCu>TtibWddZ=DcS_usn`&A4Lvi1hxO19nzc(yyg~XlMRQr$o zL;0x3lO5Ys`*-53uC>w$ExInHdTV`O`m8u@jw`3sNRV&=loc-^Mo%fYWS#~mMS%YT z2dpoc&nk1;&X=h->9g;jCh(hZ39KSMp~&t>id>G{yJzY&xm-k;wJVPxPkOF5Z-uwH zE%{dZ{5*As8CTrcKYd2_eGY2)+INr%ic2RZ_^7QhZ56pYo)WAEdWDL$H?t?R-^4@PwqUABF0b$Nt684qjW3;k8aN+`5Hft_|9 zIuXtZM$Bski(h5sM!Il`mErF)3KJ<5;_Lm@?=*%cB}cOfy%Tc(NjT zRQCZUt@$;2(HKJBY7C7``Y4)xU&S*Y%DsS_3#Q5)l>Ue_B6ntm)3r{A)7}`GYf#6e zQ^B`2U>grl8-WoZ)CK(>qJ>>@ggd)lntotf-t94lylL}%e##volrshJh@FfQVb z9skszasJei#tZ;*miA<~U2~a0$6*w0#;6r^xI+zI3g+ODEhY(obV!_n0mhuK$#l5w zAS|hNGGmBSo)S1ZR*(}x=d*JTJ3B{F*qjr#+9yPp9TXcnsQ>JXKm*kOMIps8u+qsoXd1lI#eeoGRMJ|a z%kfCHpIW6~evB=K1KXue;4R|K(l8NXVVRP-9?v$(cc_ooCWF_E&Zd5OVdP6tDFuWflY5&y_KzT@E7=Dnrq{_u-bsUNN+fxS zN=`l2#s>>3ISTe}!Fs8~R8f{CFT4FLm>svod6adx69u5p^xFw+`6irBX~SBc9O`~| z5jD9d7r0|SZ>`v5?IyTxH`1uKN<(DjoDSzoRNl?t#8_+@y=+_DCn|B~Ouv)za%rjY zN3kf$bpKi9MPD#~B&k*C2kgWp=t-z9IC&94QXt1OO#~cJHzm#kLi%A%%nV zW2hguJ>C5p;eO3=zXtxG(m2v1Ip21_7P?^4T&H&AdTg&yBE zIE#OEix1ACNTl2CAAd<&n)Z`*XlXdI<~yxjz6Xp^=DE1~Nsc2A&FVtxl0CFcb@tGO zO*yz8u^t&M3_)Ug=HV6fh?<_WnCz7AHaacNKDwG-*E9a-Fxt5}KFuoqDml~~zqwEr zGdd%oigpR4%L%+qTyBn)C5egNjCPRKtR^joU+(Q;AZN#4B|Fr{qd#@}uU|g-$CWaw zF0O^>hv)Gvt%K6&0ym7L?kFvlc?UU zqq~qlVkYf&;C&=0x)vpzBvPC9{^z}t=?I6MYz=A021q!XTU zE-*ocy0|0}+O?<%CfU0#5!$gxi{RvBwoi$LHZOWrTHw}RfR_6cd4OAcQQNFb3-R1& ze}e%r2b1b5Iia;fVp@q2TC7>&igtTq{|<9co)W1BRed<%nNqXxiJz zK&zd|KN=(4Kx)0v6|>8UiKeg1OD*dOw=SEbrZhpHn>`}j*XcL>H0hzqcOqh> z4rkE^_gZ!HxGJSB@&sgy`*F&QljSj49!JPSGxvp)&9P7~9`X0AL+@zq%$7u~XR?LJ z=GlDiW$au-ou1U}^^@D_GNi{iFA{1qYNv_C$2f7C2d$WK^fXTaual;Ee7vSi^Nip% zD%5UF$W$;jG3yO3jDMUo4+_u6$;!qavtC)#NGMgeCDPJsj9uZIz5(Z|9g|`U9&AiU znm7$d5+e&6W3{!J@hE5BEi*S2&=ulkRNC4%OBRoa6zB56K9cXqY!PM3j6%XkkC?o7eq z$mVVyVGH00DsE&xQn5YCVY`P$RjGIy&Ng7tauJV7H1< zcRua*p&_;GY5QLsZ2=(TxMlz8@yRlr-@3_fUFcH(_0%rxu6+TO%XaWLx2WpEB)YIx zR`eQA7`S|pEVW`ileuEN`2B2taW9j;4s@S^#EA>m38*D_P2wwGEr&3#WB4lYD3wKl zCpjV78k>_Hw?^*0%3(|qUXF9J1wwAzm5h;z^%miRs;ITBEw!v$Yx$)_U>RhrZIj!n z(GzyflM-w+7KYljmfOjr{KpnaAoRv9WfAj0!7V5A^l2?(V$q(gn}h$QSXBdjiq#bB zGQWS(?Hoi>%w)mUX}P*i)WlDYw8JlwqseYjL{jo_3IpDLI}wxCoHjhoQI6(LHN_=4pB{P5P6Z zsM>}q*$=;+Yau`j0xqT280^~j+|=weMsF-(s;q2x!FX*He}g5WenM<$Akvtil--MP!Y7_ryYbm%orL zka^1Ce0_0r#kDxu2#!Y@Utd0W{vx+_YZ{~e`F<{p#EtzkMtiHI(_(Rtu36r0m$mIF z^hZb6glm@VNCp*=AOFP*C*k)8bL6K>$dA_Y1G*h)?xlwW+!bAP9V0nJi-yP$M5%wG z5I0P0V&nkBo^t}x1vz+}=>%3cP_o;81a2Zxe`FjNCQ)m$tVN$GBbwegzubS2i99_5 z0;wNXk&$gu?g;xhP>nl+RkBS_6%^rPa56TR;a3VQ#XdB0l_)L-<%h81k3=3?iq8f% z1ocw~<@S5z8r&LhCv0_OL?LZz!foM|CE1MUx!G}v2X#Y#`*J2&Hd+t72`Nv>?bK7E z5c6Qv+N6F9;5o%<*)Q}{Sq}+kP`HY%jNHfQ_1KftMaQUc=WnJR&e1@@M7OT!ucg@| zrwf_Xniqj-bR#3oiLgl#EJQgpOjoz4K9_)S#xvzLKEue#T#^<$Ff>XrT4s-wi+nIaacnrKgo}@v@>>9Px4Jtv{glwFt)Bd z5|i_J+@QFTm4sd2Q;@KBg0WWEUspd13s=Is6CGq?^3HgD2Zwjd)JZ4ZuX|_u`yyV= zDA68`c+EY%`n?HnccOZesGs56FVFn^r?QUVnYo~{h7~9io&pgRJED3HUoGjj^}b+g zE1R_v^9r1uS?&P3KqgI`{3H)@5)u<4&PDY^r+h2n(1z+rm8Rq~Rl34`S?=p@UZA}w zA#aly=h*QS=397h<###U=dy~cn&qJKyy`w{C%*)g^Z$4|-?)Y^qfQ=k%(uP9CFa`& z#wq68BaE@mCgV)|jlT`e^&)~7q@Glf4bqy@Ls+F()7qeMf1VR4x<&*vJ1(cW3po>XfadDZffhy5bzXZRXuAc) zpdSSL8A9tybHCphZSF5NxJs@7B`hRyO7^6L_2h#xL|`{XH}99Qhn&g7qt>Gj6Yl!9 zJOv6_UliThJV~szO6!(D%6CEr`4zkvVQ+V6=du&^`c3Kt^SAG#0K#dv-{Y*qwi&ca zk0Mj{pY}WNAbh0?D(G1uJ_Iz zp+rjcH}taBGLp||O7$}HsVpIA(E`!$Ed;kdb7>>URo5XhXzuN<9i^d9Ur=uEgX@LU zMND@Bf0{?fQ@(A`1YS?c3+~m&4+myPh^yp~G_Lj-!9$So&|NG_|NXwb8&pEqRC*WX zxkr~e<%g84(wo)y#nJ{Mcxb(L=T8|iO2&BQfkgUdKXd8V?!1dQ`N4q9`I6Qf4{caH zTiw~Fht}!Z^c}i)2S+Rxjg~y^^d0#e!Z996b+?}WN>C3yyC^Pczmsu6)~4$14!7YR zHK5q}%~w%t%C{FO4-abWkLsIrv`c|MyNO*Y@g=FzK9aGB=19r(Kv$Q|)HToz-Qgj0 zAS+WK=ZpG#obKG(AKZt7oQ=Q!0kF474Y!#4Nph{qb!N`?WLB!LqLh2sb5LmC@|p9K z9Ff$2Uq}sx*06E~hwj^&cNtebSeX1UzS=pc{_coB=5dc!Ht81EGSZPYf5N*-@81=# z-Wi|#PlH`=)&S)$#$s^3-u`wO;ab9{=EBDi9_L0EHTNagplr^=A&2Kd_54&l52)vU z_54IV_o?Rq_xcQ}1t5JWVSVF&1;E9J6fd5LWpmmH7gzgrxJ>4&c0xy+!T_!18G%i@ zbTOJ|ClC<$D9y0e{Cy!O>A(dfs2M!%N&;dR{Fb6K*pU0LP&vv~4%j2#O;qXx{E|^@ z%+I;$dkp!O^slINTcRa>m79LwQ2IG4y?ZeIM+7*~M8Pvj5S*GshrZ@!=RWAcd^bxV z<@1v%%UHyWufB95gN92Z`G98yE$asZGg@WGa(P_E@QPk_V_i$bp7G zv5s6?Cs!cs5Vv}|p&q-EK{9ZI%tm9J^z1N6#0T++tSx;!ijcjLZVh&E_OLF(HQ_)U zM~9z0s;=?{I+forBUNy?a68heQ{}gOGTAAK%AIgZ1>^ZyH*Z{QCZyrQgvwV=Ex)e3 z{7E~K2DO%(pysN|;)c}Riw|Pto|RoGy_g}rkRfG~WPu-O*H)FWtf_8S@*|z&(%fn0 zbPKPLqwZ~x$te<(*H{oL`wf}bLi^j_ zCp?#*p4oHj43PG66s#^6ro2{rl%*&k7c^$Xgs5mn^5$CK=G72N=+9>RyHIdIU#Xk> z_uTS?#4IO9>Fx*b{BX%H)V;=~!S>ca2-RmpEclv|*^+!?dw9nF`(Q$Lj!KxT62t)Y znBvVo^LtF`hbnfw0n=D@B(lT{5xQ5o2WsH|86o9W9~>_lQ#6bmm^lWwE*w;1pr5ca zZ=n1W08F&6rio)}?6Z000NM3>1;~ut1e&lgw!Ma0zvxMJwDtq|%DgpEF1rz1+N?S} zRw645jo;CMI}GRdg?AjpI-IX;#Q5x>XH(}uf5MN2Xxw5@$}I+))hz~B9nSbR+fxFf z{nDB@<lCNCMvFus8b}Z+|&^<=aVEIo0r9VexrS5g>=9;BaUlg zWk=k*0aZnw_J4m;I}A7Hxkcsr@7E{iR1<%dG|c2jGwaDtb+1TQI#!t_P;^gn{EI4)U*Rl6K|RDN@NinQ57K9gktjkKD)*WXexv2Vk~IP!;Kwy?Q-dl^*KU?k=JK z^lPnXnBC54cb-T7%!j~3mb})|GdL)nyei{=8rJD-YK)5?7nKQ4Y7MEe5z$&GJ%q^P z?fDHU1On6&P5TbY2s{4s2*0^T;qreS;f=2Gr@0O;`ag~ELg)W{p^TJz_vWaWw2?BdZ8ir+4RYSfP4`@SaE1Y%W7U&$y($k>v%RW z#+)ZVc>Ci@4&3b=h{9>-=EPmx?_47DJRp-MB97e6Ce!UM@A_?e`kqpm+88dS>C#zj zRV?;)bsvvA^HzyJTG`2YPU?YR6?+jpqM?J!S7-fBVBwbqqvbOYu@1Nch7LqR2emuB z07PLC+P)TP8ag}I-0w@ywD8`KpTda3xWTiBgitx{GC^#Syn` zmf)92zaUS*e&tt!EcRMnF8a7*{0h)vjECF58cMKFAvok}BxB>3=LU6k+{zc`esuJd z=C{=0MjX?QbuREPDb=m~R>a_Z?a}tEU$*Jxs2#7u&nxamK#y>T zV(z@+Z20;^=M`V;0Ke0LmER{%sMDA-?7ZShWQ_m!^NJ(MGIUEF8z516T-+cI1^t|0PI7iSheb6zpVk;%dHimyYt4xLvVJlY`V6(1wzbLSNwz1u%) zfHgLBUXdH)N;n|ZhWEpH+M~@Q7m^xj|9|X#2|$$9`u7&NpQ(gQ7KGusYo;# z8FWBMR7@)iQ9vjJf|`r=tu{ou@z&Nq>1;)KYWaWd1Ru>q@|8Kj%co_W({QOT|U>w8h z_MLx*W*BC7OpCZz_0L{ll+A}17*Rp3FEEZ*7Z?GWFEGN63ykCc|GB{UK5l4Z1m3{@ zb>am^_;gIcfG?Tne}S{D*&31SWj-gn!1x|+02qVwswjOjK{a?Wgc&Hp56t<4)&U<|sRkA4a0;9T$hh_N8<{ONg z^09@oqOZKccrF&Cm~h;9QnV=q*QT+=`}GTr*Dh9z)nB{NI2sq`3fzP7zj>i?6>rCN zXjoo3vIu*d#tu5e>gRF7!+@h)p)N4uQe)T3Yk8-UA1X{>tLF@fJB{?m zl}7E4D~*Hck1LG>2yvzHOhQ~~JcAHd8aolI4l)Tc&g9T7H&qm;^FN34)mBuS%1bwA( zl9>&zG|GV!?O9%FOkhCusjoCfsf53BrST|)3i3+h7s?V;e?(a<^>5PxriauYQeFY| zdto8hPI{$LQN!S@`nwR(SM|r{m18GZ<(0>x{f1nt=5{U2lwET>nP+ zQe@ipSnIp6u7*k*#Nc=6C|=Iy{y|X+WUbf_V06gwr#iWamzNsTkhB5!6<*}-RZV2W zgUI6~mm2x~h8=GI?WfcE&f50}`MKk}4^ZD}#F)uDjn^cO&PG|~#jWl%7QyRe-&s6< zoC|vW?lg9mI(p1IjgZ1Nbn}gFF2*-mmF!YqAqUATe1`P&jno)*Zi#xA+?P1%x0aHX z8^P{BGX0xMElmj)%Tf~!SlAJoY^_@TOhXqqf~cnk9*~`2lZR?op}3YWy-`7v{6OJe zfAtT)cslx4qF}|9hOuDD;KYvU>%kHz^McZN;_RvJW)uW=oXg3L_l>q-^^ z`;=^{wk4wgK_KNd%Pi19YHUy3S@!)3XUxTrRhELj1u+*loMqcM&EmwX4bH?J^UiR@ zLGM5RQuNe116^xDxiV;qs!&J8Ff{{ck6ym0!^mI z;LaZR=K=)(T@WQ?k@Ct9|tPjV|iJsN2AdJ zDj{lu*Dhlt(N`Prmf?Js5xfx*UCVzb(|Uu~OBd=B_g7@2R3$tMp##6stu+(uzWXrh z>Fs*Gm!tQ^l5oQV6=_HJ*zjkJUHz}V2N>}>aQ{8QvtS2{v8H=92`4}oK$!2I;(wm1 zA*K4KpFN%T1Sd4dyazEGUakJ=xTh<}Q2UBISJD( z?pWK#1lvZJCGm%Ok#W%Mnst6JAXFI|lp6SIYTQ20_uREii1J_qW!#xtenAG6R%>eD zE*a*%D5$FbJ_y_Sk2xEKq%|RGK8oF%mOc1s?2qJ)n;LQSo$XHCQah{E{N_!m%QSPA$mss3`Y#SqRT+EVqa9%WqgkilLD*_#XuR5$2 z`ep>~gZ5$YetSSdwC4aesI3`+@1(|k?s*z~tNMzh5mVe6_$mkrk-qBa?tp_I-Je#Z zJ*`N4T9NiNNlTA=9qdNu4xdo7;Oy_7B3Y2iLl!Ru*Qs%@LW2*QMj0M5PPfIIb~Hnu^H#<=%p1WN#Al(6xV(Q<{S*h8Wt91l>Ml>jW|{+s zajaZ2d3}hyR4TUdu4H5!?z(pg@1wQ%M7We!YT`b=W$Hop%OwhpM&QvK_98NNFn$C$ z8hqDs^j7s)_Ev6pfC*dfi{1R7fGG6T2(eR=74Te|!$Tq*Q3!8!6oQ&`R7OwUx zgJZ?QGoWH;#gb^wnT)Y+5Mu0Uz_Ufc0%KXDVI@f&zxy59lq6{@D`t7GkKh54fOGI2 zSfIsm;Whw#xV@SY-+^`}tN?FDTwm&q=DM*VcsQ8%l>zT0oRULihYgjACcy)#nGlVcUbS|fi$Ei+nvTpGjmnt->Mq`& zfU%?cd85&3!SvCLSrFV{M>A@s+r-Uuo5(4*KX%uG*%CY8BC%r>yjVl@Fs>@}w4cHc z&l@QGoR(!o5bHEKUbi}{P{o7CyfO&(&Bq2#371p2xG)#@D^0fMR3``z_QSo3vCq5D zsJ?)&d2GTFXZSJ)-zI!H!>;IQj`0V-7-y*%%+$Bg&;M#^uQn+k&%(wcs{|reRnNiI zm!aaj;I*w19|A@| zX(r5PTpOTet7*IbCWJSiX~7|1HY5krk4pDO;N}|yu2AXkeZe;v4Lt*ay;I@%3mlH> zkw6znxzILoGY-SaNOS{!CGgOaTC1z|zwf2z7;xBJ%Z$UK6|aAqYne{IzsT@4wV<*g zPThOG=r%5BYP|HryFkepjC>cPuOS;1;T#T)=6a;-N?EclRh{3(in%Dpu9cD`VZb;x zA6co{Kc3qpIGJwS^tEm2Qq~2idKd?1Ts7xRzfO*reQbxiGr%gR0u;)7sf$83Np-DXmH3&_1oxqp__Rlvz`$zz`}gdg|3>Sm>4L0uYis*8+AfsmeKOdY+oV2c&eYbxGv}zwg$8MExfIH!rVMnzuzNoD`2X}Z3T=tFT1H$&bASR1UeJ) znxx{*M!ito%pTwH7u!Z$Lzr6UtAmTPJcnr)ZnZhR%fk@EQT>BuQM@DZ>jiOCH39-~ zgX0giIFR*Jd&HEVTl``&A|Dv85@I#m*)_Y0iCu|}WdocEb1ZJVS9%Z$+miESN6Ki_ z*BI(4$MPjDi(8cH z44#<}zBQcxWKL>)I@zA-8NSW<(B8tfpN|`o!MN+{w@Lc<^w-)pY5ldfP1IitS&32o zAJt!*TrKt2UH~D4p90%nD3`jtQMMnPlSFhy^GJ2ElcWXeVkv&9Zi2elN2O(oZOL{dl`3PeLt5OFa!-^hW7o=X zsEp-dHz%$$Ik!RjBB%_q2$wLi_;4KEm4t&)m>D3WJz}+0A4RR_W0^j~yeg7jABj+{ zYBn8`=1eZzE-<%i<7JLa+@bso=$>`fl)QQ?o^XL~PH3IQ8J>pKPTRHwXse@0*U4Ah zdX>y$CB5jI1N|k~%)nzF_1S39j$^x|{@K8k4j8IJ?yuGiRc) z;Pv{?FmU-7il)&{s`Gpn1%M0K>yVU~CA&x9RAeqTNmeFt|Xr8zA)ne4wi>g^=)kc_MV{nb|=nF zqju{b#){F@Yx}vaonoE)8N7Q;70*)_cY{WGAox z6Y4Or$+7KTFv>QuiMc`b^P+(*s&M}R-4+J z?Di>Sig$Wt22&vk#r+D|10)5eM%)@1>3si4a$HJENR(=EKgS{#Ys43Q-@Pas6UU`s zH5PJItpDssj08t_S=_eG$U}Dh8TCIZ&j5Fq&34nXrh+nz^oI5zJzaXUI;V5tSdkKU z%o%qK*_aw}y?doj+}C}7#=Tw)8=IRqlxO&DD8Gw9B*H^-_&>-KDI;xRqt&Sk5P^@Y zVumvorc`%kbyI;m zc}o#;g}5MU%XC<(o95Y;Mj!?H^5-4uLtw+>D^?lStgZI*u=+32PcSru*MAHPW|;cJ z^gtS0zN0$fJkJ>b6#+`~ZIJY@--s9t9IgYE_FD#iXRE&zftxvHqclv{E9m5atejMT zu$d#Va&mOq%+a^T^^x+)hEI*dt{ha?2pbj@ls0gp#8(I^PSzQSe+dJe)L89{`Ce8+ z{eBGRK<($VJy;1zHMf#rIV`dNauzv`^1Q!b#&*fa$>iSfrsuS}TUY&I1UtNPZ`f7; zqC}PX4bu8<{qob>#E&pR^*-DS#?U=oo}|QJtQ6oPUsC~YC8MFj98gtyVRchK_pmMO zQ>d_hTe$jwrK@b-iQ82rO+%*EhDpr2VI2OqG$fJ#A<*Sly)U3)xO4f`W#mZ_|1*`} zeyAu@i;8eICA1-4Z!h37QD)3#qFfa&Z6eq3t=A5#wh+%~988ca%#3yVJITAl<*niJ zmY<#d~Fht}LGZAm9v$^gH18 zQ}h<}a-$(^3kWkBg14a4;Uw~$fy{>~m-oaS7#y%ok+@=kDSVg0T5b1xv90xnB<0)q z%v1QvJ4L|r>coz(K&W3{hz)k&`6=ueex%j~19-koJb%WkTs#b$0(7Gc)cEM^0-`(6 z58WOoL3jAeM&!l;65*dgYFHqo@WA-@*O0ps#G6FWu`^|M^F4`G!*NTTkqP;z#LOoP&jtL6Le1D%{lq*@SrW=`vZ~y z+}}S@#-Maof;+fF#LGial(Sq5wHaU9h@}<7A_!T8v>1F&7@zJuy23i z!SZ_o7__sm${ONx>E@+7ENwA3Fw&UaDAl!mICe9-1MvxQKy>rH-oa1#?(q(G9c#c* zcvNFhDr$PWZ%le%op;anx?reQk?NN4v}g5B>=*1O;cr1UUf&zt&YAd$Z7B}4Vf&(< zx0s*etW=Y>*H^a~jR!+zJU^JqU9-7|avxIR0{g-0;Q6p&e>m)*t7^Mq%=Yr`yOVez zB7S#L`_Sr{VdxgM(`60~uW$b}m`fyVnn0v(9z=l_5S&r3-HnqE7Dxs!!@>*;G$)pr zpEB|m@LTxat;YHS1hp1$p}8cf`=)F%`qy$_8kaO(affWz--#N9qF`%T&e825?(_Kk zaEBE^yoM3->w9=$*6R$#)xa!hed0T{jz)Mkd0s_fz3U+!H~jNk1)aux=&0;TxvH~81%=w8 zoS{jmol2AkG@zobo-sBj!JY06Q{|=GcD9G1yevI5oP|ZfnHo;NDEisy7YnqK=wC;F zAN>_#cw`X1jr48uJd|4HV+pBMO$4@TBB@@+-NO0aA74p7-soMG8o0M@1-Xpp%{r15 z>gb0%uvwu8ftRcyR^KF4WV*skRge{)5x-VHPoaL~c#03)^cdb^ zRnq5~&|}PpMO8OlYg^hw`kz4iPD~HJ z3xTw}eGLL$z|Mlg>^|0+w*|9X=&r`Q?m)&aeCJ3fG$AQjGV7@$_YbI4-P6;#%;NhL zS(z8Y%VC!XRB+V>_s27E+kS;eUWU8&&@-WL+JAxMo=(dme1By^OiZHTIV^f!!06jD z7jx~E#OvHxwxA_BaJT?x3PxcNM{j}JC`i;DOBMw7^7KaaMAFahFJbSuo4a&iSFqfL zXg)^cCu_$L(s= z@qX>|yv`QsfA`ha<9NXAcYo=k4*^66z zMr0uGtRWZhcBpS<5!-4QV*9FL^&&*>+ZS=)o?;KIdWgLp3v;~2w(}`biDk)t-+M>_ z0W=Pqr6#@)NqCAaV-LLV-Hs*)?801Y6ef#$G`#IgMb6a;!JZDj-mgYC4Y%?B#NGT6fgg)cKc#QOF#Tu6<1rX}J0h#4*k z8?@5_;p^s%C!irPmJqrDad3mm0X?p&LZg~UsDfK0SQ_Hn_+Kym0~(?b z%VJ&J5N?J83HT&bLeZ)xMCu~NgI>qp>QUQY)Cwr)1QM)JD^?z8?6K%so^HO|t|V`8 zGer0X!YZFyYzXxRU$_ALv4G=Tg3Eq_XHjv9(cIpHh?pbaUfYs~kXDY7wo3B$@;VNY zo`JSaQ*oYe8*bu8Q>WMtJ9~etfe%Wx8cMOh5-tyKLeExte|f^u7ka5&__o6c8{ROQ zF>peGpXj!YLDq&r-U)c?J;knuEUDBDBN)yGp-cKUp_Z{0Lqc~D{y(u$a-aZ2VTY0Z zpE__RJ_)?hbwjsEHe|wiH28umQ8{>-OLpg{B85nD)VKnCUFJZZi!WB;dk%wJGasXU zlM9>q_p1F=QO0kFYGI@!QfQe{&C|f>3QvVRM*(X^m2SIPgWhmLj1G!3|r@|8%rqeV43*x62UEx#Pbp87zSsB84`^<3)3z(L<6PWzAS z3E8&~b<@Qcp2=>|BwN%T@h*h^IO7}6IFP^c4P@EKfuQ`PQs_k{0yzqK(}9-L>k)n2 zn1}HnCZhc*-j}eZka`(ox*@mOjf}~?B^=fTb}37n+<4Nrl)GWv8A<%wc2f`_CfNdt zdz;fEFWF0=-xV#rz7G5X;d2z>u47P)55l3hA^p7Y)Wp5>&NYb+bu}|e(!QILXwuHv zfghZIL{57GP5LL2eyOgflS%*1aNf)1`k~k#8!R-i+L*!BMj9|0{yci&C~pd`z*AzK z;Pme20~)Z#^D~sR&|z=rh`t_6@+3K6r4cy{41^YynhA|n7m(+kU|--xK%{13i^n$^ zm@Odo&ak?MzvL@TC!T1WUNlZG8fQ&jxG8ax=hBp12H)wNeM`BTlxu@bym*Ui4?g>K z$Zbi?X4&f2#?%6G>5%C}x+Jcy5V!%Ju*SP!Uk^Jpz7XYDIM>06d*t5N;kNsBVzU5? zh5~v13}Ry~Axj5~v1xHxI8HU=Wb;Rq!L;gQN$g`d@ZGA9U2!o`k$p^fJq6U%QlG)M zoqog<7LdAFyrn_ME-&@&XAkKCR-=QUqp*)Wfg3dGv+KZZDsy=OC@z}QT=|RB6YJ(p zN56pfG~F+To~nu5i|r2GM6i~9+x=*B^Y~h_T{aJ!U-Jm$ZE!?fmf_un>PJTjto1Ft z7D5K8z)AKQ?|#Yy%obw>u4pfj>C|(LUda=iI1HQ$Z}dL@hVbJA)D)yQI7fqeuSokx z>V0N&y(c;N(s7e7c0SK1t(EnOdRzl0vEGwd?^0DXD|?l!UA%0J3RW{y3I+_+@N`|n znYxBKAC=0T01TlsrHqaJ_z310`4BKaVZ5nU-+7<`+Wo_Q=O{~*FGgAHz8=aF>+7s6 z@xJ!b(!pm@9!b6?N%I7k?<-}=^nIc%(|zwM%S_*^%2MEaQCSjvJCvo=w?$bhd>f?2 zTM6oX_bEH%Il{L_*-5qUMr9{oe3i<+j&`@Qvm5vdl%3tfH$&Okk9?WRUQ4@E+4s_( zpzH@}AEfMuX}2qT9qnC}-AB7s*&As;j#KF_ueXWzMrFtHOZbi`yOs9C$}SHq?o;+C z`qwJEo%Sb`J(l+M$}UeJu2uE~`ma*<4K- zsO;-#-=*w_Y2U2ub+kXC>^|DpDSIRBwk9gN&8_nY0Hf`*hly#!LU1w12Mb1+>>G zJFm3(URCxA+V?7ZCGF2B`!d=$DElhf?^E_Qw69V2b+q58?CWW-RQ3(DyOn)2?FF>s zLrUt0z!z?QOJ8sAO6>SrHog;rePd;c30C+}|08_jl@DGa>$qL{oT+@K!{?at2~$2Z z;q!&^`Ei_C8rJ-_B?2|nQeJ8lA}&x74>6*5f*n3(%4eJMiG@#r^7)JM!G)NPS16y` zl}`eEvXswq<&y-TH03i-`9SGU$0X&Gr+f<5WHlHlk;-R?^pOJ) zJkL>{-IXUtB6!-BXRz|*kOa>z%Cj-Wg;kAUY`Iq9^H}T?&!N%hW15hL)~!W=njiJdiizMqdUT} zKgr^GMi|ZQMA+h z9MaFnxjSsVPpf{x4ixG~Vabb^IB}`tmtWjxQnw^lMKzI&OFRCFB}wxb~g zn>c@(2tplDv(}9w2B~Ik)o!9oMev_<9Si~?fU`IBd=`xxvtbra%TEplaoGDKz7Li@zX$@egQTACO3Up= zZq&}i>QH|F@d~Ql`v&wGQ8%i={SHj4C;*$VC;)GzwfYM3`~HbLl)7@XP~-VzRf~Gkw3@E%^Q%5IZ246`HreLk__qZLn$cxe8L=Jq@RNHN^ zcTmbqgP_dR%U4l!Vns$P{G}ASFG=VM{i>FcL7|>OylZFdg`ek3En4>U<`F)d3dfff z!=a}VS@)BA_#-{xGb~AksNjrkSFV0v5JuLR zTtEG~TE=9c1|JM)coeSUKL}knQ)Gaybi5zsea}Ay7uJhd_#2ULS`o55_~uIis&~1} zH!wR^Ne`U%4x=!@-Oh*{kLHLd5Xa#BZ!PYUQY_jTC&VoBTNz{ppdprhiFbI6{$+@4 z^mohu7vX=!;v`<2vb(&WVKJQP+nvli0qB7E+>s&pPGQJQ-y=FC-VD)c;(a%_O`}3^ zX8n-Y>RWSTn;aRc-c)+2;3PrS&sKsJ5DUqdAbk&>ERJh-26EYsOABWfg8|;IFZeF; zL&9u3&7`T~t~hB{-i|L!pn$O_PPDb6oOqHY=YMz-EY*e0FuxpSp)Dm!k?H4iT9TfC z6x!eL=c^Keg{ZaNvfYcnwtTN%h?fV3W5c1edf^DX7~+~8s<#}me|ZEFApQOTcOKRI z1EId@we7p&71Xn89_0J0C<}27XUCf)xe@)Mm zhqsMS_v-f|rV?_gJPRfP*LD-WrH>GG!!5G>$`K0A7PAG;*ovtlONXC~cycfp{a4SeQ`Qb0RQ3I6wh4&s?1OH7B2ROL#)e2Mv78MgowmBW^U|CwSg zI9?>?!zUMclp(ZO%z>*IF-ZM2GKZGD7+xqta*(c2T&v@iz&{jqV&>0yxd@*v!}D~R zc9iCk`7_*(a!X{X#HSR0%u%H%L`X5}!w$}nN`>`Y_?AmIV_pT^6z*=6OX^vR<;bx} z#wii`@MC_gQBe&*&Ry#^4McUbt z>U>>B5ul=$ykNd6WG-Tw*tfJy*?gPJ8`@mn$=ermYhCXw!{mE1^2vgm^}{+)A&S%F z8SxzaA88*ZJtp7co9QrdGWmBh3UZtIuDD_He;CFUv%j(Sb7U`3bg{RcEHArkgKWnp zPA&C0Oq}1GpNY@u)#p5vb@CEVUZ;u=!!T-?_@91x>{;xy|4e!3HkVh5HmcgdHOQ+B zd3wOVB5>`*o@DmT7LJ+tk7&;C^zqLI=EZ1f6)4w@K1>doF1ARs7F$X)>wBoUpt-!0 z)prSMmGz}YRrc=^Ia+Yk%)=PTQ7cd8V74O@Dtd;tX-~w8IdXhZV-CkJww;sB0cKt; z?P;iAdm_Fk<~J8y=O{={6v+C>kz<=0Bd$7SIVQgUO#5kFQm*8GP8+V6`M2~_)(S@r zH^yC#w*^SY8EO`&GI_2>aB|AO?ppW}?^l1P-(HxzYNKp7oaZ^?w=O~98}Ca9fbj-w3c;!?P2=Pbb4jWea<7-t_fJE$~ljjegaHI@8* zuGV9at;pZH2IZy%m%k+KR;wE5_4rJMhpS+*O1mSpJ6^l7wr6Y^ypT?{|glN$AsMI8R@m?7Cy z*(248G99*AXd!GR3X4#^reqO=Z)|$yO_X6@PU8_5%m(C;Y`? z98`ZNhMnl+Le2cz-x6`+U+ef)qUFybPVdh@hwDYlUjV-0PUp`|Z~v_goKXLq0sn_v zfTgPWPrW+#_y0F*knnm-%U^)y^#08BC${K+)aUQ@0G8Ahe`bkvwb;Y;J9~ehVilup z{L`bhC04j-u`3&Ol(qaStFEtJvh;@EEnB|Adt=SYn^vv9`S-W{;g5e>bL(xluf5~W zyVl)(&%O8E|K|tRKlsqYkNoAYf7|frV~;=aLdi$Ms-+RCAgAYGC^6@91`szPx`2346ziRyYn{U7S z{^+r$AAbDlIG&#rmVm&Z;C3O__MsiZI(D*!M|AGeHS&yZQPJId^z3zJZ+oA<{bKqL z7#Mrj*>Qu;89XF@=&<1<&OPt^gbPL{jv9U8n53Lpvvc$E=M)rPc~#Ne;*!#<%gWuJ zYvxtVzxJ{jmtT=R^VH$jEm*i{@qe5C|91KRGXEW8lbtE!#=BC}CZuOvG%<71|G6K;4G;5&D;Kpm(b@dN!OB+d8o2$}5-Z!dZ))RS)y9qca;?(e z+{XR;Htr_^TE~B~jhow=TvOM=a1(5No+)dbR#=={GH;4I$6Z)bO#i9nd1Vu4U70uA zom!k?sm*}7haQ>SyJl3Q3UO;{8V-#15(VKIeBACDpbm;kpHD% ztvOr_!wS*~>14V}sQ+(QS$RWcRV5a5fj3m%aD8Rh;9#uimM&ci#>dB3W>zl4>MV9( zLgutetkXDoRV|m6o2co^uJ87>V`iT>MW7qxs9l zcM1OnVZO`IfBbq@KJ&2x<{yq$<8O%cAEJE()|nYH8;6E;IPs;!Z*IyjDVsHa7h$bT znPpr5iSk>!q&fB!#G(AOZA=GN!Te^UEQG%_xhgil5|o?Y+vd+9*XNWSsvjkKWhkpa zyT>6eSNZ%JFxUAEN`sp#ZqrxMBOog#{!m*g@umEjc;a9);?9xjxJoziAir`X-xMCo zO>U@i&2pFzr6H8e@VmT)o^kLg^z+LC=8RSD=ZEaXG~kg7j45CB&mjx3{rxh4KPA6r z8B-A6+D$x&AIlX|V(e+opKj(yI+zdfW_s*$yGG8PJCYK6^~=R?kUaxGJ1z6^lK~kS zLvnM6C`TrmjYBmZ;mn(zH+NQEb3?hP1$44#r#>NW!$o;jX!Ae-hXl9{J@ zie<$+O3Mma2N^kKvzzNVZ&sNnr>vQ=X*tExiE;(X63Cx<3*^cCV2Y7`zCd2gM~usx zC6Go;$#HwiauI;vxUxcR178X}W#!E-)JI-!ah{(?mPffVN{VHyDIN`T+{}mRnMNR< zKpcT^$*+HPxpU3*TkJ*N~AbCHI% zhCy|(%%>$@Szb=AJ-@7EE>fs$`t^}fAZzj}5AiTumK6M0ygDIyDZ=qv02yy4xOVtjS-BV692KFx> zC`)SL+nhW%*jDJa&o0T$vzO$XDHKA3(F-eNPgL%dic(BeisOo-NH=$G4tNW$vIvkj za4_m&AZ_IZd9!lhSyWi88LjDT$tSODZecN4fmk3Nm37Y5o)RDq9{W}2uV#ikv;4AA z>E{)?3&_fHcTSni{U6GwWZ&!3s&vapI%9+fBf7$$zuOFx>}}YjXOu} zVEV_GB@gU3(4M2IVD0#)MhPdz?=ujrD=I3PC+i!j+f;+9fD_WS^qi9>I3a#c*&NT@ zykd8`JvQ$e^t{4+d*0kq_x!U?n!FXBv-0d_r_2GO=m&Ytjff4!J~wY}i5^1xivE~s z`eL;}Xot<;4Y~ak=ahHuHqI)^j2OSA*89Lk=}QM%Kw|R98C+MD&$t+kYyJ%E^KyYz zKBK&FZfQ|owmF{6s5o!LjLCULc{$~IGm_I&r^w)RJ=zQ|%~hj};@PkL&oV9T0z|uE z0htk9tfEV9nCLR1gXl8ZcCce@htf5Wp-~2VPqYxv!BoDfa(e;x_x#*Ck9bQ5i|ByD zEnP!J*AX_+by#>~r@F9%p|#f1keTf=WxUBKD>_s}@9H3;-GI66MKs=1i=G^LkmV7l zb#`c0gSkzY7tu3V^c)*idq!#G%&zf5nDJ)z!P$+zEqGaag^6D74x(3XsOXi|Ui3n~ zy@o~CbvqbY8(|9)whaM0*iBxm|NUZVYW*Vujcb90)CVLp972AuRSV zVb6^a_N;JWAJIwJ%X&6;uZuc(Ms3$pOV2RTGuI}1W_1!hN5DQgs_~4vt_M5U0$*D@ zVFOQXpxrjPh1Q!fWjr=Sh|e{x(SyOC++E<&PSMMN+{R)Pj3xL9E}901;xjUDg%(D2?PV$_9TUdWI_efOpTb7TiqU4LnbXbI*rN z^(`dEFZ!GgqDSr|(IYEU^cXQw^suA}MUBEI360RwvxDgAjsnfyK=T=*r=_!qtm|?x zq84FnPj0j$)KfI-E*f0e$DNBPO=d9!(3ygm-wS>@H-m( z*8QQ@(#Nbk#*1wAFSj4V&2^J{0vG!F&+E2l{=FVci-PPi@WZa}Z9qTmf^7 ziEDT}5$+BV;h-~oSXiy4eSl~`EI8g0WktC#;C~dtqK0)Xl{`Sc;76SrQ9@^$ZQ$qG zLJY}m$u}aRornOw5yLq0w8Xm!i=xM2Zh9LPfuBXzX$Q3LP}#mMvQM=Wp~KphM)=vy zx;qa`#Uz-&X#A(bz5wPW?T+XYgfTBfbQz{-W_`a?g708(+Dy{}_Q^128dauMmf4du-WXMP#=)+&ol7HThG!~Vv8z$mG8Qbn7Gobw zXOmXhE;^!JplzZ)W!o%^tZiH7t?09Nz_48qMsy2ByXk~B(@}Jr+@;avzRCNci=mAj z<}zK5YX5#?@i5g2%=q_I3Q#+REr)U`htQX5ZQ2p87130pOoT69YpUDxuUn_ zQgO0!_o8xtt=vOvak2i6mU5$qv=?W(pA={2J|WH=@whnCvO(}&TGCmHF;dlGh-?QY zul~9YdE70;dXrb2U&{jGqkCJ?XCS*RYncz34_8osZ$9jLKjs=3GtUS$XUCgmE`Csm zKfvtNWk&b0iq2U!(HY}H=Q266&tzHgD9ghBz6)c98)HUpNA!F2{hPb$aRcK8#-AvR z_vpWJK2bdVGkCi8q&z(sggn_N(eG{O_j3G~^E<{djET`06ZLppXX3l)86ob1c~9f3 z+FvKcMOz0x(Pb^|kNxqEXNA}W^Mj7>g#F{KXlL&r5I;E{Mq=Cre>l9(l+Nfgop0`tX=!f}?Puxy*~dF`p6Zy=+ua#_i$LEB z7hRG&-5lCzt!sBMs5U^i3$}k%b_DnlE^IG%C`Gz+d%M+G-3@ipWq7Buw&`Tu$g$0a zbe)9lG2q-OKy(@w8gB^=U>l3KbY{JV0!P4Ms$NxDE%Uv}kG4?G-PfDAwbWJgpa9X` zvRt<5c+_Qdf6O!P5uk6l=w^u*=9-`sOt$n66TRILqIYgM`k77iZkgLNqla}AcDDy> z7VPwAVbwbVrwuK|Lan}3pP2~(qr_t@wpPPfYwA}K4tpKO%uX$SH^Z;uLo-GjXjAci zJ9n)3@Y}g##fSgtbH}|c{>O1^ERcIAE4e~Cz6hs@ML|>Y{N>}2vnZLlSTB;Lq(dL& zh4_27OzkNv!{@1az}Ff)EvL{uuB2=VK8uU;blSOyS)5mf576Y2;_{NBykvJ-Q3^h1 zA?-UuILgZl=M?K4vPxQt6rZpJBAg7FiCKi`9su8rl3Y(w-Z+SkF3On;#Ki-FlPDwd zlkK-;R2Z1yDRLK%o$t=eDw$T8o0nVw8Q>o@Ma}UXLR`hXr?_)nkhD-LWu==t11S`d zLA3?><41&wS0+9ePAzN7(@7_b!-oydErKXB#WKZRhJUHRdx|T4 znjsSKcV2~C^rxRnno=Pe0#b{?fSjVjYul3SD9)WyT3DQ1;z7Azf?JdTWtJ3@ zYvM&pqy5}(OU|}#=R!P0o?Q%4hUDyPGLCBT&`7~Bwm(O1E{KCv{7m|&HmK?wJRT7| z4HYEYq*y?1sSt&f_>nch_R1O&SE?F<$jnp9COb{M3@nTC&_mi(a&6F*qC7~lcLq*| z#FQBb;b!i%nk0(o7JTT>K7rOe$HKOhQhZHeSxGUaNN96t6Xnx{J=p99StZSVLwq3n z+exB}OD*Gy%E_RdB1p6^qYEe~*ClkxMq5d|{Cdlavel^(BCCKB-&C}L>5Q)$ws=tW z+RVH%DeEnsjk-r2qSkIBMOlTjuhRV6)7|P+NVpk#Y8ZeZ zHG`izj(-T_)BN`PjRU`N;GgG!+2@Ac^Zq%v#zDSyx&7t*SE-av{TO}*jM?At`6li7 zOcYTt!7v?R+QGDk34sZNv1&*^y1DtuFrJMu!NP?md$-{<}(+@4uc6n^n}5JOyW;E7{3yRI9F+S zJs@eRhGF;;80JHL)n~!nsoi%262JRkSl$CLjJFAfH0_6B_!}?`j{;N3ACqTNdma9( zG!L>P>_73dw8^xsyG`&3WAncn{~t~NkK*%>=HE7cTX);?e{=uuoVJ;2QR5KjlxAWk2Qb-;c|GBga!8?p4^F z`n1r01Ka;hnWt<5dKG`lCSd>1;PYS3^?!MjC_PJkpIHKexa<=5VF5Su`uW%Q(QN(m z`^s|n{R&pyRd<}Wq(zkyu1Eas_r7(nT?L=;f7LI_D@nsAPcyzl$KQzf2ktp?Pj~V0 zJ-&M`J9Ykx?v^PgVZk}+tt;NLi|n_qd~4W=@n1RoUfmBu3|*u!gL*O}*)LYbaWx*h zH7wPzNW%gRb2Xf);dBi%HFRm1pkb_rJv9v1Fib9He24hT$6ensj*@ z`t#VYS^wLu_YH0>*J)e8e>gPY)^rEybsfLi z_#3Q^(D^qGQ}K@^|9jwK{rgVm>(lxEqmaBIu5Ee5@sK|;A()@K|N0NBf-;z0IQNI` zZSgkD6R^JnxE~+zjj$gCOza}W+E9xa1vnFC1onOjCr1kLD&k}Tz6*gu82sx1KZ9W$ z5sf_#m|BZP*a3fr*$2Dmj-4JDYXHgxd=#bt`8)ymz4mVc{A&+|5A*;D==72C3Ae#8 z|60I)y_A0p;8bnrTPXo&LLPzqg8&ol_*RE~6yQfN39uglwDbXufrtY*tS@kY{|La_ zXa_!P0Vl^mHy!-503V@&o$!kOLL7vBCg58zM__*k@ZtfGFSN6W>3~~d!jOM0;Q6sQ zzX5v!U>XeZPY3)X1Q1#9=h=!s#0fEjc0dmrKn3g-fCq-)ObhIX0s9RV;u+Xu0H?w* zpXq>mVBUfMUcgUaNLM_j*i0D4AuQE)!i_NG|3Sd-V44sg7k5MyjKFybJ76D}AlTyp zhr(E4$FTyj0)~0s2smJbqBj;W2Znj(0$!`_w*bDO?S}z7KnN6ue8K=zV8|;M;4y7) z0{q?izyon=0Y8CZo?R1!xOybcC&AwhxCw?h)B=7B!#Ljo?j8*}5#sCx9Cx7*czi~< z0AJMheSnk3DE}XZ?1%P{Dm?z;A4!9Ag7SK5s{=pUrUxb});0WLg zSg7=b|31KQr=pASD9i%*HvuM%1OBiRUID{8nhE&Dc=RjyHv+D9sd6^}-j<3s0ROds z8`D%in*oE<(H`J$1-vvv;XDIyaTeMq{3`)(pNf9Xw17Kc$g5p|6E0Tq(*b);L){~e z9dH^<5Zf)_gD_#RKLY5Ojy{XCG)}*b%<~FeXpYPWUNI zI{bZrz4H}acEB7M;sZTiVu7|3rWYuF5-uoI?U``GRjLoB1CA}ijvw-L0w&A_-$Kx* z083%mmI&8t`xAg~!mzA&09O|yAH=x@FsekwCmaRCx+W|s1>cwt;MXv+T>(CDHTnqr zISws*Y&`NPQk0c&BHC*kjZr|SI{!0%Rqk7y5suiOOMVSg3y z;Z+zvVSfbh8yLw$z@4|N_OlD{lC=uA8GvK%K%R)71o(Rx#=iydTNsx69bo01s%;RK zuETL7#3wxaZUN!0MGOLb1||k}!lCyGF$ne%fSv!0eg`}B$B9ue%(LhLoIzW!Xz>A# zc~F&`1o$LO1>$c8-1IPb2m3+5l)oT8?3sXnfMHo{0N>a4I>2s!Rry2#7Q(O}6#=&U zo9ahaz*$dWzChYsz=)?6U0ngU!dNlS)&h=rM&U{LCQKCkgPuix+dvEKcEGG17{_2I zjM{~B4X_gqg5lUgSOY_Tt^(}voSMh&fSY$?tU!Fi8}^|6z`hJ{6AbyY8L(zAaD)FU zLKylJwtrsPqX4gld53WT--GeNUI+NlKHvcRBY< zF@T+4K_7v=D_|uI=U>7VuPXl=0eifTx`%&H!1*vsU|$88^akn(_DsNThta-aj|co1 zWgk9fJ{?ULVv>iR@x8MKA90-!M2Z}(zSaiQmfTe&@fXe^@4fj;2!|r&45gI5Rlop2VMtD;TJM zW=K0cZ*d=d5bp36cLm<(DL~$4{JO1hgGZ*@h4+{*^(!wHVRo1# z7|b&G|J1+1g9nT8^pwI*mn%U zd*HUh@fFg-J#QY}z5`HKE#2@ngcbjNWgO3!;qw+WT0ci(W4CtCJ78^M`uu$4&);Ik z-*+HC{{YIDZkd06ei8f;XEF0Xj&S}EN5*G*{)&#leH`T<*e%^Cg!#YE^5yRs%EvJ5%PyS_?hRsp0#%00g#YS z{#XzHrK$@VfL{~cKQA2?k-qN$V%&)6$p69Hh9;;8(%tmZf&2PCvQNfkIMRLY7-cje zd6saT(0`;BWofCPM~G{xb`o3TAw%F8r_xmibHE zm^t%Xrhm24DUSE9U0fGfIi?by1AZnDb_`>S)NT3cBOV6KzAEkNiH7lHwy|jWz-^aJ%@&J|d$&C~mHBO1 z$@~`^#uvwqZ4j~*Ux!!1diOTMLwXw`P#e$=)}7p#aTuS*rual2B}qPEAo`VG|Ni|& zTwEOVcf^a)qelye1NwwhQzh>vO`0SwzW8Es$t9PF%P+rNEXo}x=FFKRuDtR}F?a4< zQC0@s6c|4iE?g+?_KXn^Uprbnz9>;t&q)zCUX?6vDRGF~%aX*?RU^e;DpSR6RkOsV zHKk(rO%ueyd#@6)TMaR2haraUHpG}+hB)^HLyX#Qh#4;#BKcK_$=))=r1uR`4m0&b zLtOT$A!gSZV%Fz|sH&fUfBMs(#I3j9D%P%DE7q-BC+@%hep$a8Hf#`2 zKKY~^%bt1W8S%=_4dVK*4e?~7A)b5gIr04S&x@B{dPy8QbV$7V>Z{_dx8A}FgYSy3 zu<(548$*2X!3VNk)YsRG@4os>eDs4M8XFtMv17+%TOnHmhTvN=6W<4DG}cu(x3U3^ z2c5lBBigCaS&TM@ipz{iV!m;;SYxafPa2PkSB*pFcq+Ogecw#XucbnaUWISy4fwV~ z`Z7 z9@2k=^z}&p71AH|OFv`)*6;W=N4kaO314o#M*x+I^oL^!Z4KPHSv_X^|C`+n)| zNPiyErz8C=q_05wl}LXd(m#&$TakVb(xc4P?;!ohNZ;6;eh@UyOh68W$l-U$;eO<> z3psp%92z4G@y$>}d^gDuN3S--vDJq7;ZZ~UbjWB<-vjB-LHaRBKN;y~BmFfQwIL3yHpDxR8lvG)OL{BPlLL`R-vjCUApHQOAB6N5Mi`JS8sh3n zhPd%+`X>_Vm5eCT-=2kM@C7@NHc%qdPH^09*hXgA*OF1dp{tV z%KX#PQpcoa_v#WIg>XcO)e#0{BmEc$Ge}O$?$M=7vEZWRIH1J9QEkIitVrDQlW#+}(cQWt>|vd@Y= zCmr-N|72N&$+hfJQ9yw%@K5GHG%*7#&PYkikVHrvP5uM_Z0FDr+%yHu+l|7rolWoDagii4(Y0eCAd0cNvWv_9peEi0YbU20u z+JfW9WM^b&q^4vjEHQ+rUuyD*vB^V%ETI?a0P-r;iQ!6zXJ0TRynQ>1CBg*^iDE|D z7&N4oaL4)Q#t&~FXziSez~r%L%Ogf4C%1%WXODGEOzadAd>%?j&SvhZ&X)4CliBVj zCUy)6Nn&{N7&KQ%aP^P%n~fHbn$#hp)5PrLj4_F5Yi20Pdz2LR(P%U0dYIl8ym1z*tKhy*uQ_jTpJwu@O|;&haZX~M~;Ywh6eHZ z=bwvjzWz$C4Zi>Wd-3znKa17J4Dl$|3Wt938)t{~!9;gHCOY)XRkJbC&BsJ{6DGPp ziwNThG1S;9CK-Fh)y6AgwegO4)c6?lcgs8@hG0U>j6!-$wyOpq{dq{Ag!E}he;LwW zjr6}m`a6*RF{FPU>ECajXMS6!fPKHMQ~p=3Q@RWF>)yRPIuW*|qxpaLsQyES3^|)N zd-v{9-TR!~JG@h;GqE0y?i<^`|G@r3BEoHFb%eWnpMC=eB5p)@?;&wR;zZBBXU7id z6*?pW5ju73cxKN&{bPr82#q~cd)V!ML&AGUhFC)SBJPkOz1jye|1+Wjy1><;UugUGarh4C)4xYxPlO^# zFSt>DT+glp`^60z5;qX|qx`eu;$q_B`ou~4dg5otuTLN86pL-#mqJ9Wm8G!&p6o|| zGY;b;o#-uPf&$BPF%!lj?D+p*`UN00R_yZH0aoo21_QF>|B2tJsQ-geS3TXK}D406qe8svBPwPF*y$FzvV~%FkgJ#oYs%L4ToX#x8HTwUDGh{ zJpI*IUw!iBmtTI0x#z<#zWCx}!1urV?z^w>U3%o5ciwq)&z?PAoU@J_HEI-EK5JiI z6=6R3x{~$wkMNIm&5615fB^&g0g_h~FQ3jY>axo&8*?6hh};8m#GCLPZEV}N&G_`w zPYukc2IMXjP#N#P|Gt6mZ-e2W!N528yFQ<<9<+Y&^2;yZxqbWg#bd{g9gcihPd#C} z!9>6i7uIX2Hnfw+3_Am+7fd(EDuy>THT?)XCeG8wKN|c#fIc{ad7OX#`2&a_@RIny z@WKlQWTXc0Gd6GDEb)d6$@t`xPmF^H4;qju$aqYHH3I#QAkAHmKKkgwO`A3?#Ii4? zy1IH|X=$l*<;s@x#0-DW-?IB>vt^UXIUo~R#*JL{&duFiP-?YE_yG?@l#9%)CL`|j(n zzivXi`w4mae*E!A;G{m30A~fxE)0lJ+5+Bn5XTvn8bHg;Ip9%jN{CDo$c_ZqvFWSTs(z0X6 z4oL&s2joo#>i}{l*QOa+vq)?8;0|B!$^ABFcKkxKOcJr!}o%QU50Ts zXc+LUVf1~yt@ZrPzqAe^T9jFIMk4Xn1 zdC7J}{-2Bv@|`?q9kJc8{d3&KH=9IgBJiK^wxVGy?oVC_Tt|Wi$dipy&_(U1UHma;80PrT_K*3krI7#F;xXoyE&fSlZj-6o@R96g<1H1Wq+yc#n8b5Rc~+0Qt(oQw{( zcmFyu+Z7@C&v4EQOv`b`|99~b^ik6Au3@Bth7{1S;S2v5r?h{KklRi{O!N;w{9s_7 zX=O-*X-qm!RwpJc{&f8G(@(~sEq56AFGw?PyLycA$C4z8*rlHs#?+4tV^W=AWRQk8 zb$>>i8D;ioj!Edx9Fqp1AFw~4gZ9Ke{R{TlPC|pkAN74BWEXugt}h`i91jUi8VK3$ z*{<8tVYV-}b+)DV_dQ`eFmIG`uV=JzXZeMa1`=?2yZ{NPn<3Hwj&hsbK2m3GQ1G7HZALusw3Gv6c zXVm^F*LZmTDC5ubMjQ8lhC4t5e>oUGXMl$37~`|>WHf0o`!i`U`!mO+p?kphorZB1 zX#oH2TMT0&#wq;vpA3JD_p<$?udU>Ih<(na!=#0E!1=(xPE1+|i9g55C#xLBU$09v z9s&*bgND1^3jgx23}a>kXz&@vG|<5Q%rPkqW0KjQNrTy+NrTy+IVQzEE%86r7Jv3L z&M#PJH5<}knznS9eZ`*++DQX{n-*VSJhot@@z-k;jRz}0!!@Ig?;Afe-avOI4cVXp zdu+xuzy8cIiT&AuzQ8dl0b>&9XLC#%xXm#7Dg4`xOVPw1>-!teJ@?$cSfeZ<9UK=4 z*=9%w*F1!zg?^lC&2<^WINx&3z~57q7Z@8CU0^&48vX(r)`Ny;ZY?ps`wETcG=BA< zb1v#6f%7wHFxMCZx2nW#`OopfZ2u!ijvT-K2$MKgWB1*KpkAT*bMK zzpd9N7+XNYlZ!?g8x|xQkAQ}@{rL|VlUC1l7&l%u)>u}UY*f!lHWueP4X@`?7ucUk1N$?_q+2nrl7>~J;mWbb?+QRezSCHgn<8l-o?K&aem2(_=VE@t z?}YZxF@tj$*AX=9*RMAoe)wU7>pIhz^+EV+{RDDZo6v* zEgNI}sWi#>J!n`78diXYrD#W0c}|Ha$0Uc}8sl;F`~TD4`3FZ;U2!}VC`_lVOlLY1 zX4=V4{!p}o#M;`Z0m6V93dMA6Gm+9lh>--6@M9r_0g{MVKtMpTMNsp0mqOMid6X(p zl?EK847Qa4q|~-1T04_sQKtN8YLVOT=kDI{5<(J!o#`LmnVXy4eed3L&pqedbI$j^ zU6sr1KlD5Kzw^#JuZITc)Txm6Ij}Cld2~0Ik3YU(-r2v|ytZbnX?i%*yp+^uY?9OG zt+GkXVM}rjk4ugon>EI)DoM-%LamP>e|w_pGqP6~jd{J3?DKLcYp(rwgU&I0yS%*o z5VWF?=s;Cfm7POSzB}PVI537 z9=E<1K8^ANrlO*veGc8tqq{YM9&FO{*rcBHS#zLt!RxNx8Va9u)@HwKr@7&V8_d+H zQ;p7M82q|$IBe$6pKsQzS!4BDDIaTh@P)dX3%qAx9Mn4xqdR_efiOL9iq{wx%b|C zjm{RCwQJWJVM=D>#*LO9;(Peb9M}ukCy*;DF${CSo+5|rh4ARr)#J>O?a+VT9NJoO z$;Xf`;A5cA*rd&s-+}xd<1gj@RtXAy2>Fq$Z&2L-O`U_Omkt|!X7rniMmk}5Jz>HG z>-Xq9jJ2Q84y~*ab8waS;5V{IoP%%2TA&Z;wAE*=1N*bnXKYeXpMSf%7C&%{<8^oK z?YG~)QD=H=9)z)zx@>et(dbN)QBKR;c;k&mzO|(VI`B7wl{vWT=0Qt;RAkEajcxti zGtk7R3qPWTw?&p%%}*6e;_hWXR$yUh7>=ZC+ELZ{A++KNuQIox#9O-5(e>^iV#AoKV>ZXVDJ{m4A?BsQQ$ zAJ7r>!L11`GKg>D_8wwy;%CXrRpmd?E#zOa6w!IHW9X?6akFH}61xt}0lTW)vdPcS zx9_tC%)wRW;No6lN_=eO9y)1}A?D`fi}&%V@VRA69#`(^Hk~tEM-0&y`jLAovP8a_ zIYApaXPv#X`a5>)Sd*QdZFD-tt^=QQkDCLnlPk9-PEXJeXm>4gM9hF(;RmvIF25oA zf!jW>+M8nSr{=vz5J0+kxEy`nyLYdh1Nun@&RtDIk;4{}#dCeTKqwo~_zjNnK)7aQ(_dwtG?S143 z#HUu-wjXMbTq4?E(R(XIG`L4^@tdL0i~ocDmp(qmYxIp4T3C1Ng5=ct62eKWZI!Ou z{S8}#d?Rzvj~<|xtO;_$8j#Q8b65DEepw^dgZ*6d{h8>-Zh{l`wpjX|oYDqmm^ru# zZOjW9LKjx7SYc$d4EuDjK%1-FgPp+^QIH>EA}X{quiDyLa*Uq7mgs6}XO5PB=l_e} z%bA0-i*7D%4z%za+oZkQ#tFz2du@=v(8_D*XKkDh0j`7GBTMic9g@x1Bs%&+dprF> z`Nu{C*Q2X7VP53er%ah*bS~G*x8r%R^w;Kp zq+d4v)&9Mlxxjx~HxF7j4_bJNeL)wftPSt64%klglr_T^fhB-JF&Fr*G5TU>E>Ya^ zKTE%pfB6d6!zXHYYl44||HHh%<*3LO?STUatlfnN=qvIL{pf+@ajS;ER+$OTU?}`_ z`A^x2_WTcXVDH3#!Tuo!>__aK@R*nr-m?z)0&X36jm^i_V#gFySX@JM{-kMV9#;N? zve8-2-D{_}!4_HPHP^5f$UJ;zj>x*?=%RdtR;?BLDrYRNW^aPNp30YW0cQ~Ymz6O% z`@lJkYc8fnn@iKeg%?r=nTw+rE^N^W-P4Iy|Jm1{T-!g{U%zClr(ZOl$~CsGKi7YN z?i#w+>wU?#IjsS@ z75|osJYUgQ)Ji|3sw}uUuNo}822tsp4^($u7RE|dESRA^|Jy}HMcE`l~*WG?kNu%@STV~*&ng%>_ymWd|CVyKI4DN4}?{Ld4jcp z@o;_tj0$`Q+#igMa;3@Z{WM|##h~Si7iO~mvab>6GA@2JJW0;q`c8=+6Mwsl*mo*CVgEYyx3|pphx05hK|Y)^{gm*olL_4P zKG6W~%NYoA`(VCK9>IIT8MD;B*-<9b*i#h07Rb-dgAPXv>yHl%E$lxpJym9N+F;J) z*2CNT^33>e5UVH0#H$Hxl!x!0C@zb3D z=4jy#^2+1_W6EcO`RZ9Z^N@5bNCWbi*GV3Ae&6Z@XK9efPB>i};|otIP+lP~NC#sh z=d={?HZT(KLU5{GKb_MfW_a==W9Bsr^Aru7=RqgHY$Y4bUR7VGS z6L>)(zYf*}#sO9a&IG;zP6oyVwgE;4zO9ec*0}vO3}9aisX5(!T!qX2Kx~ z^xS10wt#pa|D1V43p}7b5-YK|A$S$oEtnzLb8QkYL?^&UzzS=GQ8^wkSkOj;-Kw<9 zKf0x7d&_12XQGSP9pVCFJ9vWafgfjE|7Z?9k!x`R^3%v87(TgarxV~zV13|Z)kzu{ zEVns<<4@7pfBqNmwA-LG{7YmIJ;djyg$^qGph5?HVja3GM_=(RW(rofTr_}}b}E=B<)z;8JR;B=xZdE~mJtaa6-;bSXb zQLcPT8GKl`Zk@Gz*j@HN>_4&qKafSnh87As@xi;t%&B*eOD8(XqjUm!q;xf~lYioI z@*L!J;W6=*bkW9J=q5C|^$D&GYw_BKTyy%pW9GxZ9={}y(h2Zu{$RO zkI_YUo1IV2G4H=8=j4*CbzsA&QKN2EzIPGx5RN%}`t<3)(evA8)~s1J53DniHh1A- zA9xNw;6L#lvH(AzzjL8aU3WyX72In(jL!?D*xjoSR%NDo-;M%U!h^{+_{$T*aP-K`gStF zSl9)ptJ)_n2$z{5Sq{rbx~M@p^VCyJl?JcG`}M`e#RtH6$kFS8f5TdUuW4Vl63Ma27OO}(f10NNs}g7+Mt2T zHRu4>AvgF**bB}jDii4`cNSQT8NGl$3Vd)m zUi@V+75sM1;b{%p+p(d~j_>3dzEYus%6RNY`i=x^8#t@V+>trf3tlsCY_f7M+pcnK zJ3iyc)>!-B6&=n8)-wl2&pX;&F@7OAC*<)O#yj4g)}|0+pqn3-Lx9k(0lq(gT z)uQh%i{x=^ZEfxI!f4LRMtz`jA&+m{wylHR5eD#~`k4LPbI;lLb|ZxD zG}!fYdl>61n|?ev7~?){ zEi#P{=lmI`Q`oYpikpa;$RQk&Tk{Q_>r`?6aSHeWW1(xM*!3NZ{Z)Gwc$Yv?0b-j|6br$Zir zm<|51PV7PWL6nz%e3$jNh(Cxuh<)&Ni8tz>US;TmI6OymuzvdP8H;mc!{9^9s}ZxS zW~7ZL@!j#;i5rNWmKNP($GETAm^#I^dd}NaS6ACJBH(JQCGz6 zpJZXOFTg+T3)XId$#H&wd!5YvqH2^m*>cqKlNjoe5_ zqOyYU@UoJ^d9%Z_ADlnDaCX@p1>v%rD{e^-l@*kiJXjQVzp*(o{g#_E(no#abD>by z{CNw)_E%?n+1ss&`*dfxsBnRPFmsXZC!~@!uW0T9jZ*YL?!1x;{ifoKqHz0%!H@5% z)UEvf=)|ImqS8<)e@CVlgvXax%$`*=FFmxNWK3b9e(ZB(`hx|f;iB|WStHu}?s#*1 z|A8wm8$D}82U@d6v`<>MXN_?D9F;pJcg(nP<0svdpP&5a)7PIIKT=xseb&-5FIpGf z5#19#96b>Y#fHU->nrP5)^Dn>tKU<9u>M4SYkgb&;D*$OVGWrLISu&@#SN7WD;qX7 z)HUpB(2s;$pCQE?;+?K*t4oOwj)&sI;u-PGcy>G|o*U1PPm34Fy@nl)dm8sO9&9|^ zc%reTv9--)5 z9)F*I&_C>-@LT*=|Fqxci+Fvpq^BmuOZ7tDFfYT)^s>DiFW1ZWrg_EQ9Iw)=@m6~4 zy-nKo>%1M_9&ev_&^zp%@LIf9@3hzEr9=ithD1^$p~$dEMkF(m9m$F0M)D)mBE^vt zk+w)`G&7nX-DG*RFM3dXYKgW*Q(}W-Lt?2hIF=F1jAh4iV)?OYvEtaASY@mxwlcOp P24SC;PdV`a$$|d@Alj9? literal 0 HcmV?d00001 diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/util.py b/lib/python3.4/site-packages/pip/_vendor/distlib/util.py new file mode 100644 index 0000000..1aa4cdd --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/util.py @@ -0,0 +1,1593 @@ +# +# Copyright (C) 2012-2016 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +import codecs +from collections import deque +import contextlib +import csv +from glob import iglob as std_iglob +import io +import json +import logging +import os +import py_compile +import re +import shutil +import socket +import ssl +import subprocess +import sys +import tarfile +import tempfile +import textwrap + +try: + import threading +except ImportError: + import dummy_threading as threading +import time + +from . import DistlibException +from .compat import (string_types, text_type, shutil, raw_input, StringIO, + cache_from_source, urlopen, urljoin, httplib, xmlrpclib, + splittype, HTTPHandler, HTTPSHandler as BaseHTTPSHandler, + BaseConfigurator, valid_ident, Container, configparser, + URLError, match_hostname, CertificateError, ZipFile) + +logger = logging.getLogger(__name__) + +# +# Requirement parsing code for name + optional constraints + optional extras +# +# e.g. 'foo >= 1.2, < 2.0 [bar, baz]' +# +# The regex can seem a bit hairy, so we build it up out of smaller pieces +# which are manageable. +# + +COMMA = r'\s*,\s*' +COMMA_RE = re.compile(COMMA) + +IDENT = r'(\w|[.-])+' +EXTRA_IDENT = r'(\*|:(\*|\w+):|' + IDENT + ')' +VERSPEC = IDENT + r'\*?' + +RELOP = '([<>=!~]=)|[<>]' + +# +# The first relop is optional - if absent, will be taken as '~=' +# +BARE_CONSTRAINTS = ('(' + RELOP + r')?\s*(' + VERSPEC + ')(' + COMMA + '(' + + RELOP + r')\s*(' + VERSPEC + '))*') + +DIRECT_REF = '(from\s+(?P.*))' + +# +# Either the bare constraints or the bare constraints in parentheses +# +CONSTRAINTS = (r'\(\s*(?P' + BARE_CONSTRAINTS + '|' + DIRECT_REF + + r')\s*\)|(?P' + BARE_CONSTRAINTS + '\s*)') + +EXTRA_LIST = EXTRA_IDENT + '(' + COMMA + EXTRA_IDENT + ')*' +EXTRAS = r'\[\s*(?P' + EXTRA_LIST + r')?\s*\]' +REQUIREMENT = ('(?P' + IDENT + r')\s*(' + EXTRAS + r'\s*)?(\s*' + + CONSTRAINTS + ')?$') +REQUIREMENT_RE = re.compile(REQUIREMENT) + +# +# Used to scan through the constraints +# +RELOP_IDENT = '(?P' + RELOP + r')\s*(?P' + VERSPEC + ')' +RELOP_IDENT_RE = re.compile(RELOP_IDENT) + +def parse_requirement(s): + + def get_constraint(m): + d = m.groupdict() + return d['op'], d['vn'] + + result = None + m = REQUIREMENT_RE.match(s) + if m: + d = m.groupdict() + name = d['dn'] + cons = d['c1'] or d['c2'] + if not d['diref']: + url = None + else: + # direct reference + cons = None + url = d['diref'].strip() + if not cons: + cons = None + constr = '' + rs = d['dn'] + else: + if cons[0] not in '<>!=': + cons = '~=' + cons + iterator = RELOP_IDENT_RE.finditer(cons) + cons = [get_constraint(m) for m in iterator] + rs = '%s (%s)' % (name, ', '.join(['%s %s' % con for con in cons])) + if not d['ex']: + extras = None + else: + extras = COMMA_RE.split(d['ex']) + result = Container(name=name, constraints=cons, extras=extras, + requirement=rs, source=s, url=url) + return result + + +def get_resources_dests(resources_root, rules): + """Find destinations for resources files""" + + def get_rel_path(base, path): + # normalizes and returns a lstripped-/-separated path + base = base.replace(os.path.sep, '/') + path = path.replace(os.path.sep, '/') + assert path.startswith(base) + return path[len(base):].lstrip('/') + + + destinations = {} + for base, suffix, dest in rules: + prefix = os.path.join(resources_root, base) + for abs_base in iglob(prefix): + abs_glob = os.path.join(abs_base, suffix) + for abs_path in iglob(abs_glob): + resource_file = get_rel_path(resources_root, abs_path) + if dest is None: # remove the entry if it was here + destinations.pop(resource_file, None) + else: + rel_path = get_rel_path(abs_base, abs_path) + rel_dest = dest.replace(os.path.sep, '/').rstrip('/') + destinations[resource_file] = rel_dest + '/' + rel_path + return destinations + + +def in_venv(): + if hasattr(sys, 'real_prefix'): + # virtualenv venvs + result = True + else: + # PEP 405 venvs + result = sys.prefix != getattr(sys, 'base_prefix', sys.prefix) + return result + + +def get_executable(): +# The __PYVENV_LAUNCHER__ dance is apparently no longer needed, as +# changes to the stub launcher mean that sys.executable always points +# to the stub on OS X +# if sys.platform == 'darwin' and ('__PYVENV_LAUNCHER__' +# in os.environ): +# result = os.environ['__PYVENV_LAUNCHER__'] +# else: +# result = sys.executable +# return result + return os.path.normcase(sys.executable) + + +def proceed(prompt, allowed_chars, error_prompt=None, default=None): + p = prompt + while True: + s = raw_input(p) + p = prompt + if not s and default: + s = default + if s: + c = s[0].lower() + if c in allowed_chars: + break + if error_prompt: + p = '%c: %s\n%s' % (c, error_prompt, prompt) + return c + + +def extract_by_key(d, keys): + if isinstance(keys, string_types): + keys = keys.split() + result = {} + for key in keys: + if key in d: + result[key] = d[key] + return result + +def read_exports(stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + # Try to load as JSON, falling back on legacy format + data = stream.read() + stream = StringIO(data) + try: + jdata = json.load(stream) + result = jdata['extensions']['python.exports']['exports'] + for group, entries in result.items(): + for k, v in entries.items(): + s = '%s = %s' % (k, v) + entry = get_export_entry(s) + assert entry is not None + entries[k] = entry + return result + except Exception: + stream.seek(0, 0) + + def read_stream(cp, stream): + if hasattr(cp, 'read_file'): + cp.read_file(stream) + else: + cp.readfp(stream) + + cp = configparser.ConfigParser() + try: + read_stream(cp, stream) + except configparser.MissingSectionHeaderError: + stream.close() + data = textwrap.dedent(data) + stream = StringIO(data) + read_stream(cp, stream) + + result = {} + for key in cp.sections(): + result[key] = entries = {} + for name, value in cp.items(key): + s = '%s = %s' % (name, value) + entry = get_export_entry(s) + assert entry is not None + #entry.dist = self + entries[name] = entry + return result + + +def write_exports(exports, stream): + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getwriter('utf-8')(stream) + cp = configparser.ConfigParser() + for k, v in exports.items(): + # TODO check k, v for valid values + cp.add_section(k) + for entry in v.values(): + if entry.suffix is None: + s = entry.prefix + else: + s = '%s:%s' % (entry.prefix, entry.suffix) + if entry.flags: + s = '%s [%s]' % (s, ', '.join(entry.flags)) + cp.set(k, entry.name, s) + cp.write(stream) + + +@contextlib.contextmanager +def tempdir(): + td = tempfile.mkdtemp() + try: + yield td + finally: + shutil.rmtree(td) + +@contextlib.contextmanager +def chdir(d): + cwd = os.getcwd() + try: + os.chdir(d) + yield + finally: + os.chdir(cwd) + + +@contextlib.contextmanager +def socket_timeout(seconds=15): + cto = socket.getdefaulttimeout() + try: + socket.setdefaulttimeout(seconds) + yield + finally: + socket.setdefaulttimeout(cto) + + +class cached_property(object): + def __init__(self, func): + self.func = func + #for attr in ('__name__', '__module__', '__doc__'): + # setattr(self, attr, getattr(func, attr, None)) + + def __get__(self, obj, cls=None): + if obj is None: + return self + value = self.func(obj) + object.__setattr__(obj, self.func.__name__, value) + #obj.__dict__[self.func.__name__] = value = self.func(obj) + return value + +def convert_path(pathname): + """Return 'pathname' as a name that will work on the native filesystem. + + The path is split on '/' and put back together again using the current + directory separator. Needed because filenames in the setup script are + always supplied in Unix style, and have to be converted to the local + convention before we can actually use them in the filesystem. Raises + ValueError on non-Unix-ish systems if 'pathname' either starts or + ends with a slash. + """ + if os.sep == '/': + return pathname + if not pathname: + return pathname + if pathname[0] == '/': + raise ValueError("path '%s' cannot be absolute" % pathname) + if pathname[-1] == '/': + raise ValueError("path '%s' cannot end with '/'" % pathname) + + paths = pathname.split('/') + while os.curdir in paths: + paths.remove(os.curdir) + if not paths: + return os.curdir + return os.path.join(*paths) + + +class FileOperator(object): + def __init__(self, dry_run=False): + self.dry_run = dry_run + self.ensured = set() + self._init_record() + + def _init_record(self): + self.record = False + self.files_written = set() + self.dirs_created = set() + + def record_as_written(self, path): + if self.record: + self.files_written.add(path) + + def newer(self, source, target): + """Tell if the target is newer than the source. + + Returns true if 'source' exists and is more recently modified than + 'target', or if 'source' exists and 'target' doesn't. + + Returns false if both exist and 'target' is the same age or younger + than 'source'. Raise PackagingFileError if 'source' does not exist. + + Note that this test is not very accurate: files created in the same + second will have the same "age". + """ + if not os.path.exists(source): + raise DistlibException("file '%r' does not exist" % + os.path.abspath(source)) + if not os.path.exists(target): + return True + + return os.stat(source).st_mtime > os.stat(target).st_mtime + + def copy_file(self, infile, outfile, check=True): + """Copy a file respecting dry-run and force flags. + """ + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying %s to %s', infile, outfile) + if not self.dry_run: + msg = None + if check: + if os.path.islink(outfile): + msg = '%s is a symlink' % outfile + elif os.path.exists(outfile) and not os.path.isfile(outfile): + msg = '%s is a non-regular file' % outfile + if msg: + raise ValueError(msg + ' which would be overwritten') + shutil.copyfile(infile, outfile) + self.record_as_written(outfile) + + def copy_stream(self, instream, outfile, encoding=None): + assert not os.path.isdir(outfile) + self.ensure_dir(os.path.dirname(outfile)) + logger.info('Copying stream %s to %s', instream, outfile) + if not self.dry_run: + if encoding is None: + outstream = open(outfile, 'wb') + else: + outstream = codecs.open(outfile, 'w', encoding=encoding) + try: + shutil.copyfileobj(instream, outstream) + finally: + outstream.close() + self.record_as_written(outfile) + + def write_binary_file(self, path, data): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + with open(path, 'wb') as f: + f.write(data) + self.record_as_written(path) + + def write_text_file(self, path, data, encoding): + self.ensure_dir(os.path.dirname(path)) + if not self.dry_run: + with open(path, 'wb') as f: + f.write(data.encode(encoding)) + self.record_as_written(path) + + def set_mode(self, bits, mask, files): + if os.name == 'posix' or (os.name == 'java' and os._name == 'posix'): + # Set the executable bits (owner, group, and world) on + # all the files specified. + for f in files: + if self.dry_run: + logger.info("changing mode of %s", f) + else: + mode = (os.stat(f).st_mode | bits) & mask + logger.info("changing mode of %s to %o", f, mode) + os.chmod(f, mode) + + set_executable_mode = lambda s, f: s.set_mode(0o555, 0o7777, f) + + def ensure_dir(self, path): + path = os.path.abspath(path) + if path not in self.ensured and not os.path.exists(path): + self.ensured.add(path) + d, f = os.path.split(path) + self.ensure_dir(d) + logger.info('Creating %s' % path) + if not self.dry_run: + os.mkdir(path) + if self.record: + self.dirs_created.add(path) + + def byte_compile(self, path, optimize=False, force=False, prefix=None): + dpath = cache_from_source(path, not optimize) + logger.info('Byte-compiling %s to %s', path, dpath) + if not self.dry_run: + if force or self.newer(path, dpath): + if not prefix: + diagpath = None + else: + assert path.startswith(prefix) + diagpath = path[len(prefix):] + py_compile.compile(path, dpath, diagpath, True) # raise error + self.record_as_written(dpath) + return dpath + + def ensure_removed(self, path): + if os.path.exists(path): + if os.path.isdir(path) and not os.path.islink(path): + logger.debug('Removing directory tree at %s', path) + if not self.dry_run: + shutil.rmtree(path) + if self.record: + if path in self.dirs_created: + self.dirs_created.remove(path) + else: + if os.path.islink(path): + s = 'link' + else: + s = 'file' + logger.debug('Removing %s %s', s, path) + if not self.dry_run: + os.remove(path) + if self.record: + if path in self.files_written: + self.files_written.remove(path) + + def is_writable(self, path): + result = False + while not result: + if os.path.exists(path): + result = os.access(path, os.W_OK) + break + parent = os.path.dirname(path) + if parent == path: + break + path = parent + return result + + def commit(self): + """ + Commit recorded changes, turn off recording, return + changes. + """ + assert self.record + result = self.files_written, self.dirs_created + self._init_record() + return result + + def rollback(self): + if not self.dry_run: + for f in list(self.files_written): + if os.path.exists(f): + os.remove(f) + # dirs should all be empty now, except perhaps for + # __pycache__ subdirs + # reverse so that subdirs appear before their parents + dirs = sorted(self.dirs_created, reverse=True) + for d in dirs: + flist = os.listdir(d) + if flist: + assert flist == ['__pycache__'] + sd = os.path.join(d, flist[0]) + os.rmdir(sd) + os.rmdir(d) # should fail if non-empty + self._init_record() + +def resolve(module_name, dotted_path): + if module_name in sys.modules: + mod = sys.modules[module_name] + else: + mod = __import__(module_name) + if dotted_path is None: + result = mod + else: + parts = dotted_path.split('.') + result = getattr(mod, parts.pop(0)) + for p in parts: + result = getattr(result, p) + return result + + +class ExportEntry(object): + def __init__(self, name, prefix, suffix, flags): + self.name = name + self.prefix = prefix + self.suffix = suffix + self.flags = flags + + @cached_property + def value(self): + return resolve(self.prefix, self.suffix) + + def __repr__(self): + return '' % (self.name, self.prefix, + self.suffix, self.flags) + + def __eq__(self, other): + if not isinstance(other, ExportEntry): + result = False + else: + result = (self.name == other.name and + self.prefix == other.prefix and + self.suffix == other.suffix and + self.flags == other.flags) + return result + + __hash__ = object.__hash__ + + +ENTRY_RE = re.compile(r'''(?P(\w|[-.])+) + \s*=\s*(?P(\w+)([:\.]\w+)*) + \s*(\[\s*(?P\w+(=\w+)?(,\s*\w+(=\w+)?)*)\s*\])? + ''', re.VERBOSE) + + +def get_export_entry(specification): + m = ENTRY_RE.search(specification) + if not m: + result = None + if '[' in specification or ']' in specification: + raise DistlibException('Invalid specification ' + '%r' % specification) + else: + d = m.groupdict() + name = d['name'] + path = d['callable'] + colons = path.count(':') + if colons == 0: + prefix, suffix = path, None + else: + if colons != 1: + raise DistlibException('Invalid specification ' + '%r' % specification) + prefix, suffix = path.split(':') + flags = d['flags'] + if flags is None: + if '[' in specification or ']' in specification: + raise DistlibException('Invalid specification ' + '%r' % specification) + flags = [] + else: + flags = [f.strip() for f in flags.split(',')] + result = ExportEntry(name, prefix, suffix, flags) + return result + + +def get_cache_base(suffix=None): + """ + Return the default base location for distlib caches. If the directory does + not exist, it is created. Use the suffix provided for the base directory, + and default to '.distlib' if it isn't provided. + + On Windows, if LOCALAPPDATA is defined in the environment, then it is + assumed to be a directory, and will be the parent directory of the result. + On POSIX, and on Windows if LOCALAPPDATA is not defined, the user's home + directory - using os.expanduser('~') - will be the parent directory of + the result. + + The result is just the directory '.distlib' in the parent directory as + determined above, or with the name specified with ``suffix``. + """ + if suffix is None: + suffix = '.distlib' + if os.name == 'nt' and 'LOCALAPPDATA' in os.environ: + result = os.path.expandvars('$localappdata') + else: + # Assume posix, or old Windows + result = os.path.expanduser('~') + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if os.path.isdir(result): + usable = os.access(result, os.W_OK) + if not usable: + logger.warning('Directory exists but is not writable: %s', result) + else: + try: + os.makedirs(result) + usable = True + except OSError: + logger.warning('Unable to create %s', result, exc_info=True) + usable = False + if not usable: + result = tempfile.mkdtemp() + logger.warning('Default location unusable, using %s', result) + return os.path.join(result, suffix) + + +def path_to_cache_dir(path): + """ + Convert an absolute path to a directory name for use in a cache. + + The algorithm used is: + + #. On Windows, any ``':'`` in the drive is replaced with ``'---'``. + #. Any occurrence of ``os.sep`` is replaced with ``'--'``. + #. ``'.cache'`` is appended. + """ + d, p = os.path.splitdrive(os.path.abspath(path)) + if d: + d = d.replace(':', '---') + p = p.replace(os.sep, '--') + return d + p + '.cache' + + +def ensure_slash(s): + if not s.endswith('/'): + return s + '/' + return s + + +def parse_credentials(netloc): + username = password = None + if '@' in netloc: + prefix, netloc = netloc.split('@', 1) + if ':' not in prefix: + username = prefix + else: + username, password = prefix.split(':', 1) + return username, password, netloc + + +def get_process_umask(): + result = os.umask(0o22) + os.umask(result) + return result + +def is_string_sequence(seq): + result = True + i = None + for i, s in enumerate(seq): + if not isinstance(s, string_types): + result = False + break + assert i is not None + return result + +PROJECT_NAME_AND_VERSION = re.compile('([a-z0-9_]+([.-][a-z_][a-z0-9_]*)*)-' + '([a-z0-9_.+-]+)', re.I) +PYTHON_VERSION = re.compile(r'-py(\d\.?\d?)') + + +def split_filename(filename, project_name=None): + """ + Extract name, version, python version from a filename (no extension) + + Return name, version, pyver or None + """ + result = None + pyver = None + m = PYTHON_VERSION.search(filename) + if m: + pyver = m.group(1) + filename = filename[:m.start()] + if project_name and len(filename) > len(project_name) + 1: + m = re.match(re.escape(project_name) + r'\b', filename) + if m: + n = m.end() + result = filename[:n], filename[n + 1:], pyver + if result is None: + m = PROJECT_NAME_AND_VERSION.match(filename) + if m: + result = m.group(1), m.group(3), pyver + return result + +# Allow spaces in name because of legacy dists like "Twisted Core" +NAME_VERSION_RE = re.compile(r'(?P[\w .-]+)\s*' + r'\(\s*(?P[^\s)]+)\)$') + +def parse_name_and_version(p): + """ + A utility method used to get name and version from a string. + + From e.g. a Provides-Dist value. + + :param p: A value in a form 'foo (1.0)' + :return: The name and version as a tuple. + """ + m = NAME_VERSION_RE.match(p) + if not m: + raise DistlibException('Ill-formed name/version string: \'%s\'' % p) + d = m.groupdict() + return d['name'].strip().lower(), d['ver'] + +def get_extras(requested, available): + result = set() + requested = set(requested or []) + available = set(available or []) + if '*' in requested: + requested.remove('*') + result |= available + for r in requested: + if r == '-': + result.add(r) + elif r.startswith('-'): + unwanted = r[1:] + if unwanted not in available: + logger.warning('undeclared extra: %s' % unwanted) + if unwanted in result: + result.remove(unwanted) + else: + if r not in available: + logger.warning('undeclared extra: %s' % r) + result.add(r) + return result +# +# Extended metadata functionality +# + +def _get_external_data(url): + result = {} + try: + # urlopen might fail if it runs into redirections, + # because of Python issue #13696. Fixed in locators + # using a custom redirect handler. + resp = urlopen(url) + headers = resp.info() + if headers.get('Content-Type') != 'application/json': + logger.debug('Unexpected response for JSON request') + else: + reader = codecs.getreader('utf-8')(resp) + #data = reader.read().decode('utf-8') + #result = json.loads(data) + result = json.load(reader) + except Exception as e: + logger.exception('Failed to get external data for %s: %s', url, e) + return result + +_external_data_base_url = 'https://www.red-dove.com/pypi/projects/' + +def get_project_data(name): + url = '%s/%s/project.json' % (name[0].upper(), name) + url = urljoin(_external_data_base_url, url) + result = _get_external_data(url) + return result + +def get_package_data(name, version): + url = '%s/%s/package-%s.json' % (name[0].upper(), name, version) + url = urljoin(_external_data_base_url, url) + return _get_external_data(url) + + +class Cache(object): + """ + A class implementing a cache for resources that need to live in the file system + e.g. shared libraries. This class was moved from resources to here because it + could be used by other modules, e.g. the wheel module. + """ + + def __init__(self, base): + """ + Initialise an instance. + + :param base: The base directory where the cache should be located. + """ + # we use 'isdir' instead of 'exists', because we want to + # fail if there's a file with that name + if not os.path.isdir(base): + os.makedirs(base) + if (os.stat(base).st_mode & 0o77) != 0: + logger.warning('Directory \'%s\' is not private', base) + self.base = os.path.abspath(os.path.normpath(base)) + + def prefix_to_dir(self, prefix): + """ + Converts a resource prefix to a directory name in the cache. + """ + return path_to_cache_dir(prefix) + + def clear(self): + """ + Clear the cache. + """ + not_removed = [] + for fn in os.listdir(self.base): + fn = os.path.join(self.base, fn) + try: + if os.path.islink(fn) or os.path.isfile(fn): + os.remove(fn) + elif os.path.isdir(fn): + shutil.rmtree(fn) + except Exception: + not_removed.append(fn) + return not_removed + + +class EventMixin(object): + """ + A very simple publish/subscribe system. + """ + def __init__(self): + self._subscribers = {} + + def add(self, event, subscriber, append=True): + """ + Add a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be added (and called when the + event is published). + :param append: Whether to append or prepend the subscriber to an + existing subscriber list for the event. + """ + subs = self._subscribers + if event not in subs: + subs[event] = deque([subscriber]) + else: + sq = subs[event] + if append: + sq.append(subscriber) + else: + sq.appendleft(subscriber) + + def remove(self, event, subscriber): + """ + Remove a subscriber for an event. + + :param event: The name of an event. + :param subscriber: The subscriber to be removed. + """ + subs = self._subscribers + if event not in subs: + raise ValueError('No subscribers: %r' % event) + subs[event].remove(subscriber) + + def get_subscribers(self, event): + """ + Return an iterator for the subscribers for an event. + :param event: The event to return subscribers for. + """ + return iter(self._subscribers.get(event, ())) + + def publish(self, event, *args, **kwargs): + """ + Publish a event and return a list of values returned by its + subscribers. + + :param event: The event to publish. + :param args: The positional arguments to pass to the event's + subscribers. + :param kwargs: The keyword arguments to pass to the event's + subscribers. + """ + result = [] + for subscriber in self.get_subscribers(event): + try: + value = subscriber(event, *args, **kwargs) + except Exception: + logger.exception('Exception during event publication') + value = None + result.append(value) + logger.debug('publish %s: args = %s, kwargs = %s, result = %s', + event, args, kwargs, result) + return result + +# +# Simple sequencing +# +class Sequencer(object): + def __init__(self): + self._preds = {} + self._succs = {} + self._nodes = set() # nodes with no preds/succs + + def add_node(self, node): + self._nodes.add(node) + + def remove_node(self, node, edges=False): + if node in self._nodes: + self._nodes.remove(node) + if edges: + for p in set(self._preds.get(node, ())): + self.remove(p, node) + for s in set(self._succs.get(node, ())): + self.remove(node, s) + # Remove empties + for k, v in list(self._preds.items()): + if not v: + del self._preds[k] + for k, v in list(self._succs.items()): + if not v: + del self._succs[k] + + def add(self, pred, succ): + assert pred != succ + self._preds.setdefault(succ, set()).add(pred) + self._succs.setdefault(pred, set()).add(succ) + + def remove(self, pred, succ): + assert pred != succ + try: + preds = self._preds[succ] + succs = self._succs[pred] + except KeyError: + raise ValueError('%r not a successor of anything' % succ) + try: + preds.remove(pred) + succs.remove(succ) + except KeyError: + raise ValueError('%r not a successor of %r' % (succ, pred)) + + def is_step(self, step): + return (step in self._preds or step in self._succs or + step in self._nodes) + + def get_steps(self, final): + if not self.is_step(final): + raise ValueError('Unknown: %r' % final) + result = [] + todo = [] + seen = set() + todo.append(final) + while todo: + step = todo.pop(0) + if step in seen: + # if a step was already seen, + # move it to the end (so it will appear earlier + # when reversed on return) ... but not for the + # final step, as that would be confusing for + # users + if step != final: + result.remove(step) + result.append(step) + else: + seen.add(step) + result.append(step) + preds = self._preds.get(step, ()) + todo.extend(preds) + return reversed(result) + + @property + def strong_connections(self): + #http://en.wikipedia.org/wiki/Tarjan%27s_strongly_connected_components_algorithm + index_counter = [0] + stack = [] + lowlinks = {} + index = {} + result = [] + + graph = self._succs + + def strongconnect(node): + # set the depth index for this node to the smallest unused index + index[node] = index_counter[0] + lowlinks[node] = index_counter[0] + index_counter[0] += 1 + stack.append(node) + + # Consider successors + try: + successors = graph[node] + except Exception: + successors = [] + for successor in successors: + if successor not in lowlinks: + # Successor has not yet been visited + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # the successor is in the stack and hence in the current + # strongly connected component (SCC) + lowlinks[node] = min(lowlinks[node],index[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == index[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + component = tuple(connected_component) + # storing the result + result.append(component) + + for node in graph: + if node not in lowlinks: + strongconnect(node) + + return result + + @property + def dot(self): + result = ['digraph G {'] + for succ in self._preds: + preds = self._preds[succ] + for pred in preds: + result.append(' %s -> %s;' % (pred, succ)) + for node in self._nodes: + result.append(' %s;' % node) + result.append('}') + return '\n'.join(result) + +# +# Unarchiving functionality for zip, tar, tgz, tbz, whl +# + +ARCHIVE_EXTENSIONS = ('.tar.gz', '.tar.bz2', '.tar', '.zip', + '.tgz', '.tbz', '.whl') + +def unarchive(archive_filename, dest_dir, format=None, check=True): + + def check_path(path): + if not isinstance(path, text_type): + path = path.decode('utf-8') + p = os.path.abspath(os.path.join(dest_dir, path)) + if not p.startswith(dest_dir) or p[plen] != os.sep: + raise ValueError('path outside destination: %r' % p) + + dest_dir = os.path.abspath(dest_dir) + plen = len(dest_dir) + archive = None + if format is None: + if archive_filename.endswith(('.zip', '.whl')): + format = 'zip' + elif archive_filename.endswith(('.tar.gz', '.tgz')): + format = 'tgz' + mode = 'r:gz' + elif archive_filename.endswith(('.tar.bz2', '.tbz')): + format = 'tbz' + mode = 'r:bz2' + elif archive_filename.endswith('.tar'): + format = 'tar' + mode = 'r' + else: + raise ValueError('Unknown format for %r' % archive_filename) + try: + if format == 'zip': + archive = ZipFile(archive_filename, 'r') + if check: + names = archive.namelist() + for name in names: + check_path(name) + else: + archive = tarfile.open(archive_filename, mode) + if check: + names = archive.getnames() + for name in names: + check_path(name) + if format != 'zip' and sys.version_info[0] < 3: + # See Python issue 17153. If the dest path contains Unicode, + # tarfile extraction fails on Python 2.x if a member path name + # contains non-ASCII characters - it leads to an implicit + # bytes -> unicode conversion using ASCII to decode. + for tarinfo in archive.getmembers(): + if not isinstance(tarinfo.name, text_type): + tarinfo.name = tarinfo.name.decode('utf-8') + archive.extractall(dest_dir) + + finally: + if archive: + archive.close() + + +def zip_dir(directory): + """zip a directory tree into a BytesIO object""" + result = io.BytesIO() + dlen = len(directory) + with ZipFile(result, "w") as zf: + for root, dirs, files in os.walk(directory): + for name in files: + full = os.path.join(root, name) + rel = root[dlen:] + dest = os.path.join(rel, name) + zf.write(full, dest) + return result + +# +# Simple progress bar +# + +UNITS = ('', 'K', 'M', 'G','T','P') + + +class Progress(object): + unknown = 'UNKNOWN' + + def __init__(self, minval=0, maxval=100): + assert maxval is None or maxval >= minval + self.min = self.cur = minval + self.max = maxval + self.started = None + self.elapsed = 0 + self.done = False + + def update(self, curval): + assert self.min <= curval + assert self.max is None or curval <= self.max + self.cur = curval + now = time.time() + if self.started is None: + self.started = now + else: + self.elapsed = now - self.started + + def increment(self, incr): + assert incr >= 0 + self.update(self.cur + incr) + + def start(self): + self.update(self.min) + return self + + def stop(self): + if self.max is not None: + self.update(self.max) + self.done = True + + @property + def maximum(self): + return self.unknown if self.max is None else self.max + + @property + def percentage(self): + if self.done: + result = '100 %' + elif self.max is None: + result = ' ?? %' + else: + v = 100.0 * (self.cur - self.min) / (self.max - self.min) + result = '%3d %%' % v + return result + + def format_duration(self, duration): + if (duration <= 0) and self.max is None or self.cur == self.min: + result = '??:??:??' + #elif duration < 1: + # result = '--:--:--' + else: + result = time.strftime('%H:%M:%S', time.gmtime(duration)) + return result + + @property + def ETA(self): + if self.done: + prefix = 'Done' + t = self.elapsed + #import pdb; pdb.set_trace() + else: + prefix = 'ETA ' + if self.max is None: + t = -1 + elif self.elapsed == 0 or (self.cur == self.min): + t = 0 + else: + #import pdb; pdb.set_trace() + t = float(self.max - self.min) + t /= self.cur - self.min + t = (t - 1) * self.elapsed + return '%s: %s' % (prefix, self.format_duration(t)) + + @property + def speed(self): + if self.elapsed == 0: + result = 0.0 + else: + result = (self.cur - self.min) / self.elapsed + for unit in UNITS: + if result < 1000: + break + result /= 1000.0 + return '%d %sB/s' % (result, unit) + +# +# Glob functionality +# + +RICH_GLOB = re.compile(r'\{([^}]*)\}') +_CHECK_RECURSIVE_GLOB = re.compile(r'[^/\\,{]\*\*|\*\*[^/\\,}]') +_CHECK_MISMATCH_SET = re.compile(r'^[^{]*\}|\{[^}]*$') + + +def iglob(path_glob): + """Extended globbing function that supports ** and {opt1,opt2,opt3}.""" + if _CHECK_RECURSIVE_GLOB.search(path_glob): + msg = """invalid glob %r: recursive glob "**" must be used alone""" + raise ValueError(msg % path_glob) + if _CHECK_MISMATCH_SET.search(path_glob): + msg = """invalid glob %r: mismatching set marker '{' or '}'""" + raise ValueError(msg % path_glob) + return _iglob(path_glob) + + +def _iglob(path_glob): + rich_path_glob = RICH_GLOB.split(path_glob, 1) + if len(rich_path_glob) > 1: + assert len(rich_path_glob) == 3, rich_path_glob + prefix, set, suffix = rich_path_glob + for item in set.split(','): + for path in _iglob(''.join((prefix, item, suffix))): + yield path + else: + if '**' not in path_glob: + for item in std_iglob(path_glob): + yield item + else: + prefix, radical = path_glob.split('**', 1) + if prefix == '': + prefix = '.' + if radical == '': + radical = '*' + else: + # we support both + radical = radical.lstrip('/') + radical = radical.lstrip('\\') + for path, dir, files in os.walk(prefix): + path = os.path.normpath(path) + for fn in _iglob(os.path.join(path, radical)): + yield fn + + + +# +# HTTPSConnection which verifies certificates/matches domains +# + +class HTTPSConnection(httplib.HTTPSConnection): + ca_certs = None # set this to the path to the certs file (.pem) + check_domain = True # only used if ca_certs is not None + + # noinspection PyPropertyAccess + def connect(self): + sock = socket.create_connection((self.host, self.port), self.timeout) + if getattr(self, '_tunnel_host', False): + self.sock = sock + self._tunnel() + + if not hasattr(ssl, 'SSLContext'): + # For 2.x + if self.ca_certs: + cert_reqs = ssl.CERT_REQUIRED + else: + cert_reqs = ssl.CERT_NONE + self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file, + cert_reqs=cert_reqs, + ssl_version=ssl.PROTOCOL_SSLv23, + ca_certs=self.ca_certs) + else: + context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) + context.options |= ssl.OP_NO_SSLv2 + if self.cert_file: + context.load_cert_chain(self.cert_file, self.key_file) + kwargs = {} + if self.ca_certs: + context.verify_mode = ssl.CERT_REQUIRED + context.load_verify_locations(cafile=self.ca_certs) + if getattr(ssl, 'HAS_SNI', False): + kwargs['server_hostname'] = self.host + self.sock = context.wrap_socket(sock, **kwargs) + if self.ca_certs and self.check_domain: + try: + match_hostname(self.sock.getpeercert(), self.host) + logger.debug('Host verified: %s', self.host) + except CertificateError: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + +class HTTPSHandler(BaseHTTPSHandler): + def __init__(self, ca_certs, check_domain=True): + BaseHTTPSHandler.__init__(self) + self.ca_certs = ca_certs + self.check_domain = check_domain + + def _conn_maker(self, *args, **kwargs): + """ + This is called to create a connection instance. Normally you'd + pass a connection class to do_open, but it doesn't actually check for + a class, and just expects a callable. As long as we behave just as a + constructor would have, we should be OK. If it ever changes so that + we *must* pass a class, we'll create an UnsafeHTTPSConnection class + which just sets check_domain to False in the class definition, and + choose which one to pass to do_open. + """ + result = HTTPSConnection(*args, **kwargs) + if self.ca_certs: + result.ca_certs = self.ca_certs + result.check_domain = self.check_domain + return result + + def https_open(self, req): + try: + return self.do_open(self._conn_maker, req) + except URLError as e: + if 'certificate verify failed' in str(e.reason): + raise CertificateError('Unable to verify server certificate ' + 'for %s' % req.host) + else: + raise + +# +# To prevent against mixing HTTP traffic with HTTPS (examples: A Man-In-The- +# Middle proxy using HTTP listens on port 443, or an index mistakenly serves +# HTML containing a http://xyz link when it should be https://xyz), +# you can use the following handler class, which does not allow HTTP traffic. +# +# It works by inheriting from HTTPHandler - so build_opener won't add a +# handler for HTTP itself. +# +class HTTPSOnlyHandler(HTTPSHandler, HTTPHandler): + def http_open(self, req): + raise URLError('Unexpected HTTP request on what should be a secure ' + 'connection: %s' % req) + +# +# XML-RPC with timeouts +# + +_ver_info = sys.version_info[:2] + +if _ver_info == (2, 6): + class HTTP(httplib.HTTP): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + + class HTTPS(httplib.HTTPS): + def __init__(self, host='', port=None, **kwargs): + if port == 0: # 0 means use port 0, not the default port + port = None + self._setup(self._connection_class(host, port, **kwargs)) + + +class Transport(xmlrpclib.Transport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.Transport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, x509 = self.get_host_info(host) + if _ver_info == (2, 6): + result = HTTP(h, timeout=self.timeout) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPConnection(h) + result = self._connection[1] + return result + +class SafeTransport(xmlrpclib.SafeTransport): + def __init__(self, timeout, use_datetime=0): + self.timeout = timeout + xmlrpclib.SafeTransport.__init__(self, use_datetime) + + def make_connection(self, host): + h, eh, kwargs = self.get_host_info(host) + if not kwargs: + kwargs = {} + kwargs['timeout'] = self.timeout + if _ver_info == (2, 6): + result = HTTPS(host, None, **kwargs) + else: + if not self._connection or host != self._connection[0]: + self._extra_headers = eh + self._connection = host, httplib.HTTPSConnection(h, None, + **kwargs) + result = self._connection[1] + return result + + +class ServerProxy(xmlrpclib.ServerProxy): + def __init__(self, uri, **kwargs): + self.timeout = timeout = kwargs.pop('timeout', None) + # The above classes only come into play if a timeout + # is specified + if timeout is not None: + scheme, _ = splittype(uri) + use_datetime = kwargs.get('use_datetime', 0) + if scheme == 'https': + tcls = SafeTransport + else: + tcls = Transport + kwargs['transport'] = t = tcls(timeout, use_datetime=use_datetime) + self.transport = t + xmlrpclib.ServerProxy.__init__(self, uri, **kwargs) + +# +# CSV functionality. This is provided because on 2.x, the csv module can't +# handle Unicode. However, we need to deal with Unicode in e.g. RECORD files. +# + +def _csv_open(fn, mode, **kwargs): + if sys.version_info[0] < 3: + mode += 'b' + else: + kwargs['newline'] = '' + return open(fn, mode, **kwargs) + + +class CSVBase(object): + defaults = { + 'delimiter': str(','), # The strs are used because we need native + 'quotechar': str('"'), # str in the csv API (2.x won't take + 'lineterminator': str('\n') # Unicode) + } + + def __enter__(self): + return self + + def __exit__(self, *exc_info): + self.stream.close() + + +class CSVReader(CSVBase): + def __init__(self, **kwargs): + if 'stream' in kwargs: + stream = kwargs['stream'] + if sys.version_info[0] >= 3: + # needs to be a text stream + stream = codecs.getreader('utf-8')(stream) + self.stream = stream + else: + self.stream = _csv_open(kwargs['path'], 'r') + self.reader = csv.reader(self.stream, **self.defaults) + + def __iter__(self): + return self + + def next(self): + result = next(self.reader) + if sys.version_info[0] < 3: + for i, item in enumerate(result): + if not isinstance(item, text_type): + result[i] = item.decode('utf-8') + return result + + __next__ = next + +class CSVWriter(CSVBase): + def __init__(self, fn, **kwargs): + self.stream = _csv_open(fn, 'w') + self.writer = csv.writer(self.stream, **self.defaults) + + def writerow(self, row): + if sys.version_info[0] < 3: + r = [] + for item in row: + if isinstance(item, text_type): + item = item.encode('utf-8') + r.append(item) + row = r + self.writer.writerow(row) + +# +# Configurator functionality +# + +class Configurator(BaseConfigurator): + + value_converters = dict(BaseConfigurator.value_converters) + value_converters['inc'] = 'inc_convert' + + def __init__(self, config, base=None): + super(Configurator, self).__init__(config) + self.base = base or os.getcwd() + + def configure_custom(self, config): + def convert(o): + if isinstance(o, (list, tuple)): + result = type(o)([convert(i) for i in o]) + elif isinstance(o, dict): + if '()' in o: + result = self.configure_custom(o) + else: + result = {} + for k in o: + result[k] = convert(o[k]) + else: + result = self.convert(o) + return result + + c = config.pop('()') + if not callable(c): + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + args = config.pop('[]', ()) + if args: + args = tuple([convert(o) for o in args]) + items = [(k, convert(config[k])) for k in config if valid_ident(k)] + kwargs = dict(items) + result = c(*args, **kwargs) + if props: + for n, v in props.items(): + setattr(result, n, convert(v)) + return result + + def __getitem__(self, key): + result = self.config[key] + if isinstance(result, dict) and '()' in result: + self.config[key] = result = self.configure_custom(result) + return result + + def inc_convert(self, value): + """Default converter for the inc:// protocol.""" + if not os.path.isabs(value): + value = os.path.join(self.base, value) + with codecs.open(value, 'r', encoding='utf-8') as f: + result = json.load(f) + return result + +# +# Mixin for running subprocesses and capturing their output +# + +class SubprocessMixin(object): + def __init__(self, verbose=False, progress=None): + self.verbose = verbose + self.progress = progress + + def reader(self, stream, context): + """ + Read lines from a subprocess' output stream and either pass to a progress + callable (if specified) or write progress information to sys.stderr. + """ + progress = self.progress + verbose = self.verbose + while True: + s = stream.readline() + if not s: + break + if progress is not None: + progress(s, context) + else: + if not verbose: + sys.stderr.write('.') + else: + sys.stderr.write(s.decode('utf-8')) + sys.stderr.flush() + stream.close() + + def run_command(self, cmd, **kwargs): + p = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, **kwargs) + t1 = threading.Thread(target=self.reader, args=(p.stdout, 'stdout')) + t1.start() + t2 = threading.Thread(target=self.reader, args=(p.stderr, 'stderr')) + t2.start() + p.wait() + t1.join() + t2.join() + if self.progress is not None: + self.progress('done.', 'main') + elif self.verbose: + sys.stderr.write('done.\n') + return p diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/version.py b/lib/python3.4/site-packages/pip/_vendor/distlib/version.py new file mode 100644 index 0000000..d3dcfa0 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/distlib/version.py @@ -0,0 +1,742 @@ +# -*- coding: utf-8 -*- +# +# Copyright (C) 2012-2016 The Python Software Foundation. +# See LICENSE.txt and CONTRIBUTORS.txt. +# +""" +Implementation of a flexible versioning scheme providing support for PEP-440, +setuptools-compatible and semantic versioning. +""" + +import logging +import re + +from .compat import string_types + +__all__ = ['NormalizedVersion', 'NormalizedMatcher', + 'LegacyVersion', 'LegacyMatcher', + 'SemanticVersion', 'SemanticMatcher', + 'UnsupportedVersionError', 'get_scheme'] + +logger = logging.getLogger(__name__) + + +class UnsupportedVersionError(ValueError): + """This is an unsupported version.""" + pass + + +class Version(object): + def __init__(self, s): + self._string = s = s.strip() + self._parts = parts = self.parse(s) + assert isinstance(parts, tuple) + assert len(parts) > 0 + + def parse(self, s): + raise NotImplementedError('please implement in a subclass') + + def _check_compatible(self, other): + if type(self) != type(other): + raise TypeError('cannot compare %r and %r' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + def __lt__(self, other): + self._check_compatible(other) + return self._parts < other._parts + + def __gt__(self, other): + return not (self.__lt__(other) or self.__eq__(other)) + + def __le__(self, other): + return self.__lt__(other) or self.__eq__(other) + + def __ge__(self, other): + return self.__gt__(other) or self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self._parts) + + def __repr__(self): + return "%s('%s')" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + @property + def is_prerelease(self): + raise NotImplementedError('Please implement in subclasses.') + + +class Matcher(object): + version_class = None + + dist_re = re.compile(r"^(\w[\s\w'.-]*)(\((.*)\))?") + comp_re = re.compile(r'^(<=|>=|<|>|!=|={2,3}|~=)?\s*([^\s,]+)$') + num_re = re.compile(r'^\d+(\.\d+)*$') + + # value is either a callable or the name of a method + _operators = { + '<': lambda v, c, p: v < c, + '>': lambda v, c, p: v > c, + '<=': lambda v, c, p: v == c or v < c, + '>=': lambda v, c, p: v == c or v > c, + '==': lambda v, c, p: v == c, + '===': lambda v, c, p: v == c, + # by default, compatible => >=. + '~=': lambda v, c, p: v == c or v > c, + '!=': lambda v, c, p: v != c, + } + + def __init__(self, s): + if self.version_class is None: + raise ValueError('Please specify a version class') + self._string = s = s.strip() + m = self.dist_re.match(s) + if not m: + raise ValueError('Not valid: %r' % s) + groups = m.groups('') + self.name = groups[0].strip() + self.key = self.name.lower() # for case-insensitive comparisons + clist = [] + if groups[2]: + constraints = [c.strip() for c in groups[2].split(',')] + for c in constraints: + m = self.comp_re.match(c) + if not m: + raise ValueError('Invalid %r in %r' % (c, s)) + groups = m.groups() + op = groups[0] or '~=' + s = groups[1] + if s.endswith('.*'): + if op not in ('==', '!='): + raise ValueError('\'.*\' not allowed for ' + '%r constraints' % op) + # Could be a partial version (e.g. for '2.*') which + # won't parse as a version, so keep it as a string + vn, prefix = s[:-2], True + if not self.num_re.match(vn): + # Just to check that vn is a valid version + self.version_class(vn) + else: + # Should parse as a version, so we can create an + # instance for the comparison + vn, prefix = self.version_class(s), False + clist.append((op, vn, prefix)) + self._parts = tuple(clist) + + def match(self, version): + """ + Check if the provided version matches the constraints. + + :param version: The version to match against this instance. + :type version: Strring or :class:`Version` instance. + """ + if isinstance(version, string_types): + version = self.version_class(version) + for operator, constraint, prefix in self._parts: + f = self._operators.get(operator) + if isinstance(f, string_types): + f = getattr(self, f) + if not f: + msg = ('%r not implemented ' + 'for %s' % (operator, self.__class__.__name__)) + raise NotImplementedError(msg) + if not f(version, constraint, prefix): + return False + return True + + @property + def exact_version(self): + result = None + if len(self._parts) == 1 and self._parts[0][0] in ('==', '==='): + result = self._parts[0][1] + return result + + def _check_compatible(self, other): + if type(self) != type(other) or self.name != other.name: + raise TypeError('cannot compare %s and %s' % (self, other)) + + def __eq__(self, other): + self._check_compatible(other) + return self.key == other.key and self._parts == other._parts + + def __ne__(self, other): + return not self.__eq__(other) + + # See http://docs.python.org/reference/datamodel#object.__hash__ + def __hash__(self): + return hash(self.key) + hash(self._parts) + + def __repr__(self): + return "%s(%r)" % (self.__class__.__name__, self._string) + + def __str__(self): + return self._string + + +PEP440_VERSION_RE = re.compile(r'^v?(\d+!)?(\d+(\.\d+)*)((a|b|c|rc)(\d+))?' + r'(\.(post)(\d+))?(\.(dev)(\d+))?' + r'(\+([a-zA-Z\d]+(\.[a-zA-Z\d]+)?))?$') + + +def _pep_440_key(s): + s = s.strip() + m = PEP440_VERSION_RE.match(s) + if not m: + raise UnsupportedVersionError('Not a valid version: %s' % s) + groups = m.groups() + nums = tuple(int(v) for v in groups[1].split('.')) + while len(nums) > 1 and nums[-1] == 0: + nums = nums[:-1] + + if not groups[0]: + epoch = 0 + else: + epoch = int(groups[0]) + pre = groups[4:6] + post = groups[7:9] + dev = groups[10:12] + local = groups[13] + if pre == (None, None): + pre = () + else: + pre = pre[0], int(pre[1]) + if post == (None, None): + post = () + else: + post = post[0], int(post[1]) + if dev == (None, None): + dev = () + else: + dev = dev[0], int(dev[1]) + if local is None: + local = () + else: + parts = [] + for part in local.split('.'): + # to ensure that numeric compares as > lexicographic, avoid + # comparing them directly, but encode a tuple which ensures + # correct sorting + if part.isdigit(): + part = (1, int(part)) + else: + part = (0, part) + parts.append(part) + local = tuple(parts) + if not pre: + # either before pre-release, or final release and after + if not post and dev: + # before pre-release + pre = ('a', -1) # to sort before a0 + else: + pre = ('z',) # to sort after all pre-releases + # now look at the state of post and dev. + if not post: + post = ('_',) # sort before 'a' + if not dev: + dev = ('final',) + + #print('%s -> %s' % (s, m.groups())) + return epoch, nums, pre, post, dev, local + + +_normalized_key = _pep_440_key + + +class NormalizedVersion(Version): + """A rational version. + + Good: + 1.2 # equivalent to "1.2.0" + 1.2.0 + 1.2a1 + 1.2.3a2 + 1.2.3b1 + 1.2.3c1 + 1.2.3.4 + TODO: fill this out + + Bad: + 1 # mininum two numbers + 1.2a # release level must have a release serial + 1.2.3b + """ + def parse(self, s): + result = _normalized_key(s) + # _normalized_key loses trailing zeroes in the release + # clause, since that's needed to ensure that X.Y == X.Y.0 == X.Y.0.0 + # However, PEP 440 prefix matching needs it: for example, + # (~= 1.4.5.0) matches differently to (~= 1.4.5.0.0). + m = PEP440_VERSION_RE.match(s) # must succeed + groups = m.groups() + self._release_clause = tuple(int(v) for v in groups[1].split('.')) + return result + + PREREL_TAGS = set(['a', 'b', 'c', 'rc', 'dev']) + + @property + def is_prerelease(self): + return any(t[0] in self.PREREL_TAGS for t in self._parts if t) + + +def _match_prefix(x, y): + x = str(x) + y = str(y) + if x == y: + return True + if not x.startswith(y): + return False + n = len(y) + return x[n] == '.' + + +class NormalizedMatcher(Matcher): + version_class = NormalizedVersion + + # value is either a callable or the name of a method + _operators = { + '~=': '_match_compatible', + '<': '_match_lt', + '>': '_match_gt', + '<=': '_match_le', + '>=': '_match_ge', + '==': '_match_eq', + '===': '_match_arbitrary', + '!=': '_match_ne', + } + + def _adjust_local(self, version, constraint, prefix): + if prefix: + strip_local = '+' not in constraint and version._parts[-1] + else: + # both constraint and version are + # NormalizedVersion instances. + # If constraint does not have a local component, + # ensure the version doesn't, either. + strip_local = not constraint._parts[-1] and version._parts[-1] + if strip_local: + s = version._string.split('+', 1)[0] + version = self.version_class(s) + return version, constraint + + def _match_lt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version >= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_gt(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version <= constraint: + return False + release_clause = constraint._release_clause + pfx = '.'.join([str(i) for i in release_clause]) + return not _match_prefix(version, pfx) + + def _match_le(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version <= constraint + + def _match_ge(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + return version >= constraint + + def _match_eq(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version == constraint) + else: + result = _match_prefix(version, constraint) + return result + + def _match_arbitrary(self, version, constraint, prefix): + return str(version) == str(constraint) + + def _match_ne(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if not prefix: + result = (version != constraint) + else: + result = not _match_prefix(version, constraint) + return result + + def _match_compatible(self, version, constraint, prefix): + version, constraint = self._adjust_local(version, constraint, prefix) + if version == constraint: + return True + if version < constraint: + return False +# if not prefix: +# return True + release_clause = constraint._release_clause + if len(release_clause) > 1: + release_clause = release_clause[:-1] + pfx = '.'.join([str(i) for i in release_clause]) + return _match_prefix(version, pfx) + +_REPLACEMENTS = ( + (re.compile('[.+-]$'), ''), # remove trailing puncts + (re.compile(r'^[.](\d)'), r'0.\1'), # .N -> 0.N at start + (re.compile('^[.-]'), ''), # remove leading puncts + (re.compile(r'^\((.*)\)$'), r'\1'), # remove parentheses + (re.compile(r'^v(ersion)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile(r'^r(ev)?\s*(\d+)'), r'\2'), # remove leading v(ersion) + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\b(alfa|apha)\b'), 'alpha'), # misspelt alpha + (re.compile(r'\b(pre-alpha|prealpha)\b'), + 'pre.alpha'), # standardise + (re.compile(r'\(beta\)$'), 'beta'), # remove parentheses +) + +_SUFFIX_REPLACEMENTS = ( + (re.compile('^[:~._+-]+'), ''), # remove leading puncts + (re.compile('[,*")([\]]'), ''), # remove unwanted chars + (re.compile('[~:+_ -]'), '.'), # replace illegal chars + (re.compile('[.]{2,}'), '.'), # multiple runs of '.' + (re.compile(r'\.$'), ''), # trailing '.' +) + +_NUMERIC_PREFIX = re.compile(r'(\d+(\.\d+)*)') + + +def _suggest_semantic_version(s): + """ + Try to suggest a semantic form for a version for which + _suggest_normalized_version couldn't come up with anything. + """ + result = s.strip().lower() + for pat, repl in _REPLACEMENTS: + result = pat.sub(repl, result) + if not result: + result = '0.0.0' + + # Now look for numeric prefix, and separate it out from + # the rest. + #import pdb; pdb.set_trace() + m = _NUMERIC_PREFIX.match(result) + if not m: + prefix = '0.0.0' + suffix = result + else: + prefix = m.groups()[0].split('.') + prefix = [int(i) for i in prefix] + while len(prefix) < 3: + prefix.append(0) + if len(prefix) == 3: + suffix = result[m.end():] + else: + suffix = '.'.join([str(i) for i in prefix[3:]]) + result[m.end():] + prefix = prefix[:3] + prefix = '.'.join([str(i) for i in prefix]) + suffix = suffix.strip() + if suffix: + #import pdb; pdb.set_trace() + # massage the suffix. + for pat, repl in _SUFFIX_REPLACEMENTS: + suffix = pat.sub(repl, suffix) + + if not suffix: + result = prefix + else: + sep = '-' if 'dev' in suffix else '+' + result = prefix + sep + suffix + if not is_semver(result): + result = None + return result + + +def _suggest_normalized_version(s): + """Suggest a normalized version close to the given version string. + + If you have a version string that isn't rational (i.e. NormalizedVersion + doesn't like it) then you might be able to get an equivalent (or close) + rational version from this function. + + This does a number of simple normalizations to the given string, based + on observation of versions currently in use on PyPI. Given a dump of + those version during PyCon 2009, 4287 of them: + - 2312 (53.93%) match NormalizedVersion without change + with the automatic suggestion + - 3474 (81.04%) match when using this suggestion method + + @param s {str} An irrational version string. + @returns A rational version string, or None, if couldn't determine one. + """ + try: + _normalized_key(s) + return s # already rational + except UnsupportedVersionError: + pass + + rs = s.lower() + + # part of this could use maketrans + for orig, repl in (('-alpha', 'a'), ('-beta', 'b'), ('alpha', 'a'), + ('beta', 'b'), ('rc', 'c'), ('-final', ''), + ('-pre', 'c'), + ('-release', ''), ('.release', ''), ('-stable', ''), + ('+', '.'), ('_', '.'), (' ', ''), ('.final', ''), + ('final', '')): + rs = rs.replace(orig, repl) + + # if something ends with dev or pre, we add a 0 + rs = re.sub(r"pre$", r"pre0", rs) + rs = re.sub(r"dev$", r"dev0", rs) + + # if we have something like "b-2" or "a.2" at the end of the + # version, that is pobably beta, alpha, etc + # let's remove the dash or dot + rs = re.sub(r"([abc]|rc)[\-\.](\d+)$", r"\1\2", rs) + + # 1.0-dev-r371 -> 1.0.dev371 + # 0.1-dev-r79 -> 0.1.dev79 + rs = re.sub(r"[\-\.](dev)[\-\.]?r?(\d+)$", r".\1\2", rs) + + # Clean: 2.0.a.3, 2.0.b1, 0.9.0~c1 + rs = re.sub(r"[.~]?([abc])\.?", r"\1", rs) + + # Clean: v0.3, v1.0 + if rs.startswith('v'): + rs = rs[1:] + + # Clean leading '0's on numbers. + #TODO: unintended side-effect on, e.g., "2003.05.09" + # PyPI stats: 77 (~2%) better + rs = re.sub(r"\b0+(\d+)(?!\d)", r"\1", rs) + + # Clean a/b/c with no version. E.g. "1.0a" -> "1.0a0". Setuptools infers + # zero. + # PyPI stats: 245 (7.56%) better + rs = re.sub(r"(\d+[abc])$", r"\g<1>0", rs) + + # the 'dev-rNNN' tag is a dev tag + rs = re.sub(r"\.?(dev-r|dev\.r)\.?(\d+)$", r".dev\2", rs) + + # clean the - when used as a pre delimiter + rs = re.sub(r"-(a|b|c)(\d+)$", r"\1\2", rs) + + # a terminal "dev" or "devel" can be changed into ".dev0" + rs = re.sub(r"[\.\-](dev|devel)$", r".dev0", rs) + + # a terminal "dev" can be changed into ".dev0" + rs = re.sub(r"(?![\.\-])dev$", r".dev0", rs) + + # a terminal "final" or "stable" can be removed + rs = re.sub(r"(final|stable)$", "", rs) + + # The 'r' and the '-' tags are post release tags + # 0.4a1.r10 -> 0.4a1.post10 + # 0.9.33-17222 -> 0.9.33.post17222 + # 0.9.33-r17222 -> 0.9.33.post17222 + rs = re.sub(r"\.?(r|-|-r)\.?(\d+)$", r".post\2", rs) + + # Clean 'r' instead of 'dev' usage: + # 0.9.33+r17222 -> 0.9.33.dev17222 + # 1.0dev123 -> 1.0.dev123 + # 1.0.git123 -> 1.0.dev123 + # 1.0.bzr123 -> 1.0.dev123 + # 0.1a0dev.123 -> 0.1a0.dev123 + # PyPI stats: ~150 (~4%) better + rs = re.sub(r"\.?(dev|git|bzr)\.?(\d+)$", r".dev\2", rs) + + # Clean '.pre' (normalized from '-pre' above) instead of 'c' usage: + # 0.2.pre1 -> 0.2c1 + # 0.2-c1 -> 0.2c1 + # 1.0preview123 -> 1.0c123 + # PyPI stats: ~21 (0.62%) better + rs = re.sub(r"\.?(pre|preview|-c)(\d+)$", r"c\g<2>", rs) + + # Tcl/Tk uses "px" for their post release markers + rs = re.sub(r"p(\d+)$", r".post\1", rs) + + try: + _normalized_key(rs) + except UnsupportedVersionError: + rs = None + return rs + +# +# Legacy version processing (distribute-compatible) +# + +_VERSION_PART = re.compile(r'([a-z]+|\d+|[\.-])', re.I) +_VERSION_REPLACE = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + '': None, + '.': None, +} + + +def _legacy_key(s): + def get_parts(s): + result = [] + for p in _VERSION_PART.split(s.lower()): + p = _VERSION_REPLACE.get(p, p) + if p: + if '0' <= p[:1] <= '9': + p = p.zfill(8) + else: + p = '*' + p + result.append(p) + result.append('*final') + return result + + result = [] + for p in get_parts(s): + if p.startswith('*'): + if p < '*final': + while result and result[-1] == '*final-': + result.pop() + while result and result[-1] == '00000000': + result.pop() + result.append(p) + return tuple(result) + + +class LegacyVersion(Version): + def parse(self, s): + return _legacy_key(s) + + @property + def is_prerelease(self): + result = False + for x in self._parts: + if (isinstance(x, string_types) and x.startswith('*') and + x < '*final'): + result = True + break + return result + + +class LegacyMatcher(Matcher): + version_class = LegacyVersion + + _operators = dict(Matcher._operators) + _operators['~='] = '_match_compatible' + + numeric_re = re.compile('^(\d+(\.\d+)*)') + + def _match_compatible(self, version, constraint, prefix): + if version < constraint: + return False + m = self.numeric_re.match(str(constraint)) + if not m: + logger.warning('Cannot compute compatible match for version %s ' + ' and constraint %s', version, constraint) + return True + s = m.groups()[0] + if '.' in s: + s = s.rsplit('.', 1)[0] + return _match_prefix(version, s) + +# +# Semantic versioning +# + +_SEMVER_RE = re.compile(r'^(\d+)\.(\d+)\.(\d+)' + r'(-[a-z0-9]+(\.[a-z0-9-]+)*)?' + r'(\+[a-z0-9]+(\.[a-z0-9-]+)*)?$', re.I) + + +def is_semver(s): + return _SEMVER_RE.match(s) + + +def _semantic_key(s): + def make_tuple(s, absent): + if s is None: + result = (absent,) + else: + parts = s[1:].split('.') + # We can't compare ints and strings on Python 3, so fudge it + # by zero-filling numeric values so simulate a numeric comparison + result = tuple([p.zfill(8) if p.isdigit() else p for p in parts]) + return result + + m = is_semver(s) + if not m: + raise UnsupportedVersionError(s) + groups = m.groups() + major, minor, patch = [int(i) for i in groups[:3]] + # choose the '|' and '*' so that versions sort correctly + pre, build = make_tuple(groups[3], '|'), make_tuple(groups[5], '*') + return (major, minor, patch), pre, build + + +class SemanticVersion(Version): + def parse(self, s): + return _semantic_key(s) + + @property + def is_prerelease(self): + return self._parts[1][0] != '|' + + +class SemanticMatcher(Matcher): + version_class = SemanticVersion + + +class VersionScheme(object): + def __init__(self, key, matcher, suggester=None): + self.key = key + self.matcher = matcher + self.suggester = suggester + + def is_valid_version(self, s): + try: + self.matcher.version_class(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_matcher(self, s): + try: + self.matcher(s) + result = True + except UnsupportedVersionError: + result = False + return result + + def is_valid_constraint_list(self, s): + """ + Used for processing some metadata fields + """ + return self.is_valid_matcher('dummy_name (%s)' % s) + + def suggest(self, s): + if self.suggester is None: + result = None + else: + result = self.suggester(s) + return result + +_SCHEMES = { + 'normalized': VersionScheme(_normalized_key, NormalizedMatcher, + _suggest_normalized_version), + 'legacy': VersionScheme(_legacy_key, LegacyMatcher, lambda self, s: s), + 'semantic': VersionScheme(_semantic_key, SemanticMatcher, + _suggest_semantic_version), +} + +_SCHEMES['default'] = _SCHEMES['normalized'] + + +def get_scheme(name): + if name not in _SCHEMES: + raise ValueError('unknown scheme name: %r' % name) + return _SCHEMES[name] diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/w32.exe b/lib/python3.4/site-packages/pip/_vendor/distlib/w32.exe new file mode 100644 index 0000000000000000000000000000000000000000..f27573a1eac23fc0ddbc2e056e3c099d32da1681 GIT binary patch literal 85504 zcmeFae|%I$mN$I+N0LtH*BqZg>PMaYv}HE1#9c?y{rEApA_72`#tyED;4~&I}7UNdkXHpr=a}i>Vlu# zyZX*6v$L}blc@KMTJzh}Ve9$SZ&l&{KL0ho*_mg~|CPUgeSUzypFaO7?Z{$_U=_Sg9FX!+X8s;}QKazPp9NtK1qv*} zlYR>P&A&#A@PtDUwzmtyq`yfT`8ngGziFE`L8vrI8woZPzEZk#qlB*s&*DLJQC+FA zNBk=ggjHA8ufAQnT@Wt490_gs&BSjPek1;QQS_B2n9zMC5_<6?oTu;`@y`nkSJtnq zzXNf+uBfwc3qE>A03qlleD!y(xfg)xIfwucIKH_TRtRv{BlZ9P_x}P1mS{bJQ1KtV zf{|C=<`uNw9a8o@A<`y~kMv8|gytY6VB~p`w9`zGCWQ8m5`?f2atRh8uW zh>x~~wxLd{+~a4rQM9k8$k!K6Ly19Tdd#GOie^)w7(tD4aAuL*T_krEoxb1d?hiv8 zP_b!dQK(Z5yBGy=^qmvi&YTGC0)9U`f1dLtT0YlUu}sgY(_AkdK|-Bpw3JcliLLuW zx2e8fLe&OTny)vwk~eKY>oxLn(d5ddLf9I%pgc&6!*Ap*Bsu7f^vg~oZw>(@(q&79 z)@jNbm#r~7)^g#MGQ$FLOahd<&1*JzccQ6>^pq)%2i4}n6ZnE?ptamaO^sS!ILXMH zMTMM*mKVDG;k9^e^oKL6jl7wNhGaRFO@*$tavG58R(D(IJzmu@sA|);!WQbC#extF z@|=>H(}A3!J`VT!0K}+#MF41L)%aPxhXgiPl?rEQC9?q*?l<1|BXKqBL(9bMg}En* zlg}d?aDHPZhS&#YVlhwrCo{3nOkBfWHWL@}#QV)eOX&46ULjUqxEvi8{k#%op_qIs zYo@=ZrUs2zV@M#%$a5hL!Q4=YWB3s;Xn;b;LTvs{VlUWHU_yakexlnz8p8P|Ui5DM zS%}ZjHjbtjJv|siw9D+yN6?2CgYR0Ix7{nevI{`fXk>kQw88;BA?^|C!0o9@VwyA| z?C`Tw5dB~fO34YZ0pkNM2JgDtBK;)Luktdsp$v#?Q0a8kDc4O4$``xa>Xf1ss;Q~D^JWXk{2N(OUJz z@I2jTsTJzfW|=YtgVEjet}A_oL*R3s`>cr8jfVCa!ltOKzc&w;TkT4{yWv&HH>@5=o}?Z*;Yuf_}bPJ$Xa1>_oU z!S5t$hcqrQQ9sb-16|}K&>c9IupP2RDPK)gy2=q?&tME4YS_?GkmP3@6BuH+5E)!d zjNMc*V>hph{UZ$Rff{xO!-;aRxQ4xt);z)rQR^d~Y#Ov>>*kknztR7I%L;4Pk2d+RhtHI1uIp|V--7z1H z!UjW=LZNrF$C1B-#@fRoq^LtPXp*DJ4vZaR8OuX`#;s6`**1ia-A<}g^~mdy$Zc% zxOco+g#gQ&3_^MX z?EOiC5MVwqF~DBo;eX}NUm;6?HRB24TQOW~)Ix;va%nk{!;;lFrgYjpva`M?RVAhD z)RcLGAdL^vn=QKp1%k*dP7g%|*(1|1NeiJ4AwM2$2a!wa_|`y8=ofPk;{w3mN~G;y zh(}OMcskdKS_aTqjiXA;?isBAN-682a#pU8CWa`)zCtD{$r+}R9_*r6Vqv%-Mye9Sd|MA_I=h*C3* zcTId0l|oTkdrD3#^_`G%2v6-%)pvqp)oPFG&&GV+Yruo1hVG4cts4!2)Tq&k5x#D_ z{%OqDwTT0CH6{VNh5@=bfGN%esEpF!66DKbyw*_8#E#mFzz&j{edwu_E;$gpfMPq? zZy*+C(}Srr&t$(t*Fd*PN*iMNEcCHdEX}aO0#nWJ4tyel{X_DT%RKw=66zlgHjNrA zWPl9zJBYv$o=Ouh=?WyHxDp`?EY4dF&O)YSREF z;ONj!!hZ#a!P*zjR#>pDcQ9S-3Afz4->iyWNC?rAzvDHt|(2j1BaAd%@h(i zRNxtO|Cca3g|7HLf6>sd=T@`-MF8xd5f>A%?0{e{tL-?#|6#V|tOmumXtx$R+$UR? z7dp=zumMII*P5jquw4D7RIf0LbhQ~Pf37fl{g@iD=fwvXxMvVE*iT#0Fel#|Ng zMGpDvWgP_jUs{&?&DV^ zerRR%dSh8f^!mY-^gPd>19J(0&)OpFYM$Feo)&b01n*RufCV`}K zt-=n5mx(Q3BUU@-IZ=;gpQm9!eCQQK)EL*$oKa4&hkhssmuVulaK0uTl?ez^t2wC9@~bLm6NgyA-2ZOh8{0;y)#*Ma}{( zR@4C&%dU8{8ROD>!p=}JY0(=2?(SFR?o!z${jgNdl`b!p^W`fzb`jXIpmQ*?=MeJ4 z819n|xduUZ8@n0#F`DKx2h3AJm3WzdfwX);dI$@d;ei-43L1!Yo~@j+nW8q74NC@; zc^m4Aj7c$D49*HdU8&C@W8^*Dv6ogW^tsL@?(sRa>|6Vx`S;l8>61xkge_5@15YDw zwW)4w7t_jtepSv(+LJm$>2X#=Xwefr=THSbd#H8~BM7;R--%K%3o5??B=@3viPDvJ ztm0SN!Q#dd?La2kQMAAwpN^20q227zP85Bp*7o%uut{^Y z_uIcs({dku0?Q@k)2Z#BruUa$ZEuvuX~#TA*SWOb#%r@=%lzv-@7I5WJR}eDM6DxE zqgwOvjbF0)Lv+R;LR|j>r$uajftPNN9G4vzFX@XvMW3|^O->ZG+YYp!OG8P~Nsm*R zm|bZ$UTQH)y`PV+=m4|K9tW|N3$z6ebZXlnJ+pCkmTWV84zH)Z{`8pzPN4vkF)xW6 z?=^-gb&h(_P0AE$C-Ar-$*uX0s1Kl?k_L4+a*GY*%E|b z^Yz($11nZy(FB?FlT5w{t%*ss&w)816m1^61rbTH`vxm7Drp9Z$RiLXKK9ec<&bTd zJ-##F`wwL1^SuuV+7vaK?`ajzrxn@72&0Ld@ZP0#eTiYyqEYKv^a=C=DCO4k5j^l?vHq%luBq?~7{nsP?0BA-}bYRdo{RfIDA>;(Fc`!ISC zdbs##dvuKSOX-OfT=Ra4OR50Q&;e2ebtH z5aJ$Svo9AgT4!?6ckzaQ$I!=U6_cQ5NBLPARuxL!B+ymo6lh4~72Qr1{RnuKs%|Wp z_;fqfy3%mT7ZC)in~kEqQZ`3{Df{P$?g+4_FjAV;Wphx`k&R6{MSl-y1MF*pGApQ* zO)|WKMNVV0FkK^#HDGKuNB+nAXygZROGCSexOpfe>iZ?;;cKBW-2J5`hcF|QM+IOo zgXMA!BKKC^Mmh&&c?BsAI#a)9#~g&4_EHS(A;_P2qyPij!#wrEclyzT53HLj~6Irw$kVamp z4c69!pp89Qf@y9pYREpGm4tGzy9kOcy*e3@$pRD+wy-Ox!k$hk!_Ve(m_MKi`+Wy3 zQaY64A>9_vf+->pqPnoLUt@La>nfGI<#azg{0-Jmy?E$X!X&6Uv`n??6s)Q@KxG8M z>pPj|I|Xp1p<5vM-x2&fcqG?eny-spiuG3qYXQk|nojb`MN`iVG}g`;0+dpAAwBa7 z=s9^3JxlWGS(b}u-L^b0e`U|&ugNa{N^|npbO(Q3VaMy&KTVsBm*x|8&g5_V&SJbV z8tj}y-+Z+kOWR0tf&X?;;?9})ilr6cnQ0f=VrhjuHEFz2BPTFP+~Mf;3g%MW$ZMn~ zSPLMqbunrbgg(sMgvii)?O0aiZA4HhBLIv8#Cn1W5pL6Z2_cgJf>F!`tQ|Yr(3*j8 z?g~gi2G^i^x~H=tL+jj0DPg;x%_a=NoloX)>-&_jJ)o9Q8k>4SLRxsj(iIr%Un0DE z+75zOomQp2faZZ`w}0&wHhmr*TmgszK-6dl+7oG-Jwn5R_O0^S)b`KPHG2llLmUtl z%$#JadKI!?-*mPS88*GW66X63RyPydF=e!}LPuaFpus4ivRDQQ*|2uu_4D-BPiq=n zi?TON(B3XO)P6QiX?HDV8dOr#BB#)Zs_9uuX92odGuV<^0bqPk61ffIls1>2tpG3# zY)zFWJZ1&Z%54SUBxOlX5|D$}9AnkgGG1VQ(<-O2#W0H6QBjU)4*EH|y_vUVMBXY4 zNKWN!Zo}AQ9!_NT-vP(8cSkPlY3}3I7)+pX2h=5f>XL-Iq*u3*RZ5v#Ad#&}N!%_K zD~WpP5+$)gx`=Is`bg^8%Mf(8#c#I?!YdYZ1U=h*QmGoS=$Wr%LW>}1HO-6e!}DtD zJO{+)_Yobvf6L1P7#}$-Hc!I0xTj4?REt0TM*)qoMPUVEb3cL#8ztRRDa}_{zI27c zvZRZ#J`f{2kfYR>5F#0VMamCp2G zAhckaEOP@RWl0NlBS(P!*^|s7!}gm6GBEDz^I+EO<;fi*lUYOW$VYl0cgv5@ZJ}J})hG0?=^^uVq7R|u5L`E*gTrxxzU{PSC zm0Qt^u~w*bYE@2kqg|b?Zgi*{C#jV#zTgVGLPtMFtph;mKFLjWP)>F}ajzYqRX~@A zYZWp22DrhsN7TNnG+$b#37VItOO?+M+vR!Dit|dswP8%O0)11!o)}idV)G-ZT*jm(eu$0^nCn1dhR|- z&wWSmtlRczg1@3Y{Pp-D{)!yruczAh>pxz_>(@Vx^x~!Y?9nQ_8YRdAUxX1a5Vo?{sVg4DQj+i8B}c!c5z;6j)*5+akb^YaYK^YHT|#nm zlDo}M|5yyuBMAOb%V0Cs{uT{v(%5;**(>V5idOzrIXikoPB?oh8qEIIBS@?5NGnIa za8A&3XkyzX?Mj=Ywu9wkN`k7S?@gS>D2%ucmX?YdFr%Ewv5fs0*|8*AtUB0(2nMln z`YPUPF-9xXwnNX*_8)qM)v$DtIANPmT~p|a;#bs#-5)M7S7Ar_b-#KigJA$FG&w=S zZY@mNt9Meq4y~$pwTdMkn7R`ER{O8o#i=4pn_5pkH@9f(9PSb_e%EN}ojwCoC}nGl?6!B@&e^Prf%HXAt-@h`m(jP+(eJg_nR*yaA6>%K>H%#T1~R5r^(f~C zrIpbegs6R)ws321w0tWp>Af13jtQ8FbF~i`6@@}Ak+zNAEekYXLOX#{<+mFf3_&W? zFzk&a;f>Es5(1orYuU^6?dvmZ*FmNm{L>GE!A+%itouoqe3?@A1m+WhR1l&!`wJFv z%0aT^h}sfNsctdyC|)^;b*d9F>=Dp|@w;@XQyRl}1ZuQ*wL|Qm%xG9sNGtvgRkQwv zszOdxuU6?m1usWV+AlRqtDR!=%?KpXzRBhxDLx)CM`)Mw);PK(nR#}oE@3ahNp6(q zK!x0gSjDFXwmL%j##*Pb)@7{CWphylbjw-}gKDVOy%-q;_)siFrpt``CK>nT8~4p% zc}c(nc40}Nrds=)w(Z=<-TiP`Cb}Zzol(tV*%}goh+S9c4rut$e;nUvw%^?TznRwErLNTSEI8y!KG7RfR5=u z3Fos+Ecj@PiJ4ip!vUr>y`+P%?*tN^16k`YBqx2j!+jGdIWb+c@>mlyw zCjKsn-bZIJh~yrif@$v;-9J{bd?`4u)H$$ozChiat8QM~x%Cj3JHK=5O^|~5v5JuE zEDR0VY&kKM_pJ3HBX1WFq#9!`;KNSlApmTXV;EE`)=-z-;Xqb|jrD8T*XVph8i1OQ zJpyt%P%P*G`a%$jJ!L1G$OEK@&7C=;glY)$-$5D;k8*Y`3guc+riDFGD3D=12;1bf zU>fH|$&nHKL0ja*ON%8)NMXI&3%Zp>c0)~vLm)fx7sR^zQPK;DUrC%s!E6f(;^jEP zF7{6ZLn&b6#xAh&7qDJ|X1Mn>Cr}Tqq7a3^s-q zuWIwF-2C#HUzO(9kIb*-WzsX!Qbs@mHyRc0pq{mpQg%>p zh^0}+BiBG0xpeTnW{2FF>^Wj{yCA@fM0xzi`?Q?gg$OqFV+cbDW!Fk*yfEH3VUole zCO(r`vuo}&iRERm=OE;0$~&|&Ql_EkY+&Hnj1P>XJ5#;7JSERZFCLQT;tS;&;J`DH zWO*iavleoqfJp^DmI(YfdX zyOOZi%hl{I4`NQ>JLoyFu5zz{)j0y#PqcM-I_gDiC=Rgoppv^-jEl~9A$Bg0U6qWL ze%NkU{n1%_lLIU{JU}sOWt@E|o`9 zG+9Kjd(gaRPLOYCu#$9sU~kt)@D!m(&G&kWV46ozjG*Z8-$B85)7UeE6g01j`VfWT zaiSPA>ObX(XfjvjNLuzSWK01O^XAyvn`U!^{+BS7nkN0qeDRS3+JTYFJ=4blJk%=Z z*ba$%=H*rg#69<=yN}mI%N?}4JyC0LYL!mU+4@Y(`fVS=gCy&ErELcG_TYfVC&I(4 z$0H`b@L;G7Y&F-CJHR@@uk?^3YH|UYzyfk;wTP7@lnDWaxZ(bCu+v;9-3q#{;2RTW zil3)AAjH>d-okb^^~_PF%aR;JMy@&ek`wk-82a7En`pbEjn{m{R1P~hBNnO23^XyO z1c%4@deP< zD?eYFBuReC*iBVN+`kOP{Ve1vfD`?8WT;`2W^+(|MK%44-w#%W`;5HVpqNyNgk_K+ z&2zW4Iv2ETOLG8MUFoC+lEYg@OZ9pNKTImXAB|9&zoqsZoBJd1FcpE`-bT1!##upj zoQqD79S5v`2ecFGr96BbpbxNpYLe~pxDauiO$SbMX)_Ta>rM0>}hh4J%f-sTlZ`EHA%onOu$Zf`eS}PD38bH zx&fDe=x!$>8x2uQoAgs zCJM0aSZ%95n7y3AryoR#*&JmiDos8y)S^EGcb~>KGy7Ukm8Sot+I=-S3_8>ZH z2r-UoKa}oZ&tSk(E9{8v_pY^K&0V?Hib5=UmgWFvubmQcrh~8IFx>q84m|bqlrXac zWamM1ze?O5>cErGf%91cY02gJuaaPo04&*8(NWP)o6rr6>(*ytU5>yOJw0l_4*h$m zV?z}EBl~gyeffIo%UQfH8@{eocdo_k+w9KY)7QO2iM+47cwetm%Qp}Hoz6aInD&oh zm_Zf)o6i1Q)M{jBH;^OQ*)=4wFY*07oqg0lp|k&NxU=8DI;kjh_FDETLg?(b^3Kiy zLjWWJ>X)h?QJa!`o z8b|d06}H;=w+L}J99;U7)~)mfRNGhgZP5ZA~rvW031>tlotoswFF6Bs5ro`G{b{ZA?7X@ z;{$ulf#@8*o6bh#8yz!9oiZc1GT9?rz4pQf=R7E9(8hNGCM=t|yO5km)ws}$2&1Q# zV=S_ZFIwiQIeIQll9etOHnB6I2I}nOD3jVMS9&ICOSqN~4=z=GJ?fHfXrGB}Cb*;& z2y8UoF@ftobc6;>>7eF3r#Cq~C5_nnub;}SfKc-pgIUQ0WEz6hZ59Dq>qOOR$c4Y@T<;=5!$GZ^zd0c~}WcYn@U_ zYk6^@we4u4#WyJKZ>tR3gVOj)IY&Ei<|yf3<a#G>@0W z1)=-DU{$+DJ8%s97A|c3NQ4{tFd8FXc?u6 z(fOE1(=u86(yXu$tzwH&Ex(pw%t|-%4z z_X~|;GuabCt5s|!vkm4!S&g17IjzxQ*KLgj=d{{h^1|y-YcU(?^PCWsV<;FirdG?G z31ai_@G%ExeOheu=YhxZpHZT(uhMg9U4C?)g**gDDeYDyb=tCY8T{9OV)$I3*~hpe zrg0(Fa+YaW!pH&cFhROb%jgHGl~OKtkFunfI&IlnMr)PPvM@bOpYEycmD72`6KE&H zbPm0~jCv{>0PuFe&j&%JK9~4l1;kFz(=yDud#RFEJ+0LO;^Cp|>jfJusH7M!{|#8`I~7D zcsFOW7hn>K36~?Q{tq|`CEIN$bgw4%mtTb{O`qR#>s4t=!n!H5b*v_~+?q3ezLK!X zsjo}t7oE_@oH_9r_OvhtJ6LEAuNjuhMYX-h7>&!=@$bcadidNFc>}k}vr?Y(L9$6H zRXx~J>%}yh%v4*BA__}5C|5C}q4~j|xMIGwi?C4) zmrW%UG=I^&uH-BpgMKasee!O(ogd#F%WeXcc{uCKkHNK0y^?FpaahpQkA@2@^uB|s z6R7_<^%y!1EfXB<2PA56y(R7X3SigeFbiRt7Q}gxuk_!C$1P25;$tJ$NqeY~$& zN@MS|xlJC|^lc+h)aNK=8!=eI<1M~9fcd+2R{7Rmf7O`}l}~Jvv*=h$h7z+Vaf{rq z9cu4#*xKpk!s`v?&$jlsRZ2hoED&dzaGw(#3V^k+ywGlPM(J6I6E81d3t?@WR|tE< z1uFuqEE}8)qRX~pvkT%NT489|PIkea+?$(^;GP5Pu2I_3l*)5PUCe6;S!$&XfvG}P zr}tufxLYcJsFe`B(&ml&&oy;&!{Yn(Z&laSl~#Q!Hs|o(%{=X5vyFoJyvq*QI`$JJ z9Vh>2lU4hulnY_Qk6(KdxQIgbI2xObk8<=B-`5{%5$v#IE-^<5u1Lar)#`0w-GtHxt?4MXgVe>hfb4fV5ex! z#Oy<=f|t2G3M@McmVt^g?F?w*Rx zv^X#-9EvYtUtqwW|JcE{#ANn?Q>LC1G!l4gP)J&V9% z>7;JvE9Y$Dwh%%FZ*wE>2&$aaCy)%ipD&MTwj5x&i>VvqngJKfg}rrX@~2nfJ^+l( z=pCT+Js{ACQaW8o$FRs43?@4L%#K_^!a9*p{*q3jrcS|a3HHW<^GWY+S4j8-noK~3 zAx-xDDN~dE67Qk1>?@cZ;sKdIZ4p1Nol^Hj62 z@6%H%l1x3-O=!*DNRFL!Nl$cIw`qSAF9YY#nxFQwl!Rw5%!-EquK>wpu)P=i6!VQEqV{si?O2G}OF zF?A_^sA4J%27(vG=Hf^vrq%Grz6EeFm``k@_JZ;X=Y|TX4K|&`07FM`)rM2pox2Qb zj&=kuaP3o22zQSo*S^`(3JqogxaI{=jmUd|54w+EuPzxdJ?QMaV7=~dS8z0Fi9;Uk zXEzdM)D@-}7zf6eHaO&5Ozb7m&wiLpl5Lsr25%T(G{vsao88#0>*DCRTj0aI5IFtN zE`hkcjUTJO4+t1&II0k5Cc?rV`#^R$;C6qeayAAi}?ctF%FjoY~(< zQwj^FqBtEfsoz_UMFd>WoB$15uzP8vJV6{chMDjTnPDLQen>oqqU3y3OMsg&>sBN6 zyD}f_gwW%7ndGct>_cJ#`Uc-R0sCE{N@D+c#1D0WUHDK61okN2!(D)gyr2uHNw9l{ zz}RR6|0iAGF{;MMF5s+Ye*jZy>qQsYq#=yoC-VPD18G9jZ0@slxw6q^lM6L(ZdpOvKaQ93$}Xl?PT4ei zWt0it$LJ_V40-}v0x;o(CIx6`h8*YJZAk){f++*-{ff<3(=fDXIlQ~H_8J_Z#w5}i zeQ7%w=!w;T+0@QM?d$C3*=qEb49)Mr`6pO&ZRgvBHHxZS!(vb*V#8+MV#0N8F~r!t-_W>avHi2_k34DpphP z(KnYN&HQen?|aIyCNjUNef1S(aK<*jBlNwfESma$kiO@Z{WSGWD-Qk2vY)5ETj_gB z*=}nxKQan>PT33ejl)H&L5N_iUkxkx@u0eSH8cw+LuHhfoxsK& z7=8wu###xT7=e~a+n;z3=c3c+6l}oUM!FILT{vf&oCyQHfje!qv*L?we)h=C5D9S9 zQci`W{IxK!&06QsT zKpB)fdhG&C6>O&);iYuQ;3Tlt4)nO`M2(>Q0DI0yP-Vyv+CK;(DrEj&nEudy-U*kA`j21q0s7=bG&h2y9)v>V1HqMUGTQ!#@v)cx10)*i3 z`4oc6o!yWswS?h`Ws8k>N4~MnP1Vk7?6Gx&Fztl9-}4b)+H9yRbxW}6;TDr(Igxz# z;_90&zy$(YBw#1#1C;RrdIpXH z4zPwc8VKY8T*z~Z%7k>%T}2cg+VAINx%fV;a%X}IT6N89i$3+f@(Q$!fx$96(jjG@ zIl7ZhKk0X3ae{*K`Usyq;KnUhdA)?M*YxY5cMt!jdpV3eMsU79x3AaT>p3H5?7tb9 zbywuH?9@85M=yZEw97TBSLII3XWmy!s1&}ZrA*&ME3M?10EbXo$4y6#pg6Es^#fe= zfM#Al6%0+Fkmy!s!>C|J65DZ40H(fgm=jcUk_9Au)=QYkHwe>SsFD9Mu8zOKh50jK z`ZJ8lq|AV5W+_ZB|3k{!ho;Vr-+g7R#U7j*J<<)Sj zc7{)&N+<1-e5u0}m~#CFgEOB9;XG}(V&ra`k(#!dMp`?8o~7%)fA6Vu8LR zH3OqXJT2CnK?TjXCO{D;=L1U5EEtOWeXd5_yoKv5T51Q?+C?$AQ{O8zV%Fnx$z#s=T!J?PV@Ij)lw@zLG8(Z{ zt903lI$lENwwLi}l;&4TGb*J5b`OkLYQBu#22Je}v!V_)W&or7*AjROymFuI+mCDcI_P>*s!m~lKqw~U10j0@K~0FSGS#zIHUROW zPSqRZ=mAV)C+b%f-^Jo#8%py&1x)Xbb#ulC(xsdm} z8Goc*^WBNKx%eDR89$eUrXVZkltDelvAP09U8yygK>LD%MtBP=`{Ula7p|H1zB z7S%f&bn8!;3-&5wXu)1(I0*V0_LLXRlCHs(X6o>%e%ka2!hqd)*wV7lYX43;lB4C; z!p4Hema6lOcb>0osWRj({p~Q-KMm;tgRIh8Y}*3=WxYvN1&8qQY&mFZnyr8)$&W;a zQ@U{j1DJB>39NFWz7rJtId(+MDbfTOIZsTG$7-?1U|hb2eNurYKInwx9^eT^9g9}Sp->FMta$07UOUKI21tGj7=4qG8vDmc;(q)*} z<24OmR`Uxl`m)0X5@|TVFs0KsXwmaAvhSs1j$8_|5NFlxcnZnRD>gp|8Fw3?!;2NH z7zJvJ2W>x+~`niyGzC~?1U z&}~$T%~SAEDMl9HMLW5JnDATUWAKH`8*vQ?`?8Rj;~I59I#RWE3iH|L`!B2b?o7WTf6ILkbPz?gf((g-^E;iE=+oWK*8F|S8loX+ve>?-lL03+K{2T z`@kk+3&OY)l2jE`(a@gB^f$oBzgJGdAQ-j~Vit_=z`7S2^&QD%+hU>*t#G6|F3WQ+64q|k}8lr)&x}dAjN*gBO zXoD|z`W7@a?f817H(*K`>Fq)?sCBaCbk73atJ0Mk$MYZ{@HhY(8;zN}Q3Bk%am!9= zL$@@ew4n>$h-P~bbGy=$?S>Y!H}V=YsJWP zC~9<6+++DpjoNvQK-Y1i>!{Sh={iu=b|vSUaPUJ5xKgGU+Rvah1X_fakPhNX z9?;j(8m!J^#cn~nRXJeQKSiXtC#$HPUv&yU(j}z)L+maukUQASYC)*QbOYxMzk7b{ zYHUvAYLx@-Lo9t4KR@Qu{4;LR{Drr$TyR4<==jd@F*+1iUg%olha+n*3Q3-aOrC|K zdqwT$Nk>4E_TRi{AgT9_2O5a z==TcxsAK?0oa#*ub-rEwv2)2{|I%P>a2c#r#?cP;d#F~h)e#=YUO+I|pN8|yf>jzF zwuI2_yzq?tgv<&Doe2Hs@1Y6Ie^_AEz=}*Od|Wf6wQI+0ou0w<4o^aS@Cj&W+^PT? z>nEjlmd4ful+Q%vQ_(ZHVx^oGzvp_Kl8<4`6y#j&AYmT~)~fo;PCsQ7KDsOH#gXNg zDN%{r>*1JYvmG)DBM0wByB0>&Q061}ZNsk_ zzfJf__^rWjHGW$8!`*86PrLDEcr(0v@b1C87w=xY`|$3=JAros?*Y6AqV_1NF4%f) zi4->ss`MfJ-ouX(ZWs}8bMSVm@yM`Q`(zAgynrKMM+z2~XAVR+l@27^pm4Yivd^D> zY5?UfT!T`KGarJ}13$(}^oF4}Icv5_5w(dQNq`{W?W+)O#_u`&S}Saq9BWx%Q@-Hb z`;v3-_ajgfZAqBnw)(=Ve92p(Wd!8*5pbvhYM5-ZFKo(})Ux7(WNR6b1Gohcq&U$~x!eJNfplE7`(zE)1oxIL#R^jjX;&``j*e`lIu8SU{U1XBi#UMsBH?3y9FBhnLCV->W-JG0ueS9dzOKL=78fY|7695- znrgh(hzF*p>{mSMX!0w{k+tEAJ%}I%N^L9gzwA&ysxYzimqZhhd+S^*5MQJu76c&f z7~VY8wp+}$9j#@6`cWVPk~E8eN>>g@W3&bK;poW}t;Bd!#{w*eJ}+ov7?*Ien~UbG zfL81|Bh9E$XR|P7&~#jGBJR~f5T|T|I5IZKk9@&HcpV1$pcY%fx0d+TSx?i1a_4K< zD*FU8DqO+O-DxY zO6QsY3!*(>R3E-8%?fVl`vE-#qZrO5cXwRzBnB1~!R*pS3WN7orG)D-JNDY`Fw5;7 z!=H3>NXOpM6sF^XS}z}<>FWJSHGa11BiiS|R7Pu8H`~?C4t2A${S()y_ULBUv(G;J z5*?DmfC=EDw_l@?y*1a;;_}`spNVdrjL-c7k8>byk$fg~Sf{CH5=J|COvugmAa<{cK^IQ16*fZ*DjeG&n&Is*17Qmcc9DV#tM2IgQ5?{1= z4s94$)b901AIYZlaKSC?ub8ER#Mb#1%pkG<@jj+w>z&ca05YLGKguRhIJx1#>lgE> z+f*YSv1r+vq2wB5s%ASN`pUubB*4H(fDoCGIM5~zR7iZ==A&ZB#UZD0VftiO>$WsP ztBz`|*YsOVHpU{|K=Usk*nRji9@ZYEm}qkjWwITO z-d3QT8^0l|abeD6S9EcKo~?X4euI-f3!;m=TCnQx6oh%6cj`M+cYK_Cik}T}h7OTz zX6%D(`e2FF5~ge}SSw9dwm>#7(l&&WVtMo#Q!J+fxC7@WA8t>IB|vbKnx};OgI|M= zF#$$hQi(8RqhFXuDFf`clz#^SsS8555uXBbH$X|Tr&d6a3_*Vw;)?=qCHuFj*RWWD z6|q2P$Wvlz6a*WvW#EA9%&r}PebrnB=yR08^YWOHAa;Q0CzZhg zc?=dWa7Q0b)DKc#OfnM6sVxykY^p7PpDH+)Q=FwduVk2q=28f$XDR_^^kA%0lT`+S-OOYTdB`9Hhck;?;;eH zziiEE#ywLI06~1Q0zKt${27EtpgH_U)TF8#XnuUS7~b;H|MZ(}qZeVMMkl*Wd~q%| z6>&wNudB6j4iX*Zp2@&wS>Q>m{~UuMrj@jfcN7h0;1an`2adbj!G!v_76Wau`Q&sV zu)DDo52vg1ojCLuRm*|swmR*qI<=VAIc?S(<*&5U{0wIN1$4^Bg|^9d+*-N5pU`hM z1Tj(!rbp*bd5K1pMu)?DgM1dFHX9ARz^Ya|VDiPCDyGgdRn-Hes`P&=Eg?A(Bew!E zZ7-w_Nqzkr-Vt&`>=Sf^!IzL))n%~R(4us2z$r#o$7w4=XmfTSIxlX(^vt)5%{9~% z1?co_0{pIn+6i3l+FXIims&s@x`cM_^i#?u{O=yH7m$&(br%{jLJg$Jrwlp+p6n%~ zwTe)g*-!jz>%Rh6T@k9*%BW`;AM{tOh1>P#$kv}mBZozdV2VTs%17-7D0d&2Z{Ils z^}`h-J7@B@b7wK$bf-W!3~3FnI&I;67ye#=ZxW)tI*u+PrHqtzLw4)J{7Ee>#pk?8 zh?@!2qZ**F@=3$Aw9Fo$v`-D*Ylp}l%X{+>;cr2w1l7sZv8#;_Qau_h67*sa8+jGU z9r|?I*C}W^XI8hG{ep6_tFih{dNA6RJG;mV!d^sNM{28jO!zztpzVUOG}<@>Rj6E2 zInbY-$zxyQ9BR^V@pA;x$hCA`CKP`1w3!RN2;;GprBjiq`<$`xJkxFiI;pVLqO_B| zFES(KFG(8-(I*Q>yfn+p{BGu>O0VHQptQp-Z?QBQdLBvSoqgAIChn>*oi^CRN&w{A*eq3ZHV_U-|2nB-f-a)i@d`!9wh>$+bIEq6N zly?VFMJYH)O)0SA_wYocgx81SU~j?yIN#I76KN(k)Zr}{NR!qfuE;T7CC|_0vFCQD zjC+2Bv`+4dLB|fsBL#m~ME{faew}iyAf>}9bF)JqtRF3^%r$Wt^AA zZsGxCV7C#*q?_u$n@gP~bk%llz&y-%9EbUq@WXt3=1KeL=3&0EZ0Zm(9WM|u`k!E* z<8)Z{qcBILsIubL)C0K3OU~E6CaV<@lanOo2OvhDrhQfPwYbNbUZ=t^2O@Eb@@C0) zldO~TBc0%(#Z2UWnqagMB3uXV4Eu3#>BhmOaT@39R4dyKB9b+U!-LA#}fPqIIJe$VeZX@fd65J)6WkvmkxQ#X~f!(wfBo)TXR51(O5uK4zcn*Vx>4t zfc~nMMroD=R4_wg~JNDhHNedQ(6TBms-8a9s#3pPqH%CkaLvlF2_#Ho4c-Kn-NK%TmC>7MBuF0 zfSQpymeB>EDZ5v)Gj+&e{d2I5YcpU#M z6Em&-2XKw2RH-nVG^?mnDy+oO7<`D4!${5CP2lnU2r5wu+T*_;3R~j;if|K;Z-mm0 z|6(Y~8rKkR{PuErCOoIqebo5cmH~AiQGzb4t{DQgpW&Aa#&1K6#%1EiZCT>}H*l%J zQBy|Z*AehjR4*a!PU5}vLcEkncyl>k`ZD4Uj*-G6G17PZuUJhsetUzor19I3RNnaQ z8Y#c=+YMsm4~RD_L~iBrP6G3+k;lzXMndPCZ5V$RNhV8N@qZZ#7sUS&;a1z6Yvd`7 z4_IcwNv*@0%nKGJ^WF=XIc^SdwF&VQ@z&Ua7UL2UuOf)mVzZClcZ^Ek^zsNh5d;Z^aSGa@KH?#zhYaH7*15;us)it4Fncq%V|% zlbSvQO9WUz%vxi=)9m)uM#;h?Y%tikkNW*x_zNhsAvy$5ISk;rLLQxLP}B~(fH+ni zCq;t4>PFA#(|GEaD{LK%0Z<4?t5=Xv{{0+C;;Z0KSz2 z*cW$3-UGtomI(dPgBTzF(UL-ZG+@xs$RI{8BK$EN`z?_2O@{QhV=g4VwqQWFQ}=sJ z9}7NGr0p>bn#YHMCPt2Lxo8uNddcpz9W;Ndosi*%pX6RjIJhKC zP`FM$!$s&qsn9w`#pLvhInh!1H2zu|>R@o?6lvR%PRY_08t>(Bg{i4#R#Zo6FIFEn z1p}9#pDo>-ZsR;{$yucDi~3F%byg$^10Qg2FHqu-?%^rj0ny6P-fk={mX;d4_iOyw zX({&V-W%{2{^0zAc_?PtUl(x=cPydL;Hqy}2bii)&$yux&}m-4eq+L#MJfB9L{Z>5 zZC9Gzjo4a(ra*+0X*Smt`UEl&Cclbw9M-_Lq?raiPxA}nojHmZ@j4VGXLeNc{&pvh zASd+6BS)8A1vj&Ns3&C~DF*{LdKp_jHg*#(ZQrly`8Xe}xV-qIT!pX+$Y2VXBN&vf zG=p{^-$&)>e9sl-+Oipg)PBV zL%3onH{|#;tY>Y=LH>vIN7G=mz}^3Y@+20f*aCswJWNA}_HKL!WExiwJ3RczVf_Sl zeG={mLvZX37;5xm+yN;%hc6c37R>+Nj)uwo!mI4cCpZ)9XSblP7aXI(l{X_Um!ZF? z3JFtjrfEr+KAyb}v?ezg*{ndENpi|$<)rTBJEbk@VpoG0{zq3HrXtKm;NEV8S;rM4 zmublcz!?fPGV8x+sF}^5#dwy&1&>a&{1)RQm^_s&MWNV7nu}L*;q}RrAOou`4In1N z2dkr#%!7|M^N%{TE}UIxJ$&@6`gkF5wnVSP69?LG(%`H`{sHU(XO7X~fXVDz!UcYR zjCT_Kg9yea19yMYw-3%Cc&5hn~QlU=CWe*V^$D=zwnb`Zxoxi;e%a? zy+O^p27msf62n{pO2r`)zh*OLWoTdFkHWTNeT0*(V)H6Md%lvKvpH#Evmf8ME*$CF z_m-t@!1-o`e33QxeLm!b(FT7YUysntqSDk8<~VJE!`GV zHbT?pwfbkZEa_{h`bJqb8_s{EQ1qL`{q25s6sr?D!f>q?r80}&v{k{)$?G{JZhZ=f z0UIetJdUBXHc+pyvq{Y^*Mo4x_ zcN3e$ZT&XTxsU3P!Tv2s<`7PD7ad00>FdkB0P%nFjD*A@Zrgg%r?Za|?bN^ytA*7JbS-gNgDaY2YiDglJcHP&6)Iu@5;JWh>< zKYc5YGU@0Pvb`19r>tliG{$0&<_}u0nWYH#Hbf&c0F5@}NWQ-r=TmVU&cb;CYr-}U z#dQ>Mg@7~r;)83z{`mAE(qYx71|30kCDN3qs6&8ulX4PSqJ0GsKVRH4*7lL@ZIgZl z^iQztrdADSFRN4YHC`=Sr)4*xFrRg*%huVnk0J)CW_$mi*6eQdo4>8u?G!h%W(%lh zblt6Hsz0-jQ#;5M86`1HR%}GI&L*qn(H`Z(zGsYuOTF!Fsv7S9r%L_Z>O2jF_J7)Y z6M!hI^?&>zptyo%iDo$>F6B1BFf+`s3<4@BisG6JqJW|Z2s18S6$~s5F}-zfS(fEe zwq7fjHWQcB%v{pStea+5gIH3TQc~~l^EvOlFd&-M{r>L%_y6|7nRDK=KF@QW^PJ^5 z?>Tv8As0uEoI0W<=6$7=I0S5HY3TrD+hd2E$|aTTz_d*J?&DYJpNcJJZkyW%!#avAU)c^MP}Tz9P!i zJX_#(Ohu3;Z79rb7@5e5DKxj2Uo8bX%N}R?4j_edADl%x`7K?G5(!8TDJsUQ+Tsb$ z_wJQSGQX+aPuqm^V??j{Ms}&1);UvV2YD%vg;~Mk$Iw#iv z!`+>3!<-wO18Rbt6{|pj&$Ja$*JfTUjN-Tt53izAHdn4te!Hy)W1QE5cdL!^UBojo zM>?lbul9zv8rvItL+HW9)_{@DM7S}2t2xsu0`IkFT4CvR*L}9Yz=CibEaU`2Bb^=8 z#EKmm`MG7}RWG9pZ#0~dhi~y}nbA>1kRLW7qrIwILi7gQ?hrk9MMgODuFyUnsylRV z)4Y?o71-(Q491enKTV(&L}iS}=dtrE7wB=k)|?wkaFSKT1suE7%5~IME08ea0#xKL za1)sXV_$#k{K|QUGf+nrx}ttwzs_k{f(_u%H+U2^ZdB2dcTgfh+d!~|yb490p?S2J zoG;0bL&z`)@*V*76F;+c9f8T639J$x#$~@LY`mY{&wLv<7x-YKk*Zw5_x;#Q;Mz7_ zh&{O2c@Q?bjpH^oQe)pOt)PN&-+X`(OU1i#3-lJ;#kL#tl*e<-=az-V&*Qb|CyxbP z(9O-$g}fg&9$U|M4g4Jg|Sfkm*sFZ*c|$ zl53|#`JHe(UwDK~2>rF(MWOA4;JDO^U~Fh$NoClLkK+U@ai?CL(tHZzh_6))iN_SLvtg?EL4r}7JPl4SXr`U9}4B&pIt;DI(FWls_o@=>FSuHk33$M zSN_$WxI2>Do12$8xrLR1XLQaU1{kW{;W0o^vr=L12!{Cr%#3C!M7sOF8-q_*~ z4RvD|VXmQCiqTX{q2L(bt<)_gVZX^gL2rC2+z8qwY(S%W$!}T|{1?D~jMOh3Zb-j0 z2Lnzwc|u(zrjF1rQSmpwMQQ1#%jixvALH+1XZXA5dHGu%Y8 z4ffDEGahD|Rd|>BrBQfw%|m{Eh?Czkgh>6;n{a6QrEjn#3Ej`H_Cvl~+hTbPK}$BD zlfS#=?*{(<;}m`~tRMOouS0xX_~7}|l5J1GFHENJT{;Ke(Eel;RZ;)5wZ8smGxJjQ zKYDll551}X8A6g)HSxW-CGvsx=eZ{_yMgwH=yI_W){;dkzA7D53aFGJ1j9$K#bBj_sy~>}faeI`nhI^s(M@tap%b7yHmM?-cqX^oc9wj)( z%1f&p0)mOel8l;6p!}g{Ij1;Skb#q`nyoJ7j|>W9N~NeALp@i<1PYO38XlKP)*c+Dt^%5t>!{K@5w z?zQ|Wc&h~QD}SWMtt(#!xBQC4=xCQwRM5!axB=^zu7sdb`3y#`3QoNVZPf@=7uvT~!ig)r@QXtNmn=i}dl!-982*a#&+vL_5wTVEKJUO&%|ute4er`7 zM-!KS$u_1g!jVDV+esww(nXnrPGzZ*vDHf+W1+sqKS5StlR$Hq-h-Y+>j{Y{*~}Lx z2bZ-5K0I0g2YbV%iVrB#p+j#DMTP9Ra0rDW5eVDH#Pbf9e~OHyj_4%dr+(-kgxlG~ zxA+3O3zF$|#K0Fp050TWKMlUkhml-k{B>~K`z~Iz4|eIIP58z5ofgi*!>>>_va2hV z;aG4aMU$NU<(LtxjdOjMY~Bd3iY6ILWSAOb$s*#Vik@;KU~p>_a$P}ck3v+k)@YBI z#(o2)9JTyW{vD>WMe7N$DE}I6HyS267kxtJIm{^39eoK_!R#KtMODxpy(RsjJNo(; z%A-tsHg!jVQg^flVJbNav6RhnC%M`JC|NPGMlhfjow1i>6E2>Eig1xoYK$mdaTp#o zMpDpv?v+A~Y+ZhdH6sNMbVbY`lF-FPqL5IoL^RLON@A)?lB>%DJsNTn%`Q7Tt5 zlZt`Xeb_B5xwK^8!k^Z{Z-J@wB~{B)4z4Sak{ntdignNxTAo_EV>KIE9x^EtmUz0f zJiCzNa(I@+K+E$W9Ii@-J_j#S%fmD(dthCLtX3*hjB?lVJjTXCEzcJ~M{0SP3ZLh= zvcqK$wOXExqHUxB)jXfL$8b~gL`gLdGv%s&|0S}$jw){lK3=k!S?`toC**{v2$ZUx zzd>@4=T~uL#7zMQkx-4*4Y6Fct5P455j3BNnA7w)EOF5RWLP3IEGip{_k(!Ht%LY2 zAB9LRzCzBZLU)cC|mjZaMV%8ItGFpyT$`pB-|wf1Coca_M^duC$5157~qr<`B#+58FgH zm>Do>2vhgGVU{`y3kqXYy-EI1_4911MKl$OFOEDc3U8%2LS0upa1WW|wj!L<>&IOr zjvJGwU8wL$p7vvfXY#a*%1s@1AoC_mIy4R`8(VR+^QomAuw$eroG-#zq~nr~dMXw6 z^6t!rKnP$aJPF*b&-QmO;8w5i80t*u%YM1<-EmR1dxGc~GG~(4+a%t~$ zA`DnK->Zq^%!RY!gDXF6zZmO-15@8C@xtw*xEyD|X{>jZBQC%FysLMT*V0)JoxH*v7qlJkE{hINbZt0=WnU}LyzBfdSgw1vb2>p$wLrzNcy?vyy~%x}BLhSS-7lsz`@W;~t^ z=bK_5cE!JUeC>^cEmTf8gbk0_>*!LA?jhrha_&Q?L#leYFFvv?YTS^9?0 zjI}&W7rXFrc;fQYV6n2rXec;e?eecUC}ARAR4Kv*q$dHR+k)^7&ghz8=NYUnSDd)& z$;XYc6zX9=nC#5rgiC(yAP*b&um>eO&qLDk9LPX`11dIG71&%6#%Ba)z*4x&u$}X3 zlz=Z>aP#kexLi;*usR$Ipw!jj)htVGVp!qB>J3+{cCV5vR`?eD!hIv!6c6{f6;hRF z65mi~4XZPbPgf6N#rJl$1{bru21IeP@;M31PgACI7LciRJg3&2(POmDvjDD0^S zfx?XJwYX8mX>9kdyw7KGi^jHMxmWrJ3`1J)X(XIqBCfXSd=HNb)7IYjp+s{mzd2Xg zf!=uBA?Ij;H5%M;;)5$M;RVT?II2PqKaYMm=)vvdiSFzpcmv&pFmGPgW4DcTTGaCF5!hPR^-gUZiP&c4q3K~LVT63wsn z=lk`jyya)##e2Yi1wOd1O@15u2G{eGaRTE_dY}dNnFtAkO%tGOTj$dNM1|kE3eFON zUgw=%Q#tuzDK|kTQiOmO^O_ZSS%1j$D%pjF^gUSP!zmv=&f@@4RV>?$D_Q!S#sx<~ z(L*JUN^H1blp}x!*Uves!3G)#;{Za)C-VGyrwQd2nY|s&o$Vzs&~AyMgUS-eg^F#~ z%LOy%q1gQCKpLDW4>9}k--@DL4dYuJ3#MW*?_;jZ#?ccg`wWe)X zmzx+$+o*$qX6l{|&x7|xOM5!J(>>L1~QtErvmTAa@Ib$i(->wj`jwoK zI{ZvvztRL|2)qhL!K)wzUO5>y{A4HLXVyggEXYN`6!;b7z^`C7{0a(kOScpv&(iJu zO>nHm&f0j~WOUXGQmn0ec?C^MXZe*D_$4^b7k%!F`^9(Qa06U%e)E#HrTE^qz$+em z{_qWfr@bkUa_HAT-f?O41bo;2I-08Jak$6+p(Q)D2sbZoDVX48O9ut|^gzbGo;z-Nr}>UC<;}35QRCZ0^SyvhT5>QK!#UWQ?1}b1%gcEw8yVsh z-Sg6J;rv?Ke$JE9ZsYt&+WyW%w2jVv@>S<7lXjrZoFk+?+?got(atz&k8?&y zd!jQ`+G);S(w^$IK8Dk&-u#?6$Dws&hvOvwiKf>oM)tS zF`XYv=TbTkO6PJqcS$Fez|L2t^IDmr&bXEmL#N+(;p^9AYTorTUd(#elv zoe#k&L2kjfB{G1Y@HrPqXB#^6rPH6zsnW@%W#@S5)X_OiIs@tKFP+`#442NHbOuSM zp3bh)X`u5K=`_>nC!OJRdP}E`&Y!0f|5!TDOXmPO&%h}`9>li~Wk3?0?@H%zI?JSU zG@YBJa~z$|O6Np6SHlTOrj1S1W@jH$(%9-PkF3DC4f7CXjJ#@W+^?7x!)Kc6gH3}I zT|T2#AHCIkvCF5Q^qFO_hAx#pBb^5N)Gf%j_$+D%2W4Bm(_HCVsX6&sy{Ec-s_~s0 z%5P)!&T{#jRek)e-nlNHwtGC_dvq$yOS-t1Dd^V{*fmZK%E}u23kJ;+I+~u=e z^$E9nKj`vVp!(RX-VeKcvQ(c~tM@9G&z-8z0IT;CE}sF?XVxHV=sM{$(rKiRhJBsA z1SKQ<)v5f^1s!o>S;_u*vp=qzM5lGJ=pCg5$5#!o?ws#|sgFO-3=JC}=xEOQChmy$ zS`rsn5=u2QwZh#w{_=-vm^yB+w^6$oQ#k_h-6y`FuEep2-2FHV-V0*Xt0FJq5WPH8 zX>62F1iE%H`$i{EVJq1Um3fD~;sBa*1rDmelnKD$)pW1mnJ`6V*x!1AXL?=;8-Kz6 zJoJ?~qS7B_@~AVEhEB2g6l*oqfB{c(vKm1Skv`=9R-E;>1Htkb&_y_E>-Lg39X+3i zXINa7@BM4x0N@?m6UysuO6FJLM!&vpz#9p?b;s#< zoR%wqR}ZI0#3+mcQADMLUj+|?=z!$pu<<&FS8{SiV|*YjZ*r>|1;PfZn+k!!DCZ%J zJ1}m%;N(RxIH_PvY5ExqY_OIT@}9F>8W)mWgJk3K?iaHSOCYIRTL};U#L_< z<22jgUiV|FuB`jZQ|i|}KW?_aUg{N7O|+MSsL@b*+-{OHeF<^%wt?hx@^#NHG; zR@%IV(Iz(y%rat^i7&EmJ_^^``mK~iaQ=u4)O{Pyv&_r2eZzGY9>;`8)zx}=w$8a9 z;nM;pEYUl&;o_c?MXm!c$^nnE7#FM%?8fSGuyUJ7IQ+FZdbmpyTMSJeH4;P8#5O~d zmyB;Ax14Zy9TK*|&2YYpAaO9?_OZTe-?qe~;x%>ZS5$SE@1i7xU=~)KPy4d(J&?Hy zQHgt4qEbGVDR~VsC&(@y>fRu1NXKPs z)!d_-=z|TqH<=+0z#6By_vanAy(1hZawO>0V;^p>KHPa<$?h%!{At~n*Q{j6mCp;oj)9^ordCL9h5sB`>XsbZsi{B>Lj$v5?oaWukqE+7eCU<4Z}^P&1=Y3(WGQo z7t+uDs?s)47T|^;T!q6^+&Oh-^Q4D50ak9J$mhNwetGV?4 zP7mMbyaFq}!vG)M8~kzF!+niq&>j!pwO)bw-(odxw+>%4)J`CZz(%+ zs;JCk*QK2RUfBKb!=a8I$mVMRfJC2O+S!Qy>oQ7s+9LuE!gmkze;fI4!?*kui#3R@ z55E#cBGv+D`bosTEA0pAMa<+DcF&|~o>NhzZL02s4NDN!XuNCLh<0gB$&QWjzhcCt z-ms!}Dm+57#Vo9K`QS!4e7qPXSb%TT$Yks~YyOTs5tPP`qOH-d+_^=^ zdg$8h%zaJl-GE(ZK!&3REnnv~wtO+WQKPRcd*Rvz*B;9$+yH|+xQ>LpV>xAS`Wl!~ z$!m1!2udMFl_1aR5yfu?s;@9 zf|M=~0^$E1j~-K+NSGv;X)tqO?t^(8W)sX|m~$|Vz7)bA#sHH5GXW+WW&zA{m?vOf zh1n1DE6fdF2@wF(8^!`N2xbC|9p)aGM_{(V9D{Mfh_8`9j149UW*m$irWj^9%;PZ6 z!)$>$2=f`t1(+6?hxx;Fg$aghu7aokX z)kNv@x=pn0Ui)4X;O2Pfv(_W{%2q!CCigR#gZKm`CogVJradVyXL5Rez7R7+zL+d> zM4m{;UvH5nvhXK_PO*tOFq83zXkvsIF8YaH!tCY_5=Jpd#KDK*gGIcE5ko|*^cjfX zVepGXj1UowoMYh{B;)qW%I}w#o-SY~r`t!SW;vw0zneQcD=TL*Bj*W+$QE{y2}sk0 z4kizOgjXl?YAKpYdpPnQB9igv9!CvL24sUp{{})(*GQEpqGgOLQ_8G?hf(N(MTlU2 zy^s&qrty~!m~vt3U~=&50Hp|HBA|!|JUU6)H0hTip`Ib*P6amkaHZmxDQ1XCp!r<* zq#=(ygl3B=c*{l%Vm?dez*sj_5^PQ4_ZNXtybNe9e2(OvX`Zx(X99d8-PFB|Wq24SKPxq$Rcg}IVa z*%C%_4^yf9qW;#VjFm7YA_v0NTh=aXT&wTkn)=R04x~Uj;*nZYfU6EAV&yjC6SZeDpSeR<^)8M$i z`kRfi8ZPn5I&?43_y@{kOJW=SBjttGl$Q&hRO@~g^2$S=4$v*Pm}Hu$TgRYdPlEK&#ce9-|DF!~Qt~^^zlLL#e^O zG#Br5Q;M^9Vo#@1hP{U>-PJVYu3B1a9}VRwh!6!zhFY$yvy=bc$pH1MaFR>UlK7|x|*#5A~_S!@{*txLrX*c#_V)GTO<$G-M134S)+vpFwBP{dV zHb9E}cH2ZJ*r#@4JO=ws*mGe&0sAi4#jvYk&x6ecoN=%dW!qr?$TpK9aUKaj;;l-B z7|{cNImJ9{f#%by|uk;$Ie~5-`=x#-#h!?J@DSaLx+#NfArV~AAWTF;}f5J`tQ$9 zp8EXs7iYfw>T75D*@|=De0#p~yYGLv@Z-fxRX_cFx%wB}8|Bf+)62VY6Q8Ednzv}# z%Ga;;4L7#A>E^cn?b_ec;nv$a>N*8<4(!skTld@V=+U!R?>_pV;1ENo(PR#@Si>VC zqim^@Ca0xmOqrTFZF<&>?3~=0dHHt7tl4ws-ZggI_z5W!uO9xldGi+({I~J{ZS(m&=XK zz%_2@H_A9JHHS}{>#rwz@kG}jXDw-hK671vLWcB_{(Yn$(@?TgP~ zjK8=ht6yRdal!o$1?gI2a`XS>QFBx>1OxmuZw$8wkt zXPBJra_*zT-Vbk?ZtueZEo0=n`He#PX~=8CvkB*MHctm1ipR}|-aNtQo^+&_R zrcX|vF)3YE_~4vbHSQsk?K$*LPS3S#p3#mej(h-?;K-7BBsj8VMMmf5Wy(5C&6_Oi zFMU#;BQ;NrlbS7^C`TZFfqa>#KwiuTCR-$?&K1Z5cAn^$K1n2`W{c!hyCW|R0eJf5 zWlGoZsgAsSm*1%LwCr@(%W#M4O3cZYp~()HooZ)Vfph}#1>yGh+Mnh2rON%7$`aq2`oP>AHt!Q+D%qKWYFf#rigzRIU@ zVc|W63ky+z=RJk@++Enl+Z!b;TC~U;^GAJQQsH96?%vIuG^!BkQAkqKVkFdcFI-%R zR073JojTRWr^%e$+}XLgSLFX^yYZ`c6W7&^ORd0QNCN5*0s*fB6&VuTJ1q;$Sz9l| zGiRjdIPB5jt5kbxmM$(YFDFlznXMa?>d2lv6+E8@>BFaAYGzh?n$Di1%gIg8)}_x$ zpX{)wPRb(FkI1HjVHw~bU23*2H5ZZ65TC684AR>zPF{Lynl2+RX9i-+n7)b1h4X2oX0Y z7Y$0Krb;oypD`m9v;)3K6Uvw<#R07X

OXP&)nf^INfn@Yjmk!&Z!Yw)omj5_8RgdSy- znCXRWA#P)QjK$P(znJ8~jU$6HC*^S(vq8kjaga7)KxN9j9C>-@=v=^E0PpyK)$wQwxAN$r1>D z3`SL25u!~a_$KD0IkM9GVIVm;bq3H7>ph2Z)Yu>Og zEbAXGjA{yu#*C49C)?8oU`)m_Y9qdLz9R?7kQ{s`B++SUd2&xrxaXjp)HD~BkV7xh z(+y2Er##^lv99rHx-0WxxX(2wUwa!EH*9d+AZ?@`=aFpB!+$(5OCB(2RCH3j9K!q8 z;;!a`@co>>l0Z3`*%)AnLyZQcr{>B{KjK~KRyX)$s<`E$W`@trCC%=s#|_cqsEl8i zBZQc&ro^z&kxK%P!jh0SeypvRqh2Gjr%Jk|#m$+FDKn;$s33wQ){`n%5B7}Dk4>NC zm@*|jF9~ENV|**(h)qmhrX7R&tYi!>I5ibvo`cd;XQf}^|3#ztY(!1X%Dk(7V03m` za&BgJOpXKjo<+Xy)bjTlX3rXtje%gAI08y$rGrcB6&mGU$DrHWUc_V7(s#CbpmY%r#qo}Lz;jVT3(hnSs^E^hqwwp*JTK9w`3_%!S?c2AAU z)>3!z9tB8cD-iF=np4{-+NS3)q-FCEB8O$D+@g{|{J>HeNIno54CkB1K;p()E~7tN z7&tps%7eJJlzH94mWzZ1ONy!SMuZBd@RavA{(7Aae+&`?3J9Aj+K@BU2U5IqePy} zC5fzydlFGXbPSdfOw46STq51gz=5c<%rq%!uy!GisVxEAguRb4y-Rc<_VaG<5JZS^ zFKOr|H@uLa3*MZ(ewaTx^3YJyC1;QF=$GZlpGwgBIx;dah8E&|S=z94m8p`MTU>V9 zkPJpbA3#W5*|BXy%6JZnNleXEYXBS>sB(f!?!e=(=l`b^Xau|le`R~)rFPq6iDFqR zG)@j7tpSFnAYP8-&zlb)BN@PMS<(30Qu==7>#$hWqFuWLs zh?1&=nvUC?+4nsCnPVsKWvYD1{}AESQr~jIE)#_0HcTL4x@wdhiL&LV5&Z{iE}Vc zm@_b^U_OO84s#6VFw6m%eK5OW%3$7v*#xrz<_VaGVHkfo%wm{A7&}ZB%tV;sFi9|j zU~DiwVRSG}A*j1P=}srt|+Dq);3$6@xtyb7}pW;skDOcqQU%s80gFoR%X zVZvbyFg;-cVLHIHfoTC#{Q>fX`4r|b4CC*H{VL1`m8P1Y?HrgQ-3S z`0!|O%a{M)PuWFVo!tKHJK?MkE6)wk9`fn&BQ`PmC}@nb^;NZDTAcs9?RV8jElYRR z=RXh2eIV6hkqf9(rQJ37tqeG3`X~mGO@r|LoLHLIf>PAv=z~ zb03VR&lrxr-uNC)rL+Zbt z-$?-sN$_||9c_0^M_S-{U$hjDf7b=7$A2lBjxsw5h1{J!zh33!to!Eds~Ytyi4u-;W2ne52A(7nTGhtTYEp1jd##UdmoVFA?|c5uRFqYFg6$u7ggbG@af&V zx9H!$zZf-Yl*r7?6b^?&6f$OEA^r=+!w)|!o__ji@!D&ziL$aX@zF;giC=&Hm2(0a zOGd^^CEr?z`wtYh5Wah7&psqy4=LxX3tLpro;`0KBb-+bR3B0fTte_(y!+tgJksKQ z_H4d858+FH`3_%2_~7TfQk@~gXV0tjIful`9@qQqLq1hZpOJz5`4lkzfkPP?hfuzJ zm-%O8WWgVC3YdR2!ucSMjL-CZvM%Ah8s#6_Bi~U7^FPV*<#P$;I+C$RK@uj=!+*MiKx| z72khZ6x@2yfkTL~6w#6YGmiwB)d=#v>b*nj0-ir0<1!rS&P9hSRY)$^0095NwZjio zRUaxWV>sxe{4!#DRkaLee9*UiRh1%xCH zV_Cr|&%&reAv(GlPq;l`Bx_v7W;o-yr(-@1nF9DLg~9{I6Q&7_FHA>;eC`LMgZV8! zHy8}q--$1slas|=cikmn(|COO^5x>$XP*_DHf@qT{^5roiZ8$XvWCxdx!kf5GIZnC zCgk+L<^E0ClCfPBY(Kv}V|tD#KowUVsPNb1d(g}j-8c&xlT||!_J+R+VFCro$M07O1 z$&Zhhv>Q5fs2DkNB+g_RBgT&(FBYWr6H}&45!0qk6EkMa5P5mnlMMMWfBt;&q$5f^ zcUOdXWr0-`O^Fjrr^ko~bE3tgc{cIJLW_8zFkU>eaFW>iaIV;M|3Gng?R3%oEk*R) zsfeIGiip~+h|v9t2tTNZaqlT2=9nS|o>0WllZwcP8F5+>W4~6!wp`WvE#os8zGf2O)Qivl+e;nyQL;5d}z8vY#BmG6U^nJQwycZ2# zAB+80cI>}ehB4?f*n_nTjqv@}LOC8Jlz$Hu%9)u$IlDqAl`je9;*lEo`?Nv&0HoI= zeI(K+B0ch|%tZRRt%Y)5kWd~QDwKcC6v~bj$nzzke0iicJq901UP#{<>9J_K5$jHe zAb%<`h*5Mm2qiK|DC33-<*u1Rd1!@Dw!9>iV@GP!i&oeR+!o`VPWV2c2b!%>90KQ8 z;_L*a^_@Zq$`#7cdxSD`l~7i^ER>h_3FXL1xAZ!sHzNHYq@RTJbC7-+(yv4MSCIZK zq~DA5D09W9NdG0$SJtHOiEpn5B8N=ma4&LLj~sR*htH8iWot!zAEby2LltpxrXnt_ zP{dC!DdO@GTwtTf{#4w8^u3Ti3h9R-{bZz{h4hP&er0P#JQbvfO+yv2bEYB=tx&|L zFDatpNNsu_q$dS#Li$^fz7x`SMf#pdAK6+Fqk|MNbEqPg&O}}-6tVp!MVvTNo4&4S zNKqem&zP9L(S4(1rPrW4x_1c-2~UfsKO>k`<%f9Die zgxGil<3Apb*w~aFw=+Wj{yJB9LSjr@LKL!(i;0fv8=caV5xR5<=;R8IPqalRM90L& zAUz@L-LprJNEeKZl8~67J^gO+Z=2E^5tu_@Kqp;i02$Bx6B6R15>jrxv7JA{5uv*) zLe~_ekBVjnF$pQR+<0Raze zc{urP!n@re^Y5D&n-G^6o7nHR_BXkTKnW?gckeX__%r_)S%gNllyHB5 zz&FrO<{xBD1c?*l5)vg65=M>wfIlTR$VLzn5))I}_v?4#jW+^`n(!23;}lz;I217$ z;kWhcC$a2M6CUI1?Q3qOk4uP61eN=Blvv){&&3{5mgwkUPhW3+R7zq>Vtibp3ME8{ z`ozZ=`o{F}@@O{L6+l|W$3k4W!c)Tf_%&_p;n8{kAS8&12~l9A+VE&os6M2rr_T-X z2#o0qUN#tFVrs)vQu;;@v9@aBZA1w%Da<`Swzm9~81mf^Ys*GWYz&Wy0=p(?$w5ET z2MlIw-n!M0l$gXQD|oFY7~$G8z^`>!Vl?O&?-oq@$I2M+H-*Gl(Nx@nW&Wt&gjg_Q zd|ardUa(vKsOb1;OWo9HA9bI^I+;VhDrE&#zJ*bzFt{j#DOAm$8P63UkNQSwXe?JK zqUw$3u02lC#&g4l4U^-M1?l~3#vV`4i4==7CZ2rqNwIF-IyqK&<&{_Dc<}YtUl$xJd{FkZcogG} zjTkHJ-o0BKJa|x!4bGfCDNdh0EzX=dBPuE?#JO|l#P{Ewmt%t;fBaGW^2;w`#U(|& zgt5Yr%WiVEPbYMACUkUYm&+%kqnnG4?tXN1Pl?vbCK05(C59^d#7yO=SfP9>UQ)h9 z|6SY9h(72Lll+k$o$d0TNN+@X8`38r{aB=*iS+j({bNY~GSa_;^e1cjnZL#aBkDeZ;&v~S-YjYx=gQmfeBze}G!eeS?}dtLkX{_Q*6(b2C}tJ^RhZWqwKOP6k4 z`n2}*y}c#g+jr{RtsCOD_UqWEN1q;|L%LKy#fQOodPMzedDDcJkA59PuV`XVPU^n#8UyH-|NGCdCpEWED&&5O- z523^FpL!Z0HAd`m8wW=1()NbUlK*c!SHu2VWfNX*p}47I$Bvy~(*(kFZQ8UcWfcg?{Ia^XJch_3gLcevQ88 z^f%vp^Cj$)7cN{lk9p~tPe1+irM-LimS7)1kMQtta6W5aZoOeX+>$~%{2uzTuGul) z(6wvV&ag=MP~&;lm2n6l6T6!5D%5XOL#~OE0}Nf9uw*^D*p;D=I1)lAD_wyKLFAB#f&G z+wCwt+O};Qg0+o1gM))fJO9|&*zU|1?%xIf4?p~%U@WtubLY;Xpy9H!XU{5F)2*TN zp+kq1k3arc!ioBku(NJXojRp_^2sOioj7R*V;<=S&t3TLyYH&NcbAc;^XH#`R=)rK z`yW2|;Dgt`_~MI`DC;Mt0iF+_y@djXWlZ~f;m3GNL0L71@N0&2)eP%IGlZYCqOH04 zPNU&LoA77`Fe{otohzEr{51QIz`twPuBE8Q0PsXHaoM?Zr^JE$f%PVZb%1py$uESF zZ31mr+DDHbRcP2YSRb_6KG=4c=G}MSl{l~t(f$?I1J>%fgj@6A{WAQk8ICqgCE@k9 z-*y)-UR2NxN&YvR&0WdIG^_)xn<;O+@rHsrS1@LlZJ2eyI>EY{G=zQMzI_VY%JJjJ zrJFcV29V#_PFP2TZOgZcl72!_q7EvG;T=Wk^EU5MRg~UkxSx5yqCEShqWpOA;!(5} zw*TvSB>Zo^_122Oz`#(9;g>;PDyR>M17!~DfOI83tP8?UH|zhi&pwleZql6eCcNYY z;zGDdfAZ*>bBYrCp`zH{$NAz|f;T~D5V8+A>{gUJfJ4`9iW2a;qAdFqx`FEIRo4SN z;z;-*>lMuqe$8lgq1k_=ovaJ)(82DyrJR3Hcmbx-?MFKHroZCw@Ls1XQ*v=?h8p4D8?p`MvU(qK08P0xzX(?yipOI(KMv23} z6(t@x!~utm-?+!Ps{1>HJaQFe+Wqv?PYU{(I)*rCM#H0_I?-@($K&$l%gT}09#huO zOHdw}8KwL)$0h;0^D9Le@r9xcJ*6m#!~y5*%k~VO3D?>)WfIymWl~qP1GeX0;3u}} zU!iAf00#*_>U$~HE&?Fei-`;6A#Du@+T?rkb$vWE{vxlFmrfqoq&z)4Tv_XgP#({Z zlsFK9@#VM|z=cE7Nkth1m<9lcn)XZ_T<_g1V0+-Z)=mgNx=gAKtCc`x}NZEYFHLu_*z z9vUvJ1NH~*b)w-yoA6UkZeAFzy!bb(@+@#z4;-GftMKQaSCokrz`=qGNAAYto_AeM`)fnQSnfiEWZN(iAx`_jytp#Dq%kwPC zi+5R-XXXHhSrN*Qm1mWY(42`w3UGjqO&R6Zo+*>qo}PN$j6Bnbd7Nu7yzH zuP>L{5q^yCmxhLh24IX*OgtzTX_IG&2gf|Li3|PM*J|T3hOxipn1Rphg<;B;1!2ld zz~KepumL!{xiUw&a30Ka9Zxy%oPjzqvwsE-+8CqTTWaF^^ryVg_}^l&bY)#=hV7Mb z6E?!G;X>I@dCz$bvZDKSbxS}blIyi@t?qr(bua35pH_6(WCry=+u?(SP9I0#r4zB`-P4j^R+5+1%abSC^Upso@zHRgeKkCY6M2hiokw0#b{3nIGT`tA zaFFd8I8Y{O?fD7Fq*ZxQ%0sy}#XM z3l=PpIJomyeO&6-3EwFb*cN{4`DpixHDe66=TcXDu9xnb;~&D$x*srL0P8_{=9y>Y z*o$KttPd!&X3eU>;aYfHt4;_HWzr_fq~B`Kz=2N#v~I|&9qELmV%K)s_|PQB<%eNx_ zHuIw)Pic8mw!Zh+B<0OTHuTTx7=!)ubMs@Atq)95K6!VGa`DF>kD|V>d;I}>DV5^l z;vcX_NxA#(yMMg%&O4Q?tSm`)%qt}?5eK#luKN%+;!z(Lu0gOLCf{?6#J0lqi0zn9 z(vM>#_7lXVfd?e`=`+^X?_K|4UTC{dV2$$R-~RTuUkNAnjY`8dt>JLT9d{_$yC&;_ zeFk}+V;>CbN&o|`CaRuY+K}i@Y0jm zFLsRW6dG}5-n@CT9*6_wD%O^j(W6I8f7StU&dnem(OiYYov)L5vPLcJ%aHi34 zpsn$hRwr6JVf!HLnoU09oPm7BaUknXTi*cuRJXN&xt>SLPvE^2Du4jcu$b+6+qP{I z2hxY@hG-uWht$+mWze8OlDD9fm++I;q&aaR+-wJwKlJAuf&9a<_I9)bwgI*Y=1Un2 z*ndSn#s2>OHq`qXtUEaYG<81I>s-$x#}670a{QywmGGQAc~ZiIx>Yc?;I3lX9#G#3 z_)YQv`zX?x@oD%@94JRgQ_BCBUw&D6@x>QqANa3-{Y$3f8Ug6!glrp+K5`yl-wD5a z0W|d<*Q>v2M|e5@q5MaCT*h~{H`;`S^-j3}K9yq$=t-n(MO&Bs8)Xgon>x2C{@LdnMDL1JnyxvE`ukk5ucOE7Vnjvh&i#)`(aPPhM zDv((U`)T)wuxW;AC}${(_>g}%C!!(j#B25H)m&qI0-w$PUf7AFgkKx~gWko&L6eIb zE*cKBNpH#~^xblvK%Qb>>rP+7%6G!gy3xiE)a#J%$xEa;+Yn^N!+_(Dz+MBtJO5Kg zxYtKRbwa$j#y)P`I0gH1C4Xx)cefdjyhL87Kl=c}uIWcGM|$i}z%Jou8QT08ea|xT zCHo-a!8S-bQGQU?u|IOJ6Vi6emMzL7k31szne^5S@h0!E?Qq=0ck&GLV_yK7`7}PQ z{-bm!?0ni09`}Bc@N0%RXy#gV!aT4iTINF@V;bT{c*$GaAv=kS20O;5PyIoDSD~)` zmFqufUvmD7{=1mCkp8qaJZNip&?ZeOU)UCCSU2=zJy3SCowCj-i>OPW4vM&t?kMAr z$eCJ*>$)WT8vkRg(3NzexmKNUe9!R@@uFUihP*|4&z?O}?ve&<1N%;nUnu{`2kej7cap}ObCUL~2aW}_df+={K4mTC80HjG zuK}F@)36f{$^Y)WQJ>GXcdgyJ+vIh=GY;#5JWo0kNAf!O=vRypoTw}ItK4Jpd-^8A z_gl{mh`>Dv|C5GgaPI^6Y5eI&DMaV8efI2X(M0(*YWD0?_&%pnwc(QQ|2f;!m7j;p z#@8<>56_>pwfwaF-QX@{_INLI*{A-DUEqRKQEd;G?dh_cxNKjS-O*)7x$FX$t*EvK zZSK?3!PsE_;65$x@8S5ChWvchu_9`EAq-Z*m5VU$58tU#slOhG>>7MnmOti#di41T z>FMc1G2V~GntwF>^)MWB-vm>`r_B4`Rf;kTx}E_asya5V1yiRG4BgH+tPOwoy#(o6 zFc8-cI1S!}x!p{(=_<-Ij5&XVye&sxa1L_m1jgrUuue7&W8hfC4}uB6c;oyO=s2h+ z<65W<>$lW-Q(t`S2SquARXKc*#OGVYzuI#vWcXT?Sw;Ct8Oiw)VW81;7_Kq@uaK)h zK;|BW+>>nxD z*%wh(b1cho3d88;GZwlk>O84yqmGCAPU@(r-=W^0IyOF6LmvJeIS0TTG#m4UY3zU5 zuW_EsaygDBO>b$PNQ z`xMMyQ!&mRNjNlESpOUY6BhPAZ>*dl*R-i~=9;`#K4F*u7^wH8{#^1SV4$vzIv(;P zbyU=KQcpMrZ3NFgSDkcZKaO>mYxQ@t&Wz$gi zO8qf)e(wCpy)4wxP}fHt6Zx^nHktIIRzAl;ntqY}D#vNs_)UX_DY&l8wZOGl&!o;5 z-_j`y(Z<|iAU}?*$B)>*FWUt7(vTnP>FGMMeCShBv92)E9S)XFKBvux`ZnrFs4t|R zYRf~pzo|1k`!jAPKM9>DVBmfpwh8KN`DplaT!DK;vI`0drg02Pqrt&-6VieY*Vn0Q zqK<>QGU}PA-=Ln1IwtBis3W6(Y2IYj{PXTig)m5dgx*!teKSAz@+kvoIJTj2hl8-N z{&(CPrBL7XGGO4ECfCNPVJ140?!E ze0MpLvVik_j?alVVId7@zrQw9>J6!{qVASDL+YMayYz)@6V#7TS6B!gl|};=<}RbD zyTwz(|7=_Mwl^E{e;V5&!Y5mzy$*fOFF6Q@yDU;PcPA@Mh11{_>w%xcE~Y5ZNfoAde9IK(uwtOt>@^@e@UGw zb#=vnf%?*V&lU2I<(HI{q(1)m<0GjX;&_~Jkmj6Mb56~5Q^G-7P#!Ui5B0a)2cWfy zhWL^3T)fth)sbUttXE`Xy=4aJaR2@HOSwn6%l?n@pS(bNkQZ4tVd29z@$Knj$~UJ! zM4PC`k7yI*M?MV|tnokR<6P(9nl5R~`4!rtoNuvh5+<#F-0OyQ@%D-&rSi-%<@^^P z*776T1ohYC$43{8Vet+2|B$U5Z+B$*&_#S66~$5AY^j^ZnF`8%_uVJwcZ7*@k?nwR zkOr(D(&h)}3FY9E)0NkXqUBxyjUU&{?Wer_k6fkV3- zXQ@l1UIO2lDqMdh3^QlWlys*&U_Z#bH6CDDlnYv0ML$suz04Hwaz4gLJX?o*o_=di z)seIU?U&;cpgq*_aE%@x{41;r>etXO%W^m#Al+F$4aa00qiHt!s-Hp6zag*H&{uqe zvN;v_tw-`7+aUAe*pBq(S}u8zYw6T$uze6tZS9J6K)JwlgoU)A+@{PVoZGi=zaIs+ z`mMqdF55r!x2gD!ZY0ZQo~&oWfO7&AoGYLV9z0mWMi^)qhi!oI$Tu7(QC@IgBG$s6 zM82FWT+burFXa#R(NAN3C$T2aKA$p&x&@qBAa%@a7le-w>7cFga=c8P3dilhVHFC! z-m(dwEMMbi(v^mA(6BuAM>r!v$_DOLCGO-o))#3_yeX5h_VU#4EL)b(av*D@{BH;j zZ48WW4itRvDA$TPF65dM`SDMbEz2jqTK`L4BEHO%hBA+Bj%}NGlE=7zhkGyZU4WDi ze-Lk$&8H*ruTOV(`m+vbD0?aUSkE{Ug8cUl*29({53Zm5(TCrf`29h{_Vrux?W%B9 zcf>cl9yQ_`=7badKr0~0{>lLJLKG${Q4s1AjuflabIH;=2nkl9*AeFmIvdK zr#Xhx#xq)*qAW|nyoqxrt|7dSVT}R%I`L@ZV?q4^%VJxjT%sYrGA*A}Oiwrm-l`7C zq@U~ON7!2^o7j#BE5{Dx?Za=bQ&!}tW5da=`5WhtoWF9ez`2SZbETDli@H|M$q768 zpXENYAYP7fe!v{;QNYYRI2Y&Ki)#^__i^57#=Mhj)ErY0Zptk_oWpPq^~yhHD$K+E zsep*(nhw_yIA^@p=( zq@J5HjC44)`$=U>VT7Dda_r7=JLe6YJIzn;FUv?isVEyTuf=!XN~3^^`lTOMJU7X-Zkmy8LYC8s+7?`^){@kIfmV?0@}f7Qb0bk8fAxSdH?Fd_%d- zF&FED^3)v`y4ja&X=yXQR{x}(Htsr!ev;+0KjJ)rc~dTEuak-maK&doNtSW=YMT(R)@plykFuva|=<^A!R=9Q^X}6b7ej{JmGoixc?A$TcjoB zWzNF=4^z_fYXaSaEV6qz#87C25BS@@1xN(GATMKIyN?baR1SxU5>xr|8w-< z=;IXHnYX^V{sw(VeOG-Cy-lB_pR8Y_->LsVU#)K(bX(B(LBoQl2md4Z+2Ad~hk`#1 z{yx|@q-#h-$i$HJkb6T`g}fH>VaS&uKZN)i{0+Sgp@t~KK*ODe6vIqIfuY#2%W&G@ z85$EhG;~hrGodesz7~2g^lYdwnv7$O(~TRAUl@CvVoe{Jem41=yPMaWM}>_K%Ltnh zc7K?crG=%V38YJ2OSBT89Xs0-mt^)mElLjFNS8JH-`p< zb`Q0L_6VX5X;e!alLIwDz&TxYgQ5mUt%(A1kWv#N%jW5e^{ez- z^{s+JgQ9~51H4mS@o zPctt!Hw)_(W(vy*n;ljbwkPb?o$@FU@KBIZZjAMrxOj)=VxVUdF(Cq%A~+!nbv z@^s|c$jZoHBZH%&qJ~E;0(HM7eQiQ;Gj9idoPMbOA^l_er}b@uBZ4=Dyb%&<=w}#f z$S`;q?=TuoiKZ`1ZOkjfh5*`@)(+MOtWQ`SsLhTM<5Aw$2*0SgQLCfYM;(g7aa&>& z@@%8O1ylXm`i1)E^}F?_(b|kb1A=A+tqfWnbSmhZpq9big2x0e41O-;V93WI)gc27 zYYZn1}M*(B=_v+rPrU3O^ryIoyCl{Nf`f zMHEFWjd&%ZEaL5mqYOa^2pbrU34(e`5FccY{HS9MWGgKJ- zLPNkU!t|*r+MH;9+kDdOG`A0%7PcyEQ`j3}yTbN`9SA!d_ET6#O9X0nfMpO`_HfH+ z%Q(wK%d?h^mQ9vd(Y^z$y-<@w(6&cg$64p2ZGUV%Wj$kcTF+T4trx6S)@p0N@Im28 zXz`=N_lMsck&YVmK->5}a#~bz)IXvQ66RciyXJ%#r(dbxsMiGtL5|G{zB_n%@XFw| z!5;;m4n7y`9pW1j5YjJXe#pxqZ-;~$h8Z4(WVk)_@z7^Ow}l>ptolCm=g`)u;eN&; z#u3H|#>tQmD~&siM~&W)0GlAYPNR)inSL>~GT&_OfSTwjxxJrxi1|))3Th?KJm37P z`F-;_^S72CEtf4@t#^jc34c7iQAGO)TLb_s#Mv^iM(c+JoeJ(8G9)B7Bsp}uX_sj) zYWI-oD9ZT6)ETWP-aN&;*8Hw{LfE#jiIzf3u63pLd+WpqOJuVsYt+!Fzel}9yqDv= z4_Jfsh58Npu0i(&y%4lF$QjfuI2jV5U&zpqA44V^+8P^~+M6Ox{VgTdJ>g9wLL){+ z*rLP}I5!Y!p9}g|&?`Y(gWd?*9<&RoKMDFaNC^rIP6^I~TzNM5m*6fTks)`Y2iO#{ zJ>;8^%OPHdSi?xe-wj(0yA9_Iw}o~MO$!}u9B-UrTw#0(GV?oQU-JO-aB~ST=or>D zY)#lRknus50?Pm^3fu_%VQtdy(I3~JMc)$;WC-dPG&*Qn(7d2!K~Ds|9JD*=Ltyc9 z(2c?4f`1HtJ0!|5$#B^)6;kQA@k^tZX@q$kxUD0k#~l_udX1ZzZyD}~fHfRBjSHF> zlopg5WDhzPbUf&c<(%b$rP|_UZDDPL_8JITY_Nt~W37Y0cjK&S)-0sFAd(nuj(ry4%qZ9JC(s;pGWvoVTIC53w6OrrC(}5~~J%3T)FADrcfxjs57X|*Jz<-zm{|7%;Q!oGk literal 0 HcmV?d00001 diff --git a/lib/python3.4/site-packages/pip/_vendor/distlib/w64.exe b/lib/python3.4/site-packages/pip/_vendor/distlib/w64.exe new file mode 100644 index 0000000000000000000000000000000000000000..fd37d9ee468d5fcab4ccd51d4e85c48607efd310 GIT binary patch literal 94208 zcmeFadwf*I`3HP9yGxc_IE%0lE|C=$MdKwlUJ{q+9@vGmvQbdcsGw*t(uzvMS-}dD zcs6a8aa(%P+E!buw#7@Uy~s_)gaApn21E^_f`U5B0S%zZBA4@ipExMZ$obR1ibN}5nciii(y5qk4?$^9`-sP={-sk<{ecs@;*L&~1fA(D$=I7`5(oLUv z@l&2#rXHJ=`M>SVdnVnAxU#i!Qc!$vo^+Y`-aF}fe6JrjYtrrb-ZJ&*B)dqTnJ)js z^!GmTz3zuqbEwV#HiD@kNt*rBe$wL~4xFAT(OLn$)60C+IIC;JpaFd#{d7tXT^iZNoW63{NW z@$K_3K<(Acuc<xc+6LflnC3jhOiq>Oe|0n5npjI zF~KY`)p*e7<?9Obt( zZ&Bkjd@ePv`P?_D`XMiKzB?M-vaeNkq-0NowM7yWPReWB!>o%HX1M5-YCn;D1bBQ63(&zfb6dXEE4q}V&J*>3 zt+Th+>~ya|)*NQxcau}87#jV)KlN4xk`4L zo~5X=ike5Mam_^Qput7Hg6V2vuCEr$wHBFJYd^qFB9?1t9zZPFS<+^D9!A|rBD7Cg zy^I>bngkh4T|@f|7MxhRrYDR-Ad`W0i+oe5STtKKFC(j@4HNoW60^4LglX1QbV|j2 zo69s~dfwcO_7a{~P?WXuKcjHTPGS-Bz#``GKV$1y-DR34kTq`(Ixsz-iwe^TUHtb% zsR_@ot;(}edG%K)Y2F9} zL+EP>y#!W*&VPw2H1m}a;8G=w%nM>NERRXQSc;R1*Z7AA7MOnl*%k*48U?NzlZ^Fo4PG?jS zkDF?GR$6TqSb*Pw22m90Hz9osXpn&kSAxUM!OYJC^I7P%SPv$D9`-#<{-rtT$&dMF zA;t9gQ1#8xfVjG>RC6Y-J#A#mE4Qn9qeEV~gOK$ySb0fTFj4Iy2HiiBxS;Jbi~G*W zG6pGZm%Or-f0`#qV?bEnkfq2gLsB)$?vErced7X~^ad@}lD05=C&)IKp0C@ev+`$= z!`h>{DD-;-RsCR=nt0SV6|1CLiC=Q#MrUcEO`6JIrcrLg#81GedpXAJ<1}kJHLbGK zM5O+X`9o{V@-?}(EJw>x%_h~~TJyPK2a%&bxKzS36ipDjIR}*k4VTlD28vEZSTlyP zI(bTxf^2(yu5VnL;Aa6+woS0&eCoU^MU@YxOtU(!Eeg=O3RIVAzJLZMQUO-8i+_+S z5Rsz-5!Gc;Df(%(zE~O~25_8P?EHB&V906Ay<}C$rwSRw9iTdAi8^1U>qPt3S@?Z* zFuZY?7?o``J=-yJK2}05WLQUv`^jSYPj|E}JPu$ADRm^a%j-&?oItyO2-w=0l+@ zG5Ve&WmE)B{1ZitGpw#(&B+kVdLo!SUo{xo0h*`7==5?LHEJwDjSL{yr_=61T1jJA zZ!@t^huO)*)U4{GFRSd7+Px!^P$(7gu!yT>Tg2X!NjZ%YeNzw(CuX>^K|DO^1nLr= z3(<+7u~a~Q3^?GYz>gvziP8udb#Ww-A4CZd3^pp2ZI8sU_Eltid1F7q{dOJ$xnd~_ z9?kDLX+`SQy&=r3kX%9~hZVL#g`Dvtrx$b3+OC4x;+0w8-=M)uN38)HB{h$qS>S`Q<&$0#8`(s1d=qUq|w?WhITq zD2PiTFd%Es|1ow=m8A%^Ukf3ng#;vUJt7=83LOlDFIC$JnwCzCO+5C4@3QT0Z_txeuM*nL6I&Gk; zH(^QHh!uT-gs4MhP0b~Z&FMbTSt5UCD>1=0Hvn3+H4H=ZBL#fV^gNPIgvevIr(4dz z%XagX*j7kD-Lqcs-qkKZmprqOB3Aees9~A%yxc2V1(tc&C<<=oKNVQt%s))Wx23;# zq7C9_G?wJ0)S&9$H2(r2yb8Hh%=>PYiQ#7vY|Sp!zqjV_w`EY5C{8;QMBLrf%2429 zkjip_W-ZIyk*S&NTQe=oSY7GmHRHiz*=`N8eE!`C^IOJ>rR$=>-$)7dJ^-BXJOgbH zW#;3<9|m&J)h&o$gk1jK7|JW0EQ*w?_!tAeZvc9EE_FwuXnq#8eHgVdF*5eok~Rfv z-SiyvL(s>VzA0~8a(xgaY%LTaZ$sF|I$`ex*>N!^{9&S2z5KGFHpwQ(Z0M@RIhrDv zykNuFp$OLRk?VH}O3R;vo=ndR%`_jKv^t7GBy#LoD|dp(eN^P8#EubRT_p97i!e^{ zQdR!~U_k>={ikc@@v<|~P+4{~%lMQMZdaLXdLp8J2Omk9@oW=pCBO3u>h}j$4&ogm zJ{s|e|5(%+@pJ9<2s=g;F;2C<#XWsu(V=gz@q0|kMMT1hd^bLPr4X?_=UYjIl=K{u zmWApMXtIT!S;Hg=(dgMN&VfTLq*xh~k)eROHG`kwYkEFyBC^C3K!ntM3qh-aj4+H< zO_Q}VS&cQig~FINTe1={k~Jae*^*O4))padA&IR^mQjku3$V6Y#)9NF^I2m6Pf6oy zn4KhQgb%Xn5%pd!_(p^^0Y9&d0YyLpcC8~~0Sjc{9aw6nX9eJd3{!$QxP`?XGHViY zTLAJML;!va{SgC15_&^%nFCO~<){7Qtt>H={Wsd$M-E*^d^xK z0957Bae(FlXvEXJ{1_;CH6~f&aqP@uO#EdMJu%-8BQmL_ppi0=jJ` zw%6BvwHP79Hf;cO1ijS^cYhi2w?1&Ls-HGBcZ6*$*-1Ly+;a(GB;RSz_9>WJ@K_iY z3;E@}-IEKT@Su#L^$W6VeMUcJH5syk0CyJ54RWj!+38Zs8ZfRU+HH}l1+Gwimo`;y zAaOqI|2!y9_$tJ!1t$=m--GZI=YYn}!=By(Ybdt7OHo9> zw!o_veE|GJ>F@qBInGLR8wbOzB{`f{ZI5m;54y?1f=GOp*OF$!>_`RpCHp$UJ|be7 zMZ|I|@fMn$=x9~{+SGC)Y#j_AOQ`REoY6x;egut+XnmZ5Jh_b4B#3ca1>_+H3&0kB z4KY1S44DsMS7j~GRvW4D05L9AWzc0KY&IklA(RN+8nPZFn+np<*QwCMcz|$dk^h!z zdF6Z6{?;f7A)!^YjUYDP4Mpp9rwy5H@=AwloGtX^2*m4vFjfUwlj*tV z4N{6QV5hti8S=^|dFAFYO|%jq!t~?_z@LF3C%u|3LJ-R&c?*Ex6G{+y z$R$Op_APekRW7hc7($5m@`NghC?3rzn(ty8lcNx3e%PkY0pDR? z6y4+QT4&eB1Cg+Os7)!S{?Vu~b2O^=v{iNmU9MWqqq2!^Bd4}BCz@q$P|ZfwZ?8Fm zg+799NVbDKVmlWe;U}@&EGUz=0F_C+STFoKvc zV>k~$c3ehOoDIkL`=T7f8bIGVf$^c2tOes)U3XGQRUam*o{5L&~Q@cOX z_kB@vIF2%U$`i}xgnNS70=E|- z3hN+%kADLzdthx%V`UD41pNgt1n1}{Y?@O)krn-bkIV*+U{hr85I6gr5Y-|08qzK6 z*mwK3ra<2g_=I9HnAh}nWWtJ>+hR=WQVMkK3Z4gsib1jzpMaAI8t&o^rWQo^mh912(e^Rmf&OJr(BYl@ zSabYI-zy05h0taol#JHP2Vy(5MV)*!QdZ-CiLZiqXbChQ9yLuq3<8r?^wV6cXgA~% z80b+n(Npx_RP;UQK+AX#m52yU42NxnUJX5O;392ueUhxd_M68jp=nmnD6R zZ$erAAK(Xug#&rrFonAonMZ>~*V3Bg2JjV(h8%Arj#$I{NV zovWRMY?7X_@ldrAsx}FKjNvS$zb-H#MP0}li^MJ5kH5=knLgNZuH+}4MJ|$I z`o&HqX^INtD8@kz1i^%mRV>U{6dDJ{H4=?d82cbhgAs-n$=*1tq>Vq1wv5p#i}?fq zaVMv;iF8zNgs$R@QWA4+6RkL}B5NVaFQfkA&Hz$doI{Y<-yt=&INb>)oIn%TJRY_Vy zT?u{sI;vB*o?r)W5$932y29~$3c~UEUOJoVE0YE)*n_SLb}>^NVfJ8YMNxMJ`zXQ= zsw}y@TCP8gM#5D;R>ScrtvJ$ZhxqKjT-NIVf@%l;XSC$8iK&RcQ+{MQin56(DXc$< zu>O!kl4HN3kl&^agBDI_Tw(Th*xq8c)BX}i-oskmVySBWz5Al$RK5K4`Dj=>6C?f# zAzm)^dFTBb{`^{O>ro~elObE&VL2^3}gNR?0QF}szS`_I7%27w!a%;8zB9P zi8ej+fG7TI6o(k6H@j&hASx+YhJ^(453Sz{rg%#Z!&X#L)Uw>G`Ac?|4SnEJHF2$r zDHvO0O*XUP^FW>nk*CS+A3DFl_yppQy^%#)+)!ZpvAHdIhCRrVgg2Fa=)Zjabk&5V zYQICZ?`0Dqop;uhSy-EYaj5>V<_!9aw2LX=zvrYjPWMaEVML1y6O(NJv?(A`H3-aB z*jp5{Jp+2ps{#k<6PH_o*n4o?GQaO}i?zNI&bniugtX4+{7sQfdJPH%ah`0kyBkh_ zvm9wwt6GK0G4A{^itxF9B2;H+1SlIteTk_qF#CAn4U~DUl{qe*nY{{DCCPX|oNo+5 zZ)$w7P1Vlo{tJ_n83zf!D$@{P7yZP z7ABcC7k^J{&Wie-(S9viU=U^)@)KGQn{ClIzy+|2-U@{id}K_qB-NHLgq={*ocvXS zlI1^wjsL z#1{Id%??`0SJ-JKx6^RF*&maoZ-XhkIZI*fXbg5pIo6ES`kiv@OArj_p8_p3K!In{C>ada^(}M^ENx!}Q~h=%XRjss9@agjl=2-JSdkRbTT(c9J+! zpsOVy*Av&#w+E6-MX3&B5t6f_59ptr);~R|Z_kp~e4cI0LqpL!(TI|38dZAo2JKos zIY1kaF3+RBhw8`s$@RlQi`^|!Blz)U=w*OW22C|u6k@INnyvvx5yc8c?DGLeVY*}< zO3H>TulX!*zEffE7!D-l(eTq_uXmu?5GmX^HkJ%WFQ!4Vr!a%cDqs+l{TAOlk=C(X z|4Xb26zkWwaJB9)m+Kc;Iq9hak*yaEU^>viX5LRUAZ9ZMV4PuGgChSFML;#8$o>g} zSI~sJQDmS|gd%?xMIcg&B8{SmT%TigHUP!k-vA|8RwCCQ5{5O{0YcEQSPQr}wrFoH zCYqNh);M;fi0PaLT?_1cPCQx=4~-1QqpnCiijFqm`o~n=Ul(cFU+DuoV zK#xxE4ireWnA=EV@+m##Uf@n#>*kAqIwDZoEL7$hOBLDRqmYXF!RQxZg@cd+q<~m)a$dg=EeX|&?beT*;K>Qnhi3ccfg?qH8uZjvzad0y!%k0I**rB7 za>Ky^(@aOn*!pm4`_I4ANd7_^I7xii=Ez#gUB5>h;=$$VLuTxl0cv#?iH{KWAQqBm z3L=pADxfsu&=>X!2zaf(!@3%&vU(T68?U!o&csNNN#}ut=7C@|H6E%pJp%zpNgGR< z9`hvvCE`;}&mjb{Kf3G9Xr6w;gcQA*FE61M5;SYr35q>}4R@22it5|$MA{HmV4S--h!O%%a z_F`XX1M0~>%@-gGV3Dm8$9*4sP7FTGb_5dvmp?}(c1aqK~sO$jJY|Lb%9Sa+xc z^I1{SSV6uGKt==@8pj)ci>B(4Gw3x$2HIY{fL|teHV@TwI z+Hq9!=NE`1wX-2~>Md^lgl|!skQLL`2jPdc$g(~tGa&ahLwW@lS&s=c_I#iHqRLD7LDfd^NQ!w`A*MFguj-;nuz z*o>_61K_1nXu-T1rIumiw5mogNVDjR02oMiC~c`f`xLtH4|?l%8TsM(j8l@)kIzLG zYUekibJKGajKLWx)SO@ei-}(7}{$! z1sc8`xOx<=6K~tOH`n2>t`PVh2`$>A9aO#hq@VCR%VH zR`C4(6piQSSf5<$lZTJUXZgpokZ+EC0dkkM_ohv6J??xsi)XvV);p`Di|v31iyYek zco5L;q(2q(r&2#?*RIzOIgm0dQ6v#<;*r-I&Z}`U#gdP(P8YVHV0f@}qeAT2+ozk~(D8H6x2i!#et{s*sPsqUctjyc~(*xu^jvG_c- zYLi$^KYbb%z|mV>lDUp{0+#<03K?&4AVLbq9X>#o4agpV;PR#mY5u*@eD*l?#kMQv z+lG^E#>c-`8$clU2gq`L0Z1qk7x4P`%Juyb4aNnO$x|31k>?x|C+2~+z9F!IxVFMT z0=eRw-;5rGGyMG5QNZ*}`U~I&Z*ya%kr6@57Mx8SR#?m%%-J0KHxwh<1^u)9qXum? zrj`%CFr@4G`;b6e*SMq%k`goyeaKpCmIpw>`N0}=6t8<41J>_wOD)Ps(2Uel*NVRs z+E}OIGjU^GH+{gVo+RZhUPtDw)%p0t{XiWSyG|*cW;*d~bhsLvN#Y|#cKo$QL~tk< z#p$I}6>aDui81={vhiRzR*i%3nCV^O*4oU#ZF;_Z7E_bo4?`mXW$kll)na5SB#Jey zSONTZMVUmcgyru!NU?U%sVKV{$Q**BqfX3}>G>n2=~G;0+zzLa|9k}d$R0Y+epr-k z?gNH3NBqaAmF^Hc$<1#@m7Zf@vg^mr1)|c1*!0N^3zrf{a`GEQ;VG&Wn`Ff*tyrlQ zn`6b!vtr_0&dEnwv4vLN01;!gK%13tY9GPCo~DF^Gj}yU{#JDN3DV!BIf7*V7?6rt zVl+#%WfxP`k)39LF!O}7h2#X-2aBmeRaEr6%{ zC#b9qM=Jwq2lS_~RIZXB)}X|%Q6fF7gV7dk8&d_ay+-RaUO#ysyU79A&@nx;z^DHy zrY9Sd9d`~(f;e%4sfy{f41%woOAylzrd`bPyE(-$$19!@BEgwtzunC9NX_!BL0Wo_ ztwog0H-jiM1{JIe3kG2oN?cvQZx(I0qIuI3qRMgSeZy#LaNbTI*n*vKWjFef1klsU zxA$w|;w|JFc7%ViM_@ny9Mm^GKm0Q>%zaqwvWVY|I64da*UOLmf#!#$!bK+|iThpP zsV@1EpCOG7FLwtiuQTl5Fn{_QIr{C{yJ~M&{p~dasmxh`y<`u`gu}rs_Na*) zy{iA6nlR-#XgFU@JzBY?-rRKQj7c| zjOCk36P=lY&Gb3*qW#z)R@mJ!qOrTf6mmW3Rhe}RdFc;L=r>XR8+^Xs_XliL7>{lj zdRUroB?{+T_2&M6t+)SE*km9i0fo7y=iaAj)OHAGFx+jA8hute2%^tF_=v5h)065l zmu82vm|CTd$MIrGV_lgzFUebDotI22kbl#73_cT8%6MoG5wJPxV%suzx3@Bv^w54J^y+Nsfk6t&(P0#XyFmGK&Sp& z`A9SX^mux;I$;mwbBt^&NxIs~k18GDp#uQNpGzc)!YuP)($7yM<(g5e(5^bQl zPS}~7v+BP768Nq8gf#v@py5Bh5==3FEkLLcmIg9Bu zoU2wLS8NGZvwiqe7+VWOBhBlO~Um23~U_`MK93HM<7{(V<~n=Cwz&KxS7mV|7x+5xMaLQk$zU> z8$9V-`-`|kWZfT{TCxZJRj^!>i&dhcBM4V5@`XCC#%H$k85k_dT^|0^FM3prK?u^J z`UCLk)|@IFn{XCZp+;CJuo~-mNyPt=0y7eOSDxI2Cfi#=d;*|T;Wuf+l0t05P>Fp4y{{UJdf^*<7dqCODjtLDj&Nk(zniCUGlLc%l z2*eBGfR*SW5Q!iVp%0Sz>f3h~FySbZ48svl+@pa@kQ_S%h~OX;^o-AIB4 z@(IC8McPCF=JiGd!v6LLDtPVZM5B{zkTE`)2nKBg{QeWx)$8rv8gWdbRuqr=IG zDq6sg9v1CJOE^a;821|I;^2GS#%xilkncpQ-YE>GEg(6K&|TRYI-C(lwU2n z?vJkZDFt5AEib|Jb~OdGe12gsj31YnT}?OyjX&h-YI4i5I4Y1wj{ebUY_{!I$;xiW@~(+tN!rA!*IO>)nAy7QYqsmz0C^)*0ED+q``kr{eNzLrZqDZSa`N;+_hO3ZyN^ zz94+EDek3UD-AYd$Qu9)TLtbPqA1(IFA`n*Ps#NgP*PrjVl-u#n7PDC$>EI57_{&9 z=ztp)GGOnFF5z+bE2!~_Zm95$)NUN^CJmMVKLrln9@_zMk|C^?bhUct#3WGZ;$L1& zCGFU&N0U%oHqoVxpebMtqFDY&D%PJj!9I<)fzyfZF400(bg(YUWNn;SV8g8+ch)g^ ziLFPu=@m&3>SHg&o~Wl>HP_24c7|8m(*NLSxdX|8nlshz-TFt_TE9q9lNxtJa)KXS zSl@S5xGC8$T4qJE5wU9ntE;P*EmPyya7aG*?iDb#wPcF=D{6ACg8yZJnLW&5B^0`EaBweFnc|0_``2; z+Hf5-74xbG`nQDOrDHBJvL_}SKJecRyC&`oKHbBPXMi1ti5-Jhh#k{kl5FCkb_4f% z1ZTFPP2$XFQ6`mj^kLfvpt)z*whXpiJt01$`t<*SZ)eV0!BYBOPJxTTcQ`lH|8H|{ z$d$yo58@*@_cjp#=iYz-aqg*nBk$qduT^F^_rkuMdmb=hac=UjAwD~gFGDTCxyg3K zA3-c$kgTQf0|;Y*-;cI>nK#<&W!}B*(Q$lrFh2M8(|x)3C05(fOZa&R5%)e1-1}S< zyoibw@CYfji{P76jmUZ z90ZmFzrW?W93!T<94EhIxLKa15&@+j^@FL5tmH^M%tOX}}*@!CYa1Z|7sc z*@J8{4%cpSvsRwUhT1*ZMaO^7Qe|RwIE0eACKo&sy`3r?b%|uCl8AgFBBwPt-frg| zh~TRxE9y5to$um&6j7=uaA)c3jdMtA?o z1^V4Cav8M5Cmj$ub6&)ABS~u3A<9QYJw|bOK2Zv+KkUUKodBO*T9ku zgKjAgBhVrNO;U10Kai>k@uJ~5g#6Ba7^kd1fiRTw2unp0`R~30RjtW%>Yu`8_0i+r zNs#RC>8e~Byx6rIy3bsct;0Gqmpm|(%V-%La2U6(c}?Dq>>&G$PR6f9aCw$oe;Ku2 zzf;>0TsrsSQs_PaB$RbnX+NdNYqtg)?wy4b`@!IuxvaUneYs6ruJ1n`VMmQw>G%^n z48`A}$P+rSiKd1U$#;@OI)dLXIu^FNmdW!6|HjY=ZMg7eu}TsFd$P$fityYe=nT4 z(H2U~>JBypi~EOShqXI11<*hl8qlUeD6VM8$t0tWYOX_-6jdoxYGHZ4Fq4ToS5Tco z)EU-W$Et7~RjJMunQ4h6D&B_|DUKB`7B_QD&uhy_+K^)nBsf=1e;;yX!y<%H$losi z?8gW~wfpKHSn&3b(P@fimj+!c$1X)Cpn=){QK&$euY9HU%A9C+ZKT-cFr2lsi(Mze zRk$|(VFmjRfWwB$t_3$$L6Zp ziW@)khlUd&8#!9{qf55N!!W@oX|-Zx0wfw47(Q4_-o@6zD;i0U@!en07^69+=P#HV zgsRIHzpveaaTQ`*eFvp#!)q5;=4hjW4P!DbFAY^T1TXGy_{07Wqq&qFb#Aoh7)2l$ zt0&9q^!5irdF@(G+Sn9sUH_J^<^1{TmUm=;JT~6S$i|IR0ifb06%lcf4ep zSEmsPN9>l6{Xl$Cv6}oV%GB>%LyJ?($4%WK|C1nR34*+d^6Ze(`JW7o4`2EC^uxK ziSvqtAjtu^zlE`q+2?T-0D)QpKWJ#-i{Bmxgxt ziI!z%wBd&0D~t$HvAd^!)L9)LWL%D^Qd`#1UH}GY=Su^iEkYoA|M~^U0@TwGBQKNo zDW#!b*b}or*R$kDNZ>a{XL=(3&_maGFbm>FyoExGv@wB11{MCVG*=ip%Xw{>-V>)4 zIru*Ay55ZUAC}UF>jb}kb8luR;=4o;oLWjMFplV{d!r7v*LW55u^U;*^O1-%-QoO2 z#IVQ2daDjZkoHJ@ePxeJ(QUVqp9bVOON671vaidWSE23gpjvfYH)(IY5lxirfwRE{ zJ+#mK1h{s3c^$ z7ORqBXUa&RlMJrklHJ1OsL~0063iCjX3&jRBVVCr=2mkZ!hmW#sXS+>{L}aFHjQ%LdhdVtyWfmFkbi_>8r}&*VD-PZ|#4KZA!qdftA(4o3z} z;w{cN2pbZxl=x9AY>&6$Q~^GbI7=bnKpZ09gXO8lojy9;aZ0&B-qv{j85Hxk&kw;r zwZE!g-ECI4OfaRKV7$hT&_zQxFyubS}rvMl4e0eNBQu5=qQ)RM^fN%rH1S_$?|6=vBb} z3~UytsLf%X`P5sYfuO}5TKn5^&mENFZDiK!OAwRc`r`wgIrbNT?B{as+&m}dmF zqPMvGZ!Kyt`mw@X#NpsszvtmaVST*M)aKCVw04Km6_RY)E%b3k2ZULk&-BzZ5I568 zYSkA|J9Zrc=;ZjOvJJS|WZ4}K!|rfCtojZgBm*G|K)-W0gm4n`9}JnW^25_u9HBh3 z1-m)Dzu-TyC>CO$LZnbx`4Th=&k4Utm)*zvPir}~5x=W8Sr2q6uzkV+<#*L}cdVjU zRYtVwO<&PY8pJ8kV-U?$T)QLHuY$0sio>C`qF2o?uYd}9zF$ve(ca;A1&ka$Wsf>3 z>;ftI-DwYz{4|*BPN9dOKhMY)#nl!E*}uSVnkyA^y;^rdSa2Ryk;d}Z8{{W)HL8|0 zk^>phr)3_zr^h@f9@hEp+JZ-n<#)H4-v%=g!H+p&P%*EsxhLIk_ zNO5@s<$%?{0yA_{f&upj*L)WB3|zItLp;r}2^9mbw70g(9Ku_>cDBoPirmb^1*dQ5 z7T}x2GBcV53~4m?D zAUd`f;B~+gSj`Cjn3c-!?Jf8l)kbx24bSgzZDQ;nBPP>ei6+H+S!G|y4?Km+e%N$)0YvpH8z;4hIm0|yjH9jco`%c07XJiXw;x=JI zBR@?T;)J>u&$aOT9orc3O*rV%LkO^6N&Ie*52nsZv+SiidjW_zaMjw(r=ffF`#4w| z>4xdkF!G=OmR}W})6$5FagzbQXJ+CW8=phRwIzEL+|oj6ufK5e32bADT67ty+7E6v z6ohfLK7OC1&AFNl-q$q&F0i+uSEF5=^E$gRaaXgvec~=zi9((+XYk7atm1Fc#+Vyn zA@La_a3>-<1VyqmHy>tYV#6oTUD%;L{}ycw{us>;Ca zVyjjm9L@0`j}~?XX5om2+fZCIhm$`3<@KoRZ^l{XcFrkG>q>uw7!>Ig(+~m$;;@h zqDsYzx*UD~3Xm?N)UIGDR#YyMT4*h$i~;=Wp7uc#$QS^0;9O$_pOyyT+@1n_1x~@z zYsfklpF!)#vhNZ>VAW`&5pZd6%?-Qxm2cvWtg3=;7zA*91gQ`vU$V>+nWtqAW~(a| zmxqed*NoOdCJeC&w%pL^vN!l0>6XfAvds$w+Pn+%eM{ZW!Q552|_SE zML{1eelj0ZwNj6K?pejshCf(#!Jky@^!#k)X5(jO=v6p4Pwv{-*^n4`aL+S?5Op7 z7xb_7OE|(awR3Czqoe(?5t?vgU@0ic{2!okFjq1^;6I~%Jc=W*)E;Y$9!q1ct(6Cj{4jxfOw<_8{$+O_`j#5|jJp<4BM<|-Y3l&Z$Fsm^>M zQojvu#XrSb2eX<&!W^88q35Ha=t?wt33Y(>mcK^Q%2*$wAh&aH&+*2uESES7`2n)q zLP(-Z-{81#Qu(&ycn-KWWQqJ|+QG4JwwikkVJRtL9rnC`@O}6BBWQb81v>)&c16F7 zR?x@b8cGWsFKPHY;WinE^1a?wa+v0AAm;6GV8v)m9VUozqf9YFCTtKz8UFsl!05Aap z2B?XNuwsoyGuSe6$>+Yli%76@N75LGLmc$qZ)5fhjDbML{%;U@XxHLR0oQo1HhFx1 zZ6XkIxxBWOpPWY6F)<2Urd&#dD+W&$(|9m)kp+Y_Q<}g5_SKtdI;?(RP!jzl2O~|K z%D`dVhm!qh!S$VUY_)wPBuhR@Uo#_VLNA|i4A=napEwghCCV>>{loHEA-4MNM(5dd zEec@7dIZtRaXV>;Au-*9T0`^AC|SdBEycjy;d@Vv?ckc{SW)5ZCI) zM7w%Ihv@aRQW(nUTdeNDJA9yqAnr8bNgyPRqjadOSEE_AienG|R()W${w8Hjy^n6=3J9?oFG z=U}A==xm+j5lfb0&SQ|3XzdqE>YG1!X72^U`xIhcp4ttb?R+2E#sj2n@htBeZny+g3_g#%zKa!!JL#v;Lf+onrozt=}=ddq`Qmw_eQ|bBk{s& z^cL=Z3!&KrmY7N*LT%B<-b@77{NLuw%gTSZ2jj-0VMi2w3;5GEJ`O0dM1bfxtPdYu zfjIdfB%kv8P&Al$irew=?^T$g$deB#{zK7LVvD8376T-Wg7(m3+K~^K`|tGP#lAof zK_sbj(7JVzhyM_QNE(Fm%ADQrNzwzOX^x@*BbfM;1wi3mq$}(MfHQ*g@Rj^xAwluSG!!=rD2@S&FBSin(Y60C zQ1tAXI8P>GDu(iLt&JPophF~Xw_BRW*3Ttr@m*>aHeBbQF@e_QJ zbGJg|75Z&}W)ZwZT#2*nf^-!KOpZ>_GHNcACmf*RFjLLHhtlyi!0vwZPD*|w(Tk1FKjDZTN9F8Yk zK6mSz8~_FBG%9(`r+M@HDY@x#*!EFwgpq?fZY>+(ktoR3$>_sQCX=)HELf&39AAb| zi$t@`ix6Z|@(GBhI0*;@SqRQ!6;oTp6#^Cl@htKc)4>J|{~ULLiJgO!uZm(m;-P)Q znWmnhrPcVq5${@!Y4SHkCkoHHK&(?MLmQb!F^H~F-`gM!2>#4{8ja%Ug&vf>8;)Uc z-t6Pw13hme*>X;so@eHRqqXwCwZa4aPW)k|$v*(ULaAM=i49|l)r|LlHFK+I2HNp$ zXal3)2meSULDqeq=OU>^%E#c0Cl&vOXg{TPd%);VCNu|vE|pE+$8vdKBDxg-ujt4C zoIWmsTT&<|MNpG>Qm|nW%x+cKuD)kXrM1e^7N6P>YM1MEbQy}7a{MnKH6`|y7G=31 z7##74)xUAIHXdrFSsligwm=6hrP5jj0ND=zz$7_0v0NM>?y_L0jxXNDSHpcDtZ*n^ zq}??<>c#7IR>Vsda2 zUTHXp-B>GGj=hBSrr<(+kS%VEFK(6V|3nF_MNrz9CbTF8x@Z(xFejd0a^)P;APvo%z-o)5ZE&yk|IQp3*d5l^a}o7mQj& z<4Wy;B^lqm^x7@04Z3E#O4Ta3I<3<`PMgqo!myLKkzsKUEM3yX-HrRQUBQ8+fzrk8 zQLkX~!@qywL|iU!n*Fp8`;qIRDr3!b0zw3=75&HiF_KgB&%M+zt82I0yX5HcwVQj6&AKGS**xPUKd z?jaGLi!f^zMrd6os{%|m^lGrS8?Os3l(a!gcheF~Fm1GW)tPeMq*npQ1;Na{LEI^% z_cq@Z(k*|l8;x}!;}TRZ##V;XXQ@>(CQ!^=!n5x!OgVfk$-ffzbisd_%$owlCgoTS zC=FmFe}pdyjigx9^Y-Fiw!)gk+lhFqA_DOOdkKgey9Z9Of(%;*9G*9G6R@I^c$f0O z=_Sxe!s2~F5_H*mVVK%PO+mLD&hG#Y)9zKDH(}raWl8cyk-bynV|x5=o6-7#ykmq2>9j z;qSW9xI|tFH5j)O`6z6(@=94mFr<+3+T}CC+YEH(3$Z!rpFJA z;!Zl0?2jTv?@h$x+=Ht1<6>0D#i*LRIM6<$TK`(4eJ#?~dBgg;EIcTx0cPBNtM+fw z*2^o`;R(c|RuBX=2?HaaZq3-ZX21|Fz-<)kfoAQDYJXM~kDxbU?4q6Mr^u04<`m&_ zjAcs=k?%HMY-z(vN-15EBf@roVp+gdQ8m@k-4U_xfbA$UW=FWH8CH6?@Pbucs(p&9 zOR>vZp1eY_>-##@CnEONNXaHLS?YAO@ZfW_j$kvzBT1!Do@v74AcVeG1w=mo4C{wj zd(P@1Ks@J&b~!JGSt|>#=858KJlJ3G*g+P58FIRH)8Y7XwExY2z5cYA7CgoyKk`SA zHw>e$nmf9hvbD>|tvYw9BL^65Ag{NsrYudaW=PO^RyDl%fT^w~hn8<8IYf$s+-sYX z&ge8NV29p~d~*6ryV&AA*L~T9dUzc)3;$a5Av&P9V2@EE9B%1f=6!MmGyb<;nAayz zgLr8n2lnbFuNqhOMab=z1#{pla04V3^21d+k(i3^LT96rKL#fIwzmv6T)NDLhn~=& zBo{bxyD6=J(!7*5 z4qyc+eIKQBO1A)m6BSZBDK!;+Aza6)hj3krKC(_gTAshFL7?9qKX9-tY=> zre@|yOpvN4v-(Vum>{G%dL}6|K~sAtNKZl>q=^RNY|)+x%C;s52CAMe{*Ut$%t;X@ zpa~{vg7DNT{GKsEK-^%f8wC%~42-@lrf0r2J-zcS=BLv%7g+Oyxu9w3nI>y~n)=L7 zB;6n3H%0gb!Xr6A{S7%Z31LoY`zWo0(%J!+m?4_JX_&}}1(`KLE+h{8U-PpD^V4a< zi?z;;6+?^Bj$6oxdcPSh_+}Kpv(Z~Pp;RiXpl`wO;#u?xK4nuoH3-*kVtQx}JZND` zA|{K+!FrM!y|hMP0IRD+W1VL0EPTKxArk%;Z7KLgg3h3=e;>#kh+}elK+D84uw~*I z;H-FG=stWo4IN++&`}Ybh!S}D8Sv6+7JRtQVZj%)H4uQ*T+ra{3aW8&c=BpoX5D1tO=#OYece^t0JAdtn1ErCsIrZ=+tWY`n*om#{|JuONZ6WcdM>$( ze$Wct&_N}BX@D%ikoM*^*l>HoZ1OkPe1%&+H*Um_{$PBId^Z~B$}4WN1>qSJ^siem zL@8=lSR=0aI_SrXZ2$7|kGG&}2yRQWabWfqo0JEEycC{HXaPcUF`EQkXEPZ^)I|L} z(v8t6?zyTYTJt&;qVjBCKteHgV;<5<@nqCI2=~QQ0EQI62FGQXw;_e{TPA%^@%nja z@tZYZe?m{`N@Gz8>ixV3`bSB%2scBYB1lv`C=)5#sC3zFTL0SqxZaer-xz=>JQZsD zdzGa@*Zzh&LY9%eQGBs2;TljgMBB~LR^uwVGkHxR@bD)b@_O~uumvtOlbGOzWsG72 z&w&RRil|Ld;Le7Jm5m5VR9d`e*{-5>$X2gAl+Uf?Lpjyb6!OSwG#z zXRal(Ol9c)GRR=`%1N5;!Gxb69mbezy;<=y9GWas*`Fwl8o|Z0!ee;qO`AZ6FC#k5 z+~^QmZ^Q&L&muVaV}GJPoSRTkjsHy4#k<}jhWQk`4w|xh7z@pT&N;=JxkOYHhH#4+ zK&KIWAOMz2Dg~CuNtn>|v4!ga5xF*uZE+4jm`DQep56(|#3B%nc70zt-w5d^PDRgX zaUN*Slzadsja~YQllT!IT>kU6I9tw04c^FIqJ{>8Di`Z+nak@v890Gd5mFx{zyu|a2Fx#NKPGFanS}uHw zeM{JWh;H}k$DK9FI+LG*_z=UhI=KH$zp*68x=|cIGG%&N?y%P1r$`V=5SjXapHAE$ z60aFakEs!7;dMY+{9#A@w!C23Y@b*DsnFB0zRoV4;X7NqptRDajgwcD$}3cx|M&t= z5pJd2aCspBs?BkP?YqK7TcTe9u&mO8=v!)H(wVDio2hlV!uItdk6q)aOFm}@UEt|O z2W&A({F)Rj%?@zh;j0{AA;zFpXV52amBWRPbJak^;*;`%@oJZ0kKYF0xQE@~bgTQJ zWw1G6x54|%>oDN@Rbb+D4F2}F2eusM!bswVdx6-(+2Hx%rjH$Opke9SMDRR!n6)aU z1=_`>H`}!H=#@PJr3xiqJccm*HEU?ESwnlx8ro|#v`EoAKsOe*drD%_ zd0(!fVc|)8Ijw<&i*`jji8N~9egy`KwNpwL!_WCGtsJak6en^Mx54qV4%iQS&SJj( zI{@ssqHi^MYz~U+I!oVhqJOvlpxSxJj5D#Lw$nwNE9X!x$S}OZF6~;qHD_gcm0&s5ANoCjZc@e8Il=pCmcp(Eg5=>J}+mthkfap$d#NSNq~J8u+$ zyts231>S`>`Adrc5Ojb2Yv*DQ-Yt?>2B*03!&x!4VIm$2S!aBV-vmT{-Pc6(iwB{| z(%`U>6B=T+M!bcM0w0m_r_Ll^v3Q7DwS8%9810!X?S}0v0rI|} z-;u&z41Pg}MKCo?{l?EH2t8RZzjPN+f-C(%xXJs2?pvIrkOVM$e38gU2_~3f{KHo= zUGRGlRO#56%mvnOyX<-cRqz8K>&M~!qd(0Q?;i=*wdA(n^{Umq`YKtUx_7J3;{-;6 z#=LAP9&Brj?bL2xo(=tJm3dnHdpL}}U;L|sU=3vqY{3<=Z&JH=hD&yY$GoL+NY^RU zn+7k5$!gOO`F|Dp z3p4rWSox(r`9Bx=m6`l(nM`qoCHyCWJkkK@zgOu4-jh&wA$$Z%um*HT~r z0+SHHIpt)kjWhK-anh~9qYt3@MBV%gD2H{Lp23KVU$^Vh##2X^oA1!)JYkzugdj_z zSE;ZVDv4=dp%um4g(BH_13mz+IeE=tj}gG~XnLJ`+pb-L_oZ$|5M|D#CnV_*SQ~`` z%hLr0nVzT81!#p>Wmx`$$c8^;ItL!GTXC%imlH%0A+m!ns>B!NUlDz;pPF&Aoq22e z{pwH&>~rKn2L~Cb0{zKEQ-}*D19>CvPi0OvBM6KA8wvp2DhA^HX)>5WK0bn4(SFsT z_&{T8xTqDvwuFnk4m>p7m{pBYz#C^%G)aZrREYnGS{;ogVl2mnk-zuoDfF~NJ6&4< z7~;>)s22rr65gf_CCJX8Ok9~6W^jUWBtpEf4~Wmd)C(d^Te!y%hI2}EWJS??04l3` zV%#x^EQJKrMWjzBjd78p9k4PwhJy>*NvNbeGJI)Hx)pvv+@e6<2y`gd-v^E&n#`?^ z&zN=^^5#@y8A0(|^el#yjG_qknI?Iqn|AwZjQUHpsEOWjF12=`R4emv{peEqmRI)pjfvE3MFF=s2j@%(iZ8^qs5 zM`=s!Yy>l0(1H~n#{s-i0+WRP6F2ly1rAzc&8uAtA(&+~;5uBEyrRi)$!pvAAV|i< z&mH~kMi$3Mwe zd7%OaOJ_bo=B3cGvfHV;5tRN2f`%4?*B;zq7OFZ}%)UPBIDP}w)Wa={s4dF}cM@`^`Y z-jj4*BpY^xZNmL*Jp9@iu4=>^5|iW=vHk(%Doxx$lQ0WvrAv-Y0=X#o*-6^ussI@s z&pryloW7T&zsJ()AEv)groSmWHFJCd)Arx%AZanx>~J<=8tSb_-nU+P zmEpM?sGVoaI4hOce3Co=jNjpW>jpF-6|LKmN_Gh`{RZC+$wLUiSx^OC5LtL};+A|H zX-fBoVhj&_0Gn8Dn2l2WxEMg5flF{DTL+d`Yp1RL^kX%pQ zZy8zj)I6<9PtBKO*~Fky(e#6>sfBWVGa6cT4#70lY!%xhol#_Am+DlDHmf_18(tuS z5RiIS;M2%4TZ8y1ylj{uQ0i#WX0QM?`4HN9(@l^Ax$-KIiv)_BO}{XY;l~a?IKwdw zW}&IYkSdNg1bx$tYZc+fO?iP(u7Qiv>?)cB{_iu=2*dq&XfR6G1~}1H$1gXs;+JPI zn5183CIneONP~qH5gt*&dLGRt`pBE#zrrFP<3cgD)Czv2X+uEEu8`lHoHB-Ds_=1( zw0;#;2s=V-G7ia5CLi18gD(8a9}UMvt>%bUYv4`<_wNU@#nY~EGSWZkR{Zd(yY>Va z>TG%*Ht`hY(W8ECV>SUd5vb?e>)V##H!C0*(VJ6PpKW+53!a;a3B`Jf{KFcbtg$h#Ujz<%Ec}SiCbD( zKmtM`5ZtR&rYP8Gh-qWHmF>>dG}@-NiDoWkF0D>0mDMGjqD`sH^Z$O&`TZ^z#KxI- z-v9f5KmYl`xzE|p^E~G{`-aLM=fzn2$D*5t`#^R>8x5@kqef_L@Uc~5vhTO0UsWJJ zSD(?fcq7t*8KdgZ-M6#rw?f4;vqE+F6yiN_`wQLgQ||e`?iC5c+(9ZYzz~+|R9yQ* zW}#Xoe;gOg7&yne{4$WH@^2(_E2u1xA~y`#bo^C|D~yI1m2qH58aAOFf7Q>Wyx1B% z7?h7a7=UM5~SM(WTlq z*|_|8c|RiT>dD8Cbj+ICpO1SPjcAmxV6(zr&tIxlQf#(B5waa^n*(99bi0KX^+X{i z4sPzxMs4{R?!HajmRE| z?`}lMONwhee)C?TA5<+<95-qUe#BZgc9voW-?-@ISFnWg!7bAPkl zS6u=Y{FYvO{^ZS-`lUz()DLyr?79YJac7IcO({5dOCfO7m%>mJvYm}~%_8R^`l-KC zdH97`IqIjH(e$+IiW3qw8-0Y8@pPzs1DgdJtjOTmxMW_%;y^4g)<^1qHkTb(3~-Ia zZ;Y!yeurVBLB+{foJ>;dGj9ad|6Oa^)gRRwOZ^AbARhwj4{IyG`n}K)rE>nv;m>UT zq$O8>ZBj$?jQacG(euvz-Y(cx&?d&a@2tN8GTXezhX>(s{FU9Pc+?zodV!hj>xX;p zQeiuwe&YNIj?BXcb7LIqHjbZE+JG|(n9ar9L_`cGAlo_w z%SmKR3v==uLkj%{6X)YmMYcWz%i}Bh)__Ql4Y60+gs15~5=R8ea@rBd$$-%9Qp#ALt=r9=i-5gHKq8(@ulun+nfo`?7wW3?FVN%RAiS@q0}}5S@A(V~8@=vgXgM(00Y7QgI1KSQ zRwVA@%-xQYI7j(aUmh^!S%-{toMyD&?6TsWQk?%x^gEt-=oC)x_^C5IIM0cGy1EnS zZaiLT#Z7jhxXJEyEYA3Op8gGG?RC$K=+m&v{2HnvBys@~!CRMg@E*upZJvg;=Oh+d zxq1h=#VvHT6s(#6D|}vXX;)%`U3XUNx>saGPbEp#sv8jsy;__mJ#MYysuJ$!Oc4Sc zz;?HfP`u{gL0xNAI$Y>wc@=7cn{fcB;^$!OG=qB@5_l35lRRN-uUym}uibI!se9Ur zw1L_7tq3R3+d|Jj2TnvRs{td2eV*YRl_UeV9+t4SV2w=}kLuW8;^o-cVB>*3yGoztcxO&>fh=BQ#mMhErSTN56?KMsxr< zgN7Ug*l@qN6Ep0*y2V;`F{41Z41G`x#2v1YwLrVaODU!>#S4NMWw zY>Raj*02P+TMo<>Uk}N#D9Q12_rXg|*e-`XY{7Y;C(&+Qw~NPhxP3c~_wWPwcps}C zuJ!a8j-`$Scz4gk>L)jn_A{v=9PXymNR*rM0} z0DiE@TbCa(iE_fP@^~h)MqvUDbXD&1V+OmihSSZ?eTHhkA@|+wz{WCMa*pkXOMgB^ z3ST)hg#l+lp&gk-p-w+_W(tRbkk%9?kiu{{sLbM0wOzfj!!laJCxFe28qt!X5J;Qa z7{~4pm9a{N4QCs{@=`pGn1e-CU7}fBofy38L8QBs7i(DHkb;izZlRQ?*^ zp6q$x4zpSHJO?fAI^2r>sf>7OBBN{A#~Q|oI@qRAz>DGP>g>mm7|D1VgHqLr|DfSv zvAtk&r6p;lz=D~fA!8oxE+xjw| z?yjs$Z0PKn>kaN{T&GrWx$vg?n~|K?wei%Ku{+?`C5Vh29PB0FbTm%RS+{&=UA`RU zliQdMJ_nj=hV5U0?n}*}FCI}%t!oVXZO_1olaLti9rQd#f9RCX)i*`n38T+Vk?H=u zK5Xz$tNsYLM8*$XGR{xWrZHfVibd)uvwfqS(K5>9-;WX(GCoh%BFdgdlp|^AWJEcT z220B*zke3O;o-eKc#S;8|J)CcC$p!tlJTG+$BW^ZD8ShFfPHHK>^#Q@v*F<=5&J^W z<)z09&{{0UlbnSE2jViza!e0cdEXtjG6q_o!xVlkj;%Lz!a))V`Lkcw-SBz zK)k=GPP)Da^gM3>F}0_JMYCcdvx2YFRTrw~265O`-ThJ>8oC$+)H*BoS)3Py);@rl z#C3eB0NKyGx|`STXBu~st6lbu>NF*G6~5`}!YQIo4jl9z#I>V#sG9q6njLEboUhMU zrxe%=D2GgEU~5lJI=YmHJzs$AG~AA+_Sp~MG--S5b0Eau5ue+nCea&>N%XCdJJIkL z>vPyXKeNtL2NSC_%=3W2^g2iN$`H03l|Ne+$J!IVTNJ}<6+i$^9^6)oWrgQC7J)G1 ztT0$BBg)3>fat1scFtYOz)rkqJJ=CF&*HLGD+?lFtvU}CERtw62K)7cVfI8~X;0rO zcp9ec)%(E@8!pvz)Hq=J5Lo` z5P6<|glM4Ki9Kl_ppTvij3=UrzjTnOF!v8(>yOVlfl6-tG1tF*_9B)1i!q&fZK|7R zD>mY7qwMG5WM2TzX!lR}z%DRPXQK+Q;o7bX}^jg#f=uh}8M)gLzyr-}d zt5FeLjq17$M|F(FDs}b*3sU>fVnK>o+j>39hNc(mQFZMobpiKX51>YmhtN0mu1A$S zS7KJNCLPz*U^(hLT!At$1M-56fOP(sOA7B!*8Nlu6w)}(HL zg)z(wQf^$_*{F)s2BSDpNatXDgf{^ySax5>oHFL!-b}9kB^l>_FgJ!thr~SGVPvi9 z2Y*=Csm1bD&Bd2GYOd^;f_2t3JSNG@Q=3q*z{pqg>@lw(^A|>-H*|cawzEE$7F2iI z63-4S@o;O-TRNk8#Rab5wp!`gkHVu?dZsE3R(iNSQ?2xDR+{P7svQWb zR(rTYP}y`D-p=J}&wAyK)gBJs9H>bh+cDe`oX0K1;lcJm{`js$Yt;y}445A}8f2|J zj$F^f*kF!#3b#H2t|`^+Q!003b3PX|kp~x;OGU9NMh5O!)(y)>j+)}P@51%em{!Mv z5&EPJYaQ0@@t9WPbdrOw@Ad9d%}jdT)9|65-K=h^&txTZVwYMv@=xygId0)|rd0Pq z6UB9-CU4rwRi9g{rh^(t5AvT&Ca>Pi*tmMLIttz$@!hOdVbEBg!%?Q0YdqN~?wqd0 zc6ki)GkiH&1mvPrwpx^WoFUzY(?|4It_{n7LgY2}zcx6*QcXFzx^N{EM>cY1DnuT{s#O>^ zU!gEz-W#h{uVBvta<~3*43EtvsbhLcN-sx&dnGS(aW%Q7z=jFd>lWoz)=ouZTR3nI zm#4_yD;usc*QbtNH&yv|bo>(cpjKb#&2di$^nMjbtyL9Z37VC}y?m3dDj{ zAVx`r-$krrSj4JRi&%)Y^*_-0^~Cd#F7bHa z9>Wr;UUxBG#=%QhT z{ti|o-lCl2aa@cqjioGHNvGGTPTjyMiP|_uFmsJs$5JIgQD81+9af(1@%5zYrNsCI zwW4*5*D|xpDLOv{>b?r}_O z=m65wlr^(+Ham{x$uTD!F((j>xtLYGS1o6~>v{A9IhMSYBV818%-H!XW_<)l1j6GO z_-*HoP@c9?XpPYM@W2cD(89^Z*|TQEul5({zyOGoi(06+KRfyoWC~;ApPtO$VqS15YXr zZEoP>Wv^B?rMNOGoAO;r+X@jYjs!)j7rpWDy>)pwLZB=k>sTKM9WIs*Qf|2K6crX7 zLhAV*7^;S_`Y)hC3sZlD7I3oW*(>AEb9VzM_rnSmg0xfi_0Pcv9Y-6ESf(-@f3()$ z1;?#yvXL5^>ziq0g{+M{w!f7%u{N@H;uRC3#`(Fn$%IXf-KGt^nhrX2D7aUF4JAe) z$T&Tvi>@YDjNu|Y1AkJPP=5f`IZ*p;s2e>YRaq*6Rkg(O%U0wh0#u(ui|teoFQ|2? zEw38uQqFo#&)KWhx|Fk?6OcN-bM3lv^obAsIpp0+ZW#dwu|$R9F}of+fLIypl`6z* zu2faf8#2s}YZ))BY#QJiwT)#888%=WdmqqrRpOnnLub+~L>kmEl~FfL!mp(vk^B!t z^<25%uVJ)f<;)f2Ng;pCRjSW4Wx7)l#)=3s#ADg{>?W#+*-ccv!sSim8pe9<(Q6BI zkH?~@>S3mDFvdyMJDt@v?A~(d8BoB&x_)3Rx;>Z`VmH`#R>-rj%G+bT1X{fvsb!B9 z_cuLx`W`gyKf^I$42Cii?Pw+NK+zrVaAu)LR3A2IY0;M~l za(rh9lEpm;1a!le|Bl#s7^U7+YY5o}!tl)fHWWI{tc@PYco=eJFFb#X38UT$S9CCi zXB4!RHlNohy!R4e=El2bruWRLvv|Ps+Jw-j*8$H0EEECH$Dv2{qize3!1Dv*`5Rsw z;I*%~e|cP_M2?T2TSRn+_@LXf4RrbH*)r5}Dhg+1h(C7e1LJEd$z6qU1+=GOY#<9> zMsuDi!gHDS?Zx3i($Cnb;y_S8xe=cgX$cEqWBcnGEb)%(S%{!`5+HUBFBEnZ}SZy<4$fKxKTyvOT3p; zb0x1r{S}PiG9fnEu}Xx^VPLifhZ|}F?m#lAa0XV&L~BhoT+`;BNUiSG$ccoi?;viN>BzQa6quV+0DFsM064w%ZEb9u%hA0F}8^P$RsOQEx24*U>Km#f>s zaV2<<-HAb9SnTe^4#AaKA*carXR8<*-rez40J})oQp95fxU9pty#n`j)b7TtsRc8G zm!o5birRrL=2!H*ts}7V41x@EcB$nA7sMq|FD2_cqcG*h^~mf&vrGe47mqRyw6tO5X~#{ccKSs+-J+GA_i7`t68o7 z--HZ8-r};}hlfJ8)MdSxTZ`~4MdH}ye3=&qJ=fg_L}EzN#bft6cn&N^O`+GwvFL#{ zay?h*m2cK(iF~dPRYBT7n;i#waE_hSdAVEyBH|A82{ls{2p#X ze_clW9oeGKakz^w2>1%4ZGFw@VO1+&drjr9bqii%=XyJAJb%8CG82NB^rWg$PpNv_ zQv*i~9dO@PH1c|{XAI1E@A@5&%XGM3JM>Zxya@4y=5txC?k6=)jDI~^@@~e9EO^~` z{zvS6dbsn%l>s|HzLe*g1zTk`-dX(-Zgphcw0Z|Hlo2)%XWz~Rj+$9PkVmYL;odIb zz#Hh1KO;TwFEZ)z9E7$SF6xX?A(zcK1Vat1H)iOYuAdhbAnkzXW5kciFdU3cNkB1q zC)t|52i6yT3{eGkA=$mrduEA;|Ta*IfYtL*(#O`k^M$S~UaG28+L8+^kAi90Hy=;_QhxkUzl-!*~`{ zx(9juya{=_L#uM7KO*H)4kl03HY-HVmn;r=BD|5GIK{=7dIHscq;WHJ@3pIN-KyUD zonmbL#$xyD6K{4{;;}<|jwQ&i*1q&}a0b(8FCVfW{slK}BtnvA1H#tDn>Pe**@`B_ z{aQF=#tXLG&WROimY@o>AFtz9A`!n7W)W{`h;3!ROxgQ2M8cOPXh}nu=?)}d6JLP` zkKKW4_bT=UpLoA#?I2MrL;#Ush6bVGWS;`1)7{_NYJ}V4s;#hw@$`Wk5jb41 zKG!a&;oR!UcvCOgrf2ch#Yky2$v8}bs?6hWRkQX8=xT@Hrzc7de{Q=H}4vb8TIh1al_*QOOb+17vMIt@ntuhRymVPE0(`)X*Q9fzk z?N3`bHXR`>d5x69$ZyV?9!0Nvx@vTA&Y?Ob$n*SFWZ2{|&t|QO^la1`o9BM5iT2#C zHL;!=rRnHdt}PNhi?qh+Dbtz^&(&Hp+mo#|S)SQilkb_LHSwNFT2tZ~uQlbKIHjqs zpa?{3{R--PX+7FH;R)0FJE?D{^&6;f$|eQX8>v60^_!@#)B3H{zpeGP)bG{$z0|*` z^@ph6r1c!9c^=mKI_mG#dJpxhw7!x03axLV-lg>@Qo@t3^+D9n(fTmzGqgUEdWY8A zsE^nBXzI_^dK})wnO3cjr@pJ!CsH4z^^`@=$t*=XH^q1wwVt~qJV&*@ocbeLUqStT ztzSWXt=6xh{u!;mllqNXpHKaIt=~xf8m-?%eU;X4rM_J2YpE~P`n}X=Y5gJUXK4Kf z>YZADg!*w>Uq^kc)_bTQsP&E1M{0c&^&wi%c}|be`XK6m%v5{`qu!(Sk<@>v^)~7c zX?-;HyR<%*`mI_YPyG{GpGf@%t>-JcT(R=>o-szru7@CZ>RN}sBgMb(Y}@XW7Mn8j4JQ0 zyymjr!I=j3OZ8U|FRtS87XENw8d~EnT!U_@1an1JW6MA*@I zk}o-^`iiX45(|3CI8`rOSYKu`@YHF^=h_Ezm?RT|K8RJ%n#7pAQ7g8{^W9!KKaa<)z_|AGuJ3!2Y=5y-X8y<0~R$4}^M?Cd);1>v=rwMnSz@_9M z9D6d;&I?OP*t>9yNp!H&%P2+rmF(`3cJ@v=_{RATvB%$}|8ml=jtzG*>3<~*!p zs)1~zG0Gvz(J*d+kD;`!K z@$lyeA0AhvSMNghqoDZLdTzrM4dlfvGk9Jc8);36+1rES>x#gm&{?<)6DvD*UYrsI z5>Wbl1>w!v$SKTZVNVhAea-ZLCf~EX`JQO!i*`*I@o}%#dX=BZ$2-9!=6fRZJ(iiv z%w`pd(vEVA2e?!YC!bO|Jln|O3?qlxA1+a=BB(>Nn7?YQgT+e-Ixa`hJBzZAu(~Lo z+lt(JTMfK7L;XnTF_VD><(ZHej1?-E z=^aLP4i>D-pF#}iuE%e~pn`i<7NqjUz0Rz(@FX9G=e*t|kYn!i5R=~FL~|p1eUuXWzgf3nkqt7#YhRnsphp^e6>W~{PcaRHNg2<$)yVtlk!UuK?_7qP zMqI#LPkZwrGL>3=JQ#g^Cyq&aE>W)i!S{x`(en6Ciz{m74ME3uhGE?amTDX3XQ!Mi z{Z~$p#Jgs2tE~IBa<3m=Hq#GnkMPW!O9s@UH*>_*$#bbTWx<4Znh$uA-&KRlAkN}^ z@s5;uz$^%<>-&bQae``kSj;tHXo~SNs(*FTK-~0+ccGU5eQ}hd`c;HK{5HsGYM52| znhhw_kE0x%lDN$0)G1dVh$$v=u<7#}tPlI{`1B9i*${}y`O%pm)Q;mI-8o&Oj)RQb zPjq=8;+V?Gq%agdTSKpx(5Xbpo-q)SB57D+>f19-C7!)ev8@y^tqH(Ms&+_l5lo$gDpRUgxMvz#Y(1cDOxmus4@ zdr7V7+97Z0XVRLt^p_^B$RW480Zlj{0(#cDNj-8X?TUnuHekt6N3; zlZ39guf{UAU9kIHP=FgSVCVkExGL1$m)o+e+8sSay5rstM2)A=fz=w^{OadAAM`jA z5X0q8yu#&7a0Car)1XBk-w_<<#@w78J#x(^*gow_$?TGL0Q-MHZvnbe#Kq_wZkE)aqoo5kxsE=HcE zntAqVtBgAAlTkR~gM6v<1T|;a0z81(D3NVOcI{{gA!|&opM74fU@#DY>Iy=~N)MBo^Jy(fgi<3GE&_FnB+vD_m9vEXxC^+~qXia0|UwSzre*ZteK_6dvq| zn9@ag!4_-m>8!>5ft;f5j9oumNx=qS-sY09v-F!EUfC}PErVw{s7n2=#;@jVqPk(j ziFbMN;DYBqjE2{+@S)r0b*X%*0@$HxQm4{AH)E-_g&*|E7&=Rh)#(!J8yYULo=I)R zOVOx4Tpi?jW^Jn&Z7Rml+F6`J{k}7H*q?^ejakAKgbScgVutNBSAB=xr#?=KST9BT zUdkOl?p`O)EosdWB9zww*pA+}G#=~Awbpezs`1wvCo*u%(dN+2LFcA}q8+##v(I(N zu*?PXde!NPp2MFvhxMkR!+eAFffn3g-c;U*+tf#+!%|Xt zV;o*VaL&cLf?AHk+SaNCx_ZZ*>Rgr5cd7?_<{N3W$V$z&n_H*B+|nE`Zqq3k!sD57 zW~U2$oLc3RS__Nvai<{u=lfsG0h^UQQ6YvKFwuaQ88F{~*BfxP0UtJCtpPtY;P(a$ z3e)ipG~ieRUTnZC4OnEr3Inb&;6?*JZ@_O1=*NR&_&OV~j{%1oaGU`zHei+kT?SlZ zz$Xp3(}3?9u+f0+IveR2aHs(%8t`fZUT?r#4fvn|HyQ9{1MW585d(f>z+Vj5zKc$; zmjQ>$1`OddD?Z=5M*jL(wW-~3E+3K+;_VAHjIGk}nM@5^ z2a0<`+gA*0+r71ans5I&d?q2g$yY&@|rNS+Wgi914#av;7D#ahuwW%}d!y9e=)lOjk_hBG!w=MQ3nDw;iF= zg-@|zEe z%Am`}FGI`+#|y=Eu*pFjrEo10^I$514^qEC#n2oV-3ydUHo_H%>kWU!unz_|%=qaq z2kvu~d#;hD4XL?R{B*Y=-C~t0@hQO{W7HuE;8KMAwShCFQe!=++q#H1`+rEKzPnBo(8SpYw|z}Y6E#R%9IBDQm2%0qlbFq0d^ zi8ho3@-rLibWRm194SEzrDaC{HivX5oYD~k@fxmjm-1xh_b6|E7a<06AQ%3~t$Co; zh7?K=cCMm2&q$*XP*Y1@FkbC4A3ja&nySz|8z% z9%vWMY4VJC4*zeF50f5~Z?RrFOq@*qwM9XWm+zVzCO<}@zA(!hb3a>^5=|FNTU&A2 zlnhdiO`MwfJ4&4IjnBmA?DBIV(rP=!w)0f;VH9c&6aTYMk0py`_FqYFj5oa!$f%Zq z1&FH@ak|03LU8T$l4O?6W{#Qo$9dyBd;Bwjc@ZS79O=4HhRGq*M2R$Wu{kv}zekD- zyy>---^IvP=9jKjS-y)^ZNXYI7j-0Sty~p@DMu#M^o(pJPsEBbvVPEY4(l(w`nSAi;%=ehy zzp!JvKNh5oVpD@QcQDKuhFSM&U}>1c4EGIRYyaVfIngjN?Fk{U{eT`pf_g;p1gJH(vocd-PW-RupZ_St5zs)e)40Auj z9D7u|mmB6V!;A?D^)-I3-LE&yhYT~%BkSv`EU7G!x<{D?-BpkfN{PlI*yvL-{^9Bm@4F+f8T<lYI`hMmh$@ca4&O zh4{UY!A?sh8R2xlwI5;+78xyK8;kh<^Z9bX29C7+i$=Yq|61C&ZE|8;=I{MgiPQgD z`d=!V|19F{{(NItCYt~JFs3@2KQsJ)q(QXGedcoh|7{7dEcO2BTlN3(ubG2{%Ph@* zewMTQGsCwn)&Gp`KPmxCsVn}>6lrSKhZ*k({9}r>kJkRrj@o8lVWL^Db$lnH`KwsE ztg>qPO@COi^5*JQHLGt~v-Z|MuDk8_JMO&e?t9kXd*A&V9(eGfhaY+Lv5kNF^W#ta z<;lNp+Wgeh&pi9wmaW^Kf8oWKUfy22W9KWoUfsQC@4nYw-~YyegNF{k`PSR-y!+mf zzrFv#haY|XN!_QPeSY+dFTe8Cf8B8G@85je_}%wE{P@%H6HPz=^6Sap@H(x<&%a$j z`@o(_t4z(IqDM4vM>=G88~FQ0S8m6=&*cE4fK zjf$&Y5t$t{r?I6bG?VF z5ax&7WwqF6;Qg>+tA)7+X0wk;Ppey*Z)s)5_Ldg@alojB`PNqEKejSIF!Ren|CxX1qTWw8{y56hHrJu2#y@Ns$KMELKf%6(F+0Ch z=s6c=el&AjXnuCpkK`Pe{y6hginQ_#^Cb9XZhxGIjIXNS=Ap>G#*Pl#7cOv@lPZ z>nf)6^xP7c>0o!ybC&_xRCl3@Jk?#KGTvTNTEIL=&n}(o&F9>!O5NF|UdE?T}&2VYiWiI<%SHXhZjN%eEg%lscX*(UANXP0z_PpGQ#pOEW zOyp=XMpL83D2&7$nB`AX;i8etl@hW>p|O63NI|fq=H?iWL3t-xH2T7~O01A(s_nLB#a@SH-U;%d&P z83prmi`_0eJmH&^?aD5+C6|^Km)Z)7Y-!ow0l8eZjQ}WY%4CwEwbg7 z=gxJzvac#s1qB{v7STZWJjjgbp=K9Yp?lbbdh*SOH%k9ho)kT6ZxJ2akQS^LHadoHKl)(cu>sxbn%$GFNt~iv6F8wwXVQ zN`nsUX=+Ws%Sc$ur!T#VQeXc$xr)WUevE9HIb+f=y^`tcU!^=`zz|!uK?QTiH#AZ> z-G9F!U|nHh@j{i~2;C|hbOxLjuDRs28Q?Vk*`@Q`^K*+_Wwz+t1t@t1dA8j7C9dnv zX)|~WJg>^NnT0YNh@u?idWDFBVw;~kzu0KX`intm-TR~0Mrec1*Nl0w>5l1tzGL5) z#V>D}bnCpzhN5Zx23SC1(%3m{Kg#A@0^z!D4%V63kd)0SE0|wWn44K>)FX4s&x@Ng zEw?Z?yDWFkMKe>TD`&Ho8D5g3YZ=Y6-}_~n;a!46mz)sMC9b3BGR=A@w6lB`=5=tEWv&vcvSmXOGk_72!t(NgGJXktLQo^tg%yF$f4lcppw9>_8FG; zexm&}zl`vxU=g*eqlj_==5!EIcmpVETErpZ%J`q?FT|@*2aNc`6>fnFH`7nSARM}f z@|b?QwijJS;NY>Ayp0fC!E2EvURbB;d?PO&9f^y>N1;D_5C(iv!#jxHIlDyfxK~7P%gdru zZAeK+bO1c)r`4gYwsZv#a=`x?9l#6dr-iAWi6zia1X}J<{AIjKMFfcm7gSE5hyYI_ zMupdgpKj;Pmxl9&I0w~)4K(xXfHOSpCN}frPjLGS6yqig?;b3==X4U?<3dIEXzZYfe(Ym~I0$v25ns4Z-jL6J@Y8J^(u0~~_!&bz)FLxXmv)f%K+$ECRpmLuyf_|b zhoH)7aiHQII=d*&7v6*X2-nt(muDCm@nUA?`Q>oeS-deW*=vCyW}_QS{82 zDtgYy5Iy6jh@O^Ip-YU;mqZ|CQE5WvkvCDu8$;%6E#%=N!AbLF*(b%Of{E)Ra zv?L^}V{9;FB%JZUe^1m8Jx6tK)KqJ}F+Wyaf=5!J-Zbd8^y(;jxgtfc8QmeT-9#@- zXQ9I&F3Jhwfy@}W8k}YFuLh@kwnKU2VYwe=lzUeQcmO^CUzGVqlkd#8{#OW*2i0Jt zUq45PB{26iWIsH-y$DCR@KLPUn$!E_N+JBQZT>vNpQU$*=GYK!bP#=9&x$@d&xk&8Pm4a5O(M*w8%j_&XgmTHt!5mr zEymjqOHkLEar6xmVR4u@Ep4v1S+}4#i+@ya$5smQA(R;xf~dM6st@Y4TRYZuI22eL zP|_~TKf^DUaXRT983Y-I0>2}{@5oVIOQK>sioS6b#C(P~5ftgDiReZN+En`TOy`b05Y`_xr@xFGzHrVHKTG7CV=!8aRve#F8GC zv0W&qE|k-pP?R;4w_CgFwy)GLR+m|nRmf!|{aCe4R1Ii~A&)al?= zSMZAMoRVjly=^z;XcuG{^?DTQbwh^h%=~`-DIq?9syFgms<0>oDVbH8UHyN3BpW%94^^8=K(OsaC%jOQ83Xrn5l0e&JN4l7i6=(;o7 zj?TAs%uw}YXYintpXf9yI5uinXmVdyXYe>2^<|jolGN$e;Krc3_J`Wl`WZ5>zf@*5N=Fgqb}`^{OdBhQ)%mPDj!uiE5daW)~7m_SR&9qB94fsI+cWGg=APd zSVRZb+np&3!N3!6l$Hfu$27O)r(*=)uNe zr=g`t=-N=1rz}inDqJSw>6Q=^L#$1QL0^X{0c*3}XRurGnIfglpa1Mb*1Y=9K4i^@ z|5x@Q_qF(L_pPyDo-Rqx8>C^xH%lyTH$B&Po)7aVdhSo;TPBs}=B5=~Rm$njjUq|S zYZx1cF{!s(#pNz7#bDIjP8<|4E4#ooskn4H2Em27MojbJvnaO|!_B1PqO#(`+$2|N zVKN4KMsve8i*lxy6ci;DyMgC7h}W0TZ-VcIxsd5r z3E6!m0$je#GO4f(VslA$A?DyOrwJ_5rb}s3Sye&4`AFL9KBa3$KIcYLz?a$dud`Fc z12O-xUZ!Y)4vVYP z3v;m^bBj^>=`af?BoJ#-2DvASsJADfFLfbfnUb7NeF!8Mc~+34>LRRchz~W-AvHp5 zP~nZr4Zff9F{7%2k9FjsPt9@#a?)Lv&*Ue%^YV~M7*nYjr{(Gl&Sr?@qMRvt^d!Vo zVq=iV5-!9D|Fop^>=MmoNZM#E5k~C;i~kQkKZC|m4}LQ2t{5TSxWe{k9!`Rrq$fJ{ zIhNMu7Q0fyOxH6*rlVW-GyCy%p??!MU;F>)|HFa*aNz7Yz%thj$}CS9vMD`=h9VLw z04fv;>%?kbN+4heRFDBX7?At0>BhY`rW@mBSVAil(_mFVJMPQIuv;nG_k*JS7$}<2 z+$nRCVMe8|!b~&lryG#9Fyp@zifMD%mGR7nvO%F5SC7=8>!k1}9rRxTMVyx!a2X(J z!8DU{uL5K|*x#d8&M{S^%ntw(zlWii-eXYow*`tcaf*@d??KT$5=VJocl@pKX>D$u{{OFLT>9c|d&z@!Rr(yjzYG3v=j%V(hwIns z^EW<|?hW5+em?k}hU7QD3RV{X#*cLbKh|%)U+k|s-E$2XZNPyBv>C9M0V53aj<#uFN^qXzuYfO`$N!GJ3a=*weYx&C+S zv(9Nb?r0tV|Fk>#@VmSi-8TMa{S*76=KH3fG%PXbjXk&RaINY8_n5=Z`04m z>Hkm1a^}MZ8nZL+9{iui{NI=U8AZSt)t^xWZ2vwS{vV3zf4N9(7^dqFi(fkq)5OCV z@MSH#ZdpHr#gBfXH4i+Z;nMr-PEwb+xI)2=@W11s_aCxp=hJ?dp3+u{27LA`{o4)y z&%yt}2ai73Lwxa|=Rs`tX!UvHU;KayF%<*UsqbI;zD;DlfA#yLTKUHK?yVyq*ZnNS z$i+J3A){YZYd~COsQS|n8~wfEh=EEN^>gDW&9AqU{ylJs@%?DTd&G$MpM`8&h-d3I zhV<|glLI)^{jYy0<*!5S#~!@>fTJgf0jB_2}VW>s0e;Y8O zueOf_JOD*{j{$bF2~h<g!pETKeihc;I3_am%P)xTDu-$n=*bp`h zFq#VbSiq4`=R%(dXorf0-T}B5Dh_(WDd%gx$Bw+tU&;p(e1WbTp{Nn&!P>hrC2}4i#nV~PS3$Yn$4#L&}j(0$opicy> zgkn5ZfbSdn4*?e>>p05+ubc!Kga0hRjTb?Np(or8#XQ;zI5P$L3;k@spP`6Im#l0(MK&@k9dVrVH^S?DGMizgU-x7Xd$+3E6HBJOO{b1bcX(KMB}>md0}+;Nws% zM^6AQz6|@?;im$y+vVCm67U363GAByubrdqU4XSv#EtL??Av=7ezpR-t~7WEcz!12 z0rv5L#ZdHL0=OB9yu$roV!xp$T%WDwhcMzQ9Zw{nbFP+W!tS}c%tius%7Y*B2e92d z-~>J47N~gQ26$mU&d))g2>6Pj-vyXbpzYHDk3-FYpC-T!*P?7dPgsfx%JtB@03U^7 z9`W6Rm&&qTmGpcwW} zz|ckD1N32l>#0XwM7U}(=*0EJgp-yaPoZ}L9)a2bKZNPakRI$a0Ebth%tMd+*2HWm z@|)!^Puu|(20V|*wC*f|xelK9u&EOT{ z=>@nMY9nxa2C(urwp1cj%^I@&;1vnjw@yq~x1d4cW1f2MY#?t|K z-zMZQ!fpUe->mtc0r(446YNg{mOQ0#AdGk#@`-Yk2Ez$dq( zEr5O#V6R%;#t{zQq3xpqzkdaMhMyk+Z{CG=ANZ^S{Q6bo1@y-NuilL_sL&Sz#_mOa zKu_p^Qtb%fy-+2v-vBt`HPA>ufHhF7Xb+h2I_QEv3vkGO@D}<+z@MNtLEi*8`VDM1 zgdX?(iA$hJBhM`GtigKJ8-zR;Ks_P%=|7A%nvi?(X-~*K>eLf*e>wGp+!Ibc;X*^t zec{~eO?$$>7<%sSe$&tsa$hq25O%@-N$R;bnR^_mC*)pW>Iu2GmwH0((IuXQ+#gGO zLhi$)o{)PisVD4f=(*2wf}s~@@wHQa{DnVYG{mP9UT~ZJ#E-?p0Ww{L8XvAFz=`)Ks370&xXBej$DAcB&gY3yBO^< zm}tl;E(1 zzLaMe8fgjE93p0j@cL6DF~{sbNQ%hE1n z0KO)ge_K8(JZ=9$_*ez+i2qM_kBrwIl)34RgAezAV!!fBcZ54;A1CpaMpKCLe{lbS zhi2?=I(e|7mS*rr{x)-a(@Et_|KM-^`X;HI75^E9M?2HMx+D6Cc2@B#+!#6ITVZ^) z$|z3utzA;*UooKqRh5tOhu#9EMB@yQ?)2vyj`6gmmcU+C2n&=yR3MZUs;{J+he6q( z{*j!=#l;C6lo!*dPZ!r;f4#!Zl<_rd)`&m<`Oo5+XP!|q{_eZ)iZ8zS!Yk(`Tpr$p zI&{;PKuY=zB{#_D^0td5+rQnOcTKTaf-K70j`vKqH)KK*u37TUPnpj_v*f-n@XFAy zc{lF+V&=@5+w;=m9tX_4CiU7WQa+n07q1+${qmWaVu^fc`?E6{-!{C0(Fn8r`-u~q zgp%SL&?t7KA|G|oo~>fK_Vt52IswF#rW~# zh24((V^dNT@1{nIz`Tn<#Q6Lv{4<~1qC9gt$q8&V{_jcJnirI`AO6lG8|Ka=9}ucVk;C&g9A zq*%IisaUykrC7Umt+?ZkJH%ag-6htqUoSRn*dQKxn>K9{&p!LCs>@z{@kR00 zE1Sf!@1%IPQHobzeO2t+w@2$vluT& zip%9xah<$Y+$qB4T7q%F8q9xe!hQ*apVx@CQihA05q>?w zKZ)?Q2>&L+AN2`87~vB$u+y>xW3Dy0BW05ie?s`3jp+L#{1Jrz7~wxh_fL3lRRga3OCVDdfFVh5YNaLhe|LI5!LV#o^}g=zL_fL-_Ux z-vQx+5gzqVBRVnC)>+64M+!M-s*u-TE94z(g?w(akZ&Ju4lg>14AC9^oql2s?#A1M z+e!~Y@*8p2uMEFT$dM&NPQ6LUYu5|8_9-DZ?-la!CqChA2!9^Jry=}R2w#rys}cTT zgnt_0Uq<*n2#++^eu(g2Abg`Y{JFTfWinzYKn#CC438j&U5MdR#LyTn#rGqn_;IQf z$FG&*#9AqS-Ymthhov`sPlO+a@DmVz8p6*-_yq{R0^#opmpBV5#WPc-*m8jxvt zaHPN)|0&Qo9GNla(8EO+*$ns8^rYm}35Y&9$v$DCJ@Z_87&NGVKf^sGJ<*YTD;W)6r4M?VV}mDP*1#JuD6MGyWu%gvqtcagjiQCh$+i zKQbX5EKW~OO;G&KsO&w-5RVCC?e^LGcOns)}g({67B?sL@_;e0tBhq-G2U<*wG#QgF2_cF=--X zIW8_Kso6aPPCSy-=G^d}LM7f)i5b770NcW@(5Z6>QIQU2YK)@0^ zhIg8hnUp>u0kY3|tE^qpWUZJ`ht5SURE1XmmHylk#L-&Gw)%1vVnVC_ z-0$~O%>LZ8Y134HWO43A-oD3UC?Qi+k?5N8Eewz2bof9uN;d{IKdPJpJ_3sz3Pr^Un+R72c_ROx%P1#wPR? zcJ10F4jee3`UXco`$T;9*=OSD(W9cFp+Ouwc1(Q#-M6Z5@Y7E}iQj(vO{_g3#b)#s z4*%*?&yMJahVFbcbSRf==Axm)#NWYN(9k_9!sRn!qQ-Y4@dQn9yDmkpb_C= z)^kE(?$K|+kRk9J9@ck6%!nA#tN+mG5xs*)gu_Fp(9k};`VER6(J?r>k6~f6MGqYj z6ciGTFz0mc6xt)Q`_K^`gMxxP1O$YfW3~2Z*S^Ql-W`L2X)z+KZ$zLaus{5c7}2{! z0ORi#>DL9Ojst=_bcn$?px>aL{=MLeAiZHm`Z2w_4jB+LVnoak;E(i&#>5PaiRl-k z=<9{ghOb{gWfV)SxVt$J9?@4;$^zKdM|;x`{Ue;{tL6j+y5}Mb$|7v|{V#of2#p@Q zK#$$40-$Hg|2MufQU7~s6W@+d5qf5V_C{JRVOy zX#MofH{ZN($BrFKCQh6<8u2indO>xE3Wp*t%-3K;QBNMzts7KtsP34n7~Ry=^b7Q8 zIL{jYDDe9r%3vJhIRE_f2NOTwrSO04wbvx(NG0%-Teogicw-Jpe);8>^3b6}5_1a5 zAH$%JK>MQzbN}Yen{V8*Wy_7|_9a(VR!%7?DRHb`y*dN^YT|YdR805o-A7|?O}@k0Mk;s3)AKS=at)(#jjU<`P;`s=U1mYCD^^7-JwgYs{G`xIB|ENH-dvMd2z#J}JzDF?o&i^W-C5B{UgkY@c)`A2&ssm%N!B>twHQ~sGBlq<@H zNejz3<&1LAvSC7&3x3l-mGWZHkP7*q1b&l-@rR_m;0;YfEXo4r$z}9*<(>8D*?bCr z)WvHt$3F)7u$twJbxT`xP~Ls>#FQ&S@}KT(7Z{fHjPG~JQIt{A@R5`$pdlGFZ2G&e zk2C7OgUj7#ASUYPpMREUXId!IU@DW2w(`WJ#g~p>fBjV+-gd8iWKpWT``QWe_Tof^ z*kxZzITMexPOXzNoizN-D9@0Yab|gDorLntI%zP<0n76+$P>%-DXg=#L4(2{`MnBr z7yVJMSCJOhhlC~#gp_;Ab!$3I`J${-mOk14jC^e2IQfu!yu7dMLPY}!xT0RlOAQ(_ zK9MpFI5|Osw>*;uqdX61orF4d5bC6UprP+pUAH2drLAaC`1kMM-&_Bqjb}T5T7Izn zvOO^KgXMu{vz!or)O)h_jvV>;b>rlt3&+a`LBqYEfnPT2&pDuBHtP5ppGip?%<@bc z%<{}SY2+U8{S_(CAr0WaZJU%+P*34I&=&rv@0I+ctgU8$h-J>C!=#0I!1lm5PfS_} zi9hSfXP4ULlQ$&DKZAxxK*Iwrjeps!eiFNoIK_4Q6>J4Q6>} zofQ3o!v92T{8`S}zM!AwRiwdGt?4k!iZ311lLmfUmRum8S~ONZd3}QXQ#oi@Fkb%D z__cfw#hEl@f(ERy$yq+-nROD&vmIrDby7U)B(~3Hoit>-lmj&Wt?Q*I;*b9Qsxf26 z^hY11igd7EB&5uc4)%EnNek`R)|&k?y0N`wpMl@=6&J|o7GEGYgNDC=hK-=%#k-2- zkKaOg&f=>Fo%4|=@ob+#gW1Oz^0E%xn*XdXO!*%>cI;s0g{fFxi8pa0?j|j)`&r*} zT*G>kZ57)#elIVJm)k(Yvx~>dO^Xub6QH4WdA<#G(%SiUx$2sUaz#Ostelr5m*hBP zwfi#p@q33PY4Gi1sPc?DiC=Lf z%8Co+OQ2yZXn5vE(15bQ@=O|7o>?c|g?g1VtRW3oPn3Vi2Mu`+xi}|T(Lg-e$6)(x z_A$nweZzN}{Ikwr8_a$L)y9n*<>QY(F4-?Nm6;!ezgJE$FDnn-mo0Zxo-bcq3K~!r zo(2tnHOe#VqzBv=%6m&Mly{a)kn4&PGFSq&O)0u9SR!;PSUU+X@`l_(qdPOE>2 zKV(74KjgZKwA^&lP4bB+o>26dG!UMN4$?%~Vpz}Nr{&J7^JOh)cmXu1@(dbSCz<7W z1L~ypr4!^GC5iHnpkXy=xEVAoha4@_&r*F`|YIsbjRKD)m2Gydu6z;KGO&2@Vnu_me6^;f zb)L}7I)P>3AARrbdeYm+V0o@F%5$r5zuW&I{>*!))5(00fBMs(RNsqz8q5#K1q&8< zY4|-lelJgm59_37SSS6X@(ddIwZZGQtfI&#?6ui}x>F7rGDK!(W=ibMknGo$m6b`C z%OzK>TBXW0rhHVr!@f{kw2=2~8(3GdFU0;R#{>LWo)13imM4F~c;lf3s(tqDW87tw z=QSt`{Mtg+w8LD56@C2~Ff06r4I6eIw)Pa^>5T^Tqg6gU_~3(b%9JS*dy8aEO^w8w zl3cfLox+FXd-9t!uq|LaLAj#h7=|>kKBXM8EhLY2RXgOHj}ial^0kK-H1{!37TCvN zd1jq-cX>L||A*r*%>AvzQs`334`h8U#{FZk59SG!VF@>hHz6d-grs@$!^2|EPw>c zdzE}~c_P|p-N#`2{P>MYa?2m*$q!zCP9Fd1r?-&bXFdOby_B-5s_F;qQIgA+E&J)R z%Py0Jg@uam7*{G;A`L7TocAGaq@y)0oP%IHOu1(tiDiZJ5!*4Iq#gT6Y$r%d8y{Hk z^T*7ue|G+daiQ#Pz#Qc#H{5W;DdLHJqe`(%n=}j^I#gosn#u>Z8I*bUeM~xtFY%|$ zlTMBe2w5IjMpz!qJRzhEvTtIxJsf*;{0w>dr{+IdwkZFQr3bKI>}{4)tcc4+ix#PT zAPuZnF}Eyd&z`O9nFpl7RHVTi_i{|hJ~ri^coI^ENSi5Nv}d1+eQwkxw_@(;JnS>P zg=2_+1%Jvt6=jL@&7_IAan2cg?^O9sN=lN6iHQ=sQ&c|ConcHG2u-;%^TaGCEFZ+( zgp?zW87Nom2Qu%>`3>OD<+c#0v-wp03A$Hd37|iCSjF=E(n~KX8pt2c8=`zD8nUyq zWm;OAk}a&sEBwi8@|?5~Z+YwxSv8iRgsaAiOBZ{ zG4JF7(p>Z5y3W~rs{dfhAp1WiUy08rpM0Y5LEcLAEx4;#l?UXvguE#kU>ij~(?1o> zq=EG)dCL0#si&TjPd@piY6JiJ*T1T8oFf3AJgD2QKpVLTxbKABAAmI1JfpC7OHF zKTH#|QJhh-lom2JKn_jFzX~S)!AZ_N_r34?p7WgNJm)#j^L)?y zUUUuejm$wm^MHBDm>?&N0r4!(yTo_uWsDdP)^qju$D$j(2~K#k$gebo#@MOX(%dTT^wHAq?0@ll zGktJ+(e=gkff9bBo3wUYKLMFyt&Q>*S~-S(#>UwY;5x`XvINhWL(&=TqGKYokI)~L ze{@82JjNRn`bCU=$&w{T`*N*(JDx{Nu0xiPb$(|JfOdzE$VYndHfXo>(}weZwf1}u zxnvEZAIw4cg#JL+u^vUo1l}Gzc+l+Hwadyg{C1gsBRk9;>?V$p8R}y#kj~8L)kPEK zJG7JTg`ViT2>mY82bZrmCe)!l(Y79BjC<%C^deh_q&w-0qh0ptz6r{Ef$A%tHvXge zW&K~R-<#38xkTj4YCiF~~=!M?}-px ze@ah`#DC}mYbW*#`VTo^J!0*I$M~G^o^ikyaO1!+Iv-t&9+OXDaSiqPPSZ|5to%o1 zW2~IJV>fT3C9=*j*Dw~yJbb2)$hzd{vTTGwjTP%Edn|5cZGyfVrBCPr_8|N(Gi|W< zfqfdcolKh+mqY8j`Xh9= zQN4~2hvmTSl&vA1rch1_%gJFmH7xH5%XnB;hGlD5nnF2=vQ+dWlxHhXaG%x`-HLrn zMxL)}D{7>lR8|z+TvU!GjzLt$#sgK|xQVuki+YvG3$i&%s)}U*oXy#DSS@Y&Jx9jPyEZ0bUhmA>!te`qcEr}WZE+2>t~lhw%vo~!F;D3{3IIJZI=2RIqA(AkRLf_Z}%A3tx*9~70- z^GKw#y8c?})6(Id+8joIq9gH_pn>c#jL_!4EWLVOI`^3L=4(_8W(wX2&IP_gOdNdZ z?F+{AsqZ&|^~TZDiYp`)d#Z*8Y$tqA)<^U@YZ1B{TNXQob9^T~Caem~6RZu4hy4p+ zRNy<{{$OmRYjs|~$MFH=gEq)tsAK(QUBl0%UF>Lh687KPPK6o^pWtO*SL_zN53KB! zq3GFhzXM`L<-xX(KZuP$h6XbFoeWP{zs~&qq{%#8ZE*?W;iP3}gm;}T;HHa21Gq1H zAc*aQ`8s(7?*(TpSNZm6nT)fh$bU`B&aH+HM+@VR4Gb-;Kd(PuZ)4hE&cx(h`_S+W z(E#oVes1MaG=Q~%@gR?2RA8OpgpX>DDD{S85@#J(+@(R!-_@};lN}A%9Pk9&`sGaz zS*#CyhBzXLw#i_x;KyKoQF&x93m6Sp9~cwz_}O3CTQ9oyv4b4G$hwN1=Il2|3wID# zCKl)^o(blwXX(t7nqyHKkjLsV@~Hj$c22OD26-HV)5U3DcuG=nh3Y6Bw2hoolEB-* zNWcrhsSf_A>4uo$=?{!)J|)akG_aqCIRR!%a{b4y(0?#Dw6?a^VS|z#9mGxG1&R1N zSQ8irSQ$7I_y#x`7!%kA7#aA|w`&UdhfmcSXt44q+|^;<)E8|Z9YDsmAxG(e7RLXL zEtLkm>wst=rb%oZj0JfF=K@~=YqS_w*#5@HAE7BbC!9Ra5+9UC#vj{EI7Cv#_MPMA%sVGEC&tL5<^=Lc8n0m||M=s?If&`P zWBe=4MeA=dH=)UmPjqY;i{Cz5Wrp58ZqEJf#1(neoB+Q@9(Qkgl;+1>|D{{8x8t-g zEaI<~l~{G_EjF!pDwu8CwpsrUn$U~P1L%MUj1RmyKX}soe(x&ti>K$T{Zw(NI$e%q{CwaUhUwMWv%E}ZWJ&*2CB z$G<}s;0N@NP1LFDj!L$oYwf7^IS?-PHO+hL2eQ_K%YZG(&NABfZTs!~FX$k{59k;> zHqghqh3>#FRrOy`#=g5=isOUI{x}Vm2ri-LObzi@XjrpmjpaM~fOU|%oeaQQL^&K9HcI6JNQj37a1g$4z9ucfKC^?Vhqp=+y^c2 z0=SUauLGKALdar<4xNxDR4I0Q?!yMo` z==h9CMp~B4g~|Vef^W z3$XfOg8im#Qk?#e<$IL>i~$+li|%8b^-c)n?~LMM&!`UKClgIPYf?19%>25Me!C`J zm*RS6chbo7y74ygLV1#M>?ShTxqITegOzbZZ#n zES-KL+8B76_CFJ6{g3Jxko;X>9^xA#uP@67StPrTeT|j!t#-=~RC?9b!L`UVHk`9( z+?+y}eN%oDJ`*v7qcUqguYH|L&OVNTAJ7(a4ZTE0Ub&Z)lzV~?{A$4@v%iHO@2fnB zZekumE4Bl&-S^9v%(IOJ+ps3|-|!#tU-1?2Rm$WmJukYzTJg!D9r>r-?4||Q#yKw^ zY`18p4t#NZFJck+efXU-<#!UJ#-@U9^cD#p1|RBY&#f`k5l!z&#S+sYj)2bw{}?CM zAnYL0>)(CY+FSS^_#XH^*t+-|xfh->)PWy9Uvw~jdiRXQxzSox-QNB-@=%fPbTwo7mp8l2 zfv4u#{oF6MeA&G9T88HJ#=t_-vn$qCLw_MR=xuB+#shsCrG@XTe)KeSQxCk*c{`t2vxz5hFMc*_x3HK|_fpTMsku2t%CX5| zj;Xv>cGqw{`5vyFq(EjOQKWK;edC(mg;`WM;(co$(mAe~X5Y3pS?3BzylZWW-e|;8 z!oF$k!{L=W(ls9muT0pttWAxTC)4Rv{j${?5-s(s8`85%);2dhn66ows!yixtFNtT zZcH~Wf8xHH#`*`7>H7QE%_vFKCmU*)r_$~p24|JbxWBBV;$t68B+8#?UYoZ6MR=37 zJy5t$ccxP{YxTE=8*DuZrSj&~nzd>r^>|ft?K=Gv-ilOu)D$1vi)IIvy~|K_Bs$AF zJ2Tyx{h6N3;Y@F)FVmkH$P8wNx`(@)vMt%R?AGkgY%+ zV0I`woQ?U@{35^DPxz&NnLpE??a%kC{KfuKf2H5#xA<-TR)448?sxj#{(is5KkWDW z1OA{tk&4+et4U?><4V!3I#qFiw76O-Q~dlDF^-w)N[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))? +-(?P\w+\d+(\.\w+\d+)*) +-(?P\w+) +-(?P\w+(\.\w+)*) +\.whl$ +''', re.IGNORECASE | re.VERBOSE) + +NAME_VERSION_RE = re.compile(r''' +(?P[^-]+) +-(?P\d+[^-]*) +(-(?P\d+[^-]*))?$ +''', re.IGNORECASE | re.VERBOSE) + +SHEBANG_RE = re.compile(br'\s*#![^\r\n]*') +SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$') +SHEBANG_PYTHON = b'#!python' +SHEBANG_PYTHONW = b'#!pythonw' + +if os.sep == '/': + to_posix = lambda o: o +else: + to_posix = lambda o: o.replace(os.sep, '/') + + +class Mounter(object): + def __init__(self): + self.impure_wheels = {} + self.libs = {} + + def add(self, pathname, extensions): + self.impure_wheels[pathname] = extensions + self.libs.update(extensions) + + def remove(self, pathname): + extensions = self.impure_wheels.pop(pathname) + for k, v in extensions: + if k in self.libs: + del self.libs[k] + + def find_module(self, fullname, path=None): + if fullname in self.libs: + result = self + else: + result = None + return result + + def load_module(self, fullname): + if fullname in sys.modules: + result = sys.modules[fullname] + else: + if fullname not in self.libs: + raise ImportError('unable to find extension for %s' % fullname) + result = imp.load_dynamic(fullname, self.libs[fullname]) + result.__loader__ = self + parts = fullname.rsplit('.', 1) + if len(parts) > 1: + result.__package__ = parts[0] + return result + +_hook = Mounter() + + +class Wheel(object): + """ + Class to build and install from Wheel files (PEP 427). + """ + + wheel_version = (1, 1) + hash_kind = 'sha256' + + def __init__(self, filename=None, sign=False, verify=False): + """ + Initialise an instance using a (valid) filename. + """ + self.sign = sign + self.should_verify = verify + self.buildver = '' + self.pyver = [PYVER] + self.abi = ['none'] + self.arch = ['any'] + self.dirname = os.getcwd() + if filename is None: + self.name = 'dummy' + self.version = '0.1' + self._filename = self.filename + else: + m = NAME_VERSION_RE.match(filename) + if m: + info = m.groupdict('') + self.name = info['nm'] + # Reinstate the local version separator + self.version = info['vn'].replace('_', '-') + self.buildver = info['bn'] + self._filename = self.filename + else: + dirname, filename = os.path.split(filename) + m = FILENAME_RE.match(filename) + if not m: + raise DistlibException('Invalid name or ' + 'filename: %r' % filename) + if dirname: + self.dirname = os.path.abspath(dirname) + self._filename = filename + info = m.groupdict('') + self.name = info['nm'] + self.version = info['vn'] + self.buildver = info['bn'] + self.pyver = info['py'].split('.') + self.abi = info['bi'].split('.') + self.arch = info['ar'].split('.') + + @property + def filename(self): + """ + Build and return a filename from the various components. + """ + if self.buildver: + buildver = '-' + self.buildver + else: + buildver = '' + pyver = '.'.join(self.pyver) + abi = '.'.join(self.abi) + arch = '.'.join(self.arch) + # replace - with _ as a local version separator + version = self.version.replace('-', '_') + return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver, + pyver, abi, arch) + + @property + def exists(self): + path = os.path.join(self.dirname, self.filename) + return os.path.isfile(path) + + @property + def tags(self): + for pyver in self.pyver: + for abi in self.abi: + for arch in self.arch: + yield pyver, abi, arch + + @cached_property + def metadata(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + wrapper = codecs.getreader('utf-8') + with ZipFile(pathname, 'r') as zf: + wheel_metadata = self.get_wheel_metadata(zf) + wv = wheel_metadata['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if file_version < (1, 1): + fn = 'METADATA' + else: + fn = METADATA_FILENAME + try: + metadata_filename = posixpath.join(info_dir, fn) + with zf.open(metadata_filename) as bf: + wf = wrapper(bf) + result = Metadata(fileobj=wf) + except KeyError: + raise ValueError('Invalid wheel, because %s is ' + 'missing' % fn) + return result + + def get_wheel_metadata(self, zf): + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + metadata_filename = posixpath.join(info_dir, 'WHEEL') + with zf.open(metadata_filename) as bf: + wf = codecs.getreader('utf-8')(bf) + message = message_from_file(wf) + return dict(message) + + @cached_property + def info(self): + pathname = os.path.join(self.dirname, self.filename) + with ZipFile(pathname, 'r') as zf: + result = self.get_wheel_metadata(zf) + return result + + def process_shebang(self, data): + m = SHEBANG_RE.match(data) + if m: + end = m.end() + shebang, data_after_shebang = data[:end], data[end:] + # Preserve any arguments after the interpreter + if b'pythonw' in shebang.lower(): + shebang_python = SHEBANG_PYTHONW + else: + shebang_python = SHEBANG_PYTHON + m = SHEBANG_DETAIL_RE.match(shebang) + if m: + args = b' ' + m.groups()[-1] + else: + args = b'' + shebang = shebang_python + args + data = shebang + data_after_shebang + else: + cr = data.find(b'\r') + lf = data.find(b'\n') + if cr < 0 or cr > lf: + term = b'\n' + else: + if data[cr:cr + 2] == b'\r\n': + term = b'\r\n' + else: + term = b'\r' + data = SHEBANG_PYTHON + term + data + return data + + def get_hash(self, data, hash_kind=None): + if hash_kind is None: + hash_kind = self.hash_kind + try: + hasher = getattr(hashlib, hash_kind) + except AttributeError: + raise DistlibException('Unsupported hash algorithm: %r' % hash_kind) + result = hasher(data).digest() + result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii') + return hash_kind, result + + def write_record(self, records, record_path, base): + with CSVWriter(record_path) as writer: + for row in records: + writer.writerow(row) + p = to_posix(os.path.relpath(record_path, base)) + writer.writerow((p, '', '')) + + def write_records(self, info, libdir, archive_paths): + records = [] + distinfo, info_dir = info + hasher = getattr(hashlib, self.hash_kind) + for ap, p in archive_paths: + with open(p, 'rb') as f: + data = f.read() + digest = '%s=%s' % self.get_hash(data) + size = os.path.getsize(p) + records.append((ap, digest, size)) + + p = os.path.join(distinfo, 'RECORD') + self.write_record(records, p, libdir) + ap = to_posix(os.path.join(info_dir, 'RECORD')) + archive_paths.append((ap, p)) + + def build_zip(self, pathname, archive_paths): + with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf: + for ap, p in archive_paths: + logger.debug('Wrote %s to %s in wheel', p, ap) + zf.write(p, ap) + + def build(self, paths, tags=None, wheel_version=None): + """ + Build a wheel from files in specified paths, and use any specified tags + when determining the name of the wheel. + """ + if tags is None: + tags = {} + + libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0] + if libkey == 'platlib': + is_pure = 'false' + default_pyver = [IMPVER] + default_abi = [ABI] + default_arch = [ARCH] + else: + is_pure = 'true' + default_pyver = [PYVER] + default_abi = ['none'] + default_arch = ['any'] + + self.pyver = tags.get('pyver', default_pyver) + self.abi = tags.get('abi', default_abi) + self.arch = tags.get('arch', default_arch) + + libdir = paths[libkey] + + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + archive_paths = [] + + # First, stuff which is not in site-packages + for key in ('data', 'headers', 'scripts'): + if key not in paths: + continue + path = paths[key] + if os.path.isdir(path): + for root, dirs, files in os.walk(path): + for fn in files: + p = fsdecode(os.path.join(root, fn)) + rp = os.path.relpath(p, path) + ap = to_posix(os.path.join(data_dir, key, rp)) + archive_paths.append((ap, p)) + if key == 'scripts' and not p.endswith('.exe'): + with open(p, 'rb') as f: + data = f.read() + data = self.process_shebang(data) + with open(p, 'wb') as f: + f.write(data) + + # Now, stuff which is in site-packages, other than the + # distinfo stuff. + path = libdir + distinfo = None + for root, dirs, files in os.walk(path): + if root == path: + # At the top level only, save distinfo for later + # and skip it for now + for i, dn in enumerate(dirs): + dn = fsdecode(dn) + if dn.endswith('.dist-info'): + distinfo = os.path.join(root, dn) + del dirs[i] + break + assert distinfo, '.dist-info directory expected, not found' + + for fn in files: + # comment out next suite to leave .pyc files in + if fsdecode(fn).endswith(('.pyc', '.pyo')): + continue + p = os.path.join(root, fn) + rp = to_posix(os.path.relpath(p, path)) + archive_paths.append((rp, p)) + + # Now distinfo. Assumed to be flat, i.e. os.listdir is enough. + files = os.listdir(distinfo) + for fn in files: + if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'): + p = fsdecode(os.path.join(distinfo, fn)) + ap = to_posix(os.path.join(info_dir, fn)) + archive_paths.append((ap, p)) + + wheel_metadata = [ + 'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version), + 'Generator: distlib %s' % __version__, + 'Root-Is-Purelib: %s' % is_pure, + ] + for pyver, abi, arch in self.tags: + wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch)) + p = os.path.join(distinfo, 'WHEEL') + with open(p, 'w') as f: + f.write('\n'.join(wheel_metadata)) + ap = to_posix(os.path.join(info_dir, 'WHEEL')) + archive_paths.append((ap, p)) + + # Now, at last, RECORD. + # Paths in here are archive paths - nothing else makes sense. + self.write_records((distinfo, info_dir), libdir, archive_paths) + # Now, ready to build the zip file + pathname = os.path.join(self.dirname, self.filename) + self.build_zip(pathname, archive_paths) + return pathname + + def install(self, paths, maker, **kwargs): + """ + Install a wheel to the specified paths. If kwarg ``warner`` is + specified, it should be a callable, which will be called with two + tuples indicating the wheel version of this software and the wheel + version in the file, if there is a discrepancy in the versions. + This can be used to issue any warnings to raise any exceptions. + If kwarg ``lib_only`` is True, only the purelib/platlib files are + installed, and the headers, scripts, data and dist-info metadata are + not written. + + The return value is a :class:`InstalledDistribution` instance unless + ``options.lib_only`` is True, in which case the return value is ``None``. + """ + + dry_run = maker.dry_run + warner = kwargs.get('warner') + lib_only = kwargs.get('lib_only', False) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + if (file_version != self.wheel_version) and warner: + warner(self.wheel_version, file_version) + + if message['Root-Is-Purelib'] == 'true': + libdir = paths['purelib'] + else: + libdir = paths['platlib'] + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + data_pfx = posixpath.join(data_dir, '') + info_pfx = posixpath.join(info_dir, '') + script_pfx = posixpath.join(data_dir, 'scripts', '') + + # make a new instance rather than a copy of maker's, + # as we mutate it + fileop = FileOperator(dry_run=dry_run) + fileop.record = True # so we can rollback if needed + + bc = not sys.dont_write_bytecode # Double negatives. Lovely! + + outfiles = [] # for RECORD writing + + # for script copying/shebang processing + workdir = tempfile.mkdtemp() + # set target dir later + # we default add_launchers to False, as the + # Python Launcher should be used instead + maker.source_dir = workdir + maker.target_dir = None + try: + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + if lib_only and u_arcname.startswith((info_pfx, data_pfx)): + logger.debug('lib_only: skipping %s', u_arcname) + continue + is_script = (u_arcname.startswith(script_pfx) + and not u_arcname.endswith('.exe')) + + if u_arcname.startswith(data_pfx): + _, where, rp = u_arcname.split('/', 2) + outfile = os.path.join(paths[where], convert_path(rp)) + else: + # meant for site-packages. + if u_arcname in (wheel_metadata_name, record_name): + continue + outfile = os.path.join(libdir, convert_path(u_arcname)) + if not is_script: + with zf.open(arcname) as bf: + fileop.copy_stream(bf, outfile) + outfiles.append(outfile) + # Double check the digest of the written file + if not dry_run and row[1]: + with open(outfile, 'rb') as bf: + data = bf.read() + _, newdigest = self.get_hash(data, kind) + if newdigest != digest: + raise DistlibException('digest mismatch ' + 'on write for ' + '%s' % outfile) + if bc and outfile.endswith('.py'): + try: + pyc = fileop.byte_compile(outfile) + outfiles.append(pyc) + except Exception: + # Don't give up if byte-compilation fails, + # but log it and perhaps warn the user + logger.warning('Byte-compilation failed', + exc_info=True) + else: + fn = os.path.basename(convert_path(arcname)) + workname = os.path.join(workdir, fn) + with zf.open(arcname) as bf: + fileop.copy_stream(bf, workname) + + dn, fn = os.path.split(outfile) + maker.target_dir = dn + filenames = maker.make(fn) + fileop.set_executable_mode(filenames) + outfiles.extend(filenames) + + if lib_only: + logger.debug('lib_only: returning None') + dist = None + else: + # Generate scripts + + # Try to get pydist.json so we can see if there are + # any commands to generate. If this fails (e.g. because + # of a legacy wheel), log a warning but don't give up. + commands = None + file_version = self.info['Wheel-Version'] + if file_version == '1.0': + # Use legacy info + ep = posixpath.join(info_dir, 'entry_points.txt') + try: + with zf.open(ep) as bwf: + epdata = read_exports(bwf) + commands = {} + for key in ('console', 'gui'): + k = '%s_scripts' % key + if k in epdata: + commands['wrap_%s' % key] = d = {} + for v in epdata[k].values(): + s = '%s:%s' % (v.prefix, v.suffix) + if v.flags: + s += ' %s' % v.flags + d[v.name] = s + except Exception: + logger.warning('Unable to read legacy script ' + 'metadata, so cannot generate ' + 'scripts') + else: + try: + with zf.open(metadata_name) as bwf: + wf = wrapper(bwf) + commands = json.load(wf).get('extensions') + if commands: + commands = commands.get('python.commands') + except Exception: + logger.warning('Unable to read JSON metadata, so ' + 'cannot generate scripts') + if commands: + console_scripts = commands.get('wrap_console', {}) + gui_scripts = commands.get('wrap_gui', {}) + if console_scripts or gui_scripts: + script_dir = paths.get('scripts', '') + if not os.path.isdir(script_dir): + raise ValueError('Valid script path not ' + 'specified') + maker.target_dir = script_dir + for k, v in console_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script) + fileop.set_executable_mode(filenames) + + if gui_scripts: + options = {'gui': True } + for k, v in gui_scripts.items(): + script = '%s = %s' % (k, v) + filenames = maker.make(script, options) + fileop.set_executable_mode(filenames) + + p = os.path.join(libdir, info_dir) + dist = InstalledDistribution(p) + + # Write SHARED + paths = dict(paths) # don't change passed in dict + del paths['purelib'] + del paths['platlib'] + paths['lib'] = libdir + p = dist.write_shared_locations(paths, dry_run) + if p: + outfiles.append(p) + + # Write RECORD + dist.write_installed_files(outfiles, paths['prefix'], + dry_run) + return dist + except Exception: # pragma: no cover + logger.exception('installation failed.') + fileop.rollback() + raise + finally: + shutil.rmtree(workdir) + + def _get_dylib_cache(self): + global cache + if cache is None: + # Use native string to avoid issues on 2.x: see Python #20140. + base = os.path.join(get_cache_base(), str('dylib-cache'), + sys.version[:3]) + cache = Cache(base) + return cache + + def _get_extensions(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + arcname = posixpath.join(info_dir, 'EXTENSIONS') + wrapper = codecs.getreader('utf-8') + result = [] + with ZipFile(pathname, 'r') as zf: + try: + with zf.open(arcname) as bf: + wf = wrapper(bf) + extensions = json.load(wf) + cache = self._get_dylib_cache() + prefix = cache.prefix_to_dir(pathname) + cache_base = os.path.join(cache.base, prefix) + if not os.path.isdir(cache_base): + os.makedirs(cache_base) + for name, relpath in extensions.items(): + dest = os.path.join(cache_base, convert_path(relpath)) + if not os.path.exists(dest): + extract = True + else: + file_time = os.stat(dest).st_mtime + file_time = datetime.datetime.fromtimestamp(file_time) + info = zf.getinfo(relpath) + wheel_time = datetime.datetime(*info.date_time) + extract = wheel_time > file_time + if extract: + zf.extract(relpath, cache_base) + result.append((name, dest)) + except KeyError: + pass + return result + + def is_compatible(self): + """ + Determine if a wheel is compatible with the running system. + """ + return is_compatible(self) + + def is_mountable(self): + """ + Determine if a wheel is asserted as mountable by its metadata. + """ + return True # for now - metadata details TBD + + def mount(self, append=False): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if not self.is_compatible(): + msg = 'Wheel %s not compatible with this Python.' % pathname + raise DistlibException(msg) + if not self.is_mountable(): + msg = 'Wheel %s is marked as not mountable.' % pathname + raise DistlibException(msg) + if pathname in sys.path: + logger.debug('%s already in path', pathname) + else: + if append: + sys.path.append(pathname) + else: + sys.path.insert(0, pathname) + extensions = self._get_extensions() + if extensions: + if _hook not in sys.meta_path: + sys.meta_path.append(_hook) + _hook.add(pathname, extensions) + + def unmount(self): + pathname = os.path.abspath(os.path.join(self.dirname, self.filename)) + if pathname not in sys.path: + logger.debug('%s not in path', pathname) + else: + sys.path.remove(pathname) + if pathname in _hook.impure_wheels: + _hook.remove(pathname) + if not _hook.impure_wheels: + if _hook in sys.meta_path: + sys.meta_path.remove(_hook) + + def verify(self): + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + data_dir = '%s.data' % name_ver + info_dir = '%s.dist-info' % name_ver + + metadata_name = posixpath.join(info_dir, METADATA_FILENAME) + wheel_metadata_name = posixpath.join(info_dir, 'WHEEL') + record_name = posixpath.join(info_dir, 'RECORD') + + wrapper = codecs.getreader('utf-8') + + with ZipFile(pathname, 'r') as zf: + with zf.open(wheel_metadata_name) as bwf: + wf = wrapper(bwf) + message = message_from_file(wf) + wv = message['Wheel-Version'].split('.', 1) + file_version = tuple([int(i) for i in wv]) + # TODO version verification + + records = {} + with zf.open(record_name) as bf: + with CSVReader(stream=bf) as reader: + for row in reader: + p = row[0] + records[p] = row + + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + + # The signature file won't be in RECORD, + # and we don't currently don't do anything with it + if u_arcname.endswith('/RECORD.jws'): + continue + row = records[u_arcname] + if row[2] and str(zinfo.file_size) != row[2]: + raise DistlibException('size mismatch for ' + '%s' % u_arcname) + if row[1]: + kind, value = row[1].split('=', 1) + with zf.open(arcname) as bf: + data = bf.read() + _, digest = self.get_hash(data, kind) + if digest != value: + raise DistlibException('digest mismatch for ' + '%s' % arcname) + + def update(self, modifier, dest_dir=None, **kwargs): + """ + Update the contents of a wheel in a generic way. The modifier should + be a callable which expects a dictionary argument: its keys are + archive-entry paths, and its values are absolute filesystem paths + where the contents the corresponding archive entries can be found. The + modifier is free to change the contents of the files pointed to, add + new entries and remove entries, before returning. This method will + extract the entire contents of the wheel to a temporary location, call + the modifier, and then use the passed (and possibly updated) + dictionary to write a new wheel. If ``dest_dir`` is specified, the new + wheel is written there -- otherwise, the original wheel is overwritten. + + The modifier should return True if it updated the wheel, else False. + This method returns the same value the modifier returns. + """ + + def get_version(path_map, info_dir): + version = path = None + key = '%s/%s' % (info_dir, METADATA_FILENAME) + if key not in path_map: + key = '%s/PKG-INFO' % info_dir + if key in path_map: + path = path_map[key] + version = Metadata(path=path).version + return version, path + + def update_version(version, path): + updated = None + try: + v = NormalizedVersion(version) + i = version.find('-') + if i < 0: + updated = '%s+1' % version + else: + parts = [int(s) for s in version[i + 1:].split('.')] + parts[-1] += 1 + updated = '%s+%s' % (version[:i], + '.'.join(str(i) for i in parts)) + except UnsupportedVersionError: + logger.debug('Cannot update non-compliant (PEP-440) ' + 'version %r', version) + if updated: + md = Metadata(path=path) + md.version = updated + legacy = not path.endswith(METADATA_FILENAME) + md.write(path=path, legacy=legacy) + logger.debug('Version updated from %r to %r', version, + updated) + + pathname = os.path.join(self.dirname, self.filename) + name_ver = '%s-%s' % (self.name, self.version) + info_dir = '%s.dist-info' % name_ver + record_name = posixpath.join(info_dir, 'RECORD') + with tempdir() as workdir: + with ZipFile(pathname, 'r') as zf: + path_map = {} + for zinfo in zf.infolist(): + arcname = zinfo.filename + if isinstance(arcname, text_type): + u_arcname = arcname + else: + u_arcname = arcname.decode('utf-8') + if u_arcname == record_name: + continue + if '..' in u_arcname: + raise DistlibException('invalid entry in ' + 'wheel: %r' % u_arcname) + zf.extract(zinfo, workdir) + path = os.path.join(workdir, convert_path(u_arcname)) + path_map[u_arcname] = path + + # Remember the version. + original_version, _ = get_version(path_map, info_dir) + # Files extracted. Call the modifier. + modified = modifier(path_map, **kwargs) + if modified: + # Something changed - need to build a new wheel. + current_version, path = get_version(path_map, info_dir) + if current_version and (current_version == original_version): + # Add or update local version to signify changes. + update_version(current_version, path) + # Decide where the new wheel goes. + if dest_dir is None: + fd, newpath = tempfile.mkstemp(suffix='.whl', + prefix='wheel-update-', + dir=workdir) + os.close(fd) + else: + if not os.path.isdir(dest_dir): + raise DistlibException('Not a directory: %r' % dest_dir) + newpath = os.path.join(dest_dir, self.filename) + archive_paths = list(path_map.items()) + distinfo = os.path.join(workdir, info_dir) + info = distinfo, info_dir + self.write_records(info, workdir, archive_paths) + self.build_zip(newpath, archive_paths) + if dest_dir is None: + shutil.copyfile(newpath, pathname) + return modified + +def compatible_tags(): + """ + Return (pyver, abi, arch) tuples compatible with this Python. + """ + versions = [VER_SUFFIX] + major = VER_SUFFIX[0] + for minor in range(sys.version_info[1] - 1, - 1, -1): + versions.append(''.join([major, str(minor)])) + + abis = [] + for suffix, _, _ in imp.get_suffixes(): + if suffix.startswith('.abi'): + abis.append(suffix.split('.', 2)[1]) + abis.sort() + if ABI != 'none': + abis.insert(0, ABI) + abis.append('none') + result = [] + + arches = [ARCH] + if sys.platform == 'darwin': + m = re.match('(\w+)_(\d+)_(\d+)_(\w+)$', ARCH) + if m: + name, major, minor, arch = m.groups() + minor = int(minor) + matches = [arch] + if arch in ('i386', 'ppc'): + matches.append('fat') + if arch in ('i386', 'ppc', 'x86_64'): + matches.append('fat3') + if arch in ('ppc64', 'x86_64'): + matches.append('fat64') + if arch in ('i386', 'x86_64'): + matches.append('intel') + if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'): + matches.append('universal') + while minor >= 0: + for match in matches: + s = '%s_%s_%s_%s' % (name, major, minor, match) + if s != ARCH: # already there + arches.append(s) + minor -= 1 + + # Most specific - our Python version, ABI and arch + for abi in abis: + for arch in arches: + result.append((''.join((IMP_PREFIX, versions[0])), abi, arch)) + + # where no ABI / arch dependency, but IMP_PREFIX dependency + for i, version in enumerate(versions): + result.append((''.join((IMP_PREFIX, version)), 'none', 'any')) + if i == 0: + result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any')) + + # no IMP_PREFIX, ABI or arch dependency + for i, version in enumerate(versions): + result.append((''.join(('py', version)), 'none', 'any')) + if i == 0: + result.append((''.join(('py', version[0])), 'none', 'any')) + return set(result) + + +COMPATIBLE_TAGS = compatible_tags() + +del compatible_tags + + +def is_compatible(wheel, tags=None): + if not isinstance(wheel, Wheel): + wheel = Wheel(wheel) # assume it's a filename + result = False + if tags is None: + tags = COMPATIBLE_TAGS + for ver, abi, arch in tags: + if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch: + result = True + break + return result diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/__init__.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/__init__.py new file mode 100644 index 0000000..9484fdc --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/__init__.py @@ -0,0 +1,25 @@ +""" +HTML parsing library based on the WHATWG "HTML5" +specification. The parser is designed to be compatible with existing +HTML found in the wild and implements well-defined error recovery that +is largely compatible with modern desktop web browsers. + +Example usage: + +import html5lib +f = open("my_document.html") +tree = html5lib.parse(f) +""" + +from __future__ import absolute_import, division, unicode_literals + +from .html5parser import HTMLParser, parse, parseFragment +from .treebuilders import getTreeBuilder +from .treewalkers import getTreeWalker +from .serializer import serialize + +__all__ = ["HTMLParser", "parse", "parseFragment", "getTreeBuilder", + "getTreeWalker", "serialize"] + +# this has to be at the top level, see how setup.py parses this +__version__ = "1.0b8" diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/constants.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/constants.py new file mode 100644 index 0000000..d938e0a --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/constants.py @@ -0,0 +1,3102 @@ +from __future__ import absolute_import, division, unicode_literals + +import string + +EOF = None + +E = { + "null-character": + "Null character in input stream, replaced with U+FFFD.", + "invalid-codepoint": + "Invalid codepoint in stream.", + "incorrectly-placed-solidus": + "Solidus (/) incorrectly placed in tag.", + "incorrect-cr-newline-entity": + "Incorrect CR newline entity, replaced with LF.", + "illegal-windows-1252-entity": + "Entity used with illegal number (windows-1252 reference).", + "cant-convert-numeric-entity": + "Numeric entity couldn't be converted to character " + "(codepoint U+%(charAsInt)08x).", + "illegal-codepoint-for-numeric-entity": + "Numeric entity represents an illegal codepoint: " + "U+%(charAsInt)08x.", + "numeric-entity-without-semicolon": + "Numeric entity didn't end with ';'.", + "expected-numeric-entity-but-got-eof": + "Numeric entity expected. Got end of file instead.", + "expected-numeric-entity": + "Numeric entity expected but none found.", + "named-entity-without-semicolon": + "Named entity didn't end with ';'.", + "expected-named-entity": + "Named entity expected. Got none.", + "attributes-in-end-tag": + "End tag contains unexpected attributes.", + 'self-closing-flag-on-end-tag': + "End tag contains unexpected self-closing flag.", + "expected-tag-name-but-got-right-bracket": + "Expected tag name. Got '>' instead.", + "expected-tag-name-but-got-question-mark": + "Expected tag name. Got '?' instead. (HTML doesn't " + "support processing instructions.)", + "expected-tag-name": + "Expected tag name. Got something else instead", + "expected-closing-tag-but-got-right-bracket": + "Expected closing tag. Got '>' instead. Ignoring ''.", + "expected-closing-tag-but-got-eof": + "Expected closing tag. Unexpected end of file.", + "expected-closing-tag-but-got-char": + "Expected closing tag. Unexpected character '%(data)s' found.", + "eof-in-tag-name": + "Unexpected end of file in the tag name.", + "expected-attribute-name-but-got-eof": + "Unexpected end of file. Expected attribute name instead.", + "eof-in-attribute-name": + "Unexpected end of file in attribute name.", + "invalid-character-in-attribute-name": + "Invalid character in attribute name", + "duplicate-attribute": + "Dropped duplicate attribute on tag.", + "expected-end-of-tag-name-but-got-eof": + "Unexpected end of file. Expected = or end of tag.", + "expected-attribute-value-but-got-eof": + "Unexpected end of file. Expected attribute value.", + "expected-attribute-value-but-got-right-bracket": + "Expected attribute value. Got '>' instead.", + 'equals-in-unquoted-attribute-value': + "Unexpected = in unquoted attribute", + 'unexpected-character-in-unquoted-attribute-value': + "Unexpected character in unquoted attribute", + "invalid-character-after-attribute-name": + "Unexpected character after attribute name.", + "unexpected-character-after-attribute-value": + "Unexpected character after attribute value.", + "eof-in-attribute-value-double-quote": + "Unexpected end of file in attribute value (\").", + "eof-in-attribute-value-single-quote": + "Unexpected end of file in attribute value (').", + "eof-in-attribute-value-no-quotes": + "Unexpected end of file in attribute value.", + "unexpected-EOF-after-solidus-in-tag": + "Unexpected end of file in tag. Expected >", + "unexpected-character-after-solidus-in-tag": + "Unexpected character after / in tag. Expected >", + "expected-dashes-or-doctype": + "Expected '--' or 'DOCTYPE'. Not found.", + "unexpected-bang-after-double-dash-in-comment": + "Unexpected ! after -- in comment", + "unexpected-space-after-double-dash-in-comment": + "Unexpected space after -- in comment", + "incorrect-comment": + "Incorrect comment.", + "eof-in-comment": + "Unexpected end of file in comment.", + "eof-in-comment-end-dash": + "Unexpected end of file in comment (-)", + "unexpected-dash-after-double-dash-in-comment": + "Unexpected '-' after '--' found in comment.", + "eof-in-comment-double-dash": + "Unexpected end of file in comment (--).", + "eof-in-comment-end-space-state": + "Unexpected end of file in comment.", + "eof-in-comment-end-bang-state": + "Unexpected end of file in comment.", + "unexpected-char-in-comment": + "Unexpected character in comment found.", + "need-space-after-doctype": + "No space after literal string 'DOCTYPE'.", + "expected-doctype-name-but-got-right-bracket": + "Unexpected > character. Expected DOCTYPE name.", + "expected-doctype-name-but-got-eof": + "Unexpected end of file. Expected DOCTYPE name.", + "eof-in-doctype-name": + "Unexpected end of file in DOCTYPE name.", + "eof-in-doctype": + "Unexpected end of file in DOCTYPE.", + "expected-space-or-right-bracket-in-doctype": + "Expected space or '>'. Got '%(data)s'", + "unexpected-end-of-doctype": + "Unexpected end of DOCTYPE.", + "unexpected-char-in-doctype": + "Unexpected character in DOCTYPE.", + "eof-in-innerhtml": + "XXX innerHTML EOF", + "unexpected-doctype": + "Unexpected DOCTYPE. Ignored.", + "non-html-root": + "html needs to be the first start tag.", + "expected-doctype-but-got-eof": + "Unexpected End of file. Expected DOCTYPE.", + "unknown-doctype": + "Erroneous DOCTYPE.", + "expected-doctype-but-got-chars": + "Unexpected non-space characters. Expected DOCTYPE.", + "expected-doctype-but-got-start-tag": + "Unexpected start tag (%(name)s). Expected DOCTYPE.", + "expected-doctype-but-got-end-tag": + "Unexpected end tag (%(name)s). Expected DOCTYPE.", + "end-tag-after-implied-root": + "Unexpected end tag (%(name)s) after the (implied) root element.", + "expected-named-closing-tag-but-got-eof": + "Unexpected end of file. Expected end tag (%(name)s).", + "two-heads-are-not-better-than-one": + "Unexpected start tag head in existing head. Ignored.", + "unexpected-end-tag": + "Unexpected end tag (%(name)s). Ignored.", + "unexpected-start-tag-out-of-my-head": + "Unexpected start tag (%(name)s) that can be in head. Moved.", + "unexpected-start-tag": + "Unexpected start tag (%(name)s).", + "missing-end-tag": + "Missing end tag (%(name)s).", + "missing-end-tags": + "Missing end tags (%(name)s).", + "unexpected-start-tag-implies-end-tag": + "Unexpected start tag (%(startName)s) " + "implies end tag (%(endName)s).", + "unexpected-start-tag-treated-as": + "Unexpected start tag (%(originalName)s). Treated as %(newName)s.", + "deprecated-tag": + "Unexpected start tag %(name)s. Don't use it!", + "unexpected-start-tag-ignored": + "Unexpected start tag %(name)s. Ignored.", + "expected-one-end-tag-but-got-another": + "Unexpected end tag (%(gotName)s). " + "Missing end tag (%(expectedName)s).", + "end-tag-too-early": + "End tag (%(name)s) seen too early. Expected other end tag.", + "end-tag-too-early-named": + "Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).", + "end-tag-too-early-ignored": + "End tag (%(name)s) seen too early. Ignored.", + "adoption-agency-1.1": + "End tag (%(name)s) violates step 1, " + "paragraph 1 of the adoption agency algorithm.", + "adoption-agency-1.2": + "End tag (%(name)s) violates step 1, " + "paragraph 2 of the adoption agency algorithm.", + "adoption-agency-1.3": + "End tag (%(name)s) violates step 1, " + "paragraph 3 of the adoption agency algorithm.", + "adoption-agency-4.4": + "End tag (%(name)s) violates step 4, " + "paragraph 4 of the adoption agency algorithm.", + "unexpected-end-tag-treated-as": + "Unexpected end tag (%(originalName)s). Treated as %(newName)s.", + "no-end-tag": + "This element (%(name)s) has no end tag.", + "unexpected-implied-end-tag-in-table": + "Unexpected implied end tag (%(name)s) in the table phase.", + "unexpected-implied-end-tag-in-table-body": + "Unexpected implied end tag (%(name)s) in the table body phase.", + "unexpected-char-implies-table-voodoo": + "Unexpected non-space characters in " + "table context caused voodoo mode.", + "unexpected-hidden-input-in-table": + "Unexpected input with type hidden in table context.", + "unexpected-form-in-table": + "Unexpected form in table context.", + "unexpected-start-tag-implies-table-voodoo": + "Unexpected start tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-end-tag-implies-table-voodoo": + "Unexpected end tag (%(name)s) in " + "table context caused voodoo mode.", + "unexpected-cell-in-table-body": + "Unexpected table cell start tag (%(name)s) " + "in the table body phase.", + "unexpected-cell-end-tag": + "Got table cell end tag (%(name)s) " + "while required end tags are missing.", + "unexpected-end-tag-in-table-body": + "Unexpected end tag (%(name)s) in the table body phase. Ignored.", + "unexpected-implied-end-tag-in-table-row": + "Unexpected implied end tag (%(name)s) in the table row phase.", + "unexpected-end-tag-in-table-row": + "Unexpected end tag (%(name)s) in the table row phase. Ignored.", + "unexpected-select-in-select": + "Unexpected select start tag in the select phase " + "treated as select end tag.", + "unexpected-input-in-select": + "Unexpected input start tag in the select phase.", + "unexpected-start-tag-in-select": + "Unexpected start tag token (%(name)s in the select phase. " + "Ignored.", + "unexpected-end-tag-in-select": + "Unexpected end tag (%(name)s) in the select phase. Ignored.", + "unexpected-table-element-start-tag-in-select-in-table": + "Unexpected table element start tag (%(name)s) in the select in table phase.", + "unexpected-table-element-end-tag-in-select-in-table": + "Unexpected table element end tag (%(name)s) in the select in table phase.", + "unexpected-char-after-body": + "Unexpected non-space characters in the after body phase.", + "unexpected-start-tag-after-body": + "Unexpected start tag token (%(name)s)" + " in the after body phase.", + "unexpected-end-tag-after-body": + "Unexpected end tag token (%(name)s)" + " in the after body phase.", + "unexpected-char-in-frameset": + "Unexpected characters in the frameset phase. Characters ignored.", + "unexpected-start-tag-in-frameset": + "Unexpected start tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-frameset-in-frameset-innerhtml": + "Unexpected end tag token (frameset) " + "in the frameset phase (innerHTML).", + "unexpected-end-tag-in-frameset": + "Unexpected end tag token (%(name)s)" + " in the frameset phase. Ignored.", + "unexpected-char-after-frameset": + "Unexpected non-space characters in the " + "after frameset phase. Ignored.", + "unexpected-start-tag-after-frameset": + "Unexpected start tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-frameset": + "Unexpected end tag (%(name)s)" + " in the after frameset phase. Ignored.", + "unexpected-end-tag-after-body-innerhtml": + "Unexpected end tag after body(innerHtml)", + "expected-eof-but-got-char": + "Unexpected non-space characters. Expected end of file.", + "expected-eof-but-got-start-tag": + "Unexpected start tag (%(name)s)" + ". Expected end of file.", + "expected-eof-but-got-end-tag": + "Unexpected end tag (%(name)s)" + ". Expected end of file.", + "eof-in-table": + "Unexpected end of file. Expected table content.", + "eof-in-select": + "Unexpected end of file. Expected select content.", + "eof-in-frameset": + "Unexpected end of file. Expected frameset content.", + "eof-in-script-in-script": + "Unexpected end of file. Expected script content.", + "eof-in-foreign-lands": + "Unexpected end of file. Expected foreign content", + "non-void-element-with-trailing-solidus": + "Trailing solidus not allowed on element %(name)s", + "unexpected-html-element-in-foreign-content": + "Element %(name)s not allowed in a non-html context", + "unexpected-end-tag-before-html": + "Unexpected end tag (%(name)s) before html.", + "XXX-undefined-error": + "Undefined error (this sucks and should be fixed)", +} + +namespaces = { + "html": "http://www.w3.org/1999/xhtml", + "mathml": "http://www.w3.org/1998/Math/MathML", + "svg": "http://www.w3.org/2000/svg", + "xlink": "http://www.w3.org/1999/xlink", + "xml": "http://www.w3.org/XML/1998/namespace", + "xmlns": "http://www.w3.org/2000/xmlns/" +} + +scopingElements = frozenset([ + (namespaces["html"], "applet"), + (namespaces["html"], "caption"), + (namespaces["html"], "html"), + (namespaces["html"], "marquee"), + (namespaces["html"], "object"), + (namespaces["html"], "table"), + (namespaces["html"], "td"), + (namespaces["html"], "th"), + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext"), + (namespaces["mathml"], "annotation-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title"), +]) + +formattingElements = frozenset([ + (namespaces["html"], "a"), + (namespaces["html"], "b"), + (namespaces["html"], "big"), + (namespaces["html"], "code"), + (namespaces["html"], "em"), + (namespaces["html"], "font"), + (namespaces["html"], "i"), + (namespaces["html"], "nobr"), + (namespaces["html"], "s"), + (namespaces["html"], "small"), + (namespaces["html"], "strike"), + (namespaces["html"], "strong"), + (namespaces["html"], "tt"), + (namespaces["html"], "u") +]) + +specialElements = frozenset([ + (namespaces["html"], "address"), + (namespaces["html"], "applet"), + (namespaces["html"], "area"), + (namespaces["html"], "article"), + (namespaces["html"], "aside"), + (namespaces["html"], "base"), + (namespaces["html"], "basefont"), + (namespaces["html"], "bgsound"), + (namespaces["html"], "blockquote"), + (namespaces["html"], "body"), + (namespaces["html"], "br"), + (namespaces["html"], "button"), + (namespaces["html"], "caption"), + (namespaces["html"], "center"), + (namespaces["html"], "col"), + (namespaces["html"], "colgroup"), + (namespaces["html"], "command"), + (namespaces["html"], "dd"), + (namespaces["html"], "details"), + (namespaces["html"], "dir"), + (namespaces["html"], "div"), + (namespaces["html"], "dl"), + (namespaces["html"], "dt"), + (namespaces["html"], "embed"), + (namespaces["html"], "fieldset"), + (namespaces["html"], "figure"), + (namespaces["html"], "footer"), + (namespaces["html"], "form"), + (namespaces["html"], "frame"), + (namespaces["html"], "frameset"), + (namespaces["html"], "h1"), + (namespaces["html"], "h2"), + (namespaces["html"], "h3"), + (namespaces["html"], "h4"), + (namespaces["html"], "h5"), + (namespaces["html"], "h6"), + (namespaces["html"], "head"), + (namespaces["html"], "header"), + (namespaces["html"], "hr"), + (namespaces["html"], "html"), + (namespaces["html"], "iframe"), + # Note that image is commented out in the spec as "this isn't an + # element that can end up on the stack, so it doesn't matter," + (namespaces["html"], "image"), + (namespaces["html"], "img"), + (namespaces["html"], "input"), + (namespaces["html"], "isindex"), + (namespaces["html"], "li"), + (namespaces["html"], "link"), + (namespaces["html"], "listing"), + (namespaces["html"], "marquee"), + (namespaces["html"], "menu"), + (namespaces["html"], "meta"), + (namespaces["html"], "nav"), + (namespaces["html"], "noembed"), + (namespaces["html"], "noframes"), + (namespaces["html"], "noscript"), + (namespaces["html"], "object"), + (namespaces["html"], "ol"), + (namespaces["html"], "p"), + (namespaces["html"], "param"), + (namespaces["html"], "plaintext"), + (namespaces["html"], "pre"), + (namespaces["html"], "script"), + (namespaces["html"], "section"), + (namespaces["html"], "select"), + (namespaces["html"], "style"), + (namespaces["html"], "table"), + (namespaces["html"], "tbody"), + (namespaces["html"], "td"), + (namespaces["html"], "textarea"), + (namespaces["html"], "tfoot"), + (namespaces["html"], "th"), + (namespaces["html"], "thead"), + (namespaces["html"], "title"), + (namespaces["html"], "tr"), + (namespaces["html"], "ul"), + (namespaces["html"], "wbr"), + (namespaces["html"], "xmp"), + (namespaces["svg"], "foreignObject") +]) + +htmlIntegrationPointElements = frozenset([ + (namespaces["mathml"], "annotaion-xml"), + (namespaces["svg"], "foreignObject"), + (namespaces["svg"], "desc"), + (namespaces["svg"], "title") +]) + +mathmlTextIntegrationPointElements = frozenset([ + (namespaces["mathml"], "mi"), + (namespaces["mathml"], "mo"), + (namespaces["mathml"], "mn"), + (namespaces["mathml"], "ms"), + (namespaces["mathml"], "mtext") +]) + +adjustForeignAttributes = { + "xlink:actuate": ("xlink", "actuate", namespaces["xlink"]), + "xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]), + "xlink:href": ("xlink", "href", namespaces["xlink"]), + "xlink:role": ("xlink", "role", namespaces["xlink"]), + "xlink:show": ("xlink", "show", namespaces["xlink"]), + "xlink:title": ("xlink", "title", namespaces["xlink"]), + "xlink:type": ("xlink", "type", namespaces["xlink"]), + "xml:base": ("xml", "base", namespaces["xml"]), + "xml:lang": ("xml", "lang", namespaces["xml"]), + "xml:space": ("xml", "space", namespaces["xml"]), + "xmlns": (None, "xmlns", namespaces["xmlns"]), + "xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"]) +} + +unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in + adjustForeignAttributes.items()]) + +spaceCharacters = frozenset([ + "\t", + "\n", + "\u000C", + " ", + "\r" +]) + +tableInsertModeElements = frozenset([ + "table", + "tbody", + "tfoot", + "thead", + "tr" +]) + +asciiLowercase = frozenset(string.ascii_lowercase) +asciiUppercase = frozenset(string.ascii_uppercase) +asciiLetters = frozenset(string.ascii_letters) +digits = frozenset(string.digits) +hexDigits = frozenset(string.hexdigits) + +asciiUpper2Lower = dict([(ord(c), ord(c.lower())) + for c in string.ascii_uppercase]) + +# Heading elements need to be ordered +headingElements = ( + "h1", + "h2", + "h3", + "h4", + "h5", + "h6" +) + +voidElements = frozenset([ + "base", + "command", + "event-source", + "link", + "meta", + "hr", + "br", + "img", + "embed", + "param", + "area", + "col", + "input", + "source", + "track" +]) + +cdataElements = frozenset(['title', 'textarea']) + +rcdataElements = frozenset([ + 'style', + 'script', + 'xmp', + 'iframe', + 'noembed', + 'noframes', + 'noscript' +]) + +booleanAttributes = { + "": frozenset(["irrelevant"]), + "style": frozenset(["scoped"]), + "img": frozenset(["ismap"]), + "audio": frozenset(["autoplay", "controls"]), + "video": frozenset(["autoplay", "controls"]), + "script": frozenset(["defer", "async"]), + "details": frozenset(["open"]), + "datagrid": frozenset(["multiple", "disabled"]), + "command": frozenset(["hidden", "disabled", "checked", "default"]), + "hr": frozenset(["noshade"]), + "menu": frozenset(["autosubmit"]), + "fieldset": frozenset(["disabled", "readonly"]), + "option": frozenset(["disabled", "readonly", "selected"]), + "optgroup": frozenset(["disabled", "readonly"]), + "button": frozenset(["disabled", "autofocus"]), + "input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]), + "select": frozenset(["disabled", "readonly", "autofocus", "multiple"]), + "output": frozenset(["disabled", "readonly"]), +} + +# entitiesWindows1252 has to be _ordered_ and needs to have an index. It +# therefore can't be a frozenset. +entitiesWindows1252 = ( + 8364, # 0x80 0x20AC EURO SIGN + 65533, # 0x81 UNDEFINED + 8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK + 402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK + 8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK + 8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS + 8224, # 0x86 0x2020 DAGGER + 8225, # 0x87 0x2021 DOUBLE DAGGER + 710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT + 8240, # 0x89 0x2030 PER MILLE SIGN + 352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON + 8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK + 338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE + 65533, # 0x8D UNDEFINED + 381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON + 65533, # 0x8F UNDEFINED + 65533, # 0x90 UNDEFINED + 8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK + 8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK + 8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK + 8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK + 8226, # 0x95 0x2022 BULLET + 8211, # 0x96 0x2013 EN DASH + 8212, # 0x97 0x2014 EM DASH + 732, # 0x98 0x02DC SMALL TILDE + 8482, # 0x99 0x2122 TRADE MARK SIGN + 353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON + 8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK + 339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE + 65533, # 0x9D UNDEFINED + 382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON + 376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS +) + +xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;']) + +entities = { + "AElig": "\xc6", + "AElig;": "\xc6", + "AMP": "&", + "AMP;": "&", + "Aacute": "\xc1", + "Aacute;": "\xc1", + "Abreve;": "\u0102", + "Acirc": "\xc2", + "Acirc;": "\xc2", + "Acy;": "\u0410", + "Afr;": "\U0001d504", + "Agrave": "\xc0", + "Agrave;": "\xc0", + "Alpha;": "\u0391", + "Amacr;": "\u0100", + "And;": "\u2a53", + "Aogon;": "\u0104", + "Aopf;": "\U0001d538", + "ApplyFunction;": "\u2061", + "Aring": "\xc5", + "Aring;": "\xc5", + "Ascr;": "\U0001d49c", + "Assign;": "\u2254", + "Atilde": "\xc3", + "Atilde;": "\xc3", + "Auml": "\xc4", + "Auml;": "\xc4", + "Backslash;": "\u2216", + "Barv;": "\u2ae7", + "Barwed;": "\u2306", + "Bcy;": "\u0411", + "Because;": "\u2235", + "Bernoullis;": "\u212c", + "Beta;": "\u0392", + "Bfr;": "\U0001d505", + "Bopf;": "\U0001d539", + "Breve;": "\u02d8", + "Bscr;": "\u212c", + "Bumpeq;": "\u224e", + "CHcy;": "\u0427", + "COPY": "\xa9", + "COPY;": "\xa9", + "Cacute;": "\u0106", + "Cap;": "\u22d2", + "CapitalDifferentialD;": "\u2145", + "Cayleys;": "\u212d", + "Ccaron;": "\u010c", + "Ccedil": "\xc7", + "Ccedil;": "\xc7", + "Ccirc;": "\u0108", + "Cconint;": "\u2230", + "Cdot;": "\u010a", + "Cedilla;": "\xb8", + "CenterDot;": "\xb7", + "Cfr;": "\u212d", + "Chi;": "\u03a7", + "CircleDot;": "\u2299", + "CircleMinus;": "\u2296", + "CirclePlus;": "\u2295", + "CircleTimes;": "\u2297", + "ClockwiseContourIntegral;": "\u2232", + "CloseCurlyDoubleQuote;": "\u201d", + "CloseCurlyQuote;": "\u2019", + "Colon;": "\u2237", + "Colone;": "\u2a74", + "Congruent;": "\u2261", + "Conint;": "\u222f", + "ContourIntegral;": "\u222e", + "Copf;": "\u2102", + "Coproduct;": "\u2210", + "CounterClockwiseContourIntegral;": "\u2233", + "Cross;": "\u2a2f", + "Cscr;": "\U0001d49e", + "Cup;": "\u22d3", + "CupCap;": "\u224d", + "DD;": "\u2145", + "DDotrahd;": "\u2911", + "DJcy;": "\u0402", + "DScy;": "\u0405", + "DZcy;": "\u040f", + "Dagger;": "\u2021", + "Darr;": "\u21a1", + "Dashv;": "\u2ae4", + "Dcaron;": "\u010e", + "Dcy;": "\u0414", + "Del;": "\u2207", + "Delta;": "\u0394", + "Dfr;": "\U0001d507", + "DiacriticalAcute;": "\xb4", + "DiacriticalDot;": "\u02d9", + "DiacriticalDoubleAcute;": "\u02dd", + "DiacriticalGrave;": "`", + "DiacriticalTilde;": "\u02dc", + "Diamond;": "\u22c4", + "DifferentialD;": "\u2146", + "Dopf;": "\U0001d53b", + "Dot;": "\xa8", + "DotDot;": "\u20dc", + "DotEqual;": "\u2250", + "DoubleContourIntegral;": "\u222f", + "DoubleDot;": "\xa8", + "DoubleDownArrow;": "\u21d3", + "DoubleLeftArrow;": "\u21d0", + "DoubleLeftRightArrow;": "\u21d4", + "DoubleLeftTee;": "\u2ae4", + "DoubleLongLeftArrow;": "\u27f8", + "DoubleLongLeftRightArrow;": "\u27fa", + "DoubleLongRightArrow;": "\u27f9", + "DoubleRightArrow;": "\u21d2", + "DoubleRightTee;": "\u22a8", + "DoubleUpArrow;": "\u21d1", + "DoubleUpDownArrow;": "\u21d5", + "DoubleVerticalBar;": "\u2225", + "DownArrow;": "\u2193", + "DownArrowBar;": "\u2913", + "DownArrowUpArrow;": "\u21f5", + "DownBreve;": "\u0311", + "DownLeftRightVector;": "\u2950", + "DownLeftTeeVector;": "\u295e", + "DownLeftVector;": "\u21bd", + "DownLeftVectorBar;": "\u2956", + "DownRightTeeVector;": "\u295f", + "DownRightVector;": "\u21c1", + "DownRightVectorBar;": "\u2957", + "DownTee;": "\u22a4", + "DownTeeArrow;": "\u21a7", + "Downarrow;": "\u21d3", + "Dscr;": "\U0001d49f", + "Dstrok;": "\u0110", + "ENG;": "\u014a", + "ETH": "\xd0", + "ETH;": "\xd0", + "Eacute": "\xc9", + "Eacute;": "\xc9", + "Ecaron;": "\u011a", + "Ecirc": "\xca", + "Ecirc;": "\xca", + "Ecy;": "\u042d", + "Edot;": "\u0116", + "Efr;": "\U0001d508", + "Egrave": "\xc8", + "Egrave;": "\xc8", + "Element;": "\u2208", + "Emacr;": "\u0112", + "EmptySmallSquare;": "\u25fb", + "EmptyVerySmallSquare;": "\u25ab", + "Eogon;": "\u0118", + "Eopf;": "\U0001d53c", + "Epsilon;": "\u0395", + "Equal;": "\u2a75", + "EqualTilde;": "\u2242", + "Equilibrium;": "\u21cc", + "Escr;": "\u2130", + "Esim;": "\u2a73", + "Eta;": "\u0397", + "Euml": "\xcb", + "Euml;": "\xcb", + "Exists;": "\u2203", + "ExponentialE;": "\u2147", + "Fcy;": "\u0424", + "Ffr;": "\U0001d509", + "FilledSmallSquare;": "\u25fc", + "FilledVerySmallSquare;": "\u25aa", + "Fopf;": "\U0001d53d", + "ForAll;": "\u2200", + "Fouriertrf;": "\u2131", + "Fscr;": "\u2131", + "GJcy;": "\u0403", + "GT": ">", + "GT;": ">", + "Gamma;": "\u0393", + "Gammad;": "\u03dc", + "Gbreve;": "\u011e", + "Gcedil;": "\u0122", + "Gcirc;": "\u011c", + "Gcy;": "\u0413", + "Gdot;": "\u0120", + "Gfr;": "\U0001d50a", + "Gg;": "\u22d9", + "Gopf;": "\U0001d53e", + "GreaterEqual;": "\u2265", + "GreaterEqualLess;": "\u22db", + "GreaterFullEqual;": "\u2267", + "GreaterGreater;": "\u2aa2", + "GreaterLess;": "\u2277", + "GreaterSlantEqual;": "\u2a7e", + "GreaterTilde;": "\u2273", + "Gscr;": "\U0001d4a2", + "Gt;": "\u226b", + "HARDcy;": "\u042a", + "Hacek;": "\u02c7", + "Hat;": "^", + "Hcirc;": "\u0124", + "Hfr;": "\u210c", + "HilbertSpace;": "\u210b", + "Hopf;": "\u210d", + "HorizontalLine;": "\u2500", + "Hscr;": "\u210b", + "Hstrok;": "\u0126", + "HumpDownHump;": "\u224e", + "HumpEqual;": "\u224f", + "IEcy;": "\u0415", + "IJlig;": "\u0132", + "IOcy;": "\u0401", + "Iacute": "\xcd", + "Iacute;": "\xcd", + "Icirc": "\xce", + "Icirc;": "\xce", + "Icy;": "\u0418", + "Idot;": "\u0130", + "Ifr;": "\u2111", + "Igrave": "\xcc", + "Igrave;": "\xcc", + "Im;": "\u2111", + "Imacr;": "\u012a", + "ImaginaryI;": "\u2148", + "Implies;": "\u21d2", + "Int;": "\u222c", + "Integral;": "\u222b", + "Intersection;": "\u22c2", + "InvisibleComma;": "\u2063", + "InvisibleTimes;": "\u2062", + "Iogon;": "\u012e", + "Iopf;": "\U0001d540", + "Iota;": "\u0399", + "Iscr;": "\u2110", + "Itilde;": "\u0128", + "Iukcy;": "\u0406", + "Iuml": "\xcf", + "Iuml;": "\xcf", + "Jcirc;": "\u0134", + "Jcy;": "\u0419", + "Jfr;": "\U0001d50d", + "Jopf;": "\U0001d541", + "Jscr;": "\U0001d4a5", + "Jsercy;": "\u0408", + "Jukcy;": "\u0404", + "KHcy;": "\u0425", + "KJcy;": "\u040c", + "Kappa;": "\u039a", + "Kcedil;": "\u0136", + "Kcy;": "\u041a", + "Kfr;": "\U0001d50e", + "Kopf;": "\U0001d542", + "Kscr;": "\U0001d4a6", + "LJcy;": "\u0409", + "LT": "<", + "LT;": "<", + "Lacute;": "\u0139", + "Lambda;": "\u039b", + "Lang;": "\u27ea", + "Laplacetrf;": "\u2112", + "Larr;": "\u219e", + "Lcaron;": "\u013d", + "Lcedil;": "\u013b", + "Lcy;": "\u041b", + "LeftAngleBracket;": "\u27e8", + "LeftArrow;": "\u2190", + "LeftArrowBar;": "\u21e4", + "LeftArrowRightArrow;": "\u21c6", + "LeftCeiling;": "\u2308", + "LeftDoubleBracket;": "\u27e6", + "LeftDownTeeVector;": "\u2961", + "LeftDownVector;": "\u21c3", + "LeftDownVectorBar;": "\u2959", + "LeftFloor;": "\u230a", + "LeftRightArrow;": "\u2194", + "LeftRightVector;": "\u294e", + "LeftTee;": "\u22a3", + "LeftTeeArrow;": "\u21a4", + "LeftTeeVector;": "\u295a", + "LeftTriangle;": "\u22b2", + "LeftTriangleBar;": "\u29cf", + "LeftTriangleEqual;": "\u22b4", + "LeftUpDownVector;": "\u2951", + "LeftUpTeeVector;": "\u2960", + "LeftUpVector;": "\u21bf", + "LeftUpVectorBar;": "\u2958", + "LeftVector;": "\u21bc", + "LeftVectorBar;": "\u2952", + "Leftarrow;": "\u21d0", + "Leftrightarrow;": "\u21d4", + "LessEqualGreater;": "\u22da", + "LessFullEqual;": "\u2266", + "LessGreater;": "\u2276", + "LessLess;": "\u2aa1", + "LessSlantEqual;": "\u2a7d", + "LessTilde;": "\u2272", + "Lfr;": "\U0001d50f", + "Ll;": "\u22d8", + "Lleftarrow;": "\u21da", + "Lmidot;": "\u013f", + "LongLeftArrow;": "\u27f5", + "LongLeftRightArrow;": "\u27f7", + "LongRightArrow;": "\u27f6", + "Longleftarrow;": "\u27f8", + "Longleftrightarrow;": "\u27fa", + "Longrightarrow;": "\u27f9", + "Lopf;": "\U0001d543", + "LowerLeftArrow;": "\u2199", + "LowerRightArrow;": "\u2198", + "Lscr;": "\u2112", + "Lsh;": "\u21b0", + "Lstrok;": "\u0141", + "Lt;": "\u226a", + "Map;": "\u2905", + "Mcy;": "\u041c", + "MediumSpace;": "\u205f", + "Mellintrf;": "\u2133", + "Mfr;": "\U0001d510", + "MinusPlus;": "\u2213", + "Mopf;": "\U0001d544", + "Mscr;": "\u2133", + "Mu;": "\u039c", + "NJcy;": "\u040a", + "Nacute;": "\u0143", + "Ncaron;": "\u0147", + "Ncedil;": "\u0145", + "Ncy;": "\u041d", + "NegativeMediumSpace;": "\u200b", + "NegativeThickSpace;": "\u200b", + "NegativeThinSpace;": "\u200b", + "NegativeVeryThinSpace;": "\u200b", + "NestedGreaterGreater;": "\u226b", + "NestedLessLess;": "\u226a", + "NewLine;": "\n", + "Nfr;": "\U0001d511", + "NoBreak;": "\u2060", + "NonBreakingSpace;": "\xa0", + "Nopf;": "\u2115", + "Not;": "\u2aec", + "NotCongruent;": "\u2262", + "NotCupCap;": "\u226d", + "NotDoubleVerticalBar;": "\u2226", + "NotElement;": "\u2209", + "NotEqual;": "\u2260", + "NotEqualTilde;": "\u2242\u0338", + "NotExists;": "\u2204", + "NotGreater;": "\u226f", + "NotGreaterEqual;": "\u2271", + "NotGreaterFullEqual;": "\u2267\u0338", + "NotGreaterGreater;": "\u226b\u0338", + "NotGreaterLess;": "\u2279", + "NotGreaterSlantEqual;": "\u2a7e\u0338", + "NotGreaterTilde;": "\u2275", + "NotHumpDownHump;": "\u224e\u0338", + "NotHumpEqual;": "\u224f\u0338", + "NotLeftTriangle;": "\u22ea", + "NotLeftTriangleBar;": "\u29cf\u0338", + "NotLeftTriangleEqual;": "\u22ec", + "NotLess;": "\u226e", + "NotLessEqual;": "\u2270", + "NotLessGreater;": "\u2278", + "NotLessLess;": "\u226a\u0338", + "NotLessSlantEqual;": "\u2a7d\u0338", + "NotLessTilde;": "\u2274", + "NotNestedGreaterGreater;": "\u2aa2\u0338", + "NotNestedLessLess;": "\u2aa1\u0338", + "NotPrecedes;": "\u2280", + "NotPrecedesEqual;": "\u2aaf\u0338", + "NotPrecedesSlantEqual;": "\u22e0", + "NotReverseElement;": "\u220c", + "NotRightTriangle;": "\u22eb", + "NotRightTriangleBar;": "\u29d0\u0338", + "NotRightTriangleEqual;": "\u22ed", + "NotSquareSubset;": "\u228f\u0338", + "NotSquareSubsetEqual;": "\u22e2", + "NotSquareSuperset;": "\u2290\u0338", + "NotSquareSupersetEqual;": "\u22e3", + "NotSubset;": "\u2282\u20d2", + "NotSubsetEqual;": "\u2288", + "NotSucceeds;": "\u2281", + "NotSucceedsEqual;": "\u2ab0\u0338", + "NotSucceedsSlantEqual;": "\u22e1", + "NotSucceedsTilde;": "\u227f\u0338", + "NotSuperset;": "\u2283\u20d2", + "NotSupersetEqual;": "\u2289", + "NotTilde;": "\u2241", + "NotTildeEqual;": "\u2244", + "NotTildeFullEqual;": "\u2247", + "NotTildeTilde;": "\u2249", + "NotVerticalBar;": "\u2224", + "Nscr;": "\U0001d4a9", + "Ntilde": "\xd1", + "Ntilde;": "\xd1", + "Nu;": "\u039d", + "OElig;": "\u0152", + "Oacute": "\xd3", + "Oacute;": "\xd3", + "Ocirc": "\xd4", + "Ocirc;": "\xd4", + "Ocy;": "\u041e", + "Odblac;": "\u0150", + "Ofr;": "\U0001d512", + "Ograve": "\xd2", + "Ograve;": "\xd2", + "Omacr;": "\u014c", + "Omega;": "\u03a9", + "Omicron;": "\u039f", + "Oopf;": "\U0001d546", + "OpenCurlyDoubleQuote;": "\u201c", + "OpenCurlyQuote;": "\u2018", + "Or;": "\u2a54", + "Oscr;": "\U0001d4aa", + "Oslash": "\xd8", + "Oslash;": "\xd8", + "Otilde": "\xd5", + "Otilde;": "\xd5", + "Otimes;": "\u2a37", + "Ouml": "\xd6", + "Ouml;": "\xd6", + "OverBar;": "\u203e", + "OverBrace;": "\u23de", + "OverBracket;": "\u23b4", + "OverParenthesis;": "\u23dc", + "PartialD;": "\u2202", + "Pcy;": "\u041f", + "Pfr;": "\U0001d513", + "Phi;": "\u03a6", + "Pi;": "\u03a0", + "PlusMinus;": "\xb1", + "Poincareplane;": "\u210c", + "Popf;": "\u2119", + "Pr;": "\u2abb", + "Precedes;": "\u227a", + "PrecedesEqual;": "\u2aaf", + "PrecedesSlantEqual;": "\u227c", + "PrecedesTilde;": "\u227e", + "Prime;": "\u2033", + "Product;": "\u220f", + "Proportion;": "\u2237", + "Proportional;": "\u221d", + "Pscr;": "\U0001d4ab", + "Psi;": "\u03a8", + "QUOT": "\"", + "QUOT;": "\"", + "Qfr;": "\U0001d514", + "Qopf;": "\u211a", + "Qscr;": "\U0001d4ac", + "RBarr;": "\u2910", + "REG": "\xae", + "REG;": "\xae", + "Racute;": "\u0154", + "Rang;": "\u27eb", + "Rarr;": "\u21a0", + "Rarrtl;": "\u2916", + "Rcaron;": "\u0158", + "Rcedil;": "\u0156", + "Rcy;": "\u0420", + "Re;": "\u211c", + "ReverseElement;": "\u220b", + "ReverseEquilibrium;": "\u21cb", + "ReverseUpEquilibrium;": "\u296f", + "Rfr;": "\u211c", + "Rho;": "\u03a1", + "RightAngleBracket;": "\u27e9", + "RightArrow;": "\u2192", + "RightArrowBar;": "\u21e5", + "RightArrowLeftArrow;": "\u21c4", + "RightCeiling;": "\u2309", + "RightDoubleBracket;": "\u27e7", + "RightDownTeeVector;": "\u295d", + "RightDownVector;": "\u21c2", + "RightDownVectorBar;": "\u2955", + "RightFloor;": "\u230b", + "RightTee;": "\u22a2", + "RightTeeArrow;": "\u21a6", + "RightTeeVector;": "\u295b", + "RightTriangle;": "\u22b3", + "RightTriangleBar;": "\u29d0", + "RightTriangleEqual;": "\u22b5", + "RightUpDownVector;": "\u294f", + "RightUpTeeVector;": "\u295c", + "RightUpVector;": "\u21be", + "RightUpVectorBar;": "\u2954", + "RightVector;": "\u21c0", + "RightVectorBar;": "\u2953", + "Rightarrow;": "\u21d2", + "Ropf;": "\u211d", + "RoundImplies;": "\u2970", + "Rrightarrow;": "\u21db", + "Rscr;": "\u211b", + "Rsh;": "\u21b1", + "RuleDelayed;": "\u29f4", + "SHCHcy;": "\u0429", + "SHcy;": "\u0428", + "SOFTcy;": "\u042c", + "Sacute;": "\u015a", + "Sc;": "\u2abc", + "Scaron;": "\u0160", + "Scedil;": "\u015e", + "Scirc;": "\u015c", + "Scy;": "\u0421", + "Sfr;": "\U0001d516", + "ShortDownArrow;": "\u2193", + "ShortLeftArrow;": "\u2190", + "ShortRightArrow;": "\u2192", + "ShortUpArrow;": "\u2191", + "Sigma;": "\u03a3", + "SmallCircle;": "\u2218", + "Sopf;": "\U0001d54a", + "Sqrt;": "\u221a", + "Square;": "\u25a1", + "SquareIntersection;": "\u2293", + "SquareSubset;": "\u228f", + "SquareSubsetEqual;": "\u2291", + "SquareSuperset;": "\u2290", + "SquareSupersetEqual;": "\u2292", + "SquareUnion;": "\u2294", + "Sscr;": "\U0001d4ae", + "Star;": "\u22c6", + "Sub;": "\u22d0", + "Subset;": "\u22d0", + "SubsetEqual;": "\u2286", + "Succeeds;": "\u227b", + "SucceedsEqual;": "\u2ab0", + "SucceedsSlantEqual;": "\u227d", + "SucceedsTilde;": "\u227f", + "SuchThat;": "\u220b", + "Sum;": "\u2211", + "Sup;": "\u22d1", + "Superset;": "\u2283", + "SupersetEqual;": "\u2287", + "Supset;": "\u22d1", + "THORN": "\xde", + "THORN;": "\xde", + "TRADE;": "\u2122", + "TSHcy;": "\u040b", + "TScy;": "\u0426", + "Tab;": "\t", + "Tau;": "\u03a4", + "Tcaron;": "\u0164", + "Tcedil;": "\u0162", + "Tcy;": "\u0422", + "Tfr;": "\U0001d517", + "Therefore;": "\u2234", + "Theta;": "\u0398", + "ThickSpace;": "\u205f\u200a", + "ThinSpace;": "\u2009", + "Tilde;": "\u223c", + "TildeEqual;": "\u2243", + "TildeFullEqual;": "\u2245", + "TildeTilde;": "\u2248", + "Topf;": "\U0001d54b", + "TripleDot;": "\u20db", + "Tscr;": "\U0001d4af", + "Tstrok;": "\u0166", + "Uacute": "\xda", + "Uacute;": "\xda", + "Uarr;": "\u219f", + "Uarrocir;": "\u2949", + "Ubrcy;": "\u040e", + "Ubreve;": "\u016c", + "Ucirc": "\xdb", + "Ucirc;": "\xdb", + "Ucy;": "\u0423", + "Udblac;": "\u0170", + "Ufr;": "\U0001d518", + "Ugrave": "\xd9", + "Ugrave;": "\xd9", + "Umacr;": "\u016a", + "UnderBar;": "_", + "UnderBrace;": "\u23df", + "UnderBracket;": "\u23b5", + "UnderParenthesis;": "\u23dd", + "Union;": "\u22c3", + "UnionPlus;": "\u228e", + "Uogon;": "\u0172", + "Uopf;": "\U0001d54c", + "UpArrow;": "\u2191", + "UpArrowBar;": "\u2912", + "UpArrowDownArrow;": "\u21c5", + "UpDownArrow;": "\u2195", + "UpEquilibrium;": "\u296e", + "UpTee;": "\u22a5", + "UpTeeArrow;": "\u21a5", + "Uparrow;": "\u21d1", + "Updownarrow;": "\u21d5", + "UpperLeftArrow;": "\u2196", + "UpperRightArrow;": "\u2197", + "Upsi;": "\u03d2", + "Upsilon;": "\u03a5", + "Uring;": "\u016e", + "Uscr;": "\U0001d4b0", + "Utilde;": "\u0168", + "Uuml": "\xdc", + "Uuml;": "\xdc", + "VDash;": "\u22ab", + "Vbar;": "\u2aeb", + "Vcy;": "\u0412", + "Vdash;": "\u22a9", + "Vdashl;": "\u2ae6", + "Vee;": "\u22c1", + "Verbar;": "\u2016", + "Vert;": "\u2016", + "VerticalBar;": "\u2223", + "VerticalLine;": "|", + "VerticalSeparator;": "\u2758", + "VerticalTilde;": "\u2240", + "VeryThinSpace;": "\u200a", + "Vfr;": "\U0001d519", + "Vopf;": "\U0001d54d", + "Vscr;": "\U0001d4b1", + "Vvdash;": "\u22aa", + "Wcirc;": "\u0174", + "Wedge;": "\u22c0", + "Wfr;": "\U0001d51a", + "Wopf;": "\U0001d54e", + "Wscr;": "\U0001d4b2", + "Xfr;": "\U0001d51b", + "Xi;": "\u039e", + "Xopf;": "\U0001d54f", + "Xscr;": "\U0001d4b3", + "YAcy;": "\u042f", + "YIcy;": "\u0407", + "YUcy;": "\u042e", + "Yacute": "\xdd", + "Yacute;": "\xdd", + "Ycirc;": "\u0176", + "Ycy;": "\u042b", + "Yfr;": "\U0001d51c", + "Yopf;": "\U0001d550", + "Yscr;": "\U0001d4b4", + "Yuml;": "\u0178", + "ZHcy;": "\u0416", + "Zacute;": "\u0179", + "Zcaron;": "\u017d", + "Zcy;": "\u0417", + "Zdot;": "\u017b", + "ZeroWidthSpace;": "\u200b", + "Zeta;": "\u0396", + "Zfr;": "\u2128", + "Zopf;": "\u2124", + "Zscr;": "\U0001d4b5", + "aacute": "\xe1", + "aacute;": "\xe1", + "abreve;": "\u0103", + "ac;": "\u223e", + "acE;": "\u223e\u0333", + "acd;": "\u223f", + "acirc": "\xe2", + "acirc;": "\xe2", + "acute": "\xb4", + "acute;": "\xb4", + "acy;": "\u0430", + "aelig": "\xe6", + "aelig;": "\xe6", + "af;": "\u2061", + "afr;": "\U0001d51e", + "agrave": "\xe0", + "agrave;": "\xe0", + "alefsym;": "\u2135", + "aleph;": "\u2135", + "alpha;": "\u03b1", + "amacr;": "\u0101", + "amalg;": "\u2a3f", + "amp": "&", + "amp;": "&", + "and;": "\u2227", + "andand;": "\u2a55", + "andd;": "\u2a5c", + "andslope;": "\u2a58", + "andv;": "\u2a5a", + "ang;": "\u2220", + "ange;": "\u29a4", + "angle;": "\u2220", + "angmsd;": "\u2221", + "angmsdaa;": "\u29a8", + "angmsdab;": "\u29a9", + "angmsdac;": "\u29aa", + "angmsdad;": "\u29ab", + "angmsdae;": "\u29ac", + "angmsdaf;": "\u29ad", + "angmsdag;": "\u29ae", + "angmsdah;": "\u29af", + "angrt;": "\u221f", + "angrtvb;": "\u22be", + "angrtvbd;": "\u299d", + "angsph;": "\u2222", + "angst;": "\xc5", + "angzarr;": "\u237c", + "aogon;": "\u0105", + "aopf;": "\U0001d552", + "ap;": "\u2248", + "apE;": "\u2a70", + "apacir;": "\u2a6f", + "ape;": "\u224a", + "apid;": "\u224b", + "apos;": "'", + "approx;": "\u2248", + "approxeq;": "\u224a", + "aring": "\xe5", + "aring;": "\xe5", + "ascr;": "\U0001d4b6", + "ast;": "*", + "asymp;": "\u2248", + "asympeq;": "\u224d", + "atilde": "\xe3", + "atilde;": "\xe3", + "auml": "\xe4", + "auml;": "\xe4", + "awconint;": "\u2233", + "awint;": "\u2a11", + "bNot;": "\u2aed", + "backcong;": "\u224c", + "backepsilon;": "\u03f6", + "backprime;": "\u2035", + "backsim;": "\u223d", + "backsimeq;": "\u22cd", + "barvee;": "\u22bd", + "barwed;": "\u2305", + "barwedge;": "\u2305", + "bbrk;": "\u23b5", + "bbrktbrk;": "\u23b6", + "bcong;": "\u224c", + "bcy;": "\u0431", + "bdquo;": "\u201e", + "becaus;": "\u2235", + "because;": "\u2235", + "bemptyv;": "\u29b0", + "bepsi;": "\u03f6", + "bernou;": "\u212c", + "beta;": "\u03b2", + "beth;": "\u2136", + "between;": "\u226c", + "bfr;": "\U0001d51f", + "bigcap;": "\u22c2", + "bigcirc;": "\u25ef", + "bigcup;": "\u22c3", + "bigodot;": "\u2a00", + "bigoplus;": "\u2a01", + "bigotimes;": "\u2a02", + "bigsqcup;": "\u2a06", + "bigstar;": "\u2605", + "bigtriangledown;": "\u25bd", + "bigtriangleup;": "\u25b3", + "biguplus;": "\u2a04", + "bigvee;": "\u22c1", + "bigwedge;": "\u22c0", + "bkarow;": "\u290d", + "blacklozenge;": "\u29eb", + "blacksquare;": "\u25aa", + "blacktriangle;": "\u25b4", + "blacktriangledown;": "\u25be", + "blacktriangleleft;": "\u25c2", + "blacktriangleright;": "\u25b8", + "blank;": "\u2423", + "blk12;": "\u2592", + "blk14;": "\u2591", + "blk34;": "\u2593", + "block;": "\u2588", + "bne;": "=\u20e5", + "bnequiv;": "\u2261\u20e5", + "bnot;": "\u2310", + "bopf;": "\U0001d553", + "bot;": "\u22a5", + "bottom;": "\u22a5", + "bowtie;": "\u22c8", + "boxDL;": "\u2557", + "boxDR;": "\u2554", + "boxDl;": "\u2556", + "boxDr;": "\u2553", + "boxH;": "\u2550", + "boxHD;": "\u2566", + "boxHU;": "\u2569", + "boxHd;": "\u2564", + "boxHu;": "\u2567", + "boxUL;": "\u255d", + "boxUR;": "\u255a", + "boxUl;": "\u255c", + "boxUr;": "\u2559", + "boxV;": "\u2551", + "boxVH;": "\u256c", + "boxVL;": "\u2563", + "boxVR;": "\u2560", + "boxVh;": "\u256b", + "boxVl;": "\u2562", + "boxVr;": "\u255f", + "boxbox;": "\u29c9", + "boxdL;": "\u2555", + "boxdR;": "\u2552", + "boxdl;": "\u2510", + "boxdr;": "\u250c", + "boxh;": "\u2500", + "boxhD;": "\u2565", + "boxhU;": "\u2568", + "boxhd;": "\u252c", + "boxhu;": "\u2534", + "boxminus;": "\u229f", + "boxplus;": "\u229e", + "boxtimes;": "\u22a0", + "boxuL;": "\u255b", + "boxuR;": "\u2558", + "boxul;": "\u2518", + "boxur;": "\u2514", + "boxv;": "\u2502", + "boxvH;": "\u256a", + "boxvL;": "\u2561", + "boxvR;": "\u255e", + "boxvh;": "\u253c", + "boxvl;": "\u2524", + "boxvr;": "\u251c", + "bprime;": "\u2035", + "breve;": "\u02d8", + "brvbar": "\xa6", + "brvbar;": "\xa6", + "bscr;": "\U0001d4b7", + "bsemi;": "\u204f", + "bsim;": "\u223d", + "bsime;": "\u22cd", + "bsol;": "\\", + "bsolb;": "\u29c5", + "bsolhsub;": "\u27c8", + "bull;": "\u2022", + "bullet;": "\u2022", + "bump;": "\u224e", + "bumpE;": "\u2aae", + "bumpe;": "\u224f", + "bumpeq;": "\u224f", + "cacute;": "\u0107", + "cap;": "\u2229", + "capand;": "\u2a44", + "capbrcup;": "\u2a49", + "capcap;": "\u2a4b", + "capcup;": "\u2a47", + "capdot;": "\u2a40", + "caps;": "\u2229\ufe00", + "caret;": "\u2041", + "caron;": "\u02c7", + "ccaps;": "\u2a4d", + "ccaron;": "\u010d", + "ccedil": "\xe7", + "ccedil;": "\xe7", + "ccirc;": "\u0109", + "ccups;": "\u2a4c", + "ccupssm;": "\u2a50", + "cdot;": "\u010b", + "cedil": "\xb8", + "cedil;": "\xb8", + "cemptyv;": "\u29b2", + "cent": "\xa2", + "cent;": "\xa2", + "centerdot;": "\xb7", + "cfr;": "\U0001d520", + "chcy;": "\u0447", + "check;": "\u2713", + "checkmark;": "\u2713", + "chi;": "\u03c7", + "cir;": "\u25cb", + "cirE;": "\u29c3", + "circ;": "\u02c6", + "circeq;": "\u2257", + "circlearrowleft;": "\u21ba", + "circlearrowright;": "\u21bb", + "circledR;": "\xae", + "circledS;": "\u24c8", + "circledast;": "\u229b", + "circledcirc;": "\u229a", + "circleddash;": "\u229d", + "cire;": "\u2257", + "cirfnint;": "\u2a10", + "cirmid;": "\u2aef", + "cirscir;": "\u29c2", + "clubs;": "\u2663", + "clubsuit;": "\u2663", + "colon;": ":", + "colone;": "\u2254", + "coloneq;": "\u2254", + "comma;": ",", + "commat;": "@", + "comp;": "\u2201", + "compfn;": "\u2218", + "complement;": "\u2201", + "complexes;": "\u2102", + "cong;": "\u2245", + "congdot;": "\u2a6d", + "conint;": "\u222e", + "copf;": "\U0001d554", + "coprod;": "\u2210", + "copy": "\xa9", + "copy;": "\xa9", + "copysr;": "\u2117", + "crarr;": "\u21b5", + "cross;": "\u2717", + "cscr;": "\U0001d4b8", + "csub;": "\u2acf", + "csube;": "\u2ad1", + "csup;": "\u2ad0", + "csupe;": "\u2ad2", + "ctdot;": "\u22ef", + "cudarrl;": "\u2938", + "cudarrr;": "\u2935", + "cuepr;": "\u22de", + "cuesc;": "\u22df", + "cularr;": "\u21b6", + "cularrp;": "\u293d", + "cup;": "\u222a", + "cupbrcap;": "\u2a48", + "cupcap;": "\u2a46", + "cupcup;": "\u2a4a", + "cupdot;": "\u228d", + "cupor;": "\u2a45", + "cups;": "\u222a\ufe00", + "curarr;": "\u21b7", + "curarrm;": "\u293c", + "curlyeqprec;": "\u22de", + "curlyeqsucc;": "\u22df", + "curlyvee;": "\u22ce", + "curlywedge;": "\u22cf", + "curren": "\xa4", + "curren;": "\xa4", + "curvearrowleft;": "\u21b6", + "curvearrowright;": "\u21b7", + "cuvee;": "\u22ce", + "cuwed;": "\u22cf", + "cwconint;": "\u2232", + "cwint;": "\u2231", + "cylcty;": "\u232d", + "dArr;": "\u21d3", + "dHar;": "\u2965", + "dagger;": "\u2020", + "daleth;": "\u2138", + "darr;": "\u2193", + "dash;": "\u2010", + "dashv;": "\u22a3", + "dbkarow;": "\u290f", + "dblac;": "\u02dd", + "dcaron;": "\u010f", + "dcy;": "\u0434", + "dd;": "\u2146", + "ddagger;": "\u2021", + "ddarr;": "\u21ca", + "ddotseq;": "\u2a77", + "deg": "\xb0", + "deg;": "\xb0", + "delta;": "\u03b4", + "demptyv;": "\u29b1", + "dfisht;": "\u297f", + "dfr;": "\U0001d521", + "dharl;": "\u21c3", + "dharr;": "\u21c2", + "diam;": "\u22c4", + "diamond;": "\u22c4", + "diamondsuit;": "\u2666", + "diams;": "\u2666", + "die;": "\xa8", + "digamma;": "\u03dd", + "disin;": "\u22f2", + "div;": "\xf7", + "divide": "\xf7", + "divide;": "\xf7", + "divideontimes;": "\u22c7", + "divonx;": "\u22c7", + "djcy;": "\u0452", + "dlcorn;": "\u231e", + "dlcrop;": "\u230d", + "dollar;": "$", + "dopf;": "\U0001d555", + "dot;": "\u02d9", + "doteq;": "\u2250", + "doteqdot;": "\u2251", + "dotminus;": "\u2238", + "dotplus;": "\u2214", + "dotsquare;": "\u22a1", + "doublebarwedge;": "\u2306", + "downarrow;": "\u2193", + "downdownarrows;": "\u21ca", + "downharpoonleft;": "\u21c3", + "downharpoonright;": "\u21c2", + "drbkarow;": "\u2910", + "drcorn;": "\u231f", + "drcrop;": "\u230c", + "dscr;": "\U0001d4b9", + "dscy;": "\u0455", + "dsol;": "\u29f6", + "dstrok;": "\u0111", + "dtdot;": "\u22f1", + "dtri;": "\u25bf", + "dtrif;": "\u25be", + "duarr;": "\u21f5", + "duhar;": "\u296f", + "dwangle;": "\u29a6", + "dzcy;": "\u045f", + "dzigrarr;": "\u27ff", + "eDDot;": "\u2a77", + "eDot;": "\u2251", + "eacute": "\xe9", + "eacute;": "\xe9", + "easter;": "\u2a6e", + "ecaron;": "\u011b", + "ecir;": "\u2256", + "ecirc": "\xea", + "ecirc;": "\xea", + "ecolon;": "\u2255", + "ecy;": "\u044d", + "edot;": "\u0117", + "ee;": "\u2147", + "efDot;": "\u2252", + "efr;": "\U0001d522", + "eg;": "\u2a9a", + "egrave": "\xe8", + "egrave;": "\xe8", + "egs;": "\u2a96", + "egsdot;": "\u2a98", + "el;": "\u2a99", + "elinters;": "\u23e7", + "ell;": "\u2113", + "els;": "\u2a95", + "elsdot;": "\u2a97", + "emacr;": "\u0113", + "empty;": "\u2205", + "emptyset;": "\u2205", + "emptyv;": "\u2205", + "emsp13;": "\u2004", + "emsp14;": "\u2005", + "emsp;": "\u2003", + "eng;": "\u014b", + "ensp;": "\u2002", + "eogon;": "\u0119", + "eopf;": "\U0001d556", + "epar;": "\u22d5", + "eparsl;": "\u29e3", + "eplus;": "\u2a71", + "epsi;": "\u03b5", + "epsilon;": "\u03b5", + "epsiv;": "\u03f5", + "eqcirc;": "\u2256", + "eqcolon;": "\u2255", + "eqsim;": "\u2242", + "eqslantgtr;": "\u2a96", + "eqslantless;": "\u2a95", + "equals;": "=", + "equest;": "\u225f", + "equiv;": "\u2261", + "equivDD;": "\u2a78", + "eqvparsl;": "\u29e5", + "erDot;": "\u2253", + "erarr;": "\u2971", + "escr;": "\u212f", + "esdot;": "\u2250", + "esim;": "\u2242", + "eta;": "\u03b7", + "eth": "\xf0", + "eth;": "\xf0", + "euml": "\xeb", + "euml;": "\xeb", + "euro;": "\u20ac", + "excl;": "!", + "exist;": "\u2203", + "expectation;": "\u2130", + "exponentiale;": "\u2147", + "fallingdotseq;": "\u2252", + "fcy;": "\u0444", + "female;": "\u2640", + "ffilig;": "\ufb03", + "fflig;": "\ufb00", + "ffllig;": "\ufb04", + "ffr;": "\U0001d523", + "filig;": "\ufb01", + "fjlig;": "fj", + "flat;": "\u266d", + "fllig;": "\ufb02", + "fltns;": "\u25b1", + "fnof;": "\u0192", + "fopf;": "\U0001d557", + "forall;": "\u2200", + "fork;": "\u22d4", + "forkv;": "\u2ad9", + "fpartint;": "\u2a0d", + "frac12": "\xbd", + "frac12;": "\xbd", + "frac13;": "\u2153", + "frac14": "\xbc", + "frac14;": "\xbc", + "frac15;": "\u2155", + "frac16;": "\u2159", + "frac18;": "\u215b", + "frac23;": "\u2154", + "frac25;": "\u2156", + "frac34": "\xbe", + "frac34;": "\xbe", + "frac35;": "\u2157", + "frac38;": "\u215c", + "frac45;": "\u2158", + "frac56;": "\u215a", + "frac58;": "\u215d", + "frac78;": "\u215e", + "frasl;": "\u2044", + "frown;": "\u2322", + "fscr;": "\U0001d4bb", + "gE;": "\u2267", + "gEl;": "\u2a8c", + "gacute;": "\u01f5", + "gamma;": "\u03b3", + "gammad;": "\u03dd", + "gap;": "\u2a86", + "gbreve;": "\u011f", + "gcirc;": "\u011d", + "gcy;": "\u0433", + "gdot;": "\u0121", + "ge;": "\u2265", + "gel;": "\u22db", + "geq;": "\u2265", + "geqq;": "\u2267", + "geqslant;": "\u2a7e", + "ges;": "\u2a7e", + "gescc;": "\u2aa9", + "gesdot;": "\u2a80", + "gesdoto;": "\u2a82", + "gesdotol;": "\u2a84", + "gesl;": "\u22db\ufe00", + "gesles;": "\u2a94", + "gfr;": "\U0001d524", + "gg;": "\u226b", + "ggg;": "\u22d9", + "gimel;": "\u2137", + "gjcy;": "\u0453", + "gl;": "\u2277", + "glE;": "\u2a92", + "gla;": "\u2aa5", + "glj;": "\u2aa4", + "gnE;": "\u2269", + "gnap;": "\u2a8a", + "gnapprox;": "\u2a8a", + "gne;": "\u2a88", + "gneq;": "\u2a88", + "gneqq;": "\u2269", + "gnsim;": "\u22e7", + "gopf;": "\U0001d558", + "grave;": "`", + "gscr;": "\u210a", + "gsim;": "\u2273", + "gsime;": "\u2a8e", + "gsiml;": "\u2a90", + "gt": ">", + "gt;": ">", + "gtcc;": "\u2aa7", + "gtcir;": "\u2a7a", + "gtdot;": "\u22d7", + "gtlPar;": "\u2995", + "gtquest;": "\u2a7c", + "gtrapprox;": "\u2a86", + "gtrarr;": "\u2978", + "gtrdot;": "\u22d7", + "gtreqless;": "\u22db", + "gtreqqless;": "\u2a8c", + "gtrless;": "\u2277", + "gtrsim;": "\u2273", + "gvertneqq;": "\u2269\ufe00", + "gvnE;": "\u2269\ufe00", + "hArr;": "\u21d4", + "hairsp;": "\u200a", + "half;": "\xbd", + "hamilt;": "\u210b", + "hardcy;": "\u044a", + "harr;": "\u2194", + "harrcir;": "\u2948", + "harrw;": "\u21ad", + "hbar;": "\u210f", + "hcirc;": "\u0125", + "hearts;": "\u2665", + "heartsuit;": "\u2665", + "hellip;": "\u2026", + "hercon;": "\u22b9", + "hfr;": "\U0001d525", + "hksearow;": "\u2925", + "hkswarow;": "\u2926", + "hoarr;": "\u21ff", + "homtht;": "\u223b", + "hookleftarrow;": "\u21a9", + "hookrightarrow;": "\u21aa", + "hopf;": "\U0001d559", + "horbar;": "\u2015", + "hscr;": "\U0001d4bd", + "hslash;": "\u210f", + "hstrok;": "\u0127", + "hybull;": "\u2043", + "hyphen;": "\u2010", + "iacute": "\xed", + "iacute;": "\xed", + "ic;": "\u2063", + "icirc": "\xee", + "icirc;": "\xee", + "icy;": "\u0438", + "iecy;": "\u0435", + "iexcl": "\xa1", + "iexcl;": "\xa1", + "iff;": "\u21d4", + "ifr;": "\U0001d526", + "igrave": "\xec", + "igrave;": "\xec", + "ii;": "\u2148", + "iiiint;": "\u2a0c", + "iiint;": "\u222d", + "iinfin;": "\u29dc", + "iiota;": "\u2129", + "ijlig;": "\u0133", + "imacr;": "\u012b", + "image;": "\u2111", + "imagline;": "\u2110", + "imagpart;": "\u2111", + "imath;": "\u0131", + "imof;": "\u22b7", + "imped;": "\u01b5", + "in;": "\u2208", + "incare;": "\u2105", + "infin;": "\u221e", + "infintie;": "\u29dd", + "inodot;": "\u0131", + "int;": "\u222b", + "intcal;": "\u22ba", + "integers;": "\u2124", + "intercal;": "\u22ba", + "intlarhk;": "\u2a17", + "intprod;": "\u2a3c", + "iocy;": "\u0451", + "iogon;": "\u012f", + "iopf;": "\U0001d55a", + "iota;": "\u03b9", + "iprod;": "\u2a3c", + "iquest": "\xbf", + "iquest;": "\xbf", + "iscr;": "\U0001d4be", + "isin;": "\u2208", + "isinE;": "\u22f9", + "isindot;": "\u22f5", + "isins;": "\u22f4", + "isinsv;": "\u22f3", + "isinv;": "\u2208", + "it;": "\u2062", + "itilde;": "\u0129", + "iukcy;": "\u0456", + "iuml": "\xef", + "iuml;": "\xef", + "jcirc;": "\u0135", + "jcy;": "\u0439", + "jfr;": "\U0001d527", + "jmath;": "\u0237", + "jopf;": "\U0001d55b", + "jscr;": "\U0001d4bf", + "jsercy;": "\u0458", + "jukcy;": "\u0454", + "kappa;": "\u03ba", + "kappav;": "\u03f0", + "kcedil;": "\u0137", + "kcy;": "\u043a", + "kfr;": "\U0001d528", + "kgreen;": "\u0138", + "khcy;": "\u0445", + "kjcy;": "\u045c", + "kopf;": "\U0001d55c", + "kscr;": "\U0001d4c0", + "lAarr;": "\u21da", + "lArr;": "\u21d0", + "lAtail;": "\u291b", + "lBarr;": "\u290e", + "lE;": "\u2266", + "lEg;": "\u2a8b", + "lHar;": "\u2962", + "lacute;": "\u013a", + "laemptyv;": "\u29b4", + "lagran;": "\u2112", + "lambda;": "\u03bb", + "lang;": "\u27e8", + "langd;": "\u2991", + "langle;": "\u27e8", + "lap;": "\u2a85", + "laquo": "\xab", + "laquo;": "\xab", + "larr;": "\u2190", + "larrb;": "\u21e4", + "larrbfs;": "\u291f", + "larrfs;": "\u291d", + "larrhk;": "\u21a9", + "larrlp;": "\u21ab", + "larrpl;": "\u2939", + "larrsim;": "\u2973", + "larrtl;": "\u21a2", + "lat;": "\u2aab", + "latail;": "\u2919", + "late;": "\u2aad", + "lates;": "\u2aad\ufe00", + "lbarr;": "\u290c", + "lbbrk;": "\u2772", + "lbrace;": "{", + "lbrack;": "[", + "lbrke;": "\u298b", + "lbrksld;": "\u298f", + "lbrkslu;": "\u298d", + "lcaron;": "\u013e", + "lcedil;": "\u013c", + "lceil;": "\u2308", + "lcub;": "{", + "lcy;": "\u043b", + "ldca;": "\u2936", + "ldquo;": "\u201c", + "ldquor;": "\u201e", + "ldrdhar;": "\u2967", + "ldrushar;": "\u294b", + "ldsh;": "\u21b2", + "le;": "\u2264", + "leftarrow;": "\u2190", + "leftarrowtail;": "\u21a2", + "leftharpoondown;": "\u21bd", + "leftharpoonup;": "\u21bc", + "leftleftarrows;": "\u21c7", + "leftrightarrow;": "\u2194", + "leftrightarrows;": "\u21c6", + "leftrightharpoons;": "\u21cb", + "leftrightsquigarrow;": "\u21ad", + "leftthreetimes;": "\u22cb", + "leg;": "\u22da", + "leq;": "\u2264", + "leqq;": "\u2266", + "leqslant;": "\u2a7d", + "les;": "\u2a7d", + "lescc;": "\u2aa8", + "lesdot;": "\u2a7f", + "lesdoto;": "\u2a81", + "lesdotor;": "\u2a83", + "lesg;": "\u22da\ufe00", + "lesges;": "\u2a93", + "lessapprox;": "\u2a85", + "lessdot;": "\u22d6", + "lesseqgtr;": "\u22da", + "lesseqqgtr;": "\u2a8b", + "lessgtr;": "\u2276", + "lesssim;": "\u2272", + "lfisht;": "\u297c", + "lfloor;": "\u230a", + "lfr;": "\U0001d529", + "lg;": "\u2276", + "lgE;": "\u2a91", + "lhard;": "\u21bd", + "lharu;": "\u21bc", + "lharul;": "\u296a", + "lhblk;": "\u2584", + "ljcy;": "\u0459", + "ll;": "\u226a", + "llarr;": "\u21c7", + "llcorner;": "\u231e", + "llhard;": "\u296b", + "lltri;": "\u25fa", + "lmidot;": "\u0140", + "lmoust;": "\u23b0", + "lmoustache;": "\u23b0", + "lnE;": "\u2268", + "lnap;": "\u2a89", + "lnapprox;": "\u2a89", + "lne;": "\u2a87", + "lneq;": "\u2a87", + "lneqq;": "\u2268", + "lnsim;": "\u22e6", + "loang;": "\u27ec", + "loarr;": "\u21fd", + "lobrk;": "\u27e6", + "longleftarrow;": "\u27f5", + "longleftrightarrow;": "\u27f7", + "longmapsto;": "\u27fc", + "longrightarrow;": "\u27f6", + "looparrowleft;": "\u21ab", + "looparrowright;": "\u21ac", + "lopar;": "\u2985", + "lopf;": "\U0001d55d", + "loplus;": "\u2a2d", + "lotimes;": "\u2a34", + "lowast;": "\u2217", + "lowbar;": "_", + "loz;": "\u25ca", + "lozenge;": "\u25ca", + "lozf;": "\u29eb", + "lpar;": "(", + "lparlt;": "\u2993", + "lrarr;": "\u21c6", + "lrcorner;": "\u231f", + "lrhar;": "\u21cb", + "lrhard;": "\u296d", + "lrm;": "\u200e", + "lrtri;": "\u22bf", + "lsaquo;": "\u2039", + "lscr;": "\U0001d4c1", + "lsh;": "\u21b0", + "lsim;": "\u2272", + "lsime;": "\u2a8d", + "lsimg;": "\u2a8f", + "lsqb;": "[", + "lsquo;": "\u2018", + "lsquor;": "\u201a", + "lstrok;": "\u0142", + "lt": "<", + "lt;": "<", + "ltcc;": "\u2aa6", + "ltcir;": "\u2a79", + "ltdot;": "\u22d6", + "lthree;": "\u22cb", + "ltimes;": "\u22c9", + "ltlarr;": "\u2976", + "ltquest;": "\u2a7b", + "ltrPar;": "\u2996", + "ltri;": "\u25c3", + "ltrie;": "\u22b4", + "ltrif;": "\u25c2", + "lurdshar;": "\u294a", + "luruhar;": "\u2966", + "lvertneqq;": "\u2268\ufe00", + "lvnE;": "\u2268\ufe00", + "mDDot;": "\u223a", + "macr": "\xaf", + "macr;": "\xaf", + "male;": "\u2642", + "malt;": "\u2720", + "maltese;": "\u2720", + "map;": "\u21a6", + "mapsto;": "\u21a6", + "mapstodown;": "\u21a7", + "mapstoleft;": "\u21a4", + "mapstoup;": "\u21a5", + "marker;": "\u25ae", + "mcomma;": "\u2a29", + "mcy;": "\u043c", + "mdash;": "\u2014", + "measuredangle;": "\u2221", + "mfr;": "\U0001d52a", + "mho;": "\u2127", + "micro": "\xb5", + "micro;": "\xb5", + "mid;": "\u2223", + "midast;": "*", + "midcir;": "\u2af0", + "middot": "\xb7", + "middot;": "\xb7", + "minus;": "\u2212", + "minusb;": "\u229f", + "minusd;": "\u2238", + "minusdu;": "\u2a2a", + "mlcp;": "\u2adb", + "mldr;": "\u2026", + "mnplus;": "\u2213", + "models;": "\u22a7", + "mopf;": "\U0001d55e", + "mp;": "\u2213", + "mscr;": "\U0001d4c2", + "mstpos;": "\u223e", + "mu;": "\u03bc", + "multimap;": "\u22b8", + "mumap;": "\u22b8", + "nGg;": "\u22d9\u0338", + "nGt;": "\u226b\u20d2", + "nGtv;": "\u226b\u0338", + "nLeftarrow;": "\u21cd", + "nLeftrightarrow;": "\u21ce", + "nLl;": "\u22d8\u0338", + "nLt;": "\u226a\u20d2", + "nLtv;": "\u226a\u0338", + "nRightarrow;": "\u21cf", + "nVDash;": "\u22af", + "nVdash;": "\u22ae", + "nabla;": "\u2207", + "nacute;": "\u0144", + "nang;": "\u2220\u20d2", + "nap;": "\u2249", + "napE;": "\u2a70\u0338", + "napid;": "\u224b\u0338", + "napos;": "\u0149", + "napprox;": "\u2249", + "natur;": "\u266e", + "natural;": "\u266e", + "naturals;": "\u2115", + "nbsp": "\xa0", + "nbsp;": "\xa0", + "nbump;": "\u224e\u0338", + "nbumpe;": "\u224f\u0338", + "ncap;": "\u2a43", + "ncaron;": "\u0148", + "ncedil;": "\u0146", + "ncong;": "\u2247", + "ncongdot;": "\u2a6d\u0338", + "ncup;": "\u2a42", + "ncy;": "\u043d", + "ndash;": "\u2013", + "ne;": "\u2260", + "neArr;": "\u21d7", + "nearhk;": "\u2924", + "nearr;": "\u2197", + "nearrow;": "\u2197", + "nedot;": "\u2250\u0338", + "nequiv;": "\u2262", + "nesear;": "\u2928", + "nesim;": "\u2242\u0338", + "nexist;": "\u2204", + "nexists;": "\u2204", + "nfr;": "\U0001d52b", + "ngE;": "\u2267\u0338", + "nge;": "\u2271", + "ngeq;": "\u2271", + "ngeqq;": "\u2267\u0338", + "ngeqslant;": "\u2a7e\u0338", + "nges;": "\u2a7e\u0338", + "ngsim;": "\u2275", + "ngt;": "\u226f", + "ngtr;": "\u226f", + "nhArr;": "\u21ce", + "nharr;": "\u21ae", + "nhpar;": "\u2af2", + "ni;": "\u220b", + "nis;": "\u22fc", + "nisd;": "\u22fa", + "niv;": "\u220b", + "njcy;": "\u045a", + "nlArr;": "\u21cd", + "nlE;": "\u2266\u0338", + "nlarr;": "\u219a", + "nldr;": "\u2025", + "nle;": "\u2270", + "nleftarrow;": "\u219a", + "nleftrightarrow;": "\u21ae", + "nleq;": "\u2270", + "nleqq;": "\u2266\u0338", + "nleqslant;": "\u2a7d\u0338", + "nles;": "\u2a7d\u0338", + "nless;": "\u226e", + "nlsim;": "\u2274", + "nlt;": "\u226e", + "nltri;": "\u22ea", + "nltrie;": "\u22ec", + "nmid;": "\u2224", + "nopf;": "\U0001d55f", + "not": "\xac", + "not;": "\xac", + "notin;": "\u2209", + "notinE;": "\u22f9\u0338", + "notindot;": "\u22f5\u0338", + "notinva;": "\u2209", + "notinvb;": "\u22f7", + "notinvc;": "\u22f6", + "notni;": "\u220c", + "notniva;": "\u220c", + "notnivb;": "\u22fe", + "notnivc;": "\u22fd", + "npar;": "\u2226", + "nparallel;": "\u2226", + "nparsl;": "\u2afd\u20e5", + "npart;": "\u2202\u0338", + "npolint;": "\u2a14", + "npr;": "\u2280", + "nprcue;": "\u22e0", + "npre;": "\u2aaf\u0338", + "nprec;": "\u2280", + "npreceq;": "\u2aaf\u0338", + "nrArr;": "\u21cf", + "nrarr;": "\u219b", + "nrarrc;": "\u2933\u0338", + "nrarrw;": "\u219d\u0338", + "nrightarrow;": "\u219b", + "nrtri;": "\u22eb", + "nrtrie;": "\u22ed", + "nsc;": "\u2281", + "nsccue;": "\u22e1", + "nsce;": "\u2ab0\u0338", + "nscr;": "\U0001d4c3", + "nshortmid;": "\u2224", + "nshortparallel;": "\u2226", + "nsim;": "\u2241", + "nsime;": "\u2244", + "nsimeq;": "\u2244", + "nsmid;": "\u2224", + "nspar;": "\u2226", + "nsqsube;": "\u22e2", + "nsqsupe;": "\u22e3", + "nsub;": "\u2284", + "nsubE;": "\u2ac5\u0338", + "nsube;": "\u2288", + "nsubset;": "\u2282\u20d2", + "nsubseteq;": "\u2288", + "nsubseteqq;": "\u2ac5\u0338", + "nsucc;": "\u2281", + "nsucceq;": "\u2ab0\u0338", + "nsup;": "\u2285", + "nsupE;": "\u2ac6\u0338", + "nsupe;": "\u2289", + "nsupset;": "\u2283\u20d2", + "nsupseteq;": "\u2289", + "nsupseteqq;": "\u2ac6\u0338", + "ntgl;": "\u2279", + "ntilde": "\xf1", + "ntilde;": "\xf1", + "ntlg;": "\u2278", + "ntriangleleft;": "\u22ea", + "ntrianglelefteq;": "\u22ec", + "ntriangleright;": "\u22eb", + "ntrianglerighteq;": "\u22ed", + "nu;": "\u03bd", + "num;": "#", + "numero;": "\u2116", + "numsp;": "\u2007", + "nvDash;": "\u22ad", + "nvHarr;": "\u2904", + "nvap;": "\u224d\u20d2", + "nvdash;": "\u22ac", + "nvge;": "\u2265\u20d2", + "nvgt;": ">\u20d2", + "nvinfin;": "\u29de", + "nvlArr;": "\u2902", + "nvle;": "\u2264\u20d2", + "nvlt;": "<\u20d2", + "nvltrie;": "\u22b4\u20d2", + "nvrArr;": "\u2903", + "nvrtrie;": "\u22b5\u20d2", + "nvsim;": "\u223c\u20d2", + "nwArr;": "\u21d6", + "nwarhk;": "\u2923", + "nwarr;": "\u2196", + "nwarrow;": "\u2196", + "nwnear;": "\u2927", + "oS;": "\u24c8", + "oacute": "\xf3", + "oacute;": "\xf3", + "oast;": "\u229b", + "ocir;": "\u229a", + "ocirc": "\xf4", + "ocirc;": "\xf4", + "ocy;": "\u043e", + "odash;": "\u229d", + "odblac;": "\u0151", + "odiv;": "\u2a38", + "odot;": "\u2299", + "odsold;": "\u29bc", + "oelig;": "\u0153", + "ofcir;": "\u29bf", + "ofr;": "\U0001d52c", + "ogon;": "\u02db", + "ograve": "\xf2", + "ograve;": "\xf2", + "ogt;": "\u29c1", + "ohbar;": "\u29b5", + "ohm;": "\u03a9", + "oint;": "\u222e", + "olarr;": "\u21ba", + "olcir;": "\u29be", + "olcross;": "\u29bb", + "oline;": "\u203e", + "olt;": "\u29c0", + "omacr;": "\u014d", + "omega;": "\u03c9", + "omicron;": "\u03bf", + "omid;": "\u29b6", + "ominus;": "\u2296", + "oopf;": "\U0001d560", + "opar;": "\u29b7", + "operp;": "\u29b9", + "oplus;": "\u2295", + "or;": "\u2228", + "orarr;": "\u21bb", + "ord;": "\u2a5d", + "order;": "\u2134", + "orderof;": "\u2134", + "ordf": "\xaa", + "ordf;": "\xaa", + "ordm": "\xba", + "ordm;": "\xba", + "origof;": "\u22b6", + "oror;": "\u2a56", + "orslope;": "\u2a57", + "orv;": "\u2a5b", + "oscr;": "\u2134", + "oslash": "\xf8", + "oslash;": "\xf8", + "osol;": "\u2298", + "otilde": "\xf5", + "otilde;": "\xf5", + "otimes;": "\u2297", + "otimesas;": "\u2a36", + "ouml": "\xf6", + "ouml;": "\xf6", + "ovbar;": "\u233d", + "par;": "\u2225", + "para": "\xb6", + "para;": "\xb6", + "parallel;": "\u2225", + "parsim;": "\u2af3", + "parsl;": "\u2afd", + "part;": "\u2202", + "pcy;": "\u043f", + "percnt;": "%", + "period;": ".", + "permil;": "\u2030", + "perp;": "\u22a5", + "pertenk;": "\u2031", + "pfr;": "\U0001d52d", + "phi;": "\u03c6", + "phiv;": "\u03d5", + "phmmat;": "\u2133", + "phone;": "\u260e", + "pi;": "\u03c0", + "pitchfork;": "\u22d4", + "piv;": "\u03d6", + "planck;": "\u210f", + "planckh;": "\u210e", + "plankv;": "\u210f", + "plus;": "+", + "plusacir;": "\u2a23", + "plusb;": "\u229e", + "pluscir;": "\u2a22", + "plusdo;": "\u2214", + "plusdu;": "\u2a25", + "pluse;": "\u2a72", + "plusmn": "\xb1", + "plusmn;": "\xb1", + "plussim;": "\u2a26", + "plustwo;": "\u2a27", + "pm;": "\xb1", + "pointint;": "\u2a15", + "popf;": "\U0001d561", + "pound": "\xa3", + "pound;": "\xa3", + "pr;": "\u227a", + "prE;": "\u2ab3", + "prap;": "\u2ab7", + "prcue;": "\u227c", + "pre;": "\u2aaf", + "prec;": "\u227a", + "precapprox;": "\u2ab7", + "preccurlyeq;": "\u227c", + "preceq;": "\u2aaf", + "precnapprox;": "\u2ab9", + "precneqq;": "\u2ab5", + "precnsim;": "\u22e8", + "precsim;": "\u227e", + "prime;": "\u2032", + "primes;": "\u2119", + "prnE;": "\u2ab5", + "prnap;": "\u2ab9", + "prnsim;": "\u22e8", + "prod;": "\u220f", + "profalar;": "\u232e", + "profline;": "\u2312", + "profsurf;": "\u2313", + "prop;": "\u221d", + "propto;": "\u221d", + "prsim;": "\u227e", + "prurel;": "\u22b0", + "pscr;": "\U0001d4c5", + "psi;": "\u03c8", + "puncsp;": "\u2008", + "qfr;": "\U0001d52e", + "qint;": "\u2a0c", + "qopf;": "\U0001d562", + "qprime;": "\u2057", + "qscr;": "\U0001d4c6", + "quaternions;": "\u210d", + "quatint;": "\u2a16", + "quest;": "?", + "questeq;": "\u225f", + "quot": "\"", + "quot;": "\"", + "rAarr;": "\u21db", + "rArr;": "\u21d2", + "rAtail;": "\u291c", + "rBarr;": "\u290f", + "rHar;": "\u2964", + "race;": "\u223d\u0331", + "racute;": "\u0155", + "radic;": "\u221a", + "raemptyv;": "\u29b3", + "rang;": "\u27e9", + "rangd;": "\u2992", + "range;": "\u29a5", + "rangle;": "\u27e9", + "raquo": "\xbb", + "raquo;": "\xbb", + "rarr;": "\u2192", + "rarrap;": "\u2975", + "rarrb;": "\u21e5", + "rarrbfs;": "\u2920", + "rarrc;": "\u2933", + "rarrfs;": "\u291e", + "rarrhk;": "\u21aa", + "rarrlp;": "\u21ac", + "rarrpl;": "\u2945", + "rarrsim;": "\u2974", + "rarrtl;": "\u21a3", + "rarrw;": "\u219d", + "ratail;": "\u291a", + "ratio;": "\u2236", + "rationals;": "\u211a", + "rbarr;": "\u290d", + "rbbrk;": "\u2773", + "rbrace;": "}", + "rbrack;": "]", + "rbrke;": "\u298c", + "rbrksld;": "\u298e", + "rbrkslu;": "\u2990", + "rcaron;": "\u0159", + "rcedil;": "\u0157", + "rceil;": "\u2309", + "rcub;": "}", + "rcy;": "\u0440", + "rdca;": "\u2937", + "rdldhar;": "\u2969", + "rdquo;": "\u201d", + "rdquor;": "\u201d", + "rdsh;": "\u21b3", + "real;": "\u211c", + "realine;": "\u211b", + "realpart;": "\u211c", + "reals;": "\u211d", + "rect;": "\u25ad", + "reg": "\xae", + "reg;": "\xae", + "rfisht;": "\u297d", + "rfloor;": "\u230b", + "rfr;": "\U0001d52f", + "rhard;": "\u21c1", + "rharu;": "\u21c0", + "rharul;": "\u296c", + "rho;": "\u03c1", + "rhov;": "\u03f1", + "rightarrow;": "\u2192", + "rightarrowtail;": "\u21a3", + "rightharpoondown;": "\u21c1", + "rightharpoonup;": "\u21c0", + "rightleftarrows;": "\u21c4", + "rightleftharpoons;": "\u21cc", + "rightrightarrows;": "\u21c9", + "rightsquigarrow;": "\u219d", + "rightthreetimes;": "\u22cc", + "ring;": "\u02da", + "risingdotseq;": "\u2253", + "rlarr;": "\u21c4", + "rlhar;": "\u21cc", + "rlm;": "\u200f", + "rmoust;": "\u23b1", + "rmoustache;": "\u23b1", + "rnmid;": "\u2aee", + "roang;": "\u27ed", + "roarr;": "\u21fe", + "robrk;": "\u27e7", + "ropar;": "\u2986", + "ropf;": "\U0001d563", + "roplus;": "\u2a2e", + "rotimes;": "\u2a35", + "rpar;": ")", + "rpargt;": "\u2994", + "rppolint;": "\u2a12", + "rrarr;": "\u21c9", + "rsaquo;": "\u203a", + "rscr;": "\U0001d4c7", + "rsh;": "\u21b1", + "rsqb;": "]", + "rsquo;": "\u2019", + "rsquor;": "\u2019", + "rthree;": "\u22cc", + "rtimes;": "\u22ca", + "rtri;": "\u25b9", + "rtrie;": "\u22b5", + "rtrif;": "\u25b8", + "rtriltri;": "\u29ce", + "ruluhar;": "\u2968", + "rx;": "\u211e", + "sacute;": "\u015b", + "sbquo;": "\u201a", + "sc;": "\u227b", + "scE;": "\u2ab4", + "scap;": "\u2ab8", + "scaron;": "\u0161", + "sccue;": "\u227d", + "sce;": "\u2ab0", + "scedil;": "\u015f", + "scirc;": "\u015d", + "scnE;": "\u2ab6", + "scnap;": "\u2aba", + "scnsim;": "\u22e9", + "scpolint;": "\u2a13", + "scsim;": "\u227f", + "scy;": "\u0441", + "sdot;": "\u22c5", + "sdotb;": "\u22a1", + "sdote;": "\u2a66", + "seArr;": "\u21d8", + "searhk;": "\u2925", + "searr;": "\u2198", + "searrow;": "\u2198", + "sect": "\xa7", + "sect;": "\xa7", + "semi;": ";", + "seswar;": "\u2929", + "setminus;": "\u2216", + "setmn;": "\u2216", + "sext;": "\u2736", + "sfr;": "\U0001d530", + "sfrown;": "\u2322", + "sharp;": "\u266f", + "shchcy;": "\u0449", + "shcy;": "\u0448", + "shortmid;": "\u2223", + "shortparallel;": "\u2225", + "shy": "\xad", + "shy;": "\xad", + "sigma;": "\u03c3", + "sigmaf;": "\u03c2", + "sigmav;": "\u03c2", + "sim;": "\u223c", + "simdot;": "\u2a6a", + "sime;": "\u2243", + "simeq;": "\u2243", + "simg;": "\u2a9e", + "simgE;": "\u2aa0", + "siml;": "\u2a9d", + "simlE;": "\u2a9f", + "simne;": "\u2246", + "simplus;": "\u2a24", + "simrarr;": "\u2972", + "slarr;": "\u2190", + "smallsetminus;": "\u2216", + "smashp;": "\u2a33", + "smeparsl;": "\u29e4", + "smid;": "\u2223", + "smile;": "\u2323", + "smt;": "\u2aaa", + "smte;": "\u2aac", + "smtes;": "\u2aac\ufe00", + "softcy;": "\u044c", + "sol;": "/", + "solb;": "\u29c4", + "solbar;": "\u233f", + "sopf;": "\U0001d564", + "spades;": "\u2660", + "spadesuit;": "\u2660", + "spar;": "\u2225", + "sqcap;": "\u2293", + "sqcaps;": "\u2293\ufe00", + "sqcup;": "\u2294", + "sqcups;": "\u2294\ufe00", + "sqsub;": "\u228f", + "sqsube;": "\u2291", + "sqsubset;": "\u228f", + "sqsubseteq;": "\u2291", + "sqsup;": "\u2290", + "sqsupe;": "\u2292", + "sqsupset;": "\u2290", + "sqsupseteq;": "\u2292", + "squ;": "\u25a1", + "square;": "\u25a1", + "squarf;": "\u25aa", + "squf;": "\u25aa", + "srarr;": "\u2192", + "sscr;": "\U0001d4c8", + "ssetmn;": "\u2216", + "ssmile;": "\u2323", + "sstarf;": "\u22c6", + "star;": "\u2606", + "starf;": "\u2605", + "straightepsilon;": "\u03f5", + "straightphi;": "\u03d5", + "strns;": "\xaf", + "sub;": "\u2282", + "subE;": "\u2ac5", + "subdot;": "\u2abd", + "sube;": "\u2286", + "subedot;": "\u2ac3", + "submult;": "\u2ac1", + "subnE;": "\u2acb", + "subne;": "\u228a", + "subplus;": "\u2abf", + "subrarr;": "\u2979", + "subset;": "\u2282", + "subseteq;": "\u2286", + "subseteqq;": "\u2ac5", + "subsetneq;": "\u228a", + "subsetneqq;": "\u2acb", + "subsim;": "\u2ac7", + "subsub;": "\u2ad5", + "subsup;": "\u2ad3", + "succ;": "\u227b", + "succapprox;": "\u2ab8", + "succcurlyeq;": "\u227d", + "succeq;": "\u2ab0", + "succnapprox;": "\u2aba", + "succneqq;": "\u2ab6", + "succnsim;": "\u22e9", + "succsim;": "\u227f", + "sum;": "\u2211", + "sung;": "\u266a", + "sup1": "\xb9", + "sup1;": "\xb9", + "sup2": "\xb2", + "sup2;": "\xb2", + "sup3": "\xb3", + "sup3;": "\xb3", + "sup;": "\u2283", + "supE;": "\u2ac6", + "supdot;": "\u2abe", + "supdsub;": "\u2ad8", + "supe;": "\u2287", + "supedot;": "\u2ac4", + "suphsol;": "\u27c9", + "suphsub;": "\u2ad7", + "suplarr;": "\u297b", + "supmult;": "\u2ac2", + "supnE;": "\u2acc", + "supne;": "\u228b", + "supplus;": "\u2ac0", + "supset;": "\u2283", + "supseteq;": "\u2287", + "supseteqq;": "\u2ac6", + "supsetneq;": "\u228b", + "supsetneqq;": "\u2acc", + "supsim;": "\u2ac8", + "supsub;": "\u2ad4", + "supsup;": "\u2ad6", + "swArr;": "\u21d9", + "swarhk;": "\u2926", + "swarr;": "\u2199", + "swarrow;": "\u2199", + "swnwar;": "\u292a", + "szlig": "\xdf", + "szlig;": "\xdf", + "target;": "\u2316", + "tau;": "\u03c4", + "tbrk;": "\u23b4", + "tcaron;": "\u0165", + "tcedil;": "\u0163", + "tcy;": "\u0442", + "tdot;": "\u20db", + "telrec;": "\u2315", + "tfr;": "\U0001d531", + "there4;": "\u2234", + "therefore;": "\u2234", + "theta;": "\u03b8", + "thetasym;": "\u03d1", + "thetav;": "\u03d1", + "thickapprox;": "\u2248", + "thicksim;": "\u223c", + "thinsp;": "\u2009", + "thkap;": "\u2248", + "thksim;": "\u223c", + "thorn": "\xfe", + "thorn;": "\xfe", + "tilde;": "\u02dc", + "times": "\xd7", + "times;": "\xd7", + "timesb;": "\u22a0", + "timesbar;": "\u2a31", + "timesd;": "\u2a30", + "tint;": "\u222d", + "toea;": "\u2928", + "top;": "\u22a4", + "topbot;": "\u2336", + "topcir;": "\u2af1", + "topf;": "\U0001d565", + "topfork;": "\u2ada", + "tosa;": "\u2929", + "tprime;": "\u2034", + "trade;": "\u2122", + "triangle;": "\u25b5", + "triangledown;": "\u25bf", + "triangleleft;": "\u25c3", + "trianglelefteq;": "\u22b4", + "triangleq;": "\u225c", + "triangleright;": "\u25b9", + "trianglerighteq;": "\u22b5", + "tridot;": "\u25ec", + "trie;": "\u225c", + "triminus;": "\u2a3a", + "triplus;": "\u2a39", + "trisb;": "\u29cd", + "tritime;": "\u2a3b", + "trpezium;": "\u23e2", + "tscr;": "\U0001d4c9", + "tscy;": "\u0446", + "tshcy;": "\u045b", + "tstrok;": "\u0167", + "twixt;": "\u226c", + "twoheadleftarrow;": "\u219e", + "twoheadrightarrow;": "\u21a0", + "uArr;": "\u21d1", + "uHar;": "\u2963", + "uacute": "\xfa", + "uacute;": "\xfa", + "uarr;": "\u2191", + "ubrcy;": "\u045e", + "ubreve;": "\u016d", + "ucirc": "\xfb", + "ucirc;": "\xfb", + "ucy;": "\u0443", + "udarr;": "\u21c5", + "udblac;": "\u0171", + "udhar;": "\u296e", + "ufisht;": "\u297e", + "ufr;": "\U0001d532", + "ugrave": "\xf9", + "ugrave;": "\xf9", + "uharl;": "\u21bf", + "uharr;": "\u21be", + "uhblk;": "\u2580", + "ulcorn;": "\u231c", + "ulcorner;": "\u231c", + "ulcrop;": "\u230f", + "ultri;": "\u25f8", + "umacr;": "\u016b", + "uml": "\xa8", + "uml;": "\xa8", + "uogon;": "\u0173", + "uopf;": "\U0001d566", + "uparrow;": "\u2191", + "updownarrow;": "\u2195", + "upharpoonleft;": "\u21bf", + "upharpoonright;": "\u21be", + "uplus;": "\u228e", + "upsi;": "\u03c5", + "upsih;": "\u03d2", + "upsilon;": "\u03c5", + "upuparrows;": "\u21c8", + "urcorn;": "\u231d", + "urcorner;": "\u231d", + "urcrop;": "\u230e", + "uring;": "\u016f", + "urtri;": "\u25f9", + "uscr;": "\U0001d4ca", + "utdot;": "\u22f0", + "utilde;": "\u0169", + "utri;": "\u25b5", + "utrif;": "\u25b4", + "uuarr;": "\u21c8", + "uuml": "\xfc", + "uuml;": "\xfc", + "uwangle;": "\u29a7", + "vArr;": "\u21d5", + "vBar;": "\u2ae8", + "vBarv;": "\u2ae9", + "vDash;": "\u22a8", + "vangrt;": "\u299c", + "varepsilon;": "\u03f5", + "varkappa;": "\u03f0", + "varnothing;": "\u2205", + "varphi;": "\u03d5", + "varpi;": "\u03d6", + "varpropto;": "\u221d", + "varr;": "\u2195", + "varrho;": "\u03f1", + "varsigma;": "\u03c2", + "varsubsetneq;": "\u228a\ufe00", + "varsubsetneqq;": "\u2acb\ufe00", + "varsupsetneq;": "\u228b\ufe00", + "varsupsetneqq;": "\u2acc\ufe00", + "vartheta;": "\u03d1", + "vartriangleleft;": "\u22b2", + "vartriangleright;": "\u22b3", + "vcy;": "\u0432", + "vdash;": "\u22a2", + "vee;": "\u2228", + "veebar;": "\u22bb", + "veeeq;": "\u225a", + "vellip;": "\u22ee", + "verbar;": "|", + "vert;": "|", + "vfr;": "\U0001d533", + "vltri;": "\u22b2", + "vnsub;": "\u2282\u20d2", + "vnsup;": "\u2283\u20d2", + "vopf;": "\U0001d567", + "vprop;": "\u221d", + "vrtri;": "\u22b3", + "vscr;": "\U0001d4cb", + "vsubnE;": "\u2acb\ufe00", + "vsubne;": "\u228a\ufe00", + "vsupnE;": "\u2acc\ufe00", + "vsupne;": "\u228b\ufe00", + "vzigzag;": "\u299a", + "wcirc;": "\u0175", + "wedbar;": "\u2a5f", + "wedge;": "\u2227", + "wedgeq;": "\u2259", + "weierp;": "\u2118", + "wfr;": "\U0001d534", + "wopf;": "\U0001d568", + "wp;": "\u2118", + "wr;": "\u2240", + "wreath;": "\u2240", + "wscr;": "\U0001d4cc", + "xcap;": "\u22c2", + "xcirc;": "\u25ef", + "xcup;": "\u22c3", + "xdtri;": "\u25bd", + "xfr;": "\U0001d535", + "xhArr;": "\u27fa", + "xharr;": "\u27f7", + "xi;": "\u03be", + "xlArr;": "\u27f8", + "xlarr;": "\u27f5", + "xmap;": "\u27fc", + "xnis;": "\u22fb", + "xodot;": "\u2a00", + "xopf;": "\U0001d569", + "xoplus;": "\u2a01", + "xotime;": "\u2a02", + "xrArr;": "\u27f9", + "xrarr;": "\u27f6", + "xscr;": "\U0001d4cd", + "xsqcup;": "\u2a06", + "xuplus;": "\u2a04", + "xutri;": "\u25b3", + "xvee;": "\u22c1", + "xwedge;": "\u22c0", + "yacute": "\xfd", + "yacute;": "\xfd", + "yacy;": "\u044f", + "ycirc;": "\u0177", + "ycy;": "\u044b", + "yen": "\xa5", + "yen;": "\xa5", + "yfr;": "\U0001d536", + "yicy;": "\u0457", + "yopf;": "\U0001d56a", + "yscr;": "\U0001d4ce", + "yucy;": "\u044e", + "yuml": "\xff", + "yuml;": "\xff", + "zacute;": "\u017a", + "zcaron;": "\u017e", + "zcy;": "\u0437", + "zdot;": "\u017c", + "zeetrf;": "\u2128", + "zeta;": "\u03b6", + "zfr;": "\U0001d537", + "zhcy;": "\u0436", + "zigrarr;": "\u21dd", + "zopf;": "\U0001d56b", + "zscr;": "\U0001d4cf", + "zwj;": "\u200d", + "zwnj;": "\u200c", +} + +replacementCharacters = { + 0x0: "\uFFFD", + 0x0d: "\u000D", + 0x80: "\u20AC", + 0x81: "\u0081", + 0x81: "\u0081", + 0x82: "\u201A", + 0x83: "\u0192", + 0x84: "\u201E", + 0x85: "\u2026", + 0x86: "\u2020", + 0x87: "\u2021", + 0x88: "\u02C6", + 0x89: "\u2030", + 0x8A: "\u0160", + 0x8B: "\u2039", + 0x8C: "\u0152", + 0x8D: "\u008D", + 0x8E: "\u017D", + 0x8F: "\u008F", + 0x90: "\u0090", + 0x91: "\u2018", + 0x92: "\u2019", + 0x93: "\u201C", + 0x94: "\u201D", + 0x95: "\u2022", + 0x96: "\u2013", + 0x97: "\u2014", + 0x98: "\u02DC", + 0x99: "\u2122", + 0x9A: "\u0161", + 0x9B: "\u203A", + 0x9C: "\u0153", + 0x9D: "\u009D", + 0x9E: "\u017E", + 0x9F: "\u0178", +} + +encodings = { + '437': 'cp437', + '850': 'cp850', + '852': 'cp852', + '855': 'cp855', + '857': 'cp857', + '860': 'cp860', + '861': 'cp861', + '862': 'cp862', + '863': 'cp863', + '865': 'cp865', + '866': 'cp866', + '869': 'cp869', + 'ansix341968': 'ascii', + 'ansix341986': 'ascii', + 'arabic': 'iso8859-6', + 'ascii': 'ascii', + 'asmo708': 'iso8859-6', + 'big5': 'big5', + 'big5hkscs': 'big5hkscs', + 'chinese': 'gbk', + 'cp037': 'cp037', + 'cp1026': 'cp1026', + 'cp154': 'ptcp154', + 'cp367': 'ascii', + 'cp424': 'cp424', + 'cp437': 'cp437', + 'cp500': 'cp500', + 'cp775': 'cp775', + 'cp819': 'windows-1252', + 'cp850': 'cp850', + 'cp852': 'cp852', + 'cp855': 'cp855', + 'cp857': 'cp857', + 'cp860': 'cp860', + 'cp861': 'cp861', + 'cp862': 'cp862', + 'cp863': 'cp863', + 'cp864': 'cp864', + 'cp865': 'cp865', + 'cp866': 'cp866', + 'cp869': 'cp869', + 'cp936': 'gbk', + 'cpgr': 'cp869', + 'cpis': 'cp861', + 'csascii': 'ascii', + 'csbig5': 'big5', + 'cseuckr': 'cp949', + 'cseucpkdfmtjapanese': 'euc_jp', + 'csgb2312': 'gbk', + 'cshproman8': 'hp-roman8', + 'csibm037': 'cp037', + 'csibm1026': 'cp1026', + 'csibm424': 'cp424', + 'csibm500': 'cp500', + 'csibm855': 'cp855', + 'csibm857': 'cp857', + 'csibm860': 'cp860', + 'csibm861': 'cp861', + 'csibm863': 'cp863', + 'csibm864': 'cp864', + 'csibm865': 'cp865', + 'csibm866': 'cp866', + 'csibm869': 'cp869', + 'csiso2022jp': 'iso2022_jp', + 'csiso2022jp2': 'iso2022_jp_2', + 'csiso2022kr': 'iso2022_kr', + 'csiso58gb231280': 'gbk', + 'csisolatin1': 'windows-1252', + 'csisolatin2': 'iso8859-2', + 'csisolatin3': 'iso8859-3', + 'csisolatin4': 'iso8859-4', + 'csisolatin5': 'windows-1254', + 'csisolatin6': 'iso8859-10', + 'csisolatinarabic': 'iso8859-6', + 'csisolatincyrillic': 'iso8859-5', + 'csisolatingreek': 'iso8859-7', + 'csisolatinhebrew': 'iso8859-8', + 'cskoi8r': 'koi8-r', + 'csksc56011987': 'cp949', + 'cspc775baltic': 'cp775', + 'cspc850multilingual': 'cp850', + 'cspc862latinhebrew': 'cp862', + 'cspc8codepage437': 'cp437', + 'cspcp852': 'cp852', + 'csptcp154': 'ptcp154', + 'csshiftjis': 'shift_jis', + 'csunicode11utf7': 'utf-7', + 'cyrillic': 'iso8859-5', + 'cyrillicasian': 'ptcp154', + 'ebcdiccpbe': 'cp500', + 'ebcdiccpca': 'cp037', + 'ebcdiccpch': 'cp500', + 'ebcdiccphe': 'cp424', + 'ebcdiccpnl': 'cp037', + 'ebcdiccpus': 'cp037', + 'ebcdiccpwt': 'cp037', + 'ecma114': 'iso8859-6', + 'ecma118': 'iso8859-7', + 'elot928': 'iso8859-7', + 'eucjp': 'euc_jp', + 'euckr': 'cp949', + 'extendedunixcodepackedformatforjapanese': 'euc_jp', + 'gb18030': 'gb18030', + 'gb2312': 'gbk', + 'gb231280': 'gbk', + 'gbk': 'gbk', + 'greek': 'iso8859-7', + 'greek8': 'iso8859-7', + 'hebrew': 'iso8859-8', + 'hproman8': 'hp-roman8', + 'hzgb2312': 'hz', + 'ibm037': 'cp037', + 'ibm1026': 'cp1026', + 'ibm367': 'ascii', + 'ibm424': 'cp424', + 'ibm437': 'cp437', + 'ibm500': 'cp500', + 'ibm775': 'cp775', + 'ibm819': 'windows-1252', + 'ibm850': 'cp850', + 'ibm852': 'cp852', + 'ibm855': 'cp855', + 'ibm857': 'cp857', + 'ibm860': 'cp860', + 'ibm861': 'cp861', + 'ibm862': 'cp862', + 'ibm863': 'cp863', + 'ibm864': 'cp864', + 'ibm865': 'cp865', + 'ibm866': 'cp866', + 'ibm869': 'cp869', + 'iso2022jp': 'iso2022_jp', + 'iso2022jp2': 'iso2022_jp_2', + 'iso2022kr': 'iso2022_kr', + 'iso646irv1991': 'ascii', + 'iso646us': 'ascii', + 'iso88591': 'windows-1252', + 'iso885910': 'iso8859-10', + 'iso8859101992': 'iso8859-10', + 'iso885911987': 'windows-1252', + 'iso885913': 'iso8859-13', + 'iso885914': 'iso8859-14', + 'iso8859141998': 'iso8859-14', + 'iso885915': 'iso8859-15', + 'iso885916': 'iso8859-16', + 'iso8859162001': 'iso8859-16', + 'iso88592': 'iso8859-2', + 'iso885921987': 'iso8859-2', + 'iso88593': 'iso8859-3', + 'iso885931988': 'iso8859-3', + 'iso88594': 'iso8859-4', + 'iso885941988': 'iso8859-4', + 'iso88595': 'iso8859-5', + 'iso885951988': 'iso8859-5', + 'iso88596': 'iso8859-6', + 'iso885961987': 'iso8859-6', + 'iso88597': 'iso8859-7', + 'iso885971987': 'iso8859-7', + 'iso88598': 'iso8859-8', + 'iso885981988': 'iso8859-8', + 'iso88599': 'windows-1254', + 'iso885991989': 'windows-1254', + 'isoceltic': 'iso8859-14', + 'isoir100': 'windows-1252', + 'isoir101': 'iso8859-2', + 'isoir109': 'iso8859-3', + 'isoir110': 'iso8859-4', + 'isoir126': 'iso8859-7', + 'isoir127': 'iso8859-6', + 'isoir138': 'iso8859-8', + 'isoir144': 'iso8859-5', + 'isoir148': 'windows-1254', + 'isoir149': 'cp949', + 'isoir157': 'iso8859-10', + 'isoir199': 'iso8859-14', + 'isoir226': 'iso8859-16', + 'isoir58': 'gbk', + 'isoir6': 'ascii', + 'koi8r': 'koi8-r', + 'koi8u': 'koi8-u', + 'korean': 'cp949', + 'ksc5601': 'cp949', + 'ksc56011987': 'cp949', + 'ksc56011989': 'cp949', + 'l1': 'windows-1252', + 'l10': 'iso8859-16', + 'l2': 'iso8859-2', + 'l3': 'iso8859-3', + 'l4': 'iso8859-4', + 'l5': 'windows-1254', + 'l6': 'iso8859-10', + 'l8': 'iso8859-14', + 'latin1': 'windows-1252', + 'latin10': 'iso8859-16', + 'latin2': 'iso8859-2', + 'latin3': 'iso8859-3', + 'latin4': 'iso8859-4', + 'latin5': 'windows-1254', + 'latin6': 'iso8859-10', + 'latin8': 'iso8859-14', + 'latin9': 'iso8859-15', + 'ms936': 'gbk', + 'mskanji': 'shift_jis', + 'pt154': 'ptcp154', + 'ptcp154': 'ptcp154', + 'r8': 'hp-roman8', + 'roman8': 'hp-roman8', + 'shiftjis': 'shift_jis', + 'tis620': 'cp874', + 'unicode11utf7': 'utf-7', + 'us': 'ascii', + 'usascii': 'ascii', + 'utf16': 'utf-16', + 'utf16be': 'utf-16-be', + 'utf16le': 'utf-16-le', + 'utf8': 'utf-8', + 'windows1250': 'cp1250', + 'windows1251': 'cp1251', + 'windows1252': 'cp1252', + 'windows1253': 'cp1253', + 'windows1254': 'cp1254', + 'windows1255': 'cp1255', + 'windows1256': 'cp1256', + 'windows1257': 'cp1257', + 'windows1258': 'cp1258', + 'windows936': 'gbk', + 'x-x-big5': 'big5'} + +tokenTypes = { + "Doctype": 0, + "Characters": 1, + "SpaceCharacters": 2, + "StartTag": 3, + "EndTag": 4, + "EmptyTag": 5, + "Comment": 6, + "ParseError": 7 +} + +tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"], + tokenTypes["EmptyTag"]]) + + +prefixes = dict([(v, k) for k, v in namespaces.items()]) +prefixes["http://www.w3.org/1998/Math/MathML"] = "math" + + +class DataLossWarning(UserWarning): + pass + + +class ReparseException(Exception): + pass diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/__init__.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/_base.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/_base.py new file mode 100644 index 0000000..c7dbaed --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/_base.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, unicode_literals + + +class Filter(object): + def __init__(self, source): + self.source = source + + def __iter__(self): + return iter(self.source) + + def __getattr__(self, name): + return getattr(self.source, name) diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py new file mode 100644 index 0000000..fed6996 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py @@ -0,0 +1,20 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import _base + +try: + from collections import OrderedDict +except ImportError: + from ordereddict import OrderedDict + + +class Filter(_base.Filter): + def __iter__(self): + for token in _base.Filter.__iter__(self): + if token["type"] in ("StartTag", "EmptyTag"): + attrs = OrderedDict() + for name, value in sorted(token["data"].items(), + key=lambda x: x[0]): + attrs[name] = value + token["data"] = attrs + yield token diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py new file mode 100644 index 0000000..ca33b70 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/inject_meta_charset.py @@ -0,0 +1,65 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import _base + + +class Filter(_base.Filter): + def __init__(self, source, encoding): + _base.Filter.__init__(self, source) + self.encoding = encoding + + def __iter__(self): + state = "pre_head" + meta_found = (self.encoding is None) + pending = [] + + for token in _base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag": + if token["name"].lower() == "head": + state = "in_head" + + elif type == "EmptyTag": + if token["name"].lower() == "meta": + # replace charset with actual encoding + has_http_equiv_content_type = False + for (namespace, name), value in token["data"].items(): + if namespace is not None: + continue + elif name.lower() == 'charset': + token["data"][(namespace, name)] = self.encoding + meta_found = True + break + elif name == 'http-equiv' and value.lower() == 'content-type': + has_http_equiv_content_type = True + else: + if has_http_equiv_content_type and (None, "content") in token["data"]: + token["data"][(None, "content")] = 'text/html; charset=%s' % self.encoding + meta_found = True + + elif token["name"].lower() == "head" and not meta_found: + # insert meta into empty head + yield {"type": "StartTag", "name": "head", + "data": token["data"]} + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + yield {"type": "EndTag", "name": "head"} + meta_found = True + continue + + elif type == "EndTag": + if token["name"].lower() == "head" and pending: + # insert meta into head (if necessary) and flush pending queue + yield pending.pop(0) + if not meta_found: + yield {"type": "EmptyTag", "name": "meta", + "data": {(None, "charset"): self.encoding}} + while pending: + yield pending.pop(0) + meta_found = True + state = "post_head" + + if state == "in_head": + pending.append(token) + else: + yield token diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/lint.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/lint.py new file mode 100644 index 0000000..8884696 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/lint.py @@ -0,0 +1,90 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import _base +from ..constants import cdataElements, rcdataElements, voidElements + +from ..constants import spaceCharacters +spaceCharacters = "".join(spaceCharacters) + + +class LintError(Exception): + pass + + +class Filter(_base.Filter): + def __iter__(self): + open_elements = [] + contentModelFlag = "PCDATA" + for token in _base.Filter.__iter__(self): + type = token["type"] + if type in ("StartTag", "EmptyTag"): + name = token["name"] + if contentModelFlag != "PCDATA": + raise LintError("StartTag not in PCDATA content model flag: %(tag)s" % {"tag": name}) + if not isinstance(name, str): + raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) + if not name: + raise LintError("Empty tag name") + if type == "StartTag" and name in voidElements: + raise LintError("Void element reported as StartTag token: %(tag)s" % {"tag": name}) + elif type == "EmptyTag" and name not in voidElements: + raise LintError("Non-void element reported as EmptyTag token: %(tag)s" % {"tag": token["name"]}) + if type == "StartTag": + open_elements.append(name) + for name, value in token["data"]: + if not isinstance(name, str): + raise LintError("Attribute name is not a string: %(name)r" % {"name": name}) + if not name: + raise LintError("Empty attribute name") + if not isinstance(value, str): + raise LintError("Attribute value is not a string: %(value)r" % {"value": value}) + if name in cdataElements: + contentModelFlag = "CDATA" + elif name in rcdataElements: + contentModelFlag = "RCDATA" + elif name == "plaintext": + contentModelFlag = "PLAINTEXT" + + elif type == "EndTag": + name = token["name"] + if not isinstance(name, str): + raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) + if not name: + raise LintError("Empty tag name") + if name in voidElements: + raise LintError("Void element reported as EndTag token: %(tag)s" % {"tag": name}) + start_name = open_elements.pop() + if start_name != name: + raise LintError("EndTag (%(end)s) does not match StartTag (%(start)s)" % {"end": name, "start": start_name}) + contentModelFlag = "PCDATA" + + elif type == "Comment": + if contentModelFlag != "PCDATA": + raise LintError("Comment not in PCDATA content model flag") + + elif type in ("Characters", "SpaceCharacters"): + data = token["data"] + if not isinstance(data, str): + raise LintError("Attribute name is not a string: %(name)r" % {"name": data}) + if not data: + raise LintError("%(type)s token with empty data" % {"type": type}) + if type == "SpaceCharacters": + data = data.strip(spaceCharacters) + if data: + raise LintError("Non-space character(s) found in SpaceCharacters token: %(token)r" % {"token": data}) + + elif type == "Doctype": + name = token["name"] + if contentModelFlag != "PCDATA": + raise LintError("Doctype not in PCDATA content model flag: %(name)s" % {"name": name}) + if not isinstance(name, str): + raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) + # XXX: what to do with token["data"] ? + + elif type in ("ParseError", "SerializeError"): + pass + + else: + raise LintError("Unknown token type: %(type)s" % {"type": type}) + + yield token diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/optionaltags.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/optionaltags.py new file mode 100644 index 0000000..fefe0b3 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/optionaltags.py @@ -0,0 +1,205 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import _base + + +class Filter(_base.Filter): + def slider(self): + previous1 = previous2 = None + for token in self.source: + if previous1 is not None: + yield previous2, previous1, token + previous2 = previous1 + previous1 = token + yield previous2, previous1, None + + def __iter__(self): + for previous, token, next in self.slider(): + type = token["type"] + if type == "StartTag": + if (token["data"] or + not self.is_optional_start(token["name"], previous, next)): + yield token + elif type == "EndTag": + if not self.is_optional_end(token["name"], next): + yield token + else: + yield token + + def is_optional_start(self, tagname, previous, next): + type = next and next["type"] or None + if tagname in 'html': + # An html element's start tag may be omitted if the first thing + # inside the html element is not a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname == 'head': + # A head element's start tag may be omitted if the first thing + # inside the head element is an element. + # XXX: we also omit the start tag if the head element is empty + if type in ("StartTag", "EmptyTag"): + return True + elif type == "EndTag": + return next["name"] == "head" + elif tagname == 'body': + # A body element's start tag may be omitted if the first thing + # inside the body element is not a space character or a comment, + # except if the first thing inside the body element is a script + # or style element and the node immediately preceding the body + # element is a head element whose end tag has been omitted. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we do not look at the preceding event, so we never omit + # the body element's start tag if it's followed by a script or + # a style element. + return next["name"] not in ('script', 'style') + else: + return True + elif tagname == 'colgroup': + # A colgroup element's start tag may be omitted if the first thing + # inside the colgroup element is a col element, and if the element + # is not immediately preceeded by another colgroup element whose + # end tag has been omitted. + if type in ("StartTag", "EmptyTag"): + # XXX: we do not look at the preceding event, so instead we never + # omit the colgroup element's end tag when it is immediately + # followed by another colgroup element. See is_optional_end. + return next["name"] == "col" + else: + return False + elif tagname == 'tbody': + # A tbody element's start tag may be omitted if the first thing + # inside the tbody element is a tr element, and if the element is + # not immediately preceeded by a tbody, thead, or tfoot element + # whose end tag has been omitted. + if type == "StartTag": + # omit the thead and tfoot elements' end tag when they are + # immediately followed by a tbody element. See is_optional_end. + if previous and previous['type'] == 'EndTag' and \ + previous['name'] in ('tbody', 'thead', 'tfoot'): + return False + return next["name"] == 'tr' + else: + return False + return False + + def is_optional_end(self, tagname, next): + type = next and next["type"] or None + if tagname in ('html', 'head', 'body'): + # An html element's end tag may be omitted if the html element + # is not immediately followed by a space character or a comment. + return type not in ("Comment", "SpaceCharacters") + elif tagname in ('li', 'optgroup', 'tr'): + # A li element's end tag may be omitted if the li element is + # immediately followed by another li element or if there is + # no more content in the parent element. + # An optgroup element's end tag may be omitted if the optgroup + # element is immediately followed by another optgroup element, + # or if there is no more content in the parent element. + # A tr element's end tag may be omitted if the tr element is + # immediately followed by another tr element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] == tagname + else: + return type == "EndTag" or type is None + elif tagname in ('dt', 'dd'): + # A dt element's end tag may be omitted if the dt element is + # immediately followed by another dt element or a dd element. + # A dd element's end tag may be omitted if the dd element is + # immediately followed by another dd element or a dt element, + # or if there is no more content in the parent element. + if type == "StartTag": + return next["name"] in ('dt', 'dd') + elif tagname == 'dd': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'p': + # A p element's end tag may be omitted if the p element is + # immediately followed by an address, article, aside, + # blockquote, datagrid, dialog, dir, div, dl, fieldset, + # footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu, + # nav, ol, p, pre, section, table, or ul, element, or if + # there is no more content in the parent element. + if type in ("StartTag", "EmptyTag"): + return next["name"] in ('address', 'article', 'aside', + 'blockquote', 'datagrid', 'dialog', + 'dir', 'div', 'dl', 'fieldset', 'footer', + 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', + 'header', 'hr', 'menu', 'nav', 'ol', + 'p', 'pre', 'section', 'table', 'ul') + else: + return type == "EndTag" or type is None + elif tagname == 'option': + # An option element's end tag may be omitted if the option + # element is immediately followed by another option element, + # or if it is immediately followed by an optgroup + # element, or if there is no more content in the parent + # element. + if type == "StartTag": + return next["name"] in ('option', 'optgroup') + else: + return type == "EndTag" or type is None + elif tagname in ('rt', 'rp'): + # An rt element's end tag may be omitted if the rt element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + # An rp element's end tag may be omitted if the rp element is + # immediately followed by an rt or rp element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('rt', 'rp') + else: + return type == "EndTag" or type is None + elif tagname == 'colgroup': + # A colgroup element's end tag may be omitted if the colgroup + # element is not immediately followed by a space character or + # a comment. + if type in ("Comment", "SpaceCharacters"): + return False + elif type == "StartTag": + # XXX: we also look for an immediately following colgroup + # element. See is_optional_start. + return next["name"] != 'colgroup' + else: + return True + elif tagname in ('thead', 'tbody'): + # A thead element's end tag may be omitted if the thead element + # is immediately followed by a tbody or tfoot element. + # A tbody element's end tag may be omitted if the tbody element + # is immediately followed by a tbody or tfoot element, or if + # there is no more content in the parent element. + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] in ['tbody', 'tfoot'] + elif tagname == 'tbody': + return type == "EndTag" or type is None + else: + return False + elif tagname == 'tfoot': + # A tfoot element's end tag may be omitted if the tfoot element + # is immediately followed by a tbody element, or if there is no + # more content in the parent element. + # XXX: we never omit the end tag when the following element is + # a tbody. See is_optional_start. + if type == "StartTag": + return next["name"] == 'tbody' + else: + return type == "EndTag" or type is None + elif tagname in ('td', 'th'): + # A td element's end tag may be omitted if the td element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + # A th element's end tag may be omitted if the th element is + # immediately followed by a td or th element, or if there is + # no more content in the parent element. + if type == "StartTag": + return next["name"] in ('td', 'th') + else: + return type == "EndTag" or type is None + return False diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/sanitizer.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/sanitizer.py new file mode 100644 index 0000000..b206b54 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/sanitizer.py @@ -0,0 +1,12 @@ +from __future__ import absolute_import, division, unicode_literals + +from . import _base +from ..sanitizer import HTMLSanitizerMixin + + +class Filter(_base.Filter, HTMLSanitizerMixin): + def __iter__(self): + for token in _base.Filter.__iter__(self): + token = self.sanitize_token(token) + if token: + yield token diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/whitespace.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/whitespace.py new file mode 100644 index 0000000..dfc60ee --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/filters/whitespace.py @@ -0,0 +1,38 @@ +from __future__ import absolute_import, division, unicode_literals + +import re + +from . import _base +from ..constants import rcdataElements, spaceCharacters +spaceCharacters = "".join(spaceCharacters) + +SPACES_REGEX = re.compile("[%s]+" % spaceCharacters) + + +class Filter(_base.Filter): + + spacePreserveElements = frozenset(["pre", "textarea"] + list(rcdataElements)) + + def __iter__(self): + preserve = 0 + for token in _base.Filter.__iter__(self): + type = token["type"] + if type == "StartTag" \ + and (preserve or token["name"] in self.spacePreserveElements): + preserve += 1 + + elif type == "EndTag" and preserve: + preserve -= 1 + + elif not preserve and type == "SpaceCharacters" and token["data"]: + # Test on token["data"] above to not introduce spaces where there were not + token["data"] = " " + + elif not preserve and type == "Characters": + token["data"] = collapse_spaces(token["data"]) + + yield token + + +def collapse_spaces(text): + return SPACES_REGEX.sub(' ', text) diff --git a/lib/python3.4/site-packages/pip/_vendor/html5lib/html5parser.py b/lib/python3.4/site-packages/pip/_vendor/html5lib/html5parser.py new file mode 100644 index 0000000..40f3d09 --- /dev/null +++ b/lib/python3.4/site-packages/pip/_vendor/html5lib/html5parser.py @@ -0,0 +1,2724 @@ +from __future__ import absolute_import, division, unicode_literals +from pip._vendor.six import with_metaclass + +import types + +from . import inputstream +from . import tokenizer + +from . import treebuilders +from .treebuilders._base import Marker + +from . import utils +from . import constants +from .constants import spaceCharacters, asciiUpper2Lower +from .constants import specialElements +from .constants import headingElements +from .constants import cdataElements, rcdataElements +from .constants import tokenTypes, ReparseException, namespaces +from .constants import htmlIntegrationPointElements, mathmlTextIntegrationPointElements +from .constants import adjustForeignAttributes as adjustForeignAttributesMap +from .constants import E + + +def parse(doc, treebuilder="etree", encoding=None, + namespaceHTMLElements=True): + """Parse a string or file-like object into a tree""" + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parse(doc, encoding=encoding) + + +def parseFragment(doc, container="div", treebuilder="etree", encoding=None, + namespaceHTMLElements=True): + tb = treebuilders.getTreeBuilder(treebuilder) + p = HTMLParser(tb, namespaceHTMLElements=namespaceHTMLElements) + return p.parseFragment(doc, container=container, encoding=encoding) + + +def method_decorator_metaclass(function): + class Decorated(type): + def __new__(meta, classname, bases, classDict): + for attributeName, attribute in classDict.items(): + if isinstance(attribute, types.FunctionType): + attribute = function(attribute) + + classDict[attributeName] = attribute + return type.__new__(meta, classname, bases, classDict) + return Decorated + + +class HTMLParser(object): + """HTML parser. Generates a tree structure from a stream of (possibly + malformed) HTML""" + + def __init__(self, tree=None, tokenizer=tokenizer.HTMLTokenizer, + strict=False, namespaceHTMLElements=True, debug=False): + """ + strict - raise an exception when a parse error is encountered + + tree - a treebuilder class controlling the type of tree that will be + returned. Built in treebuilders can be accessed through + html5lib.treebuilders.getTreeBuilder(treeType) + + tokenizer - a class that provides a stream of tokens to the treebuilder. + This may be replaced for e.g. a sanitizer which converts some tags to + text + """ + + # Raise an exception on the first error encountered + self.strict = strict + + if tree is None: + tree = treebuilders.getTreeBuilder("etree") + self.tree = tree(namespaceHTMLElements) + self.tokenizer_class = tokenizer + self.errors = [] + + self.phases = dict([(name, cls(self, self.tree)) for name, cls in + getPhases(debug).items()]) + + def _parse(self, stream, innerHTML=False, container="div", + encoding=None, parseMeta=True, useChardet=True, **kwargs): + + self.innerHTMLMode = innerHTML + self.container = container + self.tokenizer = self.tokenizer_class(stream, encoding=encoding, + parseMeta=parseMeta, + useChardet=useChardet, + parser=self, **kwargs) + self.reset() + + while True: + try: + self.mainLoop() + break + except ReparseException: + self.reset() + + def reset(self): + self.tree.reset() + self.firstStartTag = False + self.errors = [] + self.log = [] # only used with debug mode + # "quirks" / "limited quirks" / "no quirks" + self.compatMode = "no quirks" + + if self.innerHTMLMode: + self.innerHTML = self.container.lower() + + if self.innerHTML in cdataElements: + self.tokenizer.state = self.tokenizer.rcdataState + elif self.innerHTML in rcdataElements: + self.tokenizer.state = self.tokenizer.rawtextState + elif self.innerHTML == 'plaintext': + self.tokenizer.state = self.tokenizer.plaintextState + else: + # state already is data state + # self.tokenizer.state = self.tokenizer.dataState + pass + self.phase = self.phases["beforeHtml"] + self.phase.insertHtmlElement() + self.resetInsertionMode() + else: + self.innerHTML = False + self.phase = self.phases["initial"] + + self.lastPhase = None + + self.beforeRCDataPhase = None + + self.framesetOK = True + + @property + def documentEncoding(self): + """The name of the character encoding + that was used to decode the input stream, + or :obj:`None` if that is not determined yet. + + """ + if not hasattr(self, 'tokenizer'): + return None + return self.tokenizer.stream.charEncoding[0] + + def isHTMLIntegrationPoint(self, element): + if (element.name == "annotation-xml" and + element.namespace == namespaces["mathml"]): + return ("encoding" in element.attributes and + element.attributes["encoding"].translate( + asciiUpper2Lower) in + ("text/html", "application/xhtml+xml")) + else: + return (element.namespace, element.name) in htmlIntegrationPointElements + + def isMathMLTextIntegrationPoint(self, element): + return (element.namespace, element.name) in mathmlTextIntegrationPointElements + + def mainLoop(self): + CharactersToken = tokenTypes["Characters"] + SpaceCharactersToken = tokenTypes["SpaceCharacters"] + StartTagToken = tokenTypes["StartTag"] + EndTagToken = tokenTypes["EndTag"] + CommentToken = tokenTypes["Comment"] + DoctypeToken = tokenTypes["Doctype"] + ParseErrorToken = tokenTypes["ParseError"] + + for token in self.normalizedTokens(): + new_token = token + while new_token is not None: + currentNode = self.tree.openElements[-1] if self.tree.openElements else None + currentNodeNamespace = currentNode.namespace if currentNode else None + currentNodeName = currentNode.name if currentNode else None + + type = new_token["type"] + + if type == ParseErrorToken: + self.parseError(new_token["data"], new_token.get("datavars", {})) + new_token = None + else: + if (len(self.tree.openElements) == 0 or + currentNodeNamespace == self.tree.defaultNamespace or + (self.isMathMLTextIntegrationPoint(currentNode) and + ((type == StartTagToken and + token["name"] not in frozenset(["mglyph", "malignmark"])) or + type in (CharactersToken, SpaceCharactersToken))) or + (currentNodeNamespace == namespaces["mathml"] and + currentNodeName == "annotation-xml" and + token["name"] == "svg") or + (self.isHTMLIntegrationPoint(currentNode) and + type in (StartTagToken, CharactersToken, SpaceCharactersToken))): + phase = self.phase + else: + phase = self.phases["inForeignContent"] + + if type == CharactersToken: + new_token = phase.processCharacters(new_token) + elif type == SpaceCharactersToken: + new_token = phase.processSpaceCharacters(new_token) + elif type == StartTagToken: + new_token = phase.processStartTag(new_token) + elif type == EndTagToken: + new_token = phase.processEndTag(new_token) + elif type == CommentToken: + new_token = phase.processComment(new_token) + elif type == DoctypeToken: + new_token = phase.processDoctype(new_token) + + if (type == StartTagToken and token["selfClosing"] + and not token["selfClosingAcknowledged"]): + self.parseError("non-void-element-with-trailing-solidus", + {"name": token["name"]}) + + # When the loop finishes it's EOF + reprocess = True + phases = [] + while reprocess: + phases.append(self.phase) + reprocess = self.phase.processEOF() + if reprocess: + assert self.phase not in phases + + def normalizedTokens(self): + for token in self.tokenizer: + yield self.normalizeToken(token) + + def parse(self, stream, encoding=None, parseMeta=True, useChardet=True): + """Parse a HTML document into a well-formed tree + + stream - a filelike object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + """ + self._parse(stream, innerHTML=False, encoding=encoding, + parseMeta=parseMeta, useChardet=useChardet) + return self.tree.getDocument() + + def parseFragment(self, stream, container="div", encoding=None, + parseMeta=False, useChardet=True): + """Parse a HTML fragment into a well-formed tree fragment + + container - name of the element we're setting the innerHTML property + if set to None, default to 'div' + + stream - a filelike object or string containing the HTML to be parsed + + The optional encoding parameter must be a string that indicates + the encoding. If specified, that encoding will be used, + regardless of any BOM or later declaration (such as in a meta + element) + """ + self._parse(stream, True, container=container, encoding=encoding) + return self.tree.getFragment() + + def parseError(self, errorcode="XXX-undefined-error", datavars={}): + # XXX The idea is to make errorcode mandatory. + self.errors.append((self.tokenizer.stream.position(), errorcode, datavars)) + if self.strict: + raise ParseError(E[errorcode] % datavars) + + def normalizeToken(self, token): + """ HTML5 specific normalizations to the token stream """ + + if token["type"] == tokenTypes["StartTag"]: + token["data"] = dict(token["data"][::-1]) + + return token + + def adjustMathMLAttributes(self, token): + replacements = {"definitionurl": "definitionURL"} + for k, v in replacements.items(): + if k in token["data"]: + token["data"][v] = token["data"][k] + del token["data"][k] + + def adjustSVGAttributes(self, token): + replacements = { + "attributename": "attributeName", + "attributetype": "attributeType", + "basefrequency": "baseFrequency", + "baseprofile": "baseProfile", + "calcmode": "calcMode", + "clippathunits": "clipPathUnits", + "contentscripttype": "contentScriptType", + "contentstyletype": "contentStyleType", + "diffuseconstant": "diffuseConstant", + "edgemode": "edgeMode", + "externalresourcesrequired": "externalResourcesRequired", + "filterres": "filterRes", + "filterunits": "filterUnits", + "glyphref": "glyphRef", + "gradienttransform": "gradientTransform", + "gradientunits": "gradientUnits", + "kernelmatrix": "kernelMatrix", + "kernelunitlength": "kernelUnitLength", + "keypoints": "keyPoints", + "keysplines": "keySplines", + "keytimes": "keyTimes", + "lengthadjust": "lengthAdjust", + "limitingconeangle": "limitingConeAngle", + "markerheight": "markerHeight", + "markerunits": "markerUnits", + "markerwidth": "markerWidth", + "maskcontentunits": "maskContentUnits", + "maskunits": "maskUnits", + "numoctaves": "numOctaves", + "pathlength": "pathLength", + "patterncontentunits": "patternContentUnits", + "patterntransform": "patternTransform", + "patternunits": "patternUnits", + "pointsatx": "pointsAtX", + "pointsaty": "pointsAtY", + "pointsatz": "pointsAtZ", + "preservealpha": "preserveAlpha", + "preserveaspectratio": "preserveAspectRatio", + "primitiveunits": "primitiveUnits", + "refx": "refX", + "refy": "refY", + "repeatcount": "repeatCount", + "repeatdur": "repeatDur", + "requiredextensions": "requiredExtensions", + "requiredfeatures": "requiredFeatures", + "specularconstant": "specularConstant", + "specularexponent": "specularExponent", + "spreadmethod": "spreadMethod", + "startoffset": "startOffset", + "stddeviation": "stdDeviation", + "stitchtiles": "stitchTiles", + "surfacescale": "surfaceScale", + "systemlanguage": "systemLanguage", + "tablevalues": "tableValues", + "targetx": "targetX", + "targety": "targetY", + "textlength": "textLength", + "viewbox": "viewBox", + "viewtarget": "viewTarget", + "xchannelselector": "xChannelSelector", + "ychannelselector": "yChannelSelector", + "zoomandpan": "zoomAndPan" + } + for originalName in list(token["data"].keys()): + if originalName in replacements: + svgName = replacements[originalName] + token["data"][svgName] = token["data"][originalName] + del token["data"][originalName] + + def adjustForeignAttributes(self, token): + replacements = adjustForeignAttributesMap + + for originalName in token["data"].keys(): + if originalName in replacements: + foreignName = replacements[originalName] + token["data"][foreignName] = token["data"][originalName] + del token["data"][originalName] + + def reparseTokenNormal(self, token): + self.parser.phase() + + def resetInsertionMode(self): + # The name of this method is mostly historical. (It's also used in the + # specification.) + last = False + newModes = { + "select": "inSelect", + "td": "inCell", + "th": "inCell", + "tr": "inRow", + "tbody": "inTableBody", + "thead": "inTableBody", + "tfoot": "inTableBody", + "caption": "inCaption", + "colgroup": "inColumnGroup", + "table": "inTable", + "head": "inBody", + "body": "inBody", + "frameset": "inFrameset", + "html": "beforeHead" + } + for node in self.tree.openElements[::-1]: + nodeName = node.name + new_phase = None + if node == self.tree.openElements[0]: + assert self.innerHTML + last = True + nodeName = self.innerHTML + # Check for conditions that should only happen in the innerHTML + # case + if nodeName in ("select", "colgroup", "head", "html"): + assert self.innerHTML + + if not last and node.namespace != self.tree.defaultNamespace: + continue + + if nodeName in newModes: + new_phase = self.phases[newModes[nodeName]] + break + elif last: + new_phase = self.phases["inBody"] + break + + self.phase = new_phase + + def parseRCDataRawtext(self, token, contentType): + """Generic RCDATA/RAWTEXT Parsing algorithm + contentType - RCDATA or RAWTEXT + """ + assert contentType in ("RAWTEXT", "RCDATA") + + self.tree.insertElement(token) + + if contentType == "RAWTEXT": + self.tokenizer.state = self.tokenizer.rawtextState + else: + self.tokenizer.state = self.tokenizer.rcdataState + + self.originalPhase = self.phase + + self.phase = self.phases["text"] + + +def getPhases(debug): + def log(function): + """Logger that records which phase processes each token""" + type_names = dict((value, key) for key, value in + constants.tokenTypes.items()) + + def wrapped(self, *args, **kwargs): + if function.__name__.startswith("process") and len(args) > 0: + token = args[0] + try: + info = {"type": type_names[token['type']]} + except: + raise + if token['type'] in constants.tagTokenTypes: + info["name"] = token['name'] + + self.parser.log.append((self.parser.tokenizer.state.__name__, + self.parser.phase.__class__.__name__, + self.__class__.__name__, + function.__name__, + info)) + return function(self, *args, **kwargs) + else: + return function(self, *args, **kwargs) + return wrapped + + def getMetaclass(use_metaclass, metaclass_func): + if use_metaclass: + return method_decorator_metaclass(metaclass_func) + else: + return type + + class Phase(with_metaclass(getMetaclass(debug, log))): + """Base class for helper object that implements each phase of processing + """ + + def __init__(self, parser, tree): + self.parser = parser + self.tree = tree + + def processEOF(self): + raise NotImplementedError + + def processComment(self, token): + # For most phases the following is correct. Where it's not it will be + # overridden. + self.tree.insertComment(token, self.tree.openElements[-1]) + + def processDoctype(self, token): + self.parser.parseError("unexpected-doctype") + + def processCharacters(self, token): + self.tree.insertText(token["data"]) + + def processSpaceCharacters(self, token): + self.tree.insertText(token["data"]) + + def processStartTag(self, token): + return self.startTagHandler[token["name"]](token) + + def startTagHtml(self, token): + if not self.parser.firstStartTag and token["name"] == "html": + self.parser.parseError("non-html-root") + # XXX Need a check here to see if the first start tag token emitted is + # this token... If it's not, invoke self.parser.parseError(). + for attr, value in token["data"].items(): + if attr not in self.tree.openElements[0].attributes: + self.tree.openElements[0].attributes[attr] = value + self.parser.firstStartTag = False + + def processEndTag(self, token): + return self.endTagHandler[token["name"]](token) + + class InitialPhase(Phase): + def processSpaceCharacters(self, token): + pass + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processDoctype(self, token): + name = token["name"] + publicId = token["publicId"] + systemId = token["systemId"] + correct = token["correct"] + + if (name != "html" or publicId is not None or + systemId is not None and systemId != "about:legacy-compat"): + self.parser.parseError("unknown-doctype") + + if publicId is None: + publicId = "" + + self.tree.insertDoctype(token) + + if publicId != "": + publicId = publicId.translate(asciiUpper2Lower) + + if (not correct or token["name"] != "html" + or publicId.startswith( + ("+//silmaril//dtd html pro v0r11 19970101//", + "-//advasoft ltd//dtd html 3.0 aswedit + extensions//", + "-//as//dtd html 3.0 aswedit + extensions//", + "-//ietf//dtd html 2.0 level 1//", + "-//ietf//dtd html 2.0 level 2//", + "-//ietf//dtd html 2.0 strict level 1//", + "-//ietf//dtd html 2.0 strict level 2//", + "-//ietf//dtd html 2.0 strict//", + "-//ietf//dtd html 2.0//", + "-//ietf//dtd html 2.1e//", + "-//ietf//dtd html 3.0//", + "-//ietf//dtd html 3.2 final//", + "-//ietf//dtd html 3.2//", + "-//ietf//dtd html 3//", + "-//ietf//dtd html level 0//", + "-//ietf//dtd html level 1//", + "-//ietf//dtd html level 2//", + "-//ietf//dtd html level 3//", + "-//ietf//dtd html strict level 0//", + "-//ietf//dtd html strict level 1//", + "-//ietf//dtd html strict level 2//", + "-//ietf//dtd html strict level 3//", + "-//ietf//dtd html strict//", + "-//ietf//dtd html//", + "-//metrius//dtd metrius presentational//", + "-//microsoft//dtd internet explorer 2.0 html strict//", + "-//microsoft//dtd internet explorer 2.0 html//", + "-//microsoft//dtd internet explorer 2.0 tables//", + "-//microsoft//dtd internet explorer 3.0 html strict//", + "-//microsoft//dtd internet explorer 3.0 html//", + "-//microsoft//dtd internet explorer 3.0 tables//", + "-//netscape comm. corp.//dtd html//", + "-//netscape comm. corp.//dtd strict html//", + "-//o'reilly and associates//dtd html 2.0//", + "-//o'reilly and associates//dtd html extended 1.0//", + "-//o'reilly and associates//dtd html extended relaxed 1.0//", + "-//softquad software//dtd hotmetal pro 6.0::19990601::extensions to html 4.0//", + "-//softquad//dtd hotmetal pro 4.0::19971010::extensions to html 4.0//", + "-//spyglass//dtd html 2.0 extended//", + "-//sq//dtd html 2.0 hotmetal + extensions//", + "-//sun microsystems corp.//dtd hotjava html//", + "-//sun microsystems corp.//dtd hotjava strict html//", + "-//w3c//dtd html 3 1995-03-24//", + "-//w3c//dtd html 3.2 draft//", + "-//w3c//dtd html 3.2 final//", + "-//w3c//dtd html 3.2//", + "-//w3c//dtd html 3.2s draft//", + "-//w3c//dtd html 4.0 frameset//", + "-//w3c//dtd html 4.0 transitional//", + "-//w3c//dtd html experimental 19960712//", + "-//w3c//dtd html experimental 970421//", + "-//w3c//dtd w3 html//", + "-//w3o//dtd w3 html 3.0//", + "-//webtechs//dtd mozilla html 2.0//", + "-//webtechs//dtd mozilla html//")) + or publicId in + ("-//w3o//dtd w3 html strict 3.0//en//", + "-/w3c/dtd html 4.0 transitional/en", + "html") + or publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is None + or systemId and systemId.lower() == "http://www.ibm.com/data/dtd/v11/ibmxhtml1-transitional.dtd"): + self.parser.compatMode = "quirks" + elif (publicId.startswith( + ("-//w3c//dtd xhtml 1.0 frameset//", + "-//w3c//dtd xhtml 1.0 transitional//")) + or publicId.startswith( + ("-//w3c//dtd html 4.01 frameset//", + "-//w3c//dtd html 4.01 transitional//")) and + systemId is not None): + self.parser.compatMode = "limited quirks" + + self.parser.phase = self.parser.phases["beforeHtml"] + + def anythingElse(self): + self.parser.compatMode = "quirks" + self.parser.phase = self.parser.phases["beforeHtml"] + + def processCharacters(self, token): + self.parser.parseError("expected-doctype-but-got-chars") + self.anythingElse() + return token + + def processStartTag(self, token): + self.parser.parseError("expected-doctype-but-got-start-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEndTag(self, token): + self.parser.parseError("expected-doctype-but-got-end-tag", + {"name": token["name"]}) + self.anythingElse() + return token + + def processEOF(self): + self.parser.parseError("expected-doctype-but-got-eof") + self.anythingElse() + return True + + class BeforeHtmlPhase(Phase): + # helper methods + def insertHtmlElement(self): + self.tree.insertRoot(impliedTagToken("html", "StartTag")) + self.parser.phase = self.parser.phases["beforeHead"] + + # other + def processEOF(self): + self.insertHtmlElement() + return True + + def processComment(self, token): + self.tree.insertComment(token, self.tree.document) + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.insertHtmlElement() + return token + + def processStartTag(self, token): + if token["name"] == "html": + self.parser.firstStartTag = True + self.insertHtmlElement() + return token + + def processEndTag(self, token): + if token["name"] not in ("head", "body", "html", "br"): + self.parser.parseError("unexpected-end-tag-before-html", + {"name": token["name"]}) + else: + self.insertHtmlElement() + return token + + class BeforeHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self.endTagHandler = utils.MethodDispatcher([ + (("head", "body", "html", "br"), self.endTagImplyHead) + ]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.startTagHead(impliedTagToken("head", "StartTag")) + return True + + def processSpaceCharacters(self, token): + pass + + def processCharacters(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.tree.insertElement(token) + self.tree.headPointer = self.tree.openElements[-1] + self.parser.phase = self.parser.phases["inHead"] + + def startTagOther(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagImplyHead(self, token): + self.startTagHead(impliedTagToken("head", "StartTag")) + return token + + def endTagOther(self, token): + self.parser.parseError("end-tag-after-implied-root", + {"name": token["name"]}) + + class InHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("title", self.startTagTitle), + (("noscript", "noframes", "style"), self.startTagNoScriptNoFramesStyle), + ("script", self.startTagScript), + (("base", "basefont", "bgsound", "command", "link"), + self.startTagBaseLinkCommand), + ("meta", self.startTagMeta), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + + self. endTagHandler = utils.MethodDispatcher([ + ("head", self.endTagHead), + (("br", "html", "body"), self.endTagHtmlBodyBr) + ]) + self.endTagHandler.default = self.endTagOther + + # the real thing + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagHead(self, token): + self.parser.parseError("two-heads-are-not-better-than-one") + + def startTagBaseLinkCommand(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + def startTagMeta(self, token): + self.tree.insertElement(token) + self.tree.openElements.pop() + token["selfClosingAcknowledged"] = True + + attributes = token["data"] + if self.parser.tokenizer.stream.charEncoding[1] == "tentative": + if "charset" in attributes: + self.parser.tokenizer.stream.changeEncoding(attributes["charset"]) + elif ("content" in attributes and + "http-equiv" in attributes and + attributes["http-equiv"].lower() == "content-type"): + # Encoding it as UTF-8 here is a hack, as really we should pass + # the abstract Unicode string, and just use the + # ContentAttrParser on that, but using UTF-8 allows all chars + # to be encoded and as a ASCII-superset works. + data = inputstream.EncodingBytes(attributes["content"].encode("utf-8")) + parser = inputstream.ContentAttrParser(data) + codec = parser.parse() + self.parser.tokenizer.stream.changeEncoding(codec) + + def startTagTitle(self, token): + self.parser.parseRCDataRawtext(token, "RCDATA") + + def startTagNoScriptNoFramesStyle(self, token): + # Need to decide whether to implement the scripting-disabled case + self.parser.parseRCDataRawtext(token, "RAWTEXT") + + def startTagScript(self, token): + self.tree.insertElement(token) + self.parser.tokenizer.state = self.parser.tokenizer.scriptDataState + self.parser.originalPhase = self.parser.phase + self.parser.phase = self.parser.phases["text"] + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHead(self, token): + node = self.parser.tree.openElements.pop() + assert node.name == "head", "Expected head got %s" % node.name + self.parser.phase = self.parser.phases["afterHead"] + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.endTagHead(impliedTagToken("head")) + + # XXX If we implement a parser for which scripting is disabled we need to + # implement this phase. + # + # class InHeadNoScriptPhase(Phase): + class AfterHeadPhase(Phase): + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + self.startTagHandler = utils.MethodDispatcher([ + ("html", self.startTagHtml), + ("body", self.startTagBody), + ("frameset", self.startTagFrameset), + (("base", "basefont", "bgsound", "link", "meta", "noframes", "script", + "style", "title"), + self.startTagFromHead), + ("head", self.startTagHead) + ]) + self.startTagHandler.default = self.startTagOther + self.endTagHandler = utils.MethodDispatcher([(("body", "html", "br"), + self.endTagHtmlBodyBr)]) + self.endTagHandler.default = self.endTagOther + + def processEOF(self): + self.anythingElse() + return True + + def processCharacters(self, token): + self.anythingElse() + return token + + def startTagHtml(self, token): + return self.parser.phases["inBody"].processStartTag(token) + + def startTagBody(self, token): + self.parser.framesetOK = False + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inBody"] + + def startTagFrameset(self, token): + self.tree.insertElement(token) + self.parser.phase = self.parser.phases["inFrameset"] + + def startTagFromHead(self, token): + self.parser.parseError("unexpected-start-tag-out-of-my-head", + {"name": token["name"]}) + self.tree.openElements.append(self.tree.headPointer) + self.parser.phases["inHead"].processStartTag(token) + for node in self.tree.openElements[::-1]: + if node.name == "head": + self.tree.openElements.remove(node) + break + + def startTagHead(self, token): + self.parser.parseError("unexpected-start-tag", {"name": token["name"]}) + + def startTagOther(self, token): + self.anythingElse() + return token + + def endTagHtmlBodyBr(self, token): + self.anythingElse() + return token + + def endTagOther(self, token): + self.parser.parseError("unexpected-end-tag", {"name": token["name"]}) + + def anythingElse(self): + self.tree.insertElement(impliedTagToken("body", "StartTag")) + self.parser.phase = self.parser.phases["inBody"] + self.parser.framesetOK = True + + class InBodyPhase(Phase): + # http://www.whatwg.org/specs/web-apps/current-work/#parsing-main-inbody + # the really-really-really-very crazy mode + def __init__(self, parser, tree): + Phase.__init__(self, parser, tree) + + # Keep a ref to this for special handling of whitespace in

+            self.processSpaceCharactersNonPre = self.processSpaceCharacters
+
+            self.startTagHandler = utils.MethodDispatcher([
+                ("html", self.startTagHtml),
+                (("base", "basefont", "bgsound", "command", "link", "meta",
+                  "script", "style", "title"),
+                 self.startTagProcessInHead),
+                ("body", self.startTagBody),
+                ("frameset", self.startTagFrameset),
+                (("address", "article", "aside", "blockquote", "center", "details",
+                  "details", "dir", "div", "dl", "fieldset", "figcaption", "figure",
+                  "footer", "header", "hgroup", "main", "menu", "nav", "ol", "p",
+                  "section", "summary", "ul"),
+                 self.startTagCloseP),
+                (headingElements, self.startTagHeading),
+                (("pre", "listing"), self.startTagPreListing),
+                ("form", self.startTagForm),
+                (("li", "dd", "dt"), self.startTagListItem),
+                ("plaintext", self.startTagPlaintext),
+                ("a", self.startTagA),
+                (("b", "big", "code", "em", "font", "i", "s", "small", "strike",
+                  "strong", "tt", "u"), self.startTagFormatting),
+                ("nobr", self.startTagNobr),
+                ("button", self.startTagButton),
+                (("applet", "marquee", "object"), self.startTagAppletMarqueeObject),
+                ("xmp", self.startTagXmp),
+                ("table", self.startTagTable),
+                (("area", "br", "embed", "img", "keygen", "wbr"),
+                 self.startTagVoidFormatting),
+                (("param", "source", "track"), self.startTagParamSource),
+                ("input", self.startTagInput),
+                ("hr", self.startTagHr),
+                ("image", self.startTagImage),
+                ("isindex", self.startTagIsIndex),
+                ("textarea", self.startTagTextarea),
+                ("iframe", self.startTagIFrame),
+                (("noembed", "noframes", "noscript"), self.startTagRawtext),
+                ("select", self.startTagSelect),
+                (("rp", "rt"), self.startTagRpRt),
+                (("option", "optgroup"), self.startTagOpt),
+                (("math"), self.startTagMath),
+                (("svg"), self.startTagSvg),
+                (("caption", "col", "colgroup", "frame", "head",
+                  "tbody", "td", "tfoot", "th", "thead",
+                  "tr"), self.startTagMisplaced)
+            ])
+            self.startTagHandler.default = self.startTagOther
+
+            self.endTagHandler = utils.MethodDispatcher([
+                ("body", self.endTagBody),
+                ("html", self.endTagHtml),
+                (("address", "article", "aside", "blockquote", "button", "center",
+                  "details", "dialog", "dir", "div", "dl", "fieldset", "figcaption", "figure",
+                  "footer", "header", "hgroup", "listing", "main", "menu", "nav", "ol", "pre",
+                  "section", "summary", "ul"), self.endTagBlock),
+                ("form", self.endTagForm),
+                ("p", self.endTagP),
+                (("dd", "dt", "li"), self.endTagListItem),
+                (headingElements, self.endTagHeading),
+                (("a", "b", "big", "code", "em", "font", "i", "nobr", "s", "small",
+                  "strike", "strong", "tt", "u"), self.endTagFormatting),
+                (("applet", "marquee", "object"), self.endTagAppletMarqueeObject),
+                ("br", self.endTagBr),
+            ])
+            self.endTagHandler.default = self.endTagOther
+
+        def isMatchingFormattingElement(self, node1, node2):
+            if node1.name != node2.name or node1.namespace != node2.namespace:
+                return False
+            elif len(node1.attributes) != len(node2.attributes):
+                return False
+            else:
+                attributes1 = sorted(node1.attributes.items())
+                attributes2 = sorted(node2.attributes.items())
+                for attr1, attr2 in zip(attributes1, attributes2):
+                    if attr1 != attr2:
+                        return False
+            return True
+
+        # helper
+        def addFormattingElement(self, token):
+            self.tree.insertElement(token)
+            element = self.tree.openElements[-1]
+
+            matchingElements = []
+            for node in self.tree.activeFormattingElements[::-1]:
+                if node is Marker:
+                    break
+                elif self.isMatchingFormattingElement(node, element):
+                    matchingElements.append(node)
+
+            assert len(matchingElements) <= 3
+            if len(matchingElements) == 3:
+                self.tree.activeFormattingElements.remove(matchingElements[-1])
+            self.tree.activeFormattingElements.append(element)
+
+        # the real deal
+        def processEOF(self):
+            allowed_elements = frozenset(("dd", "dt", "li", "p", "tbody", "td",
+                                          "tfoot", "th", "thead", "tr", "body",
+                                          "html"))
+            for node in self.tree.openElements[::-1]:
+                if node.name not in allowed_elements:
+                    self.parser.parseError("expected-closing-tag-but-got-eof")
+                    break
+            # Stop parsing
+
+        def processSpaceCharactersDropNewline(self, token):
+            # Sometimes (start of 
, , and